text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
"""
Domain middleware: enables multi-tenancy in a single process
"""
from anaf.core.domains import setup_domain, setup_domain_database
from anaf.core.db import DatabaseNotFound
from anaf.core.conf import settings
from django.http import HttpResponseRedirect
from django.db.utils import DatabaseError
from django.core.urlresolvers import reverse
from pandora import box
class DomainMiddleware(object):
"""Handles multiple domains within the same Django process"""
def process_request(self, request):
"""Identify the current domain and database, set up appropriate variables in the pandora box"""
domain = request.get_host().split('.')[0]
try:
setup_domain(domain)
except DatabaseNotFound:
evergreen_url = getattr(
settings, 'EVERGREEN_BASE_URL', 'http://tree.io/')
return HttpResponseRedirect(evergreen_url)
except DatabaseError:
from django.db import router
from anaf.core.models import ConfigSetting
setup_domain_database(router.db_for_read(ConfigSetting))
return HttpResponseRedirect(reverse('database_setup'))
box['request'] = request
def process_exception(self, request, exception):
if isinstance(exception, DatabaseNotFound):
evergreen_url = getattr(
settings, 'EVERGREEN_BASE_URL', 'http://tree.io/')
return HttpResponseRedirect(evergreen_url)
|
tovmeod/anaf
|
anaf/core/middleware/domain.py
|
Python
|
bsd-3-clause
| 1,463 | 0.000684 |
# Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
import configparser # For reading the legacy profile INI files.
import io
import json # For reading the Dictionary of Doom.
import math # For mathematical operations included in the Dictionary of Doom.
import os.path # For concatenating the path to the plugin and the relative path to the Dictionary of Doom.
from UM.Application import Application # To get the machine manager to create the new profile in.
from UM.Logger import Logger # Logging errors.
from UM.PluginRegistry import PluginRegistry # For getting the path to this plugin's directory.
from UM.Settings.ContainerRegistry import ContainerRegistry #To create unique profile IDs.
from UM.Settings.InstanceContainer import InstanceContainer # The new profile to make.
from cura.ProfileReader import ProfileReader # The plug-in type to implement.
from cura.Settings.ExtruderManager import ExtruderManager #To get the current extruder definition.
## A plugin that reads profile data from legacy Cura versions.
#
# It reads a profile from an .ini file, and performs some translations on it.
# Not all translations are correct, mind you, but it is a best effort.
class LegacyProfileReader(ProfileReader):
## Initialises the legacy profile reader.
#
# This does nothing since the only other function is basically stateless.
def __init__(self):
super().__init__()
## Prepares the default values of all legacy settings.
#
# These are loaded from the Dictionary of Doom.
#
# \param json The JSON file to load the default setting values from. This
# should not be a URL but a pre-loaded JSON handle.
# \return A dictionary of the default values of the legacy Cura version.
def prepareDefaults(self, json):
defaults = {}
for key in json["defaults"]: # We have to copy over all defaults from the JSON handle to a normal dict.
defaults[key] = json["defaults"][key]
return defaults
## Prepares the local variables that can be used in evaluation of computing
# new setting values from the old ones.
#
# This fills a dictionary with all settings from the legacy Cura version
# and their values, so that they can be used in evaluating the new setting
# values as Python code.
#
# \param config_parser The ConfigParser that finds the settings in the
# legacy profile.
# \param config_section The section in the profile where the settings
# should be found.
# \param defaults The default values for all settings in the legacy Cura.
# \return A set of local variables, one for each setting in the legacy
# profile.
def prepareLocals(self, config_parser, config_section, defaults):
copied_locals = defaults.copy() # Don't edit the original!
for option in config_parser.options(config_section):
copied_locals[option] = config_parser.get(config_section, option)
return copied_locals
## Reads a legacy Cura profile from a file and returns it.
#
# \param file_name The file to read the legacy Cura profile from.
# \return The legacy Cura profile that was in the file, if any. If the
# file could not be read or didn't contain a valid profile, \code None
# \endcode is returned.
def read(self, file_name):
if file_name.split(".")[-1] != "ini":
return None
global_container_stack = Application.getInstance().getGlobalContainerStack()
if not global_container_stack:
return None
multi_extrusion = global_container_stack.getProperty("machine_extruder_count", "value") > 1
if multi_extrusion:
Logger.log("e", "Unable to import legacy profile %s. Multi extrusion is not supported", file_name)
raise Exception("Unable to import legacy profile. Multi extrusion is not supported")
Logger.log("i", "Importing legacy profile from file " + file_name + ".")
container_registry = ContainerRegistry.getInstance()
profile_id = container_registry.uniqueName("Imported Legacy Profile")
profile = InstanceContainer(profile_id) # Create an empty profile.
parser = configparser.ConfigParser(interpolation = None)
try:
parser.read([file_name]) # Parse the INI file.
except Exception as e:
Logger.log("e", "Unable to open legacy profile %s: %s", file_name, str(e))
return None
# Legacy Cura saved the profile under the section "profile_N" where N is the ID of a machine, except when you export in which case it saves it in the section "profile".
# Since importing multiple machine profiles is out of scope, just import the first section we find.
section = ""
for found_section in parser.sections():
if found_section.startswith("profile"):
section = found_section
break
if not section: # No section starting with "profile" was found. Probably not a proper INI file.
return None
try:
with open(os.path.join(PluginRegistry.getInstance().getPluginPath("LegacyProfileReader"), "DictionaryOfDoom.json"), "r", -1, "utf-8") as f:
dict_of_doom = json.load(f) # Parse the Dictionary of Doom.
except IOError as e:
Logger.log("e", "Could not open DictionaryOfDoom.json for reading: %s", str(e))
return None
except Exception as e:
Logger.log("e", "Could not parse DictionaryOfDoom.json: %s", str(e))
return None
defaults = self.prepareDefaults(dict_of_doom)
legacy_settings = self.prepareLocals(parser, section, defaults) #Gets the settings from the legacy profile.
#Check the target version in the Dictionary of Doom with this application version.
if "target_version" not in dict_of_doom:
Logger.log("e", "Dictionary of Doom has no target version. Is it the correct JSON file?")
return None
if InstanceContainer.Version != dict_of_doom["target_version"]:
Logger.log("e", "Dictionary of Doom of legacy profile reader (version %s) is not in sync with the current instance container version (version %s)!", dict_of_doom["target_version"], str(InstanceContainer.Version))
return None
if "translation" not in dict_of_doom:
Logger.log("e", "Dictionary of Doom has no translation. Is it the correct JSON file?")
return None
current_printer_definition = global_container_stack.definition
profile.setDefinition(current_printer_definition.getId())
for new_setting in dict_of_doom["translation"]: # Evaluate all new settings that would get a value from the translations.
old_setting_expression = dict_of_doom["translation"][new_setting]
compiled = compile(old_setting_expression, new_setting, "eval")
try:
new_value = eval(compiled, {"math": math}, legacy_settings) # Pass the legacy settings as local variables to allow access to in the evaluation.
value_using_defaults = eval(compiled, {"math": math}, defaults) #Evaluate again using only the default values to try to see if they are default.
except Exception: # Probably some setting name that was missing or something else that went wrong in the ini file.
Logger.log("w", "Setting " + new_setting + " could not be set because the evaluation failed. Something is probably missing from the imported legacy profile.")
continue
definitions = current_printer_definition.findDefinitions(key = new_setting)
if definitions:
if new_value != value_using_defaults and definitions[0].default_value != new_value: # Not equal to the default in the new Cura OR the default in the legacy Cura.
profile.setProperty(new_setting, "value", new_value) # Store the setting in the profile!
if len(profile.getAllKeys()) == 0:
Logger.log("i", "A legacy profile was imported but everything evaluates to the defaults, creating an empty profile.")
profile.addMetaDataEntry("type", "profile")
# don't know what quality_type it is based on, so use "normal" by default
profile.addMetaDataEntry("quality_type", "normal")
profile.setName(profile_id)
profile.setDirty(True)
#Serialise and deserialise in order to perform the version upgrade.
parser = configparser.ConfigParser(interpolation=None)
data = profile.serialize()
parser.read_string(data)
parser["general"]["version"] = "1"
if parser.has_section("values"):
parser["settings"] = parser["values"]
del parser["values"]
stream = io.StringIO()
parser.write(stream)
data = stream.getvalue()
profile.deserialize(data)
#We need to return one extruder stack and one global stack.
global_container_id = container_registry.uniqueName("Global Imported Legacy Profile")
global_profile = profile.duplicate(new_id = global_container_id, new_name = profile_id) #Needs to have the same name as the extruder profile.
global_profile.setDirty(True)
#Only the extruder stack has an extruder metadata entry.
profile.addMetaDataEntry("extruder", ExtruderManager.getInstance().getActiveExtruderStack().definition.getId())
#Split all settings into per-extruder and global settings.
for setting_key in profile.getAllKeys():
settable_per_extruder = global_container_stack.getProperty(setting_key, "settable_per_extruder")
if settable_per_extruder:
global_profile.removeInstance(setting_key)
else:
profile.removeInstance(setting_key)
return [global_profile, profile]
|
alephobjects/Cura2
|
plugins/LegacyProfileReader/LegacyProfileReader.py
|
Python
|
lgpl-3.0
| 10,008 | 0.006695 |
#!/usr/bin/python
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example gets a user by its id. To create users, run
create_user.py."""
__author__ = 'api.shamjeff@gmail.com (Jeff Sham)'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
# Initialize client object.
client = DfpClient(path=os.path.join('..', '..', '..', '..'))
# Initialize appropriate service.
user_service = client.GetService('UserService', version='v201204')
# Set the id of the user to get.
user_id = 'INSERT_USER_ID_HERE'
# Get user.
user = user_service.GetUser(user_id)[0]
# Display results.
print ('User with id \'%s\', email \'%s\', and role \'%s\' was found.'
% (user['id'], user['email'], user['roleName']))
|
krux/adspygoogle
|
examples/adspygoogle/dfp/v201204/get_user.py
|
Python
|
apache-2.0
| 1,504 | 0.001995 |
"""
WSGI config for yearendsite project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "yearendsite.settings")
application = get_wsgi_application()
|
menzenski/django-year-end-site
|
yearendsite/wsgi.py
|
Python
|
mit
| 399 | 0 |
# Copyright 2008, 2009, 2016 Free Software Foundation, Inc.
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
from __future__ import absolute_import
import sys
import textwrap
from distutils.spawn import find_executable
from gi.repository import Gtk, GLib
from . import Utils, Actions, Constants
from ..core import Messages
class SimpleTextDisplay(Gtk.TextView):
"""
A non user-editable gtk text view.
"""
def __init__(self, text=''):
"""
TextDisplay constructor.
Args:
text: the text to display (string)
"""
Gtk.TextView.__init__(self)
self.set_text = self.get_buffer().set_text
self.set_text(text)
self.set_editable(False)
self.set_cursor_visible(False)
self.set_wrap_mode(Gtk.WrapMode.WORD_CHAR)
class TextDisplay(SimpleTextDisplay):
"""
A non user-editable scrollable text view with popup menu.
"""
def __init__(self, text=''):
"""
TextDisplay constructor.
Args:
text: the text to display (string)
"""
SimpleTextDisplay.__init__(self, text)
self.scroll_lock = True
self.connect("populate-popup", self.populate_popup)
def insert(self, line):
"""
Append text after handling backspaces and auto-scroll.
Args:
line: the text to append (string)
"""
line = self._consume_backspaces(line)
self.get_buffer().insert(self.get_buffer().get_end_iter(), line)
self.scroll_to_end()
def _consume_backspaces(self, line):
"""
Removes text from the buffer if line starts with '\b'
Args:
line: a string which may contain backspaces
Returns:
The string that remains from 'line' with leading '\b's removed.
"""
if not line:
return
# for each \b delete one char from the buffer
back_count = 0
start_iter = self.get_buffer().get_end_iter()
while len(line) > back_count and line[back_count] == '\b':
# stop at the beginning of a line
if not start_iter.starts_line():
start_iter.backward_char()
back_count += 1
# remove chars from buffer
self.get_buffer().delete(start_iter, self.get_buffer().get_end_iter())
return line[back_count:]
def scroll_to_end(self):
""" Update view's scroll position. """
if self.scroll_lock:
buf = self.get_buffer()
mark = buf.get_insert()
buf.move_mark(mark, buf.get_end_iter())
self.scroll_mark_onscreen(mark)
def clear(self):
""" Clear all text from buffer. """
buf = self.get_buffer()
buf.delete(buf.get_start_iter(), buf.get_end_iter())
def save(self, file_path):
"""
Save context of buffer to the given file.
Args:
file_path: location to save buffer contents
"""
with open(file_path, 'w') as logfile:
buf = self.get_buffer()
logfile.write(buf.get_text(buf.get_start_iter(),
buf.get_end_iter(), True))
# Action functions are set by the Application's init function
def clear_cb(self, menu_item, web_view):
""" Callback function to clear the text buffer """
Actions.CLEAR_CONSOLE()
def scroll_back_cb(self, menu_item, web_view):
""" Callback function to toggle scroll lock """
Actions.TOGGLE_SCROLL_LOCK()
def save_cb(self, menu_item, web_view):
""" Callback function to save the buffer """
Actions.SAVE_CONSOLE()
def populate_popup(self, view, menu):
"""Create a popup menu for the scroll lock and clear functions"""
menu.append(Gtk.SeparatorMenuItem())
lock = Gtk.CheckMenuItem(label = "Scroll Lock")
menu.append(lock)
lock.set_active(self.scroll_lock)
lock.connect('activate', self.scroll_back_cb, view)
save = Gtk.ImageMenuItem(label = "Save Console")
menu.append(save)
save.connect('activate', self.save_cb, view)
clear = Gtk.ImageMenuItem(label = "Clear Console")
menu.append(clear)
clear.connect('activate', self.clear_cb, view)
menu.show_all()
return False
class MessageDialogWrapper(Gtk.MessageDialog):
""" Run a message dialog. """
def __init__(self, parent, message_type, buttons, title=None, markup=None,
default_response=None, extra_buttons=None):
"""
Create a modal message dialog.
Args:
message_type: the type of message may be one of:
Gtk.MessageType.INFO
Gtk.MessageType.WARNING
Gtk.MessageType.QUESTION or Gtk.MessageType.ERROR
buttons: the predefined set of buttons to use:
Gtk.ButtonsType.NONE
Gtk.ButtonsType.OK
Gtk.ButtonsType.CLOSE
Gtk.ButtonsType.CANCEL
Gtk.ButtonsType.YES_NO
Gtk.ButtonsType.OK_CANCEL
title: the title of the window (string)
markup: the message text with pango markup
default_response: if set, determines which button is highlighted by default
extra_buttons: a tuple containing pairs of values:
each value is the button's text and the button's return value
"""
Gtk.MessageDialog.__init__(
self, transient_for=parent, modal=True, destroy_with_parent=True,
message_type=message_type, buttons=buttons
)
if title:
self.set_title(title)
if markup:
self.set_markup(markup)
if extra_buttons:
self.add_buttons(*extra_buttons)
if default_response:
self.set_default_response(default_response)
def run_and_destroy(self):
response = self.run()
self.hide()
return response
class ErrorsDialog(Gtk.Dialog):
""" Display flowgraph errors. """
def __init__(self, parent, flowgraph):
"""Create a listview of errors"""
Gtk.Dialog.__init__(
self,
title='Errors and Warnings',
transient_for=parent,
modal=True,
destroy_with_parent=True,
)
self.add_buttons(Gtk.STOCK_OK, Gtk.ResponseType.ACCEPT)
self.set_size_request(750, Constants.MIN_DIALOG_HEIGHT)
self.set_border_width(10)
self.store = Gtk.ListStore(str, str, str)
self.update(flowgraph)
self.treeview = Gtk.TreeView(model=self.store)
for i, column_title in enumerate(["Block", "Aspect", "Message"]):
renderer = Gtk.CellRendererText()
column = Gtk.TreeViewColumn(column_title, renderer, text=i)
column.set_sort_column_id(i) # liststore id matches treeview id
column.set_resizable(True)
self.treeview.append_column(column)
self.scrollable = Gtk.ScrolledWindow()
self.scrollable.set_vexpand(True)
self.scrollable.add(self.treeview)
self.vbox.pack_start(self.scrollable, True, True, 0)
self.show_all()
def update(self, flowgraph):
self.store.clear()
for element, message in flowgraph.iter_error_messages():
if element.is_block:
src, aspect = element.name, ''
elif element.is_connection:
src = element.source_block.name
aspect = "Connection to '{}'".format(element.sink_block.name)
elif element.is_port:
src = element.parent_block.name
aspect = "{} '{}'".format('Sink' if element.is_sink else 'Source', element.name)
elif element.is_param:
src = element.parent_block.name
aspect = "Param '{}'".format(element.name)
else:
src = aspect = ''
self.store.append([src, aspect, message])
def run_and_destroy(self):
response = self.run()
self.hide()
return response
def show_about(parent, config):
ad = Gtk.AboutDialog(transient_for=parent)
ad.set_program_name(config.name)
ad.set_name('')
ad.set_license(config.license)
py_version = sys.version.split()[0]
ad.set_version("{} (Python {})".format(config.version, py_version))
try:
ad.set_logo(Gtk.IconTheme().load_icon('gnuradio-grc', 64, 0))
except GLib.Error:
Messages.send("Failed to set window logo\n")
#ad.set_comments("")
ad.set_copyright(config.license.splitlines()[0])
ad.set_website(config.website)
ad.connect("response", lambda action, param: action.hide())
ad.show()
def show_help(parent):
""" Display basic usage tips. """
markup = textwrap.dedent("""\
<b>Usage Tips</b>
\n\
<u>Add block</u>: drag and drop or double click a block in the block
selection window.
<u>Rotate block</u>: Select a block, press left/right on the keyboard.
<u>Change type</u>: Select a block, press up/down on the keyboard.
<u>Edit parameters</u>: double click on a block in the flow graph.
<u>Make connection</u>: click on the source port of one block, then
click on the sink port of another block.
<u>Remove connection</u>: select the connection and press delete, or
drag the connection.
\n\
*Press Ctrl+K or see menu for Keyboard - Shortcuts
\
""")
markup = markup.replace("Ctrl", Utils.get_modifier_key())
MessageDialogWrapper(
parent, Gtk.MessageType.INFO, Gtk.ButtonsType.CLOSE, title='Help', markup=markup
).run_and_destroy()
def show_keyboard_shortcuts(parent):
""" Display keyboard shortcut-keys. """
markup = textwrap.dedent("""\
<b>Keyboard Shortcuts</b>
\n\
<u>Ctrl+N</u>: Create a new flowgraph.
<u>Ctrl+O</u>: Open an existing flowgraph.
<u>Ctrl+S</u>: Save the current flowgraph or save as for new.
<u>Ctrl+W</u>: Close the current flowgraph.
<u>Ctrl+Z</u>: Undo a change to the flowgraph.
<u>Ctrl+Y</u>: Redo a change to the flowgraph.
<u>Ctrl+A</u>: Selects all blocks and connections.
<u>Ctrl+P</u>: Screen Capture of the Flowgraph.
<u>Ctrl+Shift+P</u>: Save the console output to file.
<u>Ctrl+L</u>: Clear the console.
<u>Ctrl+E</u>: Show variable editor.
<u>Ctrl+F</u>: Search for a block by name.
<u>Ctrl+Q</u>: Quit.
<u>F1</u> : Help menu.
<u>F5</u> : Generate the Flowgraph.
<u>F6</u> : Execute the Flowgraph.
<u>F7</u> : Kill the Flowgraph.
<u>Ctrl+Shift+S</u>: Save as the current flowgraph.
<u>Ctrl+Shift+D</u>: Create a duplicate of current flow graph.
<u>Ctrl+X/C/V</u>: Edit-cut/copy/paste.
<u>Ctrl+D/B/R</u>: Toggle visibility of disabled blocks or
connections/block tree widget/console.
<u>Shift+T/M/B/L/C/R</u>: Vertical Align Top/Middle/Bottom and
Horizontal Align Left/Center/Right respectively of the
selected block.
\
""")
markup = markup.replace("Ctrl", Utils.get_modifier_key())
MessageDialogWrapper(
parent, Gtk.MessageType.INFO, Gtk.ButtonsType.CLOSE, title='Keyboard - Shortcuts', markup=markup
).run_and_destroy()
def show_get_involved(parent):
"""Get Involved Instructions"""
markup = textwrap.dedent("""\
<tt><b>Welcome to GNU Radio Community!</b></tt>
\n\
<tt>For more details on contributing to GNU Radio and getting engaged with our great community visit </tt><a href="https://www.gnuradio.org/get-involved">here</a>.
\n\
<tt>You can also join our <a href="https://slack.gnuradio.org/">Slack Channel</a>, IRC Channel (#gnuradio) or contact through our <a href="https://lists.gnu.org/mailman/listinfo/discuss-gnuradio">mailing list(discuss-gnuradio)</a></tt>.
\
""")
MessageDialogWrapper(
parent, Gtk.MessageType.QUESTION, Gtk.ButtonsType.CLOSE, title='Get - Involved', markup=markup
).run_and_destroy()
def show_types(parent):
""" Display information about standard data types. """
colors = [(name, color) for name, key, sizeof, color in Constants.CORE_TYPES]
max_len = 10 + max(len(name) for name, code in colors)
message = '\n'.join(
'<span background="{color}"><tt>{name}</tt></span>'
''.format(color=color, name=Utils.encode(name).center(max_len))
for name, color in colors
)
MessageDialogWrapper(
parent, Gtk.MessageType.INFO, Gtk.ButtonsType.CLOSE, title='Types - Color Mapping', markup=message
).run_and_destroy()
def show_missing_xterm(parent, xterm):
markup = textwrap.dedent("""\
The xterm executable {0!r} is missing.
You can change this setting in your gnuradio.conf, in section [grc], 'xterm_executable'.
\n\
(This message is shown only once)\
""").format(xterm)
MessageDialogWrapper(
parent, message_type=Gtk.MessageType.WARNING, buttons=Gtk.ButtonsType.OK,
title='Warning: missing xterm executable', markup=markup
).run_and_destroy()
def choose_editor(parent, config):
"""
Give the option to either choose an editor or use the default.
"""
if config.editor and find_executable(config.editor):
return config.editor
buttons = (
'Choose Editor', Gtk.ResponseType.YES,
'Use Default', Gtk.ResponseType.NO,
Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL
)
response = MessageDialogWrapper(
parent, message_type=Gtk.MessageType.QUESTION, buttons=Gtk.ButtonsType.NONE,
title='Choose Editor', markup='Would you like to choose the editor to use?',
default_response=Gtk.ResponseType.YES, extra_buttons=buttons
).run_and_destroy()
# Handle the initial default/choose/cancel response
# User wants to choose the editor to use
editor = ''
if response == Gtk.ResponseType.YES:
file_dialog = Gtk.FileChooserDialog(
'Select an Editor...', None,
Gtk.FileChooserAction.OPEN,
('gtk-cancel', Gtk.ResponseType.CANCEL, 'gtk-open', Gtk.ResponseType.OK),
transient_for=parent
)
file_dialog.set_select_multiple(False)
file_dialog.set_local_only(True)
file_dialog.set_current_folder('/usr/bin')
try:
if file_dialog.run() == Gtk.ResponseType.OK:
editor = file_dialog.get_filename()
finally:
file_dialog.hide()
# Go with the default editor
elif response == Gtk.ResponseType.NO:
try:
process = None
if sys.platform.startswith('linux'):
process = find_executable('xdg-open')
elif sys.platform.startswith('darwin'):
process = find_executable('open')
if process is None:
raise ValueError("Can't find default editor executable")
# Save
editor = config.editor = process
except Exception:
Messages.send('>>> Unable to load the default editor. Please choose an editor.\n')
if editor == '':
Messages.send('>>> No editor selected.\n')
return editor
|
jdemel/gnuradio
|
grc/gui/Dialogs.py
|
Python
|
gpl-3.0
| 15,494 | 0.002259 |
# -*- encoding: utf-8 -*-
from pygithub3.requests.base import Request
from pygithub3.resources.git_data import Tag
class Get(Request):
uri = 'repos/{user}/{repo}/git/tags/{sha}'
resource = Tag
class Create(Request):
uri = 'repos/{user}/{repo}/git/tags'
resource = Tag
body_schema = {
'schema': ('tag', 'message', 'object', 'type', 'tagger'),
'required': ('type',),
}
|
dongguangming/python-github3
|
pygithub3/requests/git_data/tags.py
|
Python
|
isc
| 412 | 0 |
from reportlab.pdfgen.canvas import Canvas
from reportlab.lib.units import inch
from xml.etree.ElementTree import ElementTree
import Image, re, sys
class HocrConverter():
"""
A class for converting documents to/from the hOCR format.
For details of the hOCR format, see:
http://docs.google.com/View?docid=dfxcv4vc_67g844kf
See also:
http://code.google.com/p/hocr-tools/
Basic usage:
Create a PDF from an hOCR file and an image:
hocr = HocrConverter("path/to/hOCR/file")
hocr.to_pdf("path/to/image/file", "path/to/output/file")
"""
def __init__(self, hocrFileName = None):
self.hocr = None
self.xmlns = ''
self.boxPattern = re.compile('bbox((\s+\d+){4})')
if hocrFileName is not None:
self.parse_hocr(hocrFileName)
def __str__(self):
"""
Return the textual content of the HTML body
"""
if self.hocr is None:
return ''
body = self.hocr.find(".//%sbody"%(self.xmlns))
if body:
return self._get_element_text(body).encode('utf-8') # XML gives unicode
else:
return ''
def _get_element_text(self, element):
"""
Return the textual content of the element and its children
"""
text = ''
if element.text is not None:
text = text + element.text
for child in element.getchildren():
text = text + self._get_element_text(child)
if element.tail is not None:
text = text + element.tail
return text
def element_coordinates(self, element):
"""
Returns a tuple containing the coordinates of the bounding box around
an element
"""
out = (0,0,0,0)
if 'title' in element.attrib:
matches = self.boxPattern.search(element.attrib['title'])
if matches:
coords = matches.group(1).split()
out = (int(coords[0]),int(coords[1]),int(coords[2]),int(coords[3]))
return out
def parse_hocr(self, hocrFileName):
"""
Reads an XML/XHTML file into an ElementTree object
"""
self.hocr = ElementTree()
self.hocr.parse(hocrFileName)
# if the hOCR file has a namespace, ElementTree requires its use to find elements
matches = re.match('({.*})html', self.hocr.getroot().tag)
if matches:
self.xmlns = matches.group(1)
else:
self.xmlns = ''
def to_pdf(self, imageFileName, outFileName, fontname="Courier", fontsize=8):
"""
Creates a PDF file with an image superimposed on top of the text.
Text is positioned according to the bounding box of the lines in
the hOCR file.
The image need not be identical to the image used to create the hOCR file.
It can be scaled, have a lower resolution, different color mode, etc.
"""
if self.hocr is None:
# warn that no text will be embedded in the output PDF
print "Warning: No hOCR file specified. PDF will be image-only."
im = Image.open(imageFileName)
imwidthpx, imheightpx = im.size
if 'dpi' in im.info:
width = float(im.size[0])/im.info['dpi'][0]
height = float(im.size[1])/im.info['dpi'][1]
else:
# we have to make a reasonable guess
# set to None for now and try again using info from hOCR file
width = height = None
ocr_dpi = (300, 300) # a default, in case we can't find it
# get dimensions of the OCR, which may not match the image
if self.hocr is not None:
for div in self.hocr.findall(".//%sdiv"%(self.xmlns)):
if div.attrib['class'] == 'ocr_page':
coords = self.element_coordinates(div)
ocrwidth = coords[2]-coords[0]
ocrheight = coords[3]-coords[1]
if width is None:
# no dpi info with the image
# assume OCR was done at 300 dpi
width = ocrwidth/300
height = ocrheight/300
ocr_dpi = (ocrwidth/width, ocrheight/height)
break # there shouldn't be more than one, and if there is, we don't want it
if width is None:
# no dpi info with the image, and no help from the hOCR file either
# this will probably end up looking awful, so issue a warning
print "Warning: DPI unavailable for image %s. Assuming 96 DPI."%(imageFileName)
width = float(im.size[0])/96
height = float(im.size[1])/96
# create the PDF file
pdf = Canvas(outFileName, pagesize=(width*inch, height*inch), pageCompression=1) # page size in points (1/72 in.)
# put the image on the page, scaled to fill the page
pdf.drawInlineImage(im, 0, 0, width=width*inch, height=height*inch)
if self.hocr is not None:
for line in self.hocr.findall(".//%sspan"%(self.xmlns)):
if line.attrib['class'] == 'ocr_line':
coords = self.element_coordinates(line)
text = pdf.beginText()
text.setFont(fontname, fontsize)
text.setTextRenderMode(3) # invisible
# set cursor to bottom left corner of line bbox (adjust for dpi)
text.setTextOrigin((float(coords[0])/ocr_dpi[0])*inch, (height*inch)-(float(coords[3])/ocr_dpi[1])*inch)
# scale the width of the text to fill the width of the line's bbox
text.setHorizScale((((float(coords[2])/ocr_dpi[0]*inch)-(float(coords[0])/ocr_dpi[0]*inch))/pdf.stringWidth(line.text.rstrip(), fontname, fontsize))*100)
# write the text to the page
text.textLine(line.text.rstrip())
pdf.drawText(text)
# finish up the page and save it
pdf.showPage()
pdf.save()
def to_text(self, outFileName):
"""
Writes the textual content of the hOCR body to a file.
"""
f = open(outFileName, "w")
f.write(self.__str__())
f.close()
if __name__ == "__main__":
if len(sys.argv) < 4:
print 'Usage: python HocrConverter.py inputHocrFile inputImageFile outputPdfFile'
sys.exit(1)
hocr = HocrConverter(sys.argv[1])
hocr.to_pdf(sys.argv[2], sys.argv[3])
|
jbrinley/HocrConverter
|
HocrConverter.py
|
Python
|
mit
| 5,986 | 0.019044 |
print "hello"
print "world"
print "hello world"
print hello world
end
haha
|
fausthuang/faust-s-test-repo
|
hello world.py
|
Python
|
gpl-2.0
| 76 | 0.013158 |
import os
from distutils.util import strtobool
def is_int(value):
"""
Verifies that 'value' is an integer.
"""
try:
int(value)
except ValueError:
return False
else:
return True
def is_float(value):
"""
Verifies that 'value' is a float.
"""
try:
float(value)
except ValueError:
return False
else:
return True
def is_str(value):
"""
Verifies that 'value' is a string.
"""
if not type(value) is str:
return False
else:
return True
def is_bool(value):
"""
Verifies that 'value' is a boolean.
"""
try:
strtobool(value)
except ValueError:
return False
else:
return True
def is_dir(value):
"""
Verifies that 'value' is a path to an existing directory.
"""
if not (type(value) is str and os.path.isdir(value)):
return False
else:
return True
def is_file_i(value):
"""
Verifies that 'value' is a path to an existing file.
"""
if not (type(value) is str and os.path.isfile(value)):
return False
else:
return True
def is_file_o(value):
"""
Verifies that 'value' is a path to a valid directory to create an output
file.
"""
if not (type(value) is str and os.path.split(value)[0]):
return False
else:
return True
|
INRA-LPGP/bio_tools
|
bio_tools/parameters/type_checks.py
|
Python
|
mit
| 1,406 | 0 |
# -*- coding: utf-8 -*-
#
# Anaconda documentation build configuration file, created by
# sphinx-quickstart on Wed Sep 18 14:37:01 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os, shutil
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# configuration required to import test modules
for path in ["../pyanaconda/isys/.libs", "../pyanaconda", "../tests", "../tests/lib", "../dracut", "../widgets"]:
sys.path.append(os.path.abspath(path))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
if not os.environ.get("READTHEDOCS") == "True":
extensions.extend(['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage', 'sphinx.ext.inheritance_diagram', 'sphinx.ext.todo'])
shutil.copy2("../CONTRIBUTING.rst", "contributing.rst")
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Anaconda'
copyright = '2015, Red Hat, Inc.' # pylint: disable=redefined-builtin
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
def read_version():
""" Read version from ../configure.ac"""
import re
version_re = re.compile(r"AC_INIT\(\[(.*)\], \[(.*)\], \[(.*)\]\)")
with open("../configure.ac", "r") as f:
for line in f:
m = version_re.match(line)
if m:
return m.group(2)
# The short X.Y version.
version = read_version()
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'html']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
autoclass_content = 'both'
# Inheritence diagram graphviz settings
inheritance_graph_attrs = dict(rankdir="UD", fontsize=14, ratio='auto')
inheritance_node_attrs = dict(style='rounded', margin='"0.07, 0.07"')
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Anacondadoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Anaconda.tex', 'Anaconda Documentation',
'Anaconda Team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'Anaconda', 'Anaconda Documentation',
['Anaconda Team'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Anaconda', 'Anaconda Documentation',
'Anaconda Team', 'Anaconda', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = 'Anaconda'
epub_author = 'Anaconda Team'
epub_publisher = 'Anaconda Team'
epub_copyright = '2015, Anaconda Team'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/3': None}
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme # pylint: disable=import-error
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Group by class
autodoc_member_order = 'source'
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
# This was taken directly from here:
# http://read-the-docs.readthedocs.org/en/latest/faq.html#i-get-import-errors-on-libraries-that-depend-on-c-modules
# I only added the __getitem__ method.
# NOTE: this can be removed whenever we move to sphinx-1.3, at which point we'll
# be able to use autodoc_mock_imports (value is a list of modules to be
# mocked).
class Mock(object):
__all__ = []
def __init__(self, *args, **kwargs):
pass
def __call__(self, *args, **kwargs):
return Mock()
@classmethod
def __getattr__(cls, name):
if name in ('__file__', '__path__'):
return '/dev/null'
elif name[0] == name[0].upper():
mockType = type(name, (), {})
mockType.__module__ = __name__
return mockType
else:
return Mock()
@classmethod
def __getitem__(cls, key):
return cls.__getattr__(key)
MOCK_MODULES = ['_isys']
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = Mock()
|
jkonecny12/anaconda
|
docs/conf.py
|
Python
|
gpl-2.0
| 11,653 | 0.006522 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='MissingTeacherEntry',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('hour', models.PositiveIntegerField(verbose_name='Heure de cours')),
('content', models.TextField(verbose_name='contenu', blank=True)),
('visible', models.BooleanField(default=True, verbose_name='visible ?')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='date de cr\xe9ation')),
('modified', models.DateTimeField(auto_now=True, verbose_name='date de modification')),
],
options={
'verbose_name': 'heure de cours',
'verbose_name_plural': 'heures de cours',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='MissingTeacherWidget',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text="Le nom est obligatoire et permet d'identifier votre widget facilement.", max_length=100, verbose_name='nom')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='date de cr\xe9ation')),
('modified', models.DateTimeField(auto_now=True, verbose_name='date de modification')),
('missing', models.TextField(help_text='Un enseignant par ligne.', verbose_name='Enseignants absents', blank=True)),
('hide_empty', models.BooleanField(default=True, help_text="Masque les heures de cours pour lesquelles aucuneinformation n'a \xe9t\xe9 entr\xe9e.", verbose_name='Cacher les \xe9l\xe9ments vides')),
],
options={
'verbose_name': 'enseignant absent',
'verbose_name_plural': 'enseignants absents',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='missingteacherentry',
name='widget',
field=models.ForeignKey(related_name='hours', to='schoolwidget.MissingTeacherWidget'),
preserve_default=True,
),
]
|
AlexandreDecan/Dashbird
|
widgets/schoolwidget/migrations/0001_initial.py
|
Python
|
gpl-3.0
| 2,474 | 0.005255 |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from amaranth import Signal, unsigned
from amaranth.hdl.dsl import Module
from amaranth_cfu import InstructionBase, SimpleElaboratable
from .constants import Constants
from ..stream import Endpoint, connect
class ConfigurationRegister(SimpleElaboratable):
"""A register set by the CPU.
Allows a CPU to provide data to gateware.
Attributes
----------
output: Endpoint(unsigned(32))
An output stream of values. A new value onto the stream whenever
the register is set.
value: unsigned(32), out
The value held by the register.
new_en: Signal(1), in
Indicates to register that a new value is being presented on new_value
new_value: unsigned(32), in
New value for the register. Read when "set" is asserted.
"""
def __init__(self):
super().__init__()
self.output = Endpoint(unsigned(32))
self.value = self.output.payload
self.new_en = Signal()
self.new_value = Signal(32)
def elab(self, m):
with m.If(self.output.is_transferring()):
m.d.sync += self.output.valid.eq(0)
with m.If(self.new_en):
m.d.sync += self.value.eq(self.new_value)
m.d.sync += self.output.valid.eq(1)
class SetInstruction(InstructionBase):
"""An instruction used to set values into a register of the CFU.
Sets a configuration register from in0.
Attributes
----------
output_streams: dict[id, Endpoint[unsigned(32)]], out
Value output for each register.
values: dict[id, unsigned(32)], out
Values as set into registers.
write_strobes: dict[id, Signal(1)], out
Asserted for one cycle when the corresponding register id is written.
"""
# The list of all register IDs that may be set
REGISTER_IDS = [
Constants.REG_FILTER_NUM_WORDS,
Constants.REG_INPUT_NUM_WORDS,
Constants.REG_INPUT_OFFSET,
Constants.REG_SET_FILTER,
Constants.REG_SET_INPUT,
Constants.REG_OUTPUT_OFFSET,
Constants.REG_OUTPUT_MIN,
Constants.REG_OUTPUT_MAX,
Constants.REG_FILTER_INPUT_NEXT,
Constants.REG_VERIFY,
Constants.REG_OUTPUT_PARAMS_RESET,
Constants.REG_OUTPUT_BIAS,
Constants.REG_OUTPUT_MULTIPLIER,
Constants.REG_OUTPUT_SHIFT,
]
def __init__(self):
super().__init__()
self.output_streams = {
i: Endpoint(
unsigned(32)) for i in self.REGISTER_IDS}
self.values = {i: Signal(32) for i in self.REGISTER_IDS}
self.write_strobes = {i: Signal(1) for i in self.REGISTER_IDS}
def elab(self, m: Module):
registers = {i: ConfigurationRegister() for i in self.REGISTER_IDS}
for i, register in registers.items():
m.submodules[f"reg_{i:02x}"] = register
m.d.comb += connect(register.output, self.output_streams[i])
m.d.comb += self.values[i].eq(register.value)
m.d.comb += self.write_strobes[i].eq(0) # strobes off by default
with m.If(self.start):
# Consider making self.done.eq(1) combinatorial
m.d.sync += self.done.eq(1)
with m.Switch(self.funct7):
for i, register in registers.items():
with m.Case(i):
m.d.comb += register.new_en.eq(1)
m.d.comb += register.new_value.eq(self.in0)
m.d.comb += self.write_strobes[i].eq(1)
with m.Else():
m.d.sync += self.done.eq(0)
|
google/CFU-Playground
|
proj/hps_accel/gateware/gen1/set.py
|
Python
|
apache-2.0
| 4,133 | 0 |
#!/usr/bin/env python
#-*-*- encoding: utf-8 -*-*-
#
# Copyright (C) 2005 onwards University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
from __future__ import print_function, unicode_literals
import traceback
import voodoo.log as log
from voodoo.gen import CoordAddress
class ResourcesChecker(object):
def __init__(self, coordinator):
self.coordinator = coordinator
self.locator = coordinator.locator
self.current_lab = None
def check(self):
try:
experiments_per_laboratory = self.coordinator.list_laboratories_addresses()
# Use a common broken_resources to avoid endless loops if a resource is registered
# in labs in more than one laboratory server (and one might state that it works while
# other might state that it doesn't).
broken_resources = {}
for laboratory_address_str in experiments_per_laboratory:
self.current_lab = laboratory_address_str
new_broken_resources = self.check_laboratory(laboratory_address_str, experiments_per_laboratory[laboratory_address_str])
for broken_resource in new_broken_resources:
if broken_resource in broken_resources:
broken_resources[broken_resource] += ';' + new_broken_resources[broken_resource]
else:
broken_resources[broken_resource] = new_broken_resources[broken_resource]
all_notifications = {
# (recipient1, recipient2) : [message1, message2, message3],
# (recipient1, ) : [message4, message5],
# (recipient3, ) : [message6, message7],
}
for laboratory_address_str in experiments_per_laboratory:
experiments = experiments_per_laboratory[laboratory_address_str]
for experiment in experiments:
laboratory_resource = experiments[experiment]
if laboratory_resource in broken_resources:
notifications = self.coordinator.mark_resource_as_broken(laboratory_resource, broken_resources[laboratory_resource])
else:
notifications = self.coordinator.mark_resource_as_fixed(laboratory_resource)
for recipients in notifications:
if recipients in all_notifications:
all_notifications[recipients].extend(notifications[recipients])
else:
all_notifications[recipients] = list(notifications[recipients])
if all_notifications:
self.coordinator.notify_status(all_notifications)
except:
traceback.print_exc()
log.log( ResourcesChecker, log.level.Critical,
"Error checking resources.")
log.log_exc(ResourcesChecker, log.level.Critical)
def check_laboratory(self, address_str, experiments):
""" Checks in that laboratory address which experiments are broken and which ones are working.
:param address_str: laboratory address, e.g. "laboratory:general_laboratory@server1"
:param experiments: dictionary of experiments: resources, e.g. { "exp1|ud-fpga|FPGA experiments" : "fpga1@fpga boards"}
"""
broken_resources = {
# resource_id : error_message
}
try:
address = CoordAddress.translate(address_str)
server = self.locator.get(address, timeout=1800) # Extended timeout for this method
failing_experiments = server.check_experiments_resources()
#
# failing_experiments is a dictionary such as:
# {
# experiment_instance_id : error_message
# }
#
for failing_experiment in failing_experiments:
if not failing_experiment in experiments:
log.log( ResourcesChecker, log.level.Error,
"Laboratory server %s reported that experiment %s was failing; however this laboratory does NOT manage this experiment. Attack?" % (address_str, failing_experiment))
continue
#
# The error for a resource will be concatenated
#
broken_resource = experiments[failing_experiment]
error_message = failing_experiments[failing_experiment]
if broken_resource in broken_resources:
broken_resources[broken_resource] = broken_resources[broken_resource] + ';' + error_message
else:
broken_resources[broken_resource] = error_message
except:
traceback.print_exc()
log.log( ResourcesChecker, log.level.Critical,
"Error checking resources of laboratory %s " % address_str)
log.log_exc(ResourcesChecker, log.level.Critical)
return broken_resources
|
morelab/weblabdeusto
|
server/src/weblab/core/coordinator/checker.py
|
Python
|
bsd-2-clause
| 5,307 | 0.006785 |
import numpy as np
class PointBrowser:
"""
Click on a point to select and highlight it -- the data that
generated the point will be shown in the lower axes. Use the 'n'
and 'p' keys to browse through the next and previous points
"""
def __init__(self):
self.lastind = 0
self.text = ax.text(0.05, 0.95, 'selected: none',
transform=ax.transAxes, va='top')
self.selected, = ax.plot([xs[0]], [ys[0]], 'o', ms=12, alpha=0.4,
color='yellow', visible=False)
def onpress(self, event):
if self.lastind is None: return
if event.key not in ('n', 'p'): return
if event.key=='n': inc = 1
else: inc = -1
self.lastind += inc
self.lastind = np.clip(self.lastind, 0, len(xs)-1)
self.update()
def onpick(self, event):
if event.artist!=line: return True
N = len(event.ind)
if not N: return True
# the click locations
x = event.mouseevent.xdata
y = event.mouseevent.ydata
distances = np.hypot(x-xs[event.ind], y-ys[event.ind])
indmin = distances.argmin()
dataind = event.ind[indmin]
self.lastind = dataind
self.update()
def update(self):
if self.lastind is None: return
dataind = self.lastind
ax2.cla()
ax2.plot(X[dataind])
ax2.text(0.05, 0.9, 'mu=%1.3f\nsigma=%1.3f'%(xs[dataind], ys[dataind]),
transform=ax2.transAxes, va='top')
ax2.set_ylim(-0.5, 1.5)
self.selected.set_visible(True)
self.selected.set_data(xs[dataind], ys[dataind])
self.text.set_text('selected: %d'%dataind)
fig.canvas.draw()
if __name__ == '__main__':
import matplotlib.pyplot as plt
X = np.random.rand(100, 200)
xs = np.mean(X, axis=1)
ys = np.std(X, axis=1)
fig, (ax, ax2) = plt.subplots(2, 1)
ax.set_title('click on point to plot time series')
line, = ax.plot(xs, ys, 'o', picker=5) # 5 points tolerance
browser = PointBrowser()
fig.canvas.mpl_connect('pick_event', browser.onpick)
fig.canvas.mpl_connect('key_press_event', browser.onpress)
plt.show()
|
cactusbin/nyt
|
matplotlib/examples/event_handling/data_browser.py
|
Python
|
unlicense
| 2,233 | 0.011196 |
# -*- coding: utf-8 -*-
"""
pygments.lexers.pascal
~~~~~~~~~~~~~~~~~~~~~~
Lexers for Pascal family languages.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from ..lexer import Lexer, RegexLexer, include, bygroups, words, \
using, this, default
from ..util import get_bool_opt, get_list_opt
from ..token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error
from ..scanner import Scanner
from .modula2 import Modula2Lexer
__all__ = ['DelphiLexer', 'AdaLexer']
class DelphiLexer(Lexer):
"""
For `Delphi <http://www.borland.com/delphi/>`_ (Borland Object Pascal),
Turbo Pascal and Free Pascal source code.
Additional options accepted:
`turbopascal`
Highlight Turbo Pascal specific keywords (default: ``True``).
`delphi`
Highlight Borland Delphi specific keywords (default: ``True``).
`freepascal`
Highlight Free Pascal specific keywords (default: ``True``).
`units`
A list of units that should be considered builtin, supported are
``System``, ``SysUtils``, ``Classes`` and ``Math``.
Default is to consider all of them builtin.
"""
name = 'Delphi'
aliases = ['delphi', 'pas', 'pascal', 'objectpascal']
filenames = ['*.pas']
mimetypes = ['text/x-pascal']
TURBO_PASCAL_KEYWORDS = (
'absolute', 'and', 'array', 'asm', 'begin', 'break', 'case',
'const', 'constructor', 'continue', 'destructor', 'div', 'do',
'downto', 'else', 'end', 'file', 'for', 'function', 'goto',
'if', 'implementation', 'in', 'inherited', 'inline', 'interface',
'label', 'mod', 'nil', 'not', 'object', 'of', 'on', 'operator',
'or', 'packed', 'procedure', 'program', 'record', 'reintroduce',
'repeat', 'self', 'set', 'shl', 'shr', 'string', 'then', 'to',
'type', 'unit', 'until', 'uses', 'var', 'while', 'with', 'xor'
)
DELPHI_KEYWORDS = (
'as', 'class', 'except', 'exports', 'finalization', 'finally',
'initialization', 'is', 'library', 'on', 'property', 'raise',
'threadvar', 'try'
)
FREE_PASCAL_KEYWORDS = (
'dispose', 'exit', 'false', 'new', 'true'
)
BLOCK_KEYWORDS = set((
'begin', 'class', 'const', 'constructor', 'destructor', 'end',
'finalization', 'function', 'implementation', 'initialization',
'label', 'library', 'operator', 'procedure', 'program', 'property',
'record', 'threadvar', 'type', 'unit', 'uses', 'var'
))
FUNCTION_MODIFIERS = set((
'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe',
'pascal', 'register', 'safecall', 'softfloat', 'stdcall',
'varargs', 'name', 'dynamic', 'near', 'virtual', 'external',
'override', 'assembler'
))
# XXX: those aren't global. but currently we know no way for defining
# them just for the type context.
DIRECTIVES = set((
'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far',
'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected',
'published', 'public'
))
BUILTIN_TYPES = set((
'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool',
'cardinal', 'char', 'comp', 'currency', 'double', 'dword',
'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint',
'longword', 'pansichar', 'pansistring', 'pbool', 'pboolean',
'pbyte', 'pbytearray', 'pcardinal', 'pchar', 'pcomp', 'pcurrency',
'pdate', 'pdatetime', 'pdouble', 'pdword', 'pextended', 'phandle',
'pint64', 'pinteger', 'plongint', 'plongword', 'pointer',
'ppointer', 'pshortint', 'pshortstring', 'psingle', 'psmallint',
'pstring', 'pvariant', 'pwidechar', 'pwidestring', 'pword',
'pwordarray', 'pwordbool', 'real', 'real48', 'shortint',
'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate',
'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant',
'widechar', 'widestring', 'word', 'wordbool'
))
BUILTIN_UNITS = {
'System': (
'abs', 'acquireexceptionobject', 'addr', 'ansitoutf8',
'append', 'arctan', 'assert', 'assigned', 'assignfile',
'beginthread', 'blockread', 'blockwrite', 'break', 'chdir',
'chr', 'close', 'closefile', 'comptocurrency', 'comptodouble',
'concat', 'continue', 'copy', 'cos', 'dec', 'delete',
'dispose', 'doubletocomp', 'endthread', 'enummodules',
'enumresourcemodules', 'eof', 'eoln', 'erase', 'exceptaddr',
'exceptobject', 'exclude', 'exit', 'exp', 'filepos', 'filesize',
'fillchar', 'finalize', 'findclasshinstance', 'findhinstance',
'findresourcehinstance', 'flush', 'frac', 'freemem',
'get8087cw', 'getdir', 'getlasterror', 'getmem',
'getmemorymanager', 'getmodulefilename', 'getvariantmanager',
'halt', 'hi', 'high', 'inc', 'include', 'initialize', 'insert',
'int', 'ioresult', 'ismemorymanagerset', 'isvariantmanagerset',
'length', 'ln', 'lo', 'low', 'mkdir', 'move', 'new', 'odd',
'olestrtostring', 'olestrtostrvar', 'ord', 'paramcount',
'paramstr', 'pi', 'pos', 'pred', 'ptr', 'pucs4chars', 'random',
'randomize', 'read', 'readln', 'reallocmem',
'releaseexceptionobject', 'rename', 'reset', 'rewrite', 'rmdir',
'round', 'runerror', 'seek', 'seekeof', 'seekeoln',
'set8087cw', 'setlength', 'setlinebreakstyle',
'setmemorymanager', 'setstring', 'settextbuf',
'setvariantmanager', 'sin', 'sizeof', 'slice', 'sqr', 'sqrt',
'str', 'stringofchar', 'stringtoolestr', 'stringtowidechar',
'succ', 'swap', 'trunc', 'truncate', 'typeinfo',
'ucs4stringtowidestring', 'unicodetoutf8', 'uniquestring',
'upcase', 'utf8decode', 'utf8encode', 'utf8toansi',
'utf8tounicode', 'val', 'vararrayredim', 'varclear',
'widecharlentostring', 'widecharlentostrvar',
'widechartostring', 'widechartostrvar',
'widestringtoucs4string', 'write', 'writeln'
),
'SysUtils': (
'abort', 'addexitproc', 'addterminateproc', 'adjustlinebreaks',
'allocmem', 'ansicomparefilename', 'ansicomparestr',
'ansicomparetext', 'ansidequotedstr', 'ansiextractquotedstr',
'ansilastchar', 'ansilowercase', 'ansilowercasefilename',
'ansipos', 'ansiquotedstr', 'ansisamestr', 'ansisametext',
'ansistrcomp', 'ansistricomp', 'ansistrlastchar', 'ansistrlcomp',
'ansistrlicomp', 'ansistrlower', 'ansistrpos', 'ansistrrscan',
'ansistrscan', 'ansistrupper', 'ansiuppercase',
'ansiuppercasefilename', 'appendstr', 'assignstr', 'beep',
'booltostr', 'bytetocharindex', 'bytetocharlen', 'bytetype',
'callterminateprocs', 'changefileext', 'charlength',
'chartobyteindex', 'chartobytelen', 'comparemem', 'comparestr',
'comparetext', 'createdir', 'createguid', 'currentyear',
'currtostr', 'currtostrf', 'date', 'datetimetofiledate',
'datetimetostr', 'datetimetostring', 'datetimetosystemtime',
'datetimetotimestamp', 'datetostr', 'dayofweek', 'decodedate',
'decodedatefully', 'decodetime', 'deletefile', 'directoryexists',
'diskfree', 'disksize', 'disposestr', 'encodedate', 'encodetime',
'exceptionerrormessage', 'excludetrailingbackslash',
'excludetrailingpathdelimiter', 'expandfilename',
'expandfilenamecase', 'expanduncfilename', 'extractfiledir',
'extractfiledrive', 'extractfileext', 'extractfilename',
'extractfilepath', 'extractrelativepath', 'extractshortpathname',
'fileage', 'fileclose', 'filecreate', 'filedatetodatetime',
'fileexists', 'filegetattr', 'filegetdate', 'fileisreadonly',
'fileopen', 'fileread', 'filesearch', 'fileseek', 'filesetattr',
'filesetdate', 'filesetreadonly', 'filewrite', 'finalizepackage',
'findclose', 'findcmdlineswitch', 'findfirst', 'findnext',
'floattocurr', 'floattodatetime', 'floattodecimal', 'floattostr',
'floattostrf', 'floattotext', 'floattotextfmt', 'fmtloadstr',
'fmtstr', 'forcedirectories', 'format', 'formatbuf', 'formatcurr',
'formatdatetime', 'formatfloat', 'freeandnil', 'getcurrentdir',
'getenvironmentvariable', 'getfileversion', 'getformatsettings',
'getlocaleformatsettings', 'getmodulename', 'getpackagedescription',
'getpackageinfo', 'gettime', 'guidtostring', 'incamonth',
'includetrailingbackslash', 'includetrailingpathdelimiter',
'incmonth', 'initializepackage', 'interlockeddecrement',
'interlockedexchange', 'interlockedexchangeadd',
'interlockedincrement', 'inttohex', 'inttostr', 'isdelimiter',
'isequalguid', 'isleapyear', 'ispathdelimiter', 'isvalidident',
'languages', 'lastdelimiter', 'loadpackage', 'loadstr',
'lowercase', 'msecstotimestamp', 'newstr', 'nextcharindex', 'now',
'outofmemoryerror', 'quotedstr', 'raiselastoserror',
'raiselastwin32error', 'removedir', 'renamefile', 'replacedate',
'replacetime', 'safeloadlibrary', 'samefilename', 'sametext',
'setcurrentdir', 'showexception', 'sleep', 'stralloc', 'strbufsize',
'strbytetype', 'strcat', 'strcharlength', 'strcomp', 'strcopy',
'strdispose', 'strecopy', 'strend', 'strfmt', 'stricomp',
'stringreplace', 'stringtoguid', 'strlcat', 'strlcomp', 'strlcopy',
'strlen', 'strlfmt', 'strlicomp', 'strlower', 'strmove', 'strnew',
'strnextchar', 'strpas', 'strpcopy', 'strplcopy', 'strpos',
'strrscan', 'strscan', 'strtobool', 'strtobooldef', 'strtocurr',
'strtocurrdef', 'strtodate', 'strtodatedef', 'strtodatetime',
'strtodatetimedef', 'strtofloat', 'strtofloatdef', 'strtoint',
'strtoint64', 'strtoint64def', 'strtointdef', 'strtotime',
'strtotimedef', 'strupper', 'supports', 'syserrormessage',
'systemtimetodatetime', 'texttofloat', 'time', 'timestamptodatetime',
'timestamptomsecs', 'timetostr', 'trim', 'trimleft', 'trimright',
'tryencodedate', 'tryencodetime', 'tryfloattocurr', 'tryfloattodatetime',
'trystrtobool', 'trystrtocurr', 'trystrtodate', 'trystrtodatetime',
'trystrtofloat', 'trystrtoint', 'trystrtoint64', 'trystrtotime',
'unloadpackage', 'uppercase', 'widecomparestr', 'widecomparetext',
'widefmtstr', 'wideformat', 'wideformatbuf', 'widelowercase',
'widesamestr', 'widesametext', 'wideuppercase', 'win32check',
'wraptext'
),
'Classes': (
'activateclassgroup', 'allocatehwnd', 'bintohex', 'checksynchronize',
'collectionsequal', 'countgenerations', 'deallocatehwnd', 'equalrect',
'extractstrings', 'findclass', 'findglobalcomponent', 'getclass',
'groupdescendantswith', 'hextobin', 'identtoint',
'initinheritedcomponent', 'inttoident', 'invalidpoint',
'isuniqueglobalcomponentname', 'linestart', 'objectbinarytotext',
'objectresourcetotext', 'objecttexttobinary', 'objecttexttoresource',
'pointsequal', 'readcomponentres', 'readcomponentresex',
'readcomponentresfile', 'rect', 'registerclass', 'registerclassalias',
'registerclasses', 'registercomponents', 'registerintegerconsts',
'registernoicon', 'registernonactivex', 'smallpoint', 'startclassgroup',
'teststreamformat', 'unregisterclass', 'unregisterclasses',
'unregisterintegerconsts', 'unregistermoduleclasses',
'writecomponentresfile'
),
'Math': (
'arccos', 'arccosh', 'arccot', 'arccoth', 'arccsc', 'arccsch', 'arcsec',
'arcsech', 'arcsin', 'arcsinh', 'arctan2', 'arctanh', 'ceil',
'comparevalue', 'cosecant', 'cosh', 'cot', 'cotan', 'coth', 'csc',
'csch', 'cycletodeg', 'cycletograd', 'cycletorad', 'degtocycle',
'degtograd', 'degtorad', 'divmod', 'doubledecliningbalance',
'ensurerange', 'floor', 'frexp', 'futurevalue', 'getexceptionmask',
'getprecisionmode', 'getroundmode', 'gradtocycle', 'gradtodeg',
'gradtorad', 'hypot', 'inrange', 'interestpayment', 'interestrate',
'internalrateofreturn', 'intpower', 'isinfinite', 'isnan', 'iszero',
'ldexp', 'lnxp1', 'log10', 'log2', 'logn', 'max', 'maxintvalue',
'maxvalue', 'mean', 'meanandstddev', 'min', 'minintvalue', 'minvalue',
'momentskewkurtosis', 'netpresentvalue', 'norm', 'numberofperiods',
'payment', 'periodpayment', 'poly', 'popnstddev', 'popnvariance',
'power', 'presentvalue', 'radtocycle', 'radtodeg', 'radtograd',
'randg', 'randomrange', 'roundto', 'samevalue', 'sec', 'secant',
'sech', 'setexceptionmask', 'setprecisionmode', 'setroundmode',
'sign', 'simpleroundto', 'sincos', 'sinh', 'slndepreciation', 'stddev',
'sum', 'sumint', 'sumofsquares', 'sumsandsquares', 'syddepreciation',
'tan', 'tanh', 'totalvariance', 'variance'
)
}
ASM_REGISTERS = set((
'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0',
'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0',
'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx',
'eax', 'ebp', 'ebx', 'ecx', 'edi', 'edx', 'es', 'esi', 'esp',
'fs', 'gs', 'mm0', 'mm1', 'mm2', 'mm3', 'mm4', 'mm5', 'mm6',
'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5',
'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5',
'xmm6', 'xmm7'
))
ASM_INSTRUCTIONS = set((
'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound',
'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw',
'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae',
'cmovb', 'cmovbe', 'cmovc', 'cmovcxz', 'cmove', 'cmovg',
'cmovge', 'cmovl', 'cmovle', 'cmovna', 'cmovnae', 'cmovnb',
'cmovnbe', 'cmovnc', 'cmovne', 'cmovng', 'cmovnge', 'cmovnl',
'cmovnle', 'cmovno', 'cmovnp', 'cmovns', 'cmovnz', 'cmovo',
'cmovp', 'cmovpe', 'cmovpo', 'cmovs', 'cmovz', 'cmp', 'cmpsb',
'cmpsd', 'cmpsw', 'cmpxchg', 'cmpxchg486', 'cmpxchg8b', 'cpuid',
'cwd', 'cwde', 'daa', 'das', 'dec', 'div', 'emms', 'enter', 'hlt',
'ibts', 'icebp', 'idiv', 'imul', 'in', 'inc', 'insb', 'insd',
'insw', 'int', 'int01', 'int03', 'int1', 'int3', 'into', 'invd',
'invlpg', 'iret', 'iretd', 'iretw', 'ja', 'jae', 'jb', 'jbe',
'jc', 'jcxz', 'jcxz', 'je', 'jecxz', 'jg', 'jge', 'jl', 'jle',
'jmp', 'jna', 'jnae', 'jnb', 'jnbe', 'jnc', 'jne', 'jng', 'jnge',
'jnl', 'jnle', 'jno', 'jnp', 'jns', 'jnz', 'jo', 'jp', 'jpe',
'jpo', 'js', 'jz', 'lahf', 'lar', 'lcall', 'lds', 'lea', 'leave',
'les', 'lfs', 'lgdt', 'lgs', 'lidt', 'ljmp', 'lldt', 'lmsw',
'loadall', 'loadall286', 'lock', 'lodsb', 'lodsd', 'lodsw',
'loop', 'loope', 'loopne', 'loopnz', 'loopz', 'lsl', 'lss', 'ltr',
'mov', 'movd', 'movq', 'movsb', 'movsd', 'movsw', 'movsx',
'movzx', 'mul', 'neg', 'nop', 'not', 'or', 'out', 'outsb', 'outsd',
'outsw', 'pop', 'popa', 'popad', 'popaw', 'popf', 'popfd', 'popfw',
'push', 'pusha', 'pushad', 'pushaw', 'pushf', 'pushfd', 'pushfw',
'rcl', 'rcr', 'rdmsr', 'rdpmc', 'rdshr', 'rdtsc', 'rep', 'repe',
'repne', 'repnz', 'repz', 'ret', 'retf', 'retn', 'rol', 'ror',
'rsdc', 'rsldt', 'rsm', 'sahf', 'sal', 'salc', 'sar', 'sbb',
'scasb', 'scasd', 'scasw', 'seta', 'setae', 'setb', 'setbe',
'setc', 'setcxz', 'sete', 'setg', 'setge', 'setl', 'setle',
'setna', 'setnae', 'setnb', 'setnbe', 'setnc', 'setne', 'setng',
'setnge', 'setnl', 'setnle', 'setno', 'setnp', 'setns', 'setnz',
'seto', 'setp', 'setpe', 'setpo', 'sets', 'setz', 'sgdt', 'shl',
'shld', 'shr', 'shrd', 'sidt', 'sldt', 'smi', 'smint', 'smintold',
'smsw', 'stc', 'std', 'sti', 'stosb', 'stosd', 'stosw', 'str',
'sub', 'svdc', 'svldt', 'svts', 'syscall', 'sysenter', 'sysexit',
'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait',
'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat',
'xlatb', 'xor'
))
def __init__(self, **options):
Lexer.__init__(self, **options)
self.keywords = set()
if get_bool_opt(options, 'turbopascal', True):
self.keywords.update(self.TURBO_PASCAL_KEYWORDS)
if get_bool_opt(options, 'delphi', True):
self.keywords.update(self.DELPHI_KEYWORDS)
if get_bool_opt(options, 'freepascal', True):
self.keywords.update(self.FREE_PASCAL_KEYWORDS)
self.builtins = set()
for unit in get_list_opt(options, 'units', list(self.BUILTIN_UNITS)):
self.builtins.update(self.BUILTIN_UNITS[unit])
def get_tokens_unprocessed(self, text):
scanner = Scanner(text, re.DOTALL | re.MULTILINE | re.IGNORECASE)
stack = ['initial']
in_function_block = False
in_property_block = False
was_dot = False
next_token_is_function = False
next_token_is_property = False
collect_labels = False
block_labels = set()
brace_balance = [0, 0]
while not scanner.eos:
token = Error
if stack[-1] == 'initial':
if scanner.scan(r'\s+'):
token = Text
elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
if scanner.match.startswith('$'):
token = Comment.Preproc
else:
token = Comment.Multiline
elif scanner.scan(r'//.*?$'):
token = Comment.Single
elif scanner.scan(r'[-+*\/=<>:;,.@\^]'):
token = Operator
# stop label highlighting on next ";"
if collect_labels and scanner.match == ';':
collect_labels = False
elif scanner.scan(r'[\(\)\[\]]+'):
token = Punctuation
# abort function naming ``foo = Function(...)``
next_token_is_function = False
# if we are in a function block we count the open
# braces because ootherwise it's impossible to
# determine the end of the modifier context
if in_function_block or in_property_block:
if scanner.match == '(':
brace_balance[0] += 1
elif scanner.match == ')':
brace_balance[0] -= 1
elif scanner.match == '[':
brace_balance[1] += 1
elif scanner.match == ']':
brace_balance[1] -= 1
elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
lowercase_name = scanner.match.lower()
if lowercase_name == 'result':
token = Name.Builtin.Pseudo
elif lowercase_name in self.keywords:
token = Keyword
# if we are in a special block and a
# block ending keyword occours (and the parenthesis
# is balanced) we end the current block context
if (in_function_block or in_property_block) and \
lowercase_name in self.BLOCK_KEYWORDS and \
brace_balance[0] <= 0 and \
brace_balance[1] <= 0:
in_function_block = False
in_property_block = False
brace_balance = [0, 0]
block_labels = set()
if lowercase_name in ('label', 'goto'):
collect_labels = True
elif lowercase_name == 'asm':
stack.append('asm')
elif lowercase_name == 'property':
in_property_block = True
next_token_is_property = True
elif lowercase_name in ('procedure', 'operator',
'function', 'constructor',
'destructor'):
in_function_block = True
next_token_is_function = True
# we are in a function block and the current name
# is in the set of registered modifiers. highlight
# it as pseudo keyword
elif in_function_block and \
lowercase_name in self.FUNCTION_MODIFIERS:
token = Keyword.Pseudo
# if we are in a property highlight some more
# modifiers
elif in_property_block and \
lowercase_name in ('read', 'write'):
token = Keyword.Pseudo
next_token_is_function = True
# if the last iteration set next_token_is_function
# to true we now want this name highlighted as
# function. so do that and reset the state
elif next_token_is_function:
# Look if the next token is a dot. If yes it's
# not a function, but a class name and the
# part after the dot a function name
if scanner.test(r'\s*\.\s*'):
token = Name.Class
# it's not a dot, our job is done
else:
token = Name.Function
next_token_is_function = False
# same for properties
elif next_token_is_property:
token = Name.Property
next_token_is_property = False
# Highlight this token as label and add it
# to the list of known labels
elif collect_labels:
token = Name.Label
block_labels.add(scanner.match.lower())
# name is in list of known labels
elif lowercase_name in block_labels:
token = Name.Label
elif lowercase_name in self.BUILTIN_TYPES:
token = Keyword.Type
elif lowercase_name in self.DIRECTIVES:
token = Keyword.Pseudo
# builtins are just builtins if the token
# before isn't a dot
elif not was_dot and lowercase_name in self.builtins:
token = Name.Builtin
else:
token = Name
elif scanner.scan(r"'"):
token = String
stack.append('string')
elif scanner.scan(r'\#(\d+|\$[0-9A-Fa-f]+)'):
token = String.Char
elif scanner.scan(r'\$[0-9A-Fa-f]+'):
token = Number.Hex
elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
token = Number.Integer
elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
token = Number.Float
else:
# if the stack depth is deeper than once, pop
if len(stack) > 1:
stack.pop()
scanner.get_char()
elif stack[-1] == 'string':
if scanner.scan(r"''"):
token = String.Escape
elif scanner.scan(r"'"):
token = String
stack.pop()
elif scanner.scan(r"[^']*"):
token = String
else:
scanner.get_char()
stack.pop()
elif stack[-1] == 'asm':
if scanner.scan(r'\s+'):
token = Text
elif scanner.scan(r'end'):
token = Keyword
stack.pop()
elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
if scanner.match.startswith('$'):
token = Comment.Preproc
else:
token = Comment.Multiline
elif scanner.scan(r'//.*?$'):
token = Comment.Single
elif scanner.scan(r"'"):
token = String
stack.append('string')
elif scanner.scan(r'@@[A-Za-z_][A-Za-z_0-9]*'):
token = Name.Label
elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
lowercase_name = scanner.match.lower()
if lowercase_name in self.ASM_INSTRUCTIONS:
token = Keyword
elif lowercase_name in self.ASM_REGISTERS:
token = Name.Builtin
else:
token = Name
elif scanner.scan(r'[-+*\/=<>:;,.@\^]+'):
token = Operator
elif scanner.scan(r'[\(\)\[\]]+'):
token = Punctuation
elif scanner.scan(r'\$[0-9A-Fa-f]+'):
token = Number.Hex
elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
token = Number.Integer
elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
token = Number.Float
else:
scanner.get_char()
stack.pop()
# save the dot!!!11
if scanner.match.strip():
was_dot = scanner.match == '.'
yield scanner.start_pos, token, scanner.match or ''
class AdaLexer(RegexLexer):
"""
For Ada source code.
.. versionadded:: 1.3
"""
name = 'Ada'
aliases = ['ada', 'ada95', 'ada2005']
filenames = ['*.adb', '*.ads', '*.ada']
mimetypes = ['text/x-ada']
flags = re.MULTILINE | re.IGNORECASE
tokens = {
'root': [
(r'[^\S\n]+', Text),
(r'--.*?\n', Comment.Single),
(r'[^\S\n]+', Text),
(r'function|procedure|entry', Keyword.Declaration, 'subprogram'),
(r'(subtype|type)(\s+)(\w+)',
bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
(r'task|protected', Keyword.Declaration),
(r'(subtype)(\s+)', bygroups(Keyword.Declaration, Text)),
(r'(end)(\s+)', bygroups(Keyword.Reserved, Text), 'end'),
(r'(pragma)(\s+)(\w+)', bygroups(Keyword.Reserved, Text,
Comment.Preproc)),
(r'(true|false|null)\b', Keyword.Constant),
(words((
'Address', 'Byte', 'Boolean', 'Character', 'Controlled', 'Count', 'Cursor',
'Duration', 'File_Mode', 'File_Type', 'Float', 'Generator', 'Integer', 'Long_Float',
'Long_Integer', 'Long_Long_Float', 'Long_Long_Integer', 'Natural', 'Positive',
'Reference_Type', 'Short_Float', 'Short_Integer', 'Short_Short_Float',
'Short_Short_Integer', 'String', 'Wide_Character', 'Wide_String'), suffix=r'\b'),
Keyword.Type),
(r'(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b', Operator.Word),
(r'generic|private', Keyword.Declaration),
(r'package', Keyword.Declaration, 'package'),
(r'array\b', Keyword.Reserved, 'array_def'),
(r'(with|use)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'(\w+)(\s*)(:)(\s*)(constant)',
bygroups(Name.Constant, Text, Punctuation, Text,
Keyword.Reserved)),
(r'<<\w+>>', Name.Label),
(r'(\w+)(\s*)(:)(\s*)(declare|begin|loop|for|while)',
bygroups(Name.Label, Text, Punctuation, Text, Keyword.Reserved)),
(words((
'abort', 'abs', 'abstract', 'accept', 'access', 'aliased', 'all',
'array', 'at', 'begin', 'body', 'case', 'constant', 'declare',
'delay', 'delta', 'digits', 'do', 'else', 'elsif', 'end', 'entry',
'exception', 'exit', 'interface', 'for', 'goto', 'if', 'is', 'limited',
'loop', 'new', 'null', 'of', 'or', 'others', 'out', 'overriding',
'pragma', 'protected', 'raise', 'range', 'record', 'renames', 'requeue',
'return', 'reverse', 'select', 'separate', 'subtype', 'synchronized',
'task', 'tagged', 'terminate', 'then', 'type', 'until', 'when',
'while', 'xor'), prefix=r'\b', suffix=r'\b'),
Keyword.Reserved),
(r'"[^"]*"', String),
include('attribute'),
include('numbers'),
(r"'[^']'", String.Character),
(r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
(r"(<>|=>|:=|[()|:;,.'])", Punctuation),
(r'[*<>+=/&-]', Operator),
(r'\n+', Text),
],
'numbers': [
(r'[0-9_]+#[0-9a-f]+#', Number.Hex),
(r'[0-9_]+\.[0-9_]*', Number.Float),
(r'[0-9_]+', Number.Integer),
],
'attribute': [
(r"(')(\w+)", bygroups(Punctuation, Name.Attribute)),
],
'subprogram': [
(r'\(', Punctuation, ('#pop', 'formal_part')),
(r';', Punctuation, '#pop'),
(r'is\b', Keyword.Reserved, '#pop'),
(r'"[^"]+"|\w+', Name.Function),
include('root'),
],
'end': [
('(if|case|record|loop|select)', Keyword.Reserved),
('"[^"]+"|[\w.]+', Name.Function),
('\s+', Text),
(';', Punctuation, '#pop'),
],
'type_def': [
(r';', Punctuation, '#pop'),
(r'\(', Punctuation, 'formal_part'),
(r'with|and|use', Keyword.Reserved),
(r'array\b', Keyword.Reserved, ('#pop', 'array_def')),
(r'record\b', Keyword.Reserved, ('record_def')),
(r'(null record)(;)', bygroups(Keyword.Reserved, Punctuation), '#pop'),
include('root'),
],
'array_def': [
(r';', Punctuation, '#pop'),
(r'(\w+)(\s+)(range)', bygroups(Keyword.Type, Text, Keyword.Reserved)),
include('root'),
],
'record_def': [
(r'end record', Keyword.Reserved, '#pop'),
include('root'),
],
'import': [
(r'[\w.]+', Name.Namespace, '#pop'),
default('#pop'),
],
'formal_part': [
(r'\)', Punctuation, '#pop'),
(r'\w+', Name.Variable),
(r',|:[^=]', Punctuation),
(r'(in|not|null|out|access)\b', Keyword.Reserved),
include('root'),
],
'package': [
('body', Keyword.Declaration),
('is\s+new|renames', Keyword.Reserved),
('is', Keyword.Reserved, '#pop'),
(';', Punctuation, '#pop'),
('\(', Punctuation, 'package_instantiation'),
('([\w.]+)', Name.Class),
include('root'),
],
'package_instantiation': [
(r'("[^"]+"|\w+)(\s+)(=>)', bygroups(Name.Variable, Text, Punctuation)),
(r'[\w.\'"]', Text),
(r'\)', Punctuation, '#pop'),
include('root'),
],
}
|
facelessuser/sublime-markdown-popups
|
st3/mdpopups/pygments/lexers/pascal.py
|
Python
|
mit
| 32,536 | 0.001014 |
# coding: utf-8
from openerp import models, api, fields
class LibraryReturnsWizard(models.TransientModel):
_name = 'library.returns.wizard'
member_id = fields.Many2one('library.member', 'Member')
book_ids = fields.Many2many('library.book', 'Books')
@api.multi
def record_returns(self):
loan = self.env['library.book.loan']
for rec in self:
loans = loan.search(
[('state', '=', 'ongoing'),
('book_id', 'in', self.book_ids.ids),
('member_id', '=', self.member_id.id)]
)
loans.write({'state': 'done'})
@api.onchange('member_id')
def onchange_member(self):
loan = self.env['library.book.loan']
loans = loan.search(
[('state', '=', 'ongoing'),
('member_id', '=', self.member_id.id)]
)
self.book_ids = loans.mapped('book_id')
|
vileopratama/vitech
|
docs/tutorials/ebook/Odoo Development Cookbook/OdooDevelopmentCookbook_Code/Chapter06_code/Ch06_R05/some_model_ch06r05/models.py
|
Python
|
mit
| 913 | 0 |
import boto3
import collections
import datetime
import time
import sys
import botocore
ec2_client = boto3.client('ec2',region_name='ap-southeast-2')
ec2_resource = boto3.resource('ec2',region_name='ap-southeast-2')
sns = boto3.client('sns')
images_all = ec2_resource.images.filter(Owners=["self"])
today_fmt = (datetime.datetime.now()+ datetime.timedelta(hours=11)).strftime('%Y-%m-%d')
today_date = time.strptime(today_fmt, '%Y-%m-%d')
def lambda_handler(event, context):
try:
images_to_remove=[]
toremoveimagecount = 0
snapshotcount = 0
totalimagecount=0
for image in images_all:
if (image.name.startswith('Lambda-')):
totalimagecount+=1
try:
if image.tags is not None:
deletion_date = [
t.get('Value') for t in image.tags
if t['Key'] == 'DeleteOn'][0]
delete_date = time.strptime(deletion_date, "%Y-%m-%d")
except IndexError:
deletion_date = False
delete_date = False
if delete_date <= today_date:
images_to_remove.append(image.id)
print "============="
print "About to deregister the following AMIs:"
print images_to_remove
snapshots = ec2_client.describe_snapshots(OwnerIds=["self"])['Snapshots']
for image in images_to_remove:
toremoveimagecount += 1
print "deregistering image %s" % image
amiResponse = ec2_client.deregister_image(
DryRun=False,
ImageId=image,
)
for snapshot in snapshots:
if snapshot['Description'].find(image) > 0:
snapshotcount += 1
snap = ec2_client.delete_snapshot(SnapshotId=snapshot['SnapshotId'])
print "Deleting snapshot " + snapshot['SnapshotId']
print "-------------"
result = "Deleted %d AMIs and %d corresponding snapshots" %(toremoveimagecount,snapshotcount)
print result
response = sns.publish(
TopicArn='arn:aws:sns:ap-southeast-2:352138128272:lambda_ami_backup',
Message= result,
Subject='Purge Success')
#SNS email
except botocore.exceptions.ClientError as e:
result = e.response['Error']['Message']
print result
response = sns.publish(
TopicArn='arn:aws:sns:ap-southeast-2:352138128272:lambda_ami_backup',
Message= result,
Subject='Purge Failed')
#SNS email
|
RexChenjq/AWS-Image-Backup
|
purge latest.py
|
Python
|
mit
| 2,667 | 0.009374 |
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.facts.network.base import NetworkCollector
from ansible.module_utils.facts.network.generic_bsd import GenericBsdIfconfigNetwork
class DragonFlyNetwork(GenericBsdIfconfigNetwork):
"""
This is the DragonFly Network Class.
It uses the GenericBsdIfconfigNetwork unchanged.
"""
platform = 'DragonFly'
class DragonFlyNetworkCollector(NetworkCollector):
_fact_class = DragonFlyNetwork
_platform = 'DragonFly'
|
e-gob/plataforma-kioscos-autoatencion
|
scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/module_utils/facts/network/dragonfly.py
|
Python
|
bsd-3-clause
| 1,202 | 0.000832 |
"""Module containing a preprocessor that converts outputs in the notebook from
one format to another.
"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from .base import Preprocessor
from traitlets import Unicode
class ConvertFiguresPreprocessor(Preprocessor):
"""
Converts all of the outputs in a notebook from one format to another.
"""
from_format = Unicode(help='Format the converter accepts').tag(config=True)
to_format = Unicode(help='Format the converter writes').tag(config=True)
def __init__(self, **kw):
"""
Public constructor
"""
super(ConvertFiguresPreprocessor, self).__init__(**kw)
def convert_figure(self, data_format, data):
raise NotImplementedError()
def preprocess_cell(self, cell, resources, cell_index):
"""
Apply a transformation on each cell,
See base.py
"""
# Loop through all of the datatypes of the outputs in the cell.
for output in cell.get('outputs', []):
if output.output_type in {'execute_result', 'display_data'} \
and self.from_format in output.data \
and self.to_format not in output.data:
output.data[self.to_format] = self.convert_figure(
self.from_format, output.data[self.from_format])
return cell, resources
|
unnikrishnankgs/va
|
venv/lib/python3.5/site-packages/nbconvert/preprocessors/convertfigures.py
|
Python
|
bsd-2-clause
| 1,450 | 0.002759 |
#!env/bin/python
"""Create the database"""
from migrate.versioning import api
from config import SQLALCHEMY_DATABASE_URI
from config import SQLALCHEMY_MIGRATE_REPO
from app import db
import os.path
db.create_all()
if not os.path.exists(SQLALCHEMY_MIGRATE_REPO):
api.create(SQLALCHEMY_MIGRATE_REPO, 'database repository')
api.version_control(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
else:
api.version_control(SQLALCHEMY_DATABASE_URI,
SQLALCHEMY_MIGRATE_REPO,
api.version(SQLALCHEMY_MIGRATE_REPO))
|
dianvaltodorov/happy-commas
|
db_create.py
|
Python
|
mit
| 566 | 0 |
# -*- coding: utf-8 -*-
import datetime
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.core.urlresolvers import reverse
from django.conf import settings
from django import forms
from commons.decorators import submitted_applicant_required
from commons.decorators import within_submission_deadline
from commons.utils import submission_deadline_passed, redirect_to_deadline_error, validate_phone_number
from application.views.form_views import prepare_major_form
from application.forms.handlers import handle_major_form
from application.forms.handlers import assign_major_pref_to_applicant
from application.forms.handlers import handle_education_form
from application.forms.handlers import handle_address_form
from application.forms.handlers import handle_personal_info_form
from application.forms import EducationForm, SingleMajorPreferenceForm
from application.models import Applicant, MajorPreference, Major, PersonalInfo
from commons.models import Log
from commons.local import APP_TITLE_FORM_CHOICES
from commons.email import send_sub_method_change_notice_by_email
from application.forms.widgets import ThaiSelectDateWidget
def update_major_single_choice(request):
applicant = request.applicant
if request.method == 'POST':
if 'cancel' not in request.POST:
form = SingleMajorPreferenceForm(request.POST)
if form.is_valid():
assign_major_pref_to_applicant(applicant,
[form.cleaned_data['major'].number])
request.session['notice'] = 'การแก้ไขอันดับสาขาวิชาเรียบร้อย'
return HttpResponseRedirect(reverse('status-index'))
else:
request.session['notice'] = 'อันดับสาขาวิชาไม่ถูกแก้ไข'
return HttpResponseRedirect(reverse('status-index'))
else:
if applicant.has_major_preference():
pref = applicant.preference.majors
if len(pref)==0:
prev_major = None
else:
majors = dict([(int(m.number), m) for m in Major.get_all_majors()])
prev_major = majors[pref[0]]
form = SingleMajorPreferenceForm(initial={'major': prev_major.id})
# add step info
form_data = {}
form_data['step_name'] = 'แก้ไขอันดับสาขาวิชา'
form_data['can_log_out'] = True
form_data['form'] = form
return render_to_response('application/update/majors_single.html',
form_data)
def update_majors_as_major_lists(request):
"""
WARNINGS:
Unused in the current version.
This is for the case when the number of choices is small.
"""
if settings.MAX_MAJOR_RANK == 1:
return update_major_single_choice(request)
applicant = request.applicant
if request.method == 'POST':
if 'cancel' not in request.POST:
result, major_list, errors = handle_major_form(request)
if result:
request.session['notice'] = 'การแก้ไขอันดับสาขาวิชาเรียบร้อย'
return HttpResponseRedirect(reverse('status-index'))
else:
request.session['notice'] = 'อันดับสาขาวิชาไม่ถูกแก้ไข'
return HttpResponseRedirect(reverse('status-index'))
pref_ranks = MajorPreference.major_list_to_major_rank_list(major_list)
form_data = prepare_major_form(applicant, pref_ranks, errors)
else:
if applicant.has_major_preference():
pref_ranks = applicant.preference.to_major_rank_list()
else:
pref_ranks = [None] * len(majors)
form_data = prepare_major_form(applicant, pref_ranks)
# add step info
form_data['step_name'] = 'แก้ไขอันดับสาขาวิชา'
form_data['can_log_out'] = True
return render_to_response('application/update/majors.html',
form_data)
@within_submission_deadline
@submitted_applicant_required
def update_majors(request):
max_major_rank = settings.MAX_MAJOR_RANK
if max_major_rank == 1:
return update_major_single_choice(request)
from form_views import prepare_major_selections
applicant = request.applicant
form_data = { 'majors': Major.get_all_majors() }
if request.method == 'POST':
if 'cancel' not in request.POST:
result, pref_list, errors = handle_major_form(request,
max_major_rank)
log = Log.create("Update major pref: %s from %s" %
(applicant.id,request.META['REMOTE_ADDR']))
if result:
request.session['notice'] = 'การแก้ไขอันดับสาขาวิชาเรียบร้อย'
return HttpResponseRedirect(reverse('status-index'))
selections = prepare_major_selections(pref_list, max_major_rank)
form_data['errors'] = errors
else:
request.session['notice'] = 'อันดับสาขาวิชาไม่ถูกแก้ไข'
return HttpResponseRedirect(reverse('status-index'))
else:
if applicant.has_major_preference():
pref_list = applicant.preference.get_major_list()
else:
pref_list = [None] * max_major_rank
selections = prepare_major_selections(pref_list, max_major_rank)
# add step info
form_data['step_name'] = 'แก้ไขอันดับสาขาวิชา'
form_data['can_log_out'] = True
form_data['selections'] = selections
form_data['max_major_rank'] = max_major_rank
return render_to_response('application/update/majors.html',
form_data)
@within_submission_deadline
@submitted_applicant_required
def update_education(request):
applicant = request.applicant
old_education = applicant.get_educational_info_or_none()
result, form = handle_education_form(request, old_education)
if result:
request.session['notice'] = 'การแก้ไขข้อมูลการศึกษาเรียบร้อย'
return HttpResponseRedirect(reverse('status-index'))
elif 'cancel' in request.POST:
request.session['notice'] = 'ข้อมูลการศึกษาไม่ถูกแก้ไข'
return HttpResponseRedirect(reverse('status-index'))
return render_to_response('application/update/education.html',
{'form': form,
'can_log_out': True,
'applicant': applicant })
@within_submission_deadline
@submitted_applicant_required
def update_address(request):
applicant = request.applicant
if not applicant.has_address():
return HttpResponseForbidden()
result, hform, cform = handle_address_form(request)
if result:
request.session['notice'] = 'การแก้ไขที่อยู่เรียบร้อย'
return HttpResponseRedirect(reverse('status-index'))
elif 'cancel' in request.POST:
request.session['notice'] = 'ข้อมูลที่อยู่ไม่ถูกแก้ไข'
return HttpResponseRedirect(reverse('status-index'))
return render_to_response('application/update/address.html',
{ 'home_address_form': hform,
'contact_address_form': cform,
'can_log_out': True,
'applicant': applicant })
THIS_YEAR = datetime.date.today().year
APPLICANT_BIRTH_YEARS = range(THIS_YEAR-30,THIS_YEAR-10)
class PersonalInfoWithFullnameForm(forms.Form):
title = forms.ChoiceField(choices=APP_TITLE_FORM_CHOICES)
first_name = forms.CharField(label=u'ชื่อ')
last_name = forms.CharField(label=u'นามสกุล')
birth_date = forms.DateField(
widget=ThaiSelectDateWidget(years=APPLICANT_BIRTH_YEARS),
label=u"วันเกิด")
phone_number = forms.CharField(label=u'หมายเลขโทรศัพท์')
nationality = forms.CharField(label="สัญชาติ")
ethnicity = forms.CharField(label="เชื้อชาติ")
def clean_phone_number(self):
if not validate_phone_number(self.cleaned_data['phone_number']):
raise forms.ValidationError("หมายเลขโทรศัพท์ไม่ถูกต้อง")
return self.cleaned_data['phone_number']
@within_submission_deadline
@submitted_applicant_required
def update_personal_info(request):
applicant = request.applicant
old_personal_info = applicant.get_personal_info_or_none()
if not old_personal_info:
return HttpResponseRedirect(reverse('status-index'))
if (request.method=='POST') and ('cancel' not in request.POST):
form = PersonalInfoWithFullnameForm(
request.POST,
initial={
'birth_date': old_personal_info.birth_date,
'nationality': old_personal_info.nationality,
'ethnicity': old_personal_info.ethnicity,
'phone_number': old_personal_info.phone_number,
'title': applicant.title,
'first_name': applicant.first_name,
'last_name': applicant.last_name
})
if form.is_valid():
old_personal_info.birth_date = form.cleaned_data['birth_date']
old_personal_info.nationality = form.cleaned_data['nationality']
old_personal_info.ethnicity = form.cleaned_data['ethnicity']
old_personal_info.phone_number = form.cleaned_data['phone_number']
old_personal_info.save()
applicant.title = form.cleaned_data['title']
applicant.first_name = form.cleaned_data['first_name']
applicant.last_name = form.cleaned_data['last_name']
applicant.save()
request.session['notice'] = 'การแก้ไขข้อมูลการส่วนตัวเรียบร้อย'
return HttpResponseRedirect(reverse('status-index'))
elif 'cancel' in request.POST:
request.session['notice'] = 'ข้อมูลส่วนตัวไม่ถูกเปลี่ยนแปลง'
return HttpResponseRedirect(reverse('status-index'))
else:
form = PersonalInfoWithFullnameForm(
initial={
'birth_date': old_personal_info.birth_date,
'nationality': old_personal_info.nationality,
'ethnicity': old_personal_info.ethnicity,
'phone_number': old_personal_info.phone_number,
'title': applicant.title,
'first_name': applicant.first_name,
'last_name': applicant.last_name
})
return render_to_response('application/update/personal.html',
{'form': form,
'can_log_out': True,
'applicant': applicant })
@within_submission_deadline
@submitted_applicant_required
def update_to_postal_submission(request):
return HttpResponseRedirect(reverse('status-index'))
applicant = request.applicant
if request.method == 'POST':
if 'cancel' not in request.POST:
submission_info = applicant.submission_info
submission_info.delete()
applicant.doc_submission_method = Applicant.UNDECIDED_METHOD
applicant.is_submitted = False
applicant.save()
request.session['notice'] = 'คุณได้ยกเลิกการเลือกส่งหลักฐานทางไปรษณีย์แล้ว อย่าลืมว่าคุณจะต้องยืนยันข้อมูลอีกครั้ง'
send_sub_method_change_notice_by_email(applicant)
return HttpResponseRedirect(reverse('upload-index'))
else:
request.session['notice'] = 'วิธีการส่งยังคงเป็นแบบไปรษณีย์ไม่เปลี่ยนแปลง'
return HttpResponseRedirect(reverse('status-index'))
else:
return render_to_response('application/update/postal_sub.html',
{ 'can_log_out': False,
'applicant': applicant })
|
jittat/adm2
|
application/views/update.py
|
Python
|
agpl-3.0
| 12,858 | 0.003085 |
inside = lambda x, y: 4*x*x+y*y <= 100
def coll(sx, sy, dx, dy):
m = 0
for p in range(32):
m2 = m + 2**(-p)
if inside(sx + dx * m2, sy + dy * m2): m = m2
return (sx + dx*m, sy + dy*m)
def norm(x, y):
l = (x*x + y*y)**0.5
return (x/l, y/l)
sx, sy = 0, 10.1
dx, dy = 1.4, -19.7
for I in range(999):
sx, sy = coll(sx, sy, dx, dy)
if sy > 0 and abs(sx) <= 0.01:
print(I)
break
mx, my = norm(1, -4*sx/sy)
d = mx*dx + my*dy
dx, dy = -dx + 2 * mx * d, -dy + 2 * my * d
|
jokkebk/euler
|
p144.py
|
Python
|
mit
| 538 | 0.013011 |
from tensorflow.keras.applications.resnet50 import ResNet50
from tensorflow.keras.preprocessing import image
from tensorflow.keras.applications.resnet50 import preprocess_input, decode_predictions
import numpy as np
model = ResNet50(weights='imagenet')
img_path = 'elephant.jpg'
img = image.load_img(img_path, target_size=(224, 224))
x = image.img_to_array(img)
x = np.expand_dims(x, axis=0)
x = preprocess_input(x)
model.summary()
preds = model.predict(x)
print('Predicted:', decode_predictions(preds, top=3)[0])
|
lukas/ml-class
|
examples/keras-transfer/resnet50-inspect.py
|
Python
|
gpl-2.0
| 517 | 0.001934 |
# pylint: skip-file
# flake8: noqa
# pylint: disable=too-many-lines
# noqa: E301,E302,E303,T001
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
ADDITIONAL_PATH_LOOKUPS = ['/usr/local/bin', os.path.expanduser('~/bin')]
def locate_oc_binary():
''' Find and return oc binary file '''
# https://github.com/openshift/openshift-ansible/issues/3410
# oc can be in /usr/local/bin in some cases, but that may not
# be in $PATH due to ansible/sudo
paths = os.environ.get("PATH", os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS
oc_binary = 'oc'
# Use shutil.which if it is available, otherwise fallback to a naive path search
try:
which_result = shutil.which(oc_binary, path=os.pathsep.join(paths))
if which_result is not None:
oc_binary = which_result
except AttributeError:
for path in paths:
if os.path.exists(os.path.join(path, oc_binary)):
oc_binary = os.path.join(path, oc_binary)
break
return oc_binary
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig)
self.all_namespaces = all_namespaces
self.oc_binary = locate_oc_binary()
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, edits=None, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, res['results'][0], separator=sep)
updated = False
if content is not None:
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
updated = True
elif edits is not None:
results = Yedit.process_edits(edits, yed)
if results['changed']:
updated = True
if updated:
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''replace the current object with oc replace'''
# We are removing the 'resourceVersion' to handle
# a race condition when modifying oc objects
yed = Yedit(fname)
results = yed.delete('metadata.resourceVersion')
if results[0]:
yed.write()
cmd = ['replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''create a temporary file and then call oc create on it'''
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''call oc create on a filename'''
return self.openshift_cmd(['create', '-f', fname])
def _delete(self, resource, name=None, selector=None):
'''call oc delete on a resource'''
cmd = ['delete', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
elif name is not None:
cmd.append(name)
else:
raise OpenShiftCLIError('Either name or selector is required when calling delete.')
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None): # noqa: E501
'''process a template
template_name: the name of the template to process
create: whether to send to oc create after processing
params: the parameters for the template
template_data: the incoming template's data; instead of a file
'''
cmd = ['process']
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["{}={}".format(key, str(value).replace("'", r'"')) for key, value in params.items()]
cmd.append('-p')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = Utils.create_tmpfile(template_name + '-')
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['create', '-f', fname])
def _get(self, resource, name=None, selector=None, field_selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
if field_selector is not None:
cmd.append('--field-selector={}'.format(field_selector))
# Name cannot be used with selector or field_selector.
if selector is None and field_selector is None and name is not None:
cmd.append(name)
cmd.extend(['-o', 'json'])
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if 'items' in rval:
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
cmd.append('--schedulable={}'.format(schedulable))
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') # noqa: E501
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm list pods
node: the node in which to list pods
selector: the label selector filter if provided
pod_selector: the pod selector filter if provided
'''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
# pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
if grace_period:
cmd.append('--grace-period={}'.format(int(grace_period)))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _version(self):
''' return the openshift version'''
return self.openshift_cmd(['version'], output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
def _run(self, cmds, input_data):
''' Actually executes the command. This makes mocking easier. '''
curr_env = os.environ.copy()
curr_env.update({'KUBECONFIG': self.kubeconfig})
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=curr_env)
stdout, stderr = proc.communicate(input_data)
return proc.returncode, stdout.decode('utf-8'), stderr.decode('utf-8')
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = [self.oc_binary]
if oadm:
cmds.append('adm')
cmds.extend(cmd)
if self.all_namespaces:
cmds.extend(['--all-namespaces'])
elif self.namespace is not None and self.namespace.lower() not in ['none', 'emtpy']: # E501
cmds.extend(['-n', self.namespace])
if self.verbose:
print(' '.join(cmds))
try:
returncode, stdout, stderr = self._run(cmds, input_data)
except OSError as ex:
returncode, stdout, stderr = 1, '', 'Failed to execute {}: {}'.format(subprocess.list2cmdline(cmds), ex)
rval = {"returncode": returncode,
"cmd": ' '.join(cmds)}
if output_type == 'json':
rval['results'] = {}
if output and stdout:
try:
rval['results'] = json.loads(stdout)
except ValueError as verr:
if "No JSON object could be decoded" in verr.args:
rval['err'] = verr.args
elif output_type == 'raw':
rval['results'] = stdout if output else ''
if self.verbose:
print("STDOUT: {0}".format(stdout))
print("STDERR: {0}".format(stderr))
if 'err' in rval or returncode != 0:
rval.update({"stderr": stderr,
"stdout": stdout})
return rval
class Utils(object):
''' utilities for openshiftcli modules '''
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
with open(filename, 'w') as sfd:
sfd.write(str(contents))
@staticmethod
def create_tmp_file_from_contents(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripDumper'):
Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
else:
Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
Utils._write(tmp, data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [tmp])
return tmp
@staticmethod
def create_tmpfile_copy(inc_file):
'''create a temporary copy of a file'''
tmpfile = Utils.create_tmpfile('lib_openshift-')
Utils._write(tmpfile, open(inc_file).read())
# Cleanup the tmpfile
atexit.register(Utils.cleanup, [tmpfile])
return tmpfile
@staticmethod
def create_tmpfile(prefix='tmp'):
''' Generates and returns a temporary file name '''
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as tmp:
return tmp.name
@staticmethod
def create_tmp_files_from_contents(content, content_type=None):
'''Turn an array of dict: filename, content into a files array'''
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_tmp_file_from_contents(item['path'] + '-',
item['data'],
ftype=content_type)
files.append({'name': os.path.basename(item['path']),
'path': path})
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if 'metadata' in result and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripLoader'):
contents = yaml.load(contents, yaml.RoundTripLoader)
else:
contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
@staticmethod
def filter_versions(stdout):
''' filter the oc version output '''
version_dict = {}
version_search = ['oc', 'openshift', 'kubernetes']
for line in stdout.strip().split('\n'):
for term in version_search:
if not line:
continue
if line.startswith(term):
version_dict[term] = line.split()[-1]
# horrible hack to get openshift version in Openshift 3.2
# By default "oc version in 3.2 does not return an "openshift" version
if "openshift" not in version_dict:
version_dict["openshift"] = version_dict["oc"]
return version_dict
@staticmethod
def add_custom_versions(versions):
''' create custom versions strings '''
versions_dict = {}
for tech, version in versions.items():
# clean up "-" from version
if "-" in version:
version = version.split("-")[0]
if version.startswith('v'):
versions_dict[tech + '_numeric'] = version[1:].split('+')[0]
# "v3.3.0.33" is what we have, we want "3.3"
versions_dict[tech + '_short'] = version[1:4]
return versions_dict
@staticmethod
def openshift_installed():
''' check if openshift is installed '''
import rpm
transaction_set = rpm.TransactionSet()
rpmquery = transaction_set.dbMatch("name", "atomic-openshift")
return rpmquery.count() > 0
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if key not in user_def:
if debug:
print('User data does not have key [%s]' % key)
print('User data: %s' % user_def)
return False
if not isinstance(user_def[key], list):
if debug:
print('user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key]))
return False
if len(user_def[key]) != len(value):
if debug:
print("List lengths are not equal.")
print("key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value)))
print("user_def: %s" % user_def[key])
print("value: %s" % value)
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print('sending list - list')
print(type(values[0]))
print(type(values[1]))
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print('list compare returned false')
return False
elif value != user_def[key]:
if debug:
print('value should be identical')
print(user_def[key])
print(value)
return False
# recurse on a dictionary
elif isinstance(value, dict):
if key not in user_def:
if debug:
print("user_def does not have key [%s]" % key)
return False
if not isinstance(user_def[key], dict):
if debug:
print("dict returned false: not instance of dict")
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print("keys are not equal in dict")
print(user_values)
print(api_values)
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print("dict returned false")
print(result)
return False
# Verify each key, value pair is the same
else:
if key not in user_def or value != user_def[key]:
if debug:
print("value not equal; user_def does not have key")
print(key)
print(value)
if key in user_def:
print(user_def[key])
return False
if debug:
print('returning true')
return True
class OpenShiftCLIConfig(object):
'''Generic Config'''
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
''' return config options '''
return self._options
def to_option_list(self, ascommalist=''):
'''return all options as a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs'''
return self.stringify(ascommalist)
def stringify(self, ascommalist=''):
''' return the options hash as cli params in a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs '''
rval = []
for key in sorted(self.config_options.keys()):
data = self.config_options[key]
if data['include'] \
and (data['value'] is not None or isinstance(data['value'], int)):
if key == ascommalist:
val = ','.join(['{}={}'.format(kk, vv) for kk, vv in sorted(data['value'].items())])
else:
val = data['value']
rval.append('--{}={}'.format(key.replace('_', '-'), val))
return rval
|
abutcher/openshift-ansible
|
roles/lib_openshift/src/lib/base.py
|
Python
|
apache-2.0
| 21,696 | 0.001244 |
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='Unirest',
version='1.1.7',
author='Mashape',
author_email='opensource@mashape.com',
packages=['unirest'],
url='https://github.com/Mashape/unirest-python',
license='LICENSE',
description='Simplified, lightweight HTTP client library',
install_requires=[
"poster >= 0.8.1"
]
)
|
abhishekgahlot/unirest-python
|
setup.py
|
Python
|
mit
| 431 | 0 |
# -*- coding: utf-8 -*-
"""Core Service Interface Definitions
osid version 3.0.0
The Open Service Interface Definitions (OSIDs) is a service-based
architecture to promote software interoperability. The OSIDs are a large
suite of interface contract specifications that describe the integration
points among services and system components for the purpose of creating
choice among a variety of different and independently developed
applications and systems, allowing independent evolution of software
components within a complex system, and federated service providers.
The OSIDs were initially developed in 2001 as part of the MIT Open
Knowledge Initiative Project funded by the Andrew W. Mellon Foundation
to provide an architecture for higher education learning systems. OSID
3K development began in 2006 to redesign the capabilities of the
specifications to apply to a much broader range of service domains and
integration challenges among both small and large-scale enterprise
systems.
The ``osid`` package defines the building blocks for the OSIDs which are
defined in packages for their respective services. This package defines
the top-level interfaces used by all the OSIDs as well as specification
metadata and the OSID Runtime interface.
Meta Interfaces and Enumerations
* ``OSID:`` an enumeration listing the OSIDs defined in the
specification.
* ``Syntax:`` an enumeration listing primitive types
* ``Metadata:`` an interface for describing data constraints on a data
element
Interface Behavioral Markers
Interface behavioral markers are used to tag a behavioral pattern of the
interface used to construct other object interfaces.
* ``OsidPrimitive:`` marks an OSID interface used as a primitive. OSID
primitives may take the form interfaces if not bound to a language
primitive. Interfaces used as primitives are marked to indicate that
the underlying objects may be constructed by an OSID Consumer and an
OSID Provider must honor any OSID primitive regardless of its
origin.
* ``Identifiable:`` Marks an interface identifiable by an OSID ``Id.``
* ``Extensible:`` Marks an interface as extensible through
``OsidRecords.``
* ``Browsable:`` Marks an interface as providing ``Property``
inspection for its ``OsidRecords.``
* ``Suppliable:`` Marks an interface as accepting data from an OSID
Consumer.
* ``Temporal:`` Marks an interface that has a lifetime with begin an
end dates.
* ``Subjugateable:`` Mars an interface that is dependent on another
object.
* ``Aggregateable:`` Marks an interface that contains other objects
normally related through other services.
* ``Containable:`` Marks an interface that contains a recursive
reference to itself.
* ``Sourceable:`` Marks an interface as having a provider.
* ``Federateable:`` Marks an interface that can be federated using the
OSID Hierarchy pattern.
* ``Operable:`` Marks an interface as responsible for performing
operatons or tasks. ``Operables`` may be enabled or disabled.
Abstract service Interfaces
* ``OsidProfile:`` Defines interoperability methods used by
OsidManagers.
* ``OsidManager:`` The entry point into an OSID and provides access to
``OsidSessions.``
* ``OsidProxyManager:`` Another entry point into an OSID providing a
means for proxying data from a middle tier application server to an
underlying OSID Provider.
* ``OsidSession`` : A service interface accessible from an
``OsidManager`` that defines a set of methods for an aspect of a
service.
Object-like interfaces are generally defined along lines of
interoperability separating issues of data access from data management
and searching. These interfaces may also implement any of the abstract
behavioral interfaces listed above. The OSIDs do not adhere to a DAO/DTO
model in its service definitions in that there are service methods
defined on the objects (although they can be implemented using DTOs if
desired). For the sake of an outline, we'll pretend they are data
objects.
* ``OsidObject:`` Defines object data. ``OsidObjects`` are accessed
from ``OsidSessions.`` ``OsidObjects`` are part of an interface
hierarchy whose interfaces include the behavioral markers and a
variety of common ``OsidObjects.`` All ``OsidObjects`` are
``Identifiable,`` ``Extensible,`` and have a ``Type.`` There are
several variants of ``OsidObjects`` that indicate a more precise
behavior.
* ``OsidObjectQuery:`` Defines a set of methods to query an OSID for
its ``OsidObjects`` . An ``OsidQuery`` is accessed from an
``OsidSession.``
* ``OsidObjectQueryInspector:`` Defines a set of methods to examine an
``OsidQuery.``
* ``OsidObjectForm:`` Defines a set of methods to create and update
data. ``OsidForms`` are accessed from ``OsidSessions.``
* ``OsidObjectSearchOrder:`` Defines a set of methods to order search
results. ``OsidSearchOrders`` are accessed from ``OsidSessions.``
Most objects are or are derived from ``OsidObjects``. Some object
interfaces may not implement ``OsidObejct`` but instead derive directly
from interface behavioral markers. Other ``OsidObjects`` may include
interface behavioral markers to indicate functionality beyond a plain
object. Several categories of ``OsidObjects`` have been defined to
cluster behaviors to semantically distinguish their function in the
OSIDs.
* ``OsidCatalog:`` At the basic level, a catalog represents a
collection of other ``OsidObjects.`` The collection may be physical
or virtual and may be federated to build larger ``OsidCatalogs``
using hierarchy services. ``OsidCatalogs`` may serve as a control
point to filter or constrain the ``OsidObjects`` that may be visible
or created. Each ``OsidCatalog`` may have its own provider identifty
apart from the service provider.
* ``OsidRelationship:`` Relates two ``OsidObjects.`` The
``OsidRelationship`` represents the edge in a graph that may have
its own relationship type and data. ``OsidRelationships`` are
``Temporal`` in that they have a time in which the relationship came
into being and a time when the relationship ends.
* ``OsidRule:`` Defines an injection point for logic. An ``OsidRule``
may represent some constraint, evaluation, or execution. While
authoring of ``OsidRules`` is outside the scope of the OSIDs, an
``OsidRule`` provides the mean to identify the rule and map it to
certain ``OsidObjects`` to effect behavior of a service.
The most basic operations of an OSID center on retrieval, search, create
& update, and notifications on changes to an ``OsidObject``. The more
advanced OSIDs model a system behavior where a variety of implicit
relationships, constraints and rules come into play.
* ``OsidGovernator:`` Implies an activity or operation exists in the
OSID Provider acting as an ``Operable`` point for a set of rules
governing related ``OsidObjects.`` The ``OsidGovernator`` represents
an engine of sorts in an OSID Provider and may have its own provider
identity.
* ``OsidCompendium`` : ``OsidObjects`` which are reports or summaries
based on transactional data managed elsewhere.
Managing data governing rules occurs in a separate set of interfaces
from the effected ``OsidObjects`` (and often in a separate package).
This allows for a normalized set of rules managing a small set of
control points in a potentially large service.
* ``OsidEnabler:`` A managed control point to enable or disable the
operation or effectiveness of another ``OsidObject`` . Enablers
create a dynamic environment where behaviors and relationships can
come and go based on rule evauations.
* ``OsidConstrainer:`` A managed control point to configure the
constraints on the behavior of another ``OsidObject.``
* ``OsidProcessor:`` A managed control point to configure the behavior
of another ``OsidObject`` where some kins of processing is implied.
Other Abstract Interfaces
* ``OsidSearch:`` Defines set of methods to manage search options for
performing searches.
* ``OsidSearchResults:`` Defines a set of methods to examine search
results.
* ``OsidReceiver:`` Defines a set of methods invoked for asynchronous
notification.
* ``OsidList:`` Defines a set of methods to sequentially access a set
of objects.
* ``OsidNode:`` An interface used by hierarchy nodes.
* ``OsidCondition:`` An input or "statement of fact" into an
``OsidRule`` evaluation.
* ``OsidInput:`` An input of source data into an ``OsidRule``
processor.
* ``OsidResult:`` The output from processing an ``OsidRule.``
* ``OsidRecord:`` An interface marker for an extension to another
interface. ``OsidRecord`` are negotiated using OSID ``Types.``
* ``Property:`` Maps a name to a value. Properties are available in
OSID objects to provide a simplified view of data that may exist
within a typed interface.
* ``PropertyList:`` A list of properties.
Runtime
* ``OsidRuntimeProfile:`` The ``OsidProfile`` for the runtime
``OsidManager.``
* ``OsidRuntimeManager:`` The OSID Runtime service.
Abstract Flow
Generally, these definitions are abstract and not accesed directly. They
are used as building blocks to define interfaces in the OSIDs
themselves. OSIDs derive most of their definitions from a definition in
the osid package. The methods that are defined at this abstract level
versus the methods defined directly in a specific OSID is determined by
the typing in the method signatures. The osid package interfaces are a
means of ensuring consistency of common methods and not designed to
facilitate object polymorphism among different OSIDs. A language binder
may elect to alter the interface hierarchy presented in this
specification and a provider need not parallel these interfaces in their
implementations.
The flow of control through any OSID can be described in terms of these
definitions. An ``OsidManager`` or ``OsidProxyManager`` is retrieved
from the ``OsidRuntimeManager`` for a given service. Both types of
managers share an interface for describing what they support in the
``OsidProfile``.
``OsidSessions`` are created from the ``OsidManager``. ``OsidSessions``
tend to be organized along clusters of like-functionality. Lookup-
oriented sessions retrieve ``OsidObjects``. Return of multiple
``OsidObjects`` is done via the ``OsidList``. Search-oriented sessions
retrieve ``OsidObjects`` through searches provided through the
``OsidQuery`` and ``OsidSearch`` interfaces.
Administrative-oriented sessions create and update ``OsidObjects`` using
the ``OsidForm`` interface. The ``OsidForm`` makes available
``Metadata`` to help define its rules for setting and changing various
data elements.
``OsidObjects`` can be organized within ``OsidCatalogs``. An
``OsidCatalog`` is hierarchical and can be traversed through an
``OsidNode``. An ``OsidQuery`` or an ``OsidSearchOrder`` may be mapped
to a dynamic ``OsidCatalog``. Such a query may be examined using an
``OsidQueryInspector``.
A notification session provides a means for subscribing to events, "a
new object has been created", for example, and these events are received
from an ``OsidReceiver``.
Meta OSID Specification
The OSID Specification framework defines the interace and method
structures as well as the language primitives and errors used throughout
the OSIDs. The OSID Specifications are defined completely in terms of
interfaces and the elements specified in the meta specification.
Language Primitives
Ths meta OSID Specification enumerates the allowable language primitives
that can be used in OSID method signatures. Parameters and returns in
OSID methods may be specified in terms of other OSID interfaces or using
one of these primitives. An OSID Binder translates these language
primitives into an appropriate language primitive counterpart.
An OSID Primitive differs from a language primitive. An OSID Primitive
is an interface used to describe a more complex structure than a simple
language primitive can support. Both OSID Primitives and language
primitives have the same behavior in the OSIDs in that an there is no
service encapsulation present allowing OSID Primitives to be consructed
by an OSID Consumer.
Errors
OSID methods are required to return a value, if specified, or return one
of the errors specified in the method signature. The meta package
defines the set of errors that a method signtaure may use.
Errors should result when the contract of the interface as been violated
or cannot be fulfilled and it is necessary to disrupt the flow of
control for a consumer. Different errors are specified where it is
forseen that a consumer may wish to execute a different action without
violating the encapsulation of internal provider operations. Such
actions do not include debugging or other detailed information which is
the responsibility of the provider to manage. As such, the number of
errors defined across all the interfaces is kept to a minimum and the
context of the error may vary from method to method in accordance with
the spceification.
Errors are categorized to convey the audience to which the error
pertains.
* User Errors: Errors which may be the result of a user operation
intended for the user.
* Operational Errors: Errors which may be the result of a system or
some other problem intended for the user.
* Consumer Contract Errors: Software errors resulting in the use of
the OSIDs by an OSID Consumer intended for the application
programmer. These also include integration problems where the OSID
Consumer bypassed a method to test for support of a service or type.
* Provider Contract Errors: Software errors in the use of an OSID by
an OSID Provider intended for an implementation programmer.
Compliance
OSID methods include a compliance statement indicating whether a method
is required or optional to implement. An optional OSID method is one
that defines an UNIMPLEMENTED error and there is a corresponding method
to test for the existence of an implementation.
OSID 3K Acknowledgements
* Tom Coppeto (Editor & Architect)
* Scott Thorne (Architect)
The authors gratefully acknowledge the following individuals for their
time, wisdom, and contributions in shaping these specifications.
* Adam Franco, Middlebury College
* Jeffrey Merriman, Massachusetts Institute of Technology
* Charles Shubert, Massachusetts Insitute of Technology
* Prof. Marc Alier, Universitat Politècnica de Catalyuna
* Joshua Aresty, Massachusetts Institute of Technology
* Fabrizio Cardinali, Giunti Labs
* Pablo Casado, Universitat Politècnica de Catalyuna
* Alex Chapin, Middlebury College
* Craig Counterman, Massachusetts Institute of Technology
* Francesc Santanach Delisau, Universitat Oberta de Catalyuna
* Prof. Llorenç Valverde Garcia, Universitat Oberta de Catalyuna
* Catherine Iannuzzo, Massachusetts Institute of Technology
* Jeffrey Kahn, Verbena Consulting
* Michael Korcynski, Tufts University
* Anoop Kumar, Tufts University
* Eva de Lera, Universitat Oberta de Catalyuna
* Roberto García Marrodán, Universitat Oberta de Catalyuna
* Andrew McKinney, Massachusetts Institute of Technology
* Scott Morris, Apple
* Mark Norton, Nolaria Consulting
* Mark O'Neill, Dartmouth College
* Prof. Charles Severance, University of Michigan
* Stuart Sim, Sun Microsystems/Common Need
* Colin Smythe, IMS Global Learning Consortium
* George Ward, California State University
* Peter Wilkins, Massachusetts Institute of Technology
* Norman Wright, Massachusetts Institute of Technology
O.K.I. Acknowledgements
OSID 3K is based on the O.K.I. OSIDs developed as part of the MIT Open
Knowledge Initiative (O.K.I) project 2001-2004.
* Vijay Kumar, O.K.I. Principal Investigator, Massachusetts Insitute
of Technology
* Jeffrey Merriman, O.K.I. Project Director, Massachusetts Insitute of
Technology
* Scott Thorne, O.K.I. Chief Architect, Massachusetts Institute of
Technology
* Charles Shubert, O.K.I. Architect, Massachusetts Institute of
Technology
* Lois Brooks, Project Coordinator, Stanford University
* Mark Brown, O.K.I. Project Manager, Massachusetts Institute of
Technology
* Bill Fitzgerald, O.K.I. Finance Manager, Massachusetts Institute of
Technology
* Judson Harward, Educational Systems Architect, Massachusetts
Institute of Technology
* Charles Kerns, Educational Systems Architect, Stanford University
* Jeffrey Kahn, O.K.I. Partner, Verbena Consulting
* Judith Leonard, O.K.I. Project Administrator, Massachusetts
Institute of Technology
* Phil Long, O.K.I. Outreach Coordinator, Massachusetts Institute of
Technology
* Cambridge University, O.K.I. Core Collaborator
* Dartmouth College, O.K.I. Core Collaborator
* Massachusetts Institute of Technology, O.K.I. Core Collaborator
* North Carolina State University, O.K.I. Core Collaborator
* Stanford University, O.K.I. Core Collaborator
* University of Michigan, O.K.I. Core Collaborator
* University of Pennsylvania, O.K.I. Core Collaborator
* University of Wisconsin, Madison, O.K.I. Core Collaborator
"""
|
birdland/dlkit-doc
|
dlkit/mongo/osid/summary_doc.py
|
Python
|
mit
| 17,271 | 0 |
"""
Django settings for {{ project_name }} project.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import sys
########## PATH CONFIGURATION
# Absolute filesystem path to the Django project directory:
CONFIG_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Absolute filesystem path to the top-level project folder:
SITE_ROOT = os.path.dirname(CONFIG_DIR)
# Absolute filesystem path to the apps folder:
APPS_DIR = os.path.join(SITE_ROOT, '{{project_name}}')
# Add our project to our pythonpath, this way we don't need to type our project
# name in our dotted import paths:
sys.path.append(SITE_ROOT)
########## END PATH CONFIGURATION
########## SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key should only be used for development and testing.
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = r"{{ secret_key }}"
########## END SECRET CONFIGURATION
########## DEBUG CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
########## END DEBUG CONFIGURATION
########## SITE CONFIGURATION
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
########## END SITE CONFIGURATION
########## MANAGER CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
('Your Name', 'your_email@example.com'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
########## END MANAGER CONFIGURATION
########## DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
########## END DATABASE CONFIGURATION
########## FIXTURE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
os.path.normpath(os.path.join(SITE_ROOT, 'fixtures')),
)
########## END FIXTURE CONFIGURATION
########## MIDDLEWARE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#middleware-classes
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
########## END MIDDLEWARE CONFIGURATION
########## URL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf
ROOT_URLCONF = 'config.urls'
########## END URL CONFIGURATION
########## APP CONFIGURATION
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin
'django.contrib.admin',
)
# Third Party apps go here.
THIRD_PARTY_APPS = (
'crispy_forms', # Form layouts
'allauth', # registration
'allauth.account', # registration
'allauth.socialaccount', # registration
'django_extensions', # shell_plus
)
# Apps specific for this project go here.
LOCAL_APPS = (
'{{project_name}}.users', # custom users app
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
########## END APP CONFIGURATION
# MIGRATIONS CONFIGURATION
MIGRATION_MODULES = {
'sites': '{{project_name}}.contrib.sites.migrations'
}
########## END MIGRATIONS MODULES
########## TEMPLATE CONFIGURATION
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(APPS_DIR, 'templates'),
],
'OPTIONS': {
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
'debug': DEBUG,
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
# Your stuff: custom template context processors go here
],
},
},
]
# See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap3'
########## END TEMPLATE CONFIGURATION
########## WSGI CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
########## END WSGI CONFIGURATION
########## INTERNATIONAL CONFIGURATION
# https://docs.djangoproject.com/en/1.8/topics/i18n/
# See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone
TIME_ZONE = 'Europe/Madrid'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'es-ES'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
########## END INTERNATIONAL CONFIGURATION
########## MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = os.path.normpath(os.path.join(APPS_DIR, 'media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
########## END MEDIA CONFIGURATION
########## STATIC FILE CONFIGURATION
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = os.path.normpath(os.path.join(APPS_DIR, 'staticfiles'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
os.path.normpath(os.path.join(APPS_DIR, 'static')),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
########## END STATIC FILE CONFIGURATION
# AUTHENTICATION CONFIGURATION
# ------------------------------------------------------------------------------
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
)
# Some really nice defaults
ACCOUNT_AUTHENTICATION_METHOD = 'username'
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
# Custom user app defaults
# Select the correct user model
AUTH_USER_MODEL = 'users.User'
LOGIN_REDIRECT_URL = 'users:redirect'
LOGIN_URL = 'account_login'
# SLUGLIFIER
AUTOSLUG_SLUGIFY_FUNCTION = 'slugify.slugify'
########## LOGGING CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
########## END LOGGING CONFIGURATION
|
jmorenobl/django-template-project-1.8
|
project_name_project/config/settings/base.py
|
Python
|
mit
| 9,481 | 0.004324 |
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._ast_to_string import ast_to_string
from ._clang_format import clang_format
from ._graphs import compute_condensation_in_topological_order
from ._ir_to_string import ir_to_string
|
google/tmppy
|
_py2tmp/utils/__init__.py
|
Python
|
apache-2.0
| 784 | 0 |
import numpy as np
import active_subspaces as ac
def borehole(xx):
#each row of xx should be [rw, r, Tu, Hu, Tl, Hl, L, Kw] in the normalized space
#returns column vector of borehole function at each row of inputs
x = xx.copy()
x = np.atleast_2d(x)
M = x.shape[0]
#unnormalize inpus
xl = np.array([63070, 990, 63.1, 700, 1120, 9855])
xu = np.array([115600, 1110, 116, 820, 1680, 12045])
x[:,2:] = ac.utils.misc.BoundedNormalizer(xl, xu).unnormalize(x[:,2:])
x[:,0] = .0161812*x[:,0] + .1
x[:,1] = np.exp(1.0056*x[:,1] + 7.71)
rw = x[:,0]; r = x[:,1]; Tu = x[:,2]; Hu = x[:,3]
Tl = x[:,4]; Hl = x[:,5]; L = x[:,6]; Kw = x[:,7]
pi = np.pi
return (2*pi*Tu*(Hu - Hl)/(np.log(r/rw)*(1 + 2*L*Tu/(np.log(r/rw)*rw**2*Kw) + Tu/Tl))).reshape(M, 1)
def borehole_grad(xx):
#each row of xx should be [rw, r, Tu, Hu, Tl, Hl, L, Kw] in the normalized space
#returns matrix whose ith row is gradient of borehole function at ith row of inputs
x = xx.copy()
x = np.atleast_2d(x)
M = x.shape[0]
#unnormalize inpus
xl = np.array([63070, 990, 63.1, 700, 1120, 9855])
xu = np.array([115600, 1110, 116, 820, 1680, 12045])
x[:,2:] = ac.utils.misc.BoundedNormalizer(xl, xu).unnormalize(x[:,2:])
x[:,0] = .0161812*x[:,0] + .1
x[:,1] = np.exp(1.0056*x[:,1] + 7.71)
rw = x[:,0]; r = x[:,1]; Tu = x[:,2]; Hu = x[:,3]
Tl = x[:,4]; Hl = x[:,5]; L = x[:,6]; Kw = x[:,7]
pi = np.pi
Q = 1 + 2*L*Tu/(np.log(r/rw)*rw**2*Kw) + Tu/Tl #Convenience variable
l = np.log(r/rw) #Convenience variable
dfdrw = (-2*pi*Tu*(Hu - Hl)*(Q*l)**-2*(-Q/rw - l*2*L*Tu/Kw*(l*rw**2)**-2*(-rw + 2*rw*l)))[:,None]
dfdr = (-2*pi*Tu*(Hu - Hl)*(l*Q)**-2*(Q/r - 2*L*Tu/(r*rw**2*Kw*l)))[:,None]
dfdTu = (2*pi*(Hu - Hl)/(l*Q) - 2*pi*Tu*(Hu - Hl)*(l*Q)**-2*(l*(2*L/(l*rw**2*Kw)+1./Tl)))[:,None]
dfdHu = (2*pi*Tu/(l*Q))[:,None]
dfdTl = (2*pi*Tu*(Hu - Hl)*(l*Q)**-2*l*Tu/Tl**2)[:,None]
dfdHl = (-2*pi*Tu/(l*Q))[:,None]
dfdL = (-2*pi*Tu*(Hu - Hl)*(l*Q)**-2*2*Tu/(rw**2*Kw))[:,None]
dfdKw = (2*pi*Tu*(Hu - Hl)*(l*Q)**-2*2*L*Tu/(rw**2*Kw**2))[:,None]
#The gradient components must be scaled in accordance with the chain rule: df/dx = df/dy*dy/dx
r = np.log(r); r = ((r - 7.71)/1.0056).reshape(M, 1)
return np.hstack((dfdrw*.0161812, dfdr*1.0056*np.exp(1.0056*r + 7.71), dfdTu*(115600 - 63070)/2., dfdHu*(1110 - 990)/2.,\
dfdTl*(116 - 63.1)/2., dfdHl*(820 - 700)/2., dfdL*(1680 - 1120)/2., dfdKw*(12045 - 9855)/2.))
|
paulcon/active_subspaces
|
tutorials/test_functions/borehole/borehole_functions.py
|
Python
|
mit
| 2,595 | 0.035067 |
#!/usr/bin/env python
f = open("repair.log", "r");
lines = f.readlines();
cnt = 0;
for line in lines:
tokens = line.strip().split();
if (len(tokens) > 3):
if (tokens[0] == "Total") and (tokens[1] == "return"):
cnt += int(tokens[3]);
if (tokens[0] == "Total") and (tokens[2] == "different") and (tokens[3] == "repair"):
cnt += int(tokens[1]);
print "Total size: " + str(cnt);
|
jyi/ITSP
|
prophet-gpl/tools/return_counter.py
|
Python
|
mit
| 423 | 0.018913 |
"""djangochat URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^', include('chatdemo.urls')),
url(r'^admin/', admin.site.urls),
]
|
ploggingdev/djangochat
|
djangochat/urls.py
|
Python
|
gpl-3.0
| 817 | 0 |
"""
Create inset.
"""
from __future__ import absolute_import
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
from fabmetheus_utilities.geometry.creation import lineation
from fabmetheus_utilities.geometry.geometry_utilities import evaluate
from fabmetheus_utilities import intercircle
__author__ = 'Enrique Perez (perez_enrique@yahoo.com)'
__credits__ = 'Art of Illusion <http://www.artofillusion.org/>'
__date__ = '$Date: 2008/02/05 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
globalExecutionOrder = 80
def getManipulatedPaths(close, loop, prefix, sideLength, xmlElement):
"Get outset path."
radius = lineation.getStrokeRadiusByPrefix(prefix, xmlElement )
return intercircle.getInsetLoopsFromVector3Loop(loop, -radius)
def processXMLElement(xmlElement):
"Process the xml element."
lineation.processXMLElementByFunction(getManipulatedPaths, xmlElement)
|
natetrue/ReplicatorG
|
skein_engines/skeinforge-40/fabmetheus_utilities/geometry/manipulation_paths/_outset.py
|
Python
|
gpl-2.0
| 1,041 | 0.009606 |
from django.db import models
class Article(models.Model):
posttitle = models.TextField(default="Post")
post = models.TextField()
piclink = models.TextField(blank=True)
pub_date = models.DateTimeField(auto_now_add=True)
class BlogPost(Article):
def __str__(self):
return self.posttitle
class PagePost(Article):
parentpage = models.ForeignKey('Page', null=True)
def __str__(self):
return self.tag+" "+self.posttitle
class Page(models.Model):
page_index = models.IntegerField(default=0)
name = models.CharField(max_length=200, unique=True)
def __str__(self):
return self.name
class Comment(models.Model):
name=models.CharField(max_length=20, blank=False)
email=models.CharField(max_length=120, blank=False)
text=models.CharField(max_length=512, blank=False)
parent_article=models.ForeignKey('BlogPost', null=False)
pub_date = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.email+" "+self.parent_article.__str__()
|
GabMus/simpelblog
|
blog/models.py
|
Python
|
gpl-3.0
| 969 | 0.034056 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2014, Nicolas P. Rougier. All Rights Reserved.
# Distributed under the (new) BSD License.
# -----------------------------------------------------------------------------
""" This example shows spatial interpolation of images. """
import numpy as np
from glumpy import app, gl, gloo, data, library
vertex = """
attribute vec2 position;
attribute vec2 texcoord;
attribute float interpol;
varying vec2 v_texcoord;
varying float v_interpol;
void main()
{
gl_Position = vec4(position, 0.0, 1.0);
v_texcoord = texcoord;
v_interpol = interpol;
} """
fragment = """
#include "misc/spatial-filters.frag"
uniform sampler2D u_data;
uniform vec2 u_shape;
varying vec2 v_texcoord;
varying float v_interpol;
void main()
{
if (v_interpol < 0.5)
// gl_FragColor = Nearest(u_data, u_shape, v_texcoord);
gl_FragColor = texture2D(u_data, v_texcoord);
else if (v_interpol < 1.5)
gl_FragColor = Bilinear(u_data, u_shape, v_texcoord);
else if (v_interpol < 2.5)
gl_FragColor = Hanning(u_data, u_shape, v_texcoord);
else if (v_interpol < 3.5)
gl_FragColor = Hamming(u_data, u_shape, v_texcoord);
else if (v_interpol < 4.5)
gl_FragColor = Hermite(u_data, u_shape, v_texcoord);
else if (v_interpol < 5.5)
gl_FragColor = Kaiser(u_data, u_shape, v_texcoord);
else if (v_interpol < 6.5)
gl_FragColor = Quadric(u_data, u_shape, v_texcoord);
else if (v_interpol < 7.5)
gl_FragColor = Bicubic(u_data, u_shape, v_texcoord);
else if (v_interpol < 8.5)
gl_FragColor = CatRom(u_data, u_shape, v_texcoord);
else if (v_interpol < 9.5)
gl_FragColor = Mitchell(u_data, u_shape, v_texcoord);
else if (v_interpol < 10.5)
gl_FragColor = Spline16(u_data, u_shape, v_texcoord);
else if (v_interpol < 11.5)
gl_FragColor = Spline36(u_data, u_shape, v_texcoord);
else if (v_interpol < 12.5)
gl_FragColor = Gaussian(u_data, u_shape, v_texcoord);
else if (v_interpol < 13.5)
gl_FragColor = Bessel(u_data, u_shape, v_texcoord);
else if (v_interpol < 14.5)
gl_FragColor = Sinc(u_data, u_shape, v_texcoord);
else if (v_interpol < 15.5)
gl_FragColor = Lanczos(u_data, u_shape, v_texcoord);
else
gl_FragColor = Blackman(u_data, u_shape, v_texcoord);
} """
window = app.Window(width=4*512, height=2*512)
@window.event
def on_draw(dt):
window.clear()
program.draw(gl.GL_TRIANGLES, indices)
@window.event
def on_mouse_motion(x, y, dx, dy):
global zoom
dx, dy = 0.05*zoom, 0.05*zoom
x = min(max(x/1024.0, dx), 1.0-dx)
y = min(max(y/1024.0, dy), 1.0-dy)
vertices[1:]['texcoord'] = (x-dx,y-dy), (x-dy,y+dy), (x+dx, y-dy), (x+dx,y+dy)
@window.event
def on_mouse_scroll(x, y, dx, dy):
global zoom
zoom = np.minimum(np.maximum(zoom*(1+dy/100.0), 0.001), 10.00)
on_mouse_motion(x,y,0,0)
zoom = 0.25
program = gloo.Program(vertex, fragment)
vertices = np.zeros((16+1,4),
[("position", np.float32, 2),
("texcoord", np.float32, 2),
("interpol", np.float32, 1)]).view(gloo.VertexBuffer)
vertices["position"][0] = (-1,+1), (-1,-1), (0,+1), (0,-1)
dx, dy = 1/4.0, 1/2.0
for j in range(4):
for i in range(4):
index = 1+j*4+i
x, y = i/4.0, -1 + j/2.0
vertices["position"][index] = (x,y+dy), (x,y), (x+dx,y+dy), (x+dx,y)
vertices['texcoord'] = ( 0, 0), ( 0,+1), (+1, 0), (+1,+1)
vertices['interpol'] = np.arange(17).reshape(17,1)
program.bind(vertices)
indices = np.zeros((17,6),np.uint32).view(gloo.IndexBuffer)
indices[:] = [0,1,2,1,2,3]
indices += 4*np.arange(17).reshape(17,1)
lena = data.get("lena.png")
program['u_data'] = lena
program['u_shape'] = lena.shape[1], lena.shape[0]
program['u_kernel'] = data.get("spatial-filters.npy")
program['u_data'].interpolation = gl.GL_NEAREST
program['u_data'].wrapping = gl.GL_CLAMP
x,y = 512,512
dx, dy = 0.05, 0.05
x = min(max(x/1024.0, dx), 1.0-dx)
y = min(max(y/1024.0, dy), 1.0-dy)
vertices['texcoord'][1:] = (x-dx,y-dy), (x-dy,y+dy), (x+dx, y-dy), (x+dx,y+dy)
app.run()
|
duyuan11/glumpy
|
examples/interpolations.py
|
Python
|
bsd-3-clause
| 4,303 | 0.008599 |
import os
from sqlalchemy import and_
from photomanager.lib.pmconst import TODO_INX_NAME
from photomanager.utils.imageutils import ImageInfo
from photomanager.db.helper import exif_to_model
from photomanager.db.models import ImageMeta
from photomanager.lib.helper import get_file_md5
from photomanager.db.config import Config
class ImageDBHandler:
def __init__(self, folder, session, skip_existed):
"""
:param session: db session
:param filenames: list of image filenames
"""
self.session = session
self.config = Config(self.session)
self.folder = folder
self.skip_existed = skip_existed
self._on_index_image = None
@property
def on_index_image(self):
return self._on_index_image
@on_index_image.setter
def on_index_image(self, func_on_index_image):
assert callable(func_on_index_image)
self._on_index_image = func_on_index_image
def do_index(self, filenames):
cnt = 0
for inx, filename in enumerate(filenames):
filename = filename.strip()
self.index_image(filename)
cnt += 1
if self.on_index_image:
self.on_index_image(inx)
if inx % 100 == 0:
self.session.commit()
self.session.commit()
return cnt
def index_image(self, filename):
folder = os.path.dirname(filename)
basename = os.path.basename(filename)
image_meta_existed = self.session.query(ImageMeta).filter(
and_(ImageMeta.filename == basename, ImageMeta.folder == folder)).first()
full_file_name = self.folder + '/' + filename
if image_meta_existed and (self.skip_existed or image_meta_existed.md5 == get_file_md5(full_file_name)):
return None
image_info = ImageInfo(full_file_name)
image_meta_new = exif_to_model(image_info)
image_meta_new.filename = basename
image_meta_new.folder = folder
if image_meta_existed:
image_meta_new.id = image_meta_existed.id
image_meta_new.uuid = image_meta_existed.uuid
self.session.merge(image_meta_new)
return image_meta_new
@property
def todo_index(self):
value = self.config.get_value(TODO_INX_NAME)
if value:
return int(value)
else:
return -1
@todo_index.setter
def todo_index(self, value):
assert isinstance(value, int)
self.config.set_value(TODO_INX_NAME, value)
|
wrenchzc/photomanager
|
photomanager/db/imagehandler.py
|
Python
|
mit
| 2,547 | 0.000785 |
from mimetypes import guess_type
from django.conf import settings
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404
from django.utils.text import Truncator
from django.utils.translation import ugettext as _
from storybase_asset.models import FEATURED_ASSET_THUMBNAIL_WIDTH, FEATURED_ASSET_THUMBNAIL_HEIGHT
from storybase_story.models import Story
from storybase_taxonomy.models import Category
class StoriesFeed(Feed):
"""
Generates a feed of the 25 most recently published stories
Allows basic filtering by topic slug by providing either a
``topics=SLUG`` or ``topics-exclude=SLUG`` querystring parameter to
the GET request.
"""
title = "%s %s" % (settings.STORYBASE_SITE_NAME, _("Stories"))
description = _("Recent stories from ") + settings.STORYBASE_SITE_NAME
# Map of query string parameters to Queryset filters
QUERY_MAP = {
'topics': 'topics__categorytranslation__slug',
}
def link(self):
return reverse('explore_stories')
def get_object(self, request, *args, **kwargs):
# HACK: Dummy get_object implementation that doesn't actually get an
# object, but has the side effect of storying the request object as
# an attribute of the Feed object
self.request = request
return super(StoriesFeed, self).get_object(request, *args, **kwargs)
def get_filter_kwargs(self):
"""
Get queryset filter/exclude arguments from the request's GET parameters
Returns a tuple of dictionaries, the first providing arguments suitable
for a call to Queryset.filter() and the second providing arguments
for a cal to Queryset.exclude()
"""
filter_kwargs = {}
exclude_kwargs = {}
for param, lookup in self.QUERY_MAP.items():
exclude_param = '%s-exclude' % param
if param in self.request.GET:
filter_kwargs[lookup] = self.request.GET[param]
if exclude_param in self.request.GET:
exclude_kwargs[lookup] = self.request.GET[exclude_param]
return filter_kwargs, exclude_kwargs
def items(self):
# Only show non-connected, published stories in the feed
queryset = Story.objects.exclude(source__relation_type='connected').published()
filter_kwargs, exclude_kwargs = self.get_filter_kwargs()
if filter_kwargs:
queryset = queryset.filter(**filter_kwargs)
if exclude_kwargs:
queryset = queryset.exclude(**exclude_kwargs)
return queryset.order_by('-published')[:25]
def item_title(self, item):
return item.title
def item_description(self, item):
truncator = Truncator(item.summary)
return truncator.words(75, html=True)
def item_author_name(self, item):
return item.contributor_name
def item_pubdate(self, item):
return item.published
def item_updateddate(self, item):
return item.last_edited
def item_categories(self, item):
category_objs = list(item.projects.all()) + list(item.organizations.all()) + list(item.tags.all()) + list(item.topics.all())
return [obj.name for obj in category_objs]
def item_copyright(self, item):
return item.license_name()
def item_enclosure_url(self, item):
return item.featured_asset_thumbnail_url()
def item_enclosure_length(self, item):
asset = item.get_featured_asset()
thumbnail_options = {
'size': (FEATURED_ASSET_THUMBNAIL_WIDTH,FEATURED_ASSET_THUMBNAIL_HEIGHT),
}
try:
return asset.get_thumbnail(thumbnail_options).size
except AttributeError:
return 0
def item_enclosure_mime_type(self, item):
url = item.featured_asset_thumbnail_url()
(mtype, encoding) = guess_type(url)
return mtype
class TopicStoriesFeed(StoriesFeed):
"""
Generates a feed of the 25 most recently published stories in a particular
topic
The topic is passed to the feed via a ``slug`` keyword argument in the URL
configuration for the feed.
"""
def get_object(self, request, slug):
return get_object_or_404(Category, categorytranslation__slug=slug)
def title(self, obj):
return "%s %s %s" % (settings.STORYBASE_SITE_NAME, obj.name, _("Stories"))
def description(self, obj):
return _("Recent ") + obj.name + _(" stories from ") + settings.STORYBASE_SITE_NAME
def link(self, obj):
return "%s?topics=%s" % (reverse('explore_stories'), obj.pk)
def items(self, obj):
return Story.objects.exclude(source__relation_type='connected').published().filter(topics=obj).order_by('-published')[:25]
|
denverfoundation/storybase
|
apps/storybase_story/feeds.py
|
Python
|
mit
| 4,815 | 0.003323 |
"""Copyright 2008 Orbitz WorldWide
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License."""
from collections import defaultdict
from structlog import get_logger
from ..utils import epoch
logger = get_logger()
class TimeSeries(list):
def __init__(self, name, start, end, step, values, consolidate='average'):
list.__init__(self, values)
self.name = name
self.start = start
self.end = end
self.step = step
self.consolidationFunc = consolidate
self.valuesPerPoint = 1
self.options = {}
self.pathExpression = name
def __eq__(self, other):
if isinstance(other, TimeSeries):
color_eq = True
if hasattr(self, 'color'):
if hasattr(other, 'color'):
color_eq = (self.color == other.color)
else:
color_eq = False
elif hasattr(other, 'color'):
color_eq = False
return ((self.name, self.start, self.step, self.consolidationFunc,
self.valuesPerPoint, self.options) ==
(other.name, other.start, other.step,
other.consolidationFunc, other.valuesPerPoint,
other.options)) and list.__eq__(self, other) and color_eq
return False
def __iter__(self):
if self.valuesPerPoint > 1:
return self.__consolidatingGenerator(list.__iter__(self))
else:
return list.__iter__(self)
def consolidate(self, valuesPerPoint):
self.valuesPerPoint = int(valuesPerPoint)
def __consolidatingGenerator(self, gen):
buf = []
for x in gen:
buf.append(x)
if len(buf) == self.valuesPerPoint:
while None in buf:
buf.remove(None)
if buf:
yield self.__consolidate(buf)
buf = []
else:
yield None
while None in buf:
buf.remove(None)
if buf:
yield self.__consolidate(buf)
else:
yield None
return
def __consolidate(self, values):
usable = [v for v in values if v is not None]
if not usable:
return None
if self.consolidationFunc == 'sum':
return sum(usable)
if self.consolidationFunc == 'average':
return float(sum(usable)) / len(usable)
if self.consolidationFunc == 'max':
return max(usable)
if self.consolidationFunc == 'min':
return min(usable)
raise Exception(
"Invalid consolidation function: '%s'" % self.consolidationFunc)
def __repr__(self):
return 'TimeSeries(name=%s, start=%s, end=%s, step=%s)' % (
self.name, self.start, self.end, self.step)
class DataStore(object):
"""
Simple object to store results of multi fetches.
Also aids in looking up data by pathExpressions.
"""
def __init__(self):
self.paths = defaultdict(set)
self.data = defaultdict(list)
def get_paths(self, path_expr):
"""
Returns all paths found for path_expr
"""
return sorted(self.paths[path_expr])
def add_data(self, path, time_info, data, exprs):
"""
Stores data before it can be put into a time series
"""
# Dont add if empty
if not nonempty(data):
for d in self.data[path]:
if nonempty(d['values']):
return
# Add data to path
for expr in exprs:
self.paths[expr].add(path)
self.data[path].append({
'time_info': time_info,
'values': data
})
def get_series_list(self, path_expr):
series_list = []
for path in self.get_paths(path_expr):
for data in self.data.get(path):
start, end, step = data['time_info']
series = TimeSeries(path, start, end, step, data['values'])
series.pathExpression = path_expr
series_list.append(series)
return series_list
def fetchData(requestContext, pathExprs):
from ..app import app
startTime = int(epoch(requestContext['startTime']))
endTime = int(epoch(requestContext['endTime']))
if 'now' in requestContext:
now = int(epoch(requestContext['now']))
else:
now = None
# Convert to list if given single path
if not isinstance(pathExprs, list):
pathExprs = [pathExprs]
data_store = DataStore()
multi_nodes = defaultdict(list)
single_nodes = []
path_to_exprs = defaultdict(list)
# Group nodes that support multiple fetches
for pathExpr in pathExprs:
for node in app.store.find(pathExpr, startTime, endTime):
if not node.is_leaf:
continue
if node.path not in path_to_exprs:
if hasattr(node, '__fetch_multi__'):
multi_nodes[node.__fetch_multi__].append(node)
else:
single_nodes.append(node)
path_to_exprs[node.path].append(pathExpr)
# Multi fetches
for finder in app.store.finders:
if not hasattr(finder, '__fetch_multi__'):
continue
nodes = multi_nodes[finder.__fetch_multi__]
if not nodes:
continue
try:
time_info, series = finder.fetch_multi(nodes, startTime, endTime,
now, requestContext)
except TypeError:
time_info, series = finder.fetch_multi(nodes, startTime, endTime)
for path, values in series.items():
data_store.add_data(path, time_info, values,
path_to_exprs[path])
# Single fetches
fetches = [
(node.path, node.fetch(startTime, endTime, now, requestContext))
for node in single_nodes
]
for path, results in fetches:
if not results:
logger.info("no results", path=path, start=startTime,
end=endTime)
continue
try:
time_info, values = results
except ValueError as e:
raise Exception("could not parse timeInfo/values from metric "
"'%s': %s" % (path, e))
data_store.add_data(path, time_info, values, path_to_exprs[path])
return data_store
def nonempty(series):
for value in series:
if value is not None:
return True
return False
|
brutasse/graphite-api
|
graphite_api/render/datalib.py
|
Python
|
apache-2.0
| 7,097 | 0 |
# -*- coding: utf-8 -*-
from chat.models import Message
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.contrib.auth.models import User
from django.views.generic.list import ListView
# Uncomment the next two lines to enable the admin:
admin.autodiscover()
urlpatterns = patterns('chat.views',
# Nous allons réécrire l'URL de l'accueil
url(r'^check_messages_ajax', 'check_messages_ajax', name='check_messages_ajax'),
url(r'^connexion/$', 'connexion', name='connexion'),
url(r'^deconnexion/$', 'deconnexion', name='deconnexion'),
url(r'^$', 'connexion', name='connexion'),
url(r'^add_message', 'add_message', name='add_message'),
url(r'^$', ListView.as_view(model=Message, context_object_name="derniers_messages",
template_name="message_list.html")),
)
|
rewiko/chat_django
|
chat/urls.py
|
Python
|
gpl-2.0
| 881 | 0.013652 |
#!/usr/bin/env python
#
# Raman off-resonant activity calculator
# using VASP as a back-end.
#
# Contributors: Alexandr Fonari (Georgia Tech)
# Shannon Stauffer (UT Austin)
#
# MIT license, 2013
#
def parse_poscar_header(inp_fh):
import sys
from math import sqrt
#
inp_fh.seek(0) # just in case
poscar_header = ""
vol = 0.0
b = []
atom_numbers = []
#
inp_fh.readline() # skip title
scale = float(inp_fh.readline())
for i in range(3): b.append( [float(s) for s in inp_fh.readline().split()] )
#
if scale > 0.0:
b = [[ b[i][j]*scale for i in range(3)] for j in range(3) ]
scale = 1.0
#
vol = b[0][0]*b[1][1]*b[2][2] + b[1][0]*b[2][1]*b[0][2] + b[2][0]*b[0][1]*b[1][2] - \
b[0][2]*b[1][1]*b[2][0] - b[2][1]*b[1][2]*b[0][0] - b[2][2]*b[0][1]*b[1][0]
else:
print "[parse_poscar]: ERROR negative scale not implemented."
vol = scale
sys.exit(1)
#
atom_labels = inp_fh.readline() # yes, it is hardcoded for VASP5
atom_numbers = [int(s) for s in inp_fh.readline().split()]
nat = sum(atom_numbers)
#
poscar_header += "%15.12f\n" % scale
poscar_header += "%15.12f %15.12f %15.12f\n" % (b[0][0], b[0][1], b[0][2])
poscar_header += "%15.12f %15.12f %15.12f\n" % (b[1][0], b[1][1], b[1][2])
poscar_header += "%15.12f %15.12f %15.12f\n" % (b[2][0], b[2][1], b[2][2])
poscar_header += atom_labels
poscar_header += " ".join(str(x) for x in atom_numbers)+"\n"
#
return nat, vol, poscar_header
#
def parse_env_params(params):
import sys
#
tmp = params.strip().split('_')
if len(tmp) != 4:
print "[parse_env_params]: ERROR there should be exactly four parameters"
sys.exit(1)
#
[first, last, nderiv, step_size] = [int(tmp[0]), int(tmp[1]), int(tmp[2]), float(tmp[3])]
#
return first, last, nderiv, step_size
#
def get_modes_from_OUTCAR(outcar_fh, nat):
import sys
import re
from math import sqrt
eigvals = [ 0.0 for i in range(nat*3) ]
eigvecs = [ 0.0 for i in range(nat*3) ]
norms = [ 0.0 for i in range(nat*3) ]
pos = [ 0.0 for i in range(nat) ]
#
outcar_fh.seek(0) # just in case
while True:
line = outcar_fh.readline()
if not line:
break
#
if "Eigenvectors after division by SQRT(mass)" in line:
outcar_fh.readline() # empty line
outcar_fh.readline() # Eigenvectors and eigenvalues of the dynamical matrix
outcar_fh.readline() # ----------------------------------------------------
outcar_fh.readline() # empty line
#
for i in range(nat*3): # all frequencies should be supplied, regardless of those requested to calculate
outcar_fh.readline() # empty line
p = re.search(r'^\s*(\d+).+?([\.\d]+) cm-1', outcar_fh.readline())
eigvals[i] = float(p.group(2))
#
outcar_fh.readline() # X Y Z dx dy dz
eigvec = []
#
for j in range(nat):
tmp = outcar_fh.readline().split()
if i == 0: pos[j] = [ float(tmp[x]) for x in range(3) ] # get atomic positions only once
#
eigvec.append([ float(tmp[x]) for x in range(3,6) ])
#
eigvecs[i] = eigvec
norms[i] = sqrt( sum( [abs(x)**2 for sublist in eigvec for x in sublist] ) )
#
return pos, eigvals, eigvecs, norms
#
print "[get_modes_from_OUTCAR]: ERROR Couldn't find 'Eigenvectors after division by SQRT(mass)' in OUTCAR. Use 'NWRITE=3' in INCAR. Exiting..."
sys.exit(1)
#
def get_epsilon_from_OUTCAR(outcar_fh):
import re
import sys
epsilon = []
#
outcar_fh.seek(0) # just in case
while True:
line = outcar_fh.readline()
if not line:
break
#
if "MACROSCOPIC STATIC DIELECTRIC TENSOR" in line:
outcar_fh.readline()
epsilon.append([float(x) for x in outcar_fh.readline().split()])
epsilon.append([float(x) for x in outcar_fh.readline().split()])
epsilon.append([float(x) for x in outcar_fh.readline().split()])
return epsilon
#
raise RuntimeError("[get_epsilon_from_OUTCAR]: ERROR Couldn't find dielectric tensor in OUTCAR")
return 1
#
if __name__ == '__main__':
import sys
from math import pi
from shutil import move
import os
import datetime
import time
#import argparse
import optparse
#
print ""
print " Raman off-resonant activity calculator,"
print " using VASP as a back-end."
print ""
print " Contributors: Alexandr Fonari (Georgia Tech)"
print " Shannon Stauffer (UT Austin)"
print " MIT License, 2013"
print " URL: http://raman-sc.github.io"
print " Started at: "+datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
print ""
#
description = "Before run, set environment variables:\n"
description += " VASP_RAMAN_RUN='mpirun vasp'\n"
description += " VASP_RAMAN_PARAMS='[first-mode]_[last-mode]_[nderiv]_[step-size]'\n\n"
description += "bash one-liner is:\n"
description += "VASP_RAMAN_RUN='mpirun vasp' VASP_RAMAN_PARAMS='1_2_2_0.01' python vasp_raman.py"
#
parser = optparse.OptionParser(description=description)
parser.add_option('-g', '--gen', help='Generate POSCAR only', action='store_true')
parser.add_option('-u', '--use_poscar', help='Use provided POSCAR in the folder, USE WITH CAUTION!!', action='store_true')
(options, args) = parser.parse_args()
#args = vars(parser.parse_args())
args = vars(options)
#
VASP_RAMAN_RUN = os.environ.get('VASP_RAMAN_RUN')
if VASP_RAMAN_RUN == None:
print "[__main__]: ERROR Set environment variable 'VASP_RAMAN_RUN'"
print ""
parser.print_help()
sys.exit(1)
print "[__main__]: VASP_RAMAN_RUN='"+VASP_RAMAN_RUN+"'"
#
VASP_RAMAN_PARAMS = os.environ.get('VASP_RAMAN_PARAMS')
if VASP_RAMAN_PARAMS == None:
print "[__main__]: ERROR Set environment variable 'VASP_RAMAN_PARAMS'"
print ""
parser.print_help()
sys.exit(1)
print "[__main__]: VASP_RAMAN_PARAMS='"+VASP_RAMAN_PARAMS+"'"
#
first, last, nderiv, step_size = parse_env_params(VASP_RAMAN_PARAMS)
assert first >= 1, '[__main__]: First mode should be equal or larger than 1'
assert last >= first, '[__main__]: Last mode should be equal or larger than first mode'
if args['gen']: assert last == first, "[__main__]: '-gen' mode -> only generation for the one mode makes sense"
assert nderiv == 2, '[__main__]: At this time, nderiv = 2 is the only supported'
disps = [-1, 1] # hardcoded for
coeffs = [-0.5, 0.5] # three point stencil (nderiv=2)
#
try:
poscar_fh = open('POSCAR.phon', 'r')
except IOError:
print "[__main__]: ERROR Couldn't open input file POSCAR.phon, exiting...\n"
sys.exit(1)
#
nat, vol, poscar_header = parse_poscar_header(poscar_fh)
poscar_fh.close()
#
try:
outcar_fh = open('OUTCAR.phon', 'r')
except IOError:
print "[__main__]: ERROR Couldn't open OUTCAR.phon, exiting...\n"
sys.exit(1)
#
pos, eigvals, eigvecs, norms = get_modes_from_OUTCAR(outcar_fh, nat)
outcar_fh.close()
#
output_fh = open('vasp_raman.dat', 'w')
output_fh.write("# mode freq(cm-1) alpha beta2 activity\n")
for i in range(first-1, last):
eigval = eigvals[i]
eigvec = eigvecs[i]
norm = norms[i]
#
print ""
print "[__main__]: Mode #%i: frequency %10.7f cm-1; norm: %10.7f" % ( i+1, eigval, norm )
#
ra = [[0.0 for x in range(3)] for y in range(3)]
for j in range(len(disps)):
disp_filename = 'OUTCAR.%04d.%+d.out' % (i+1, disps[j])
#
try:
outcar_fh = open(disp_filename, 'r')
print "[__main__]: File "+disp_filename+" exists, parsing..."
except IOError:
if args['use_poscar'] != True:
print "[__main__]: File "+disp_filename+" not found, preparing displaced POSCAR"
poscar_fh = open('POSCAR', 'w')
poscar_fh.write("%s %4.1e \n" % (disp_filename, step_size))
poscar_fh.write(poscar_header)
poscar_fh.write("Cartesian\n")
#
for k in range(nat):
pos_disp = [ pos[k][l] + eigvec[k][l]*step_size*disps[j]/norm for l in range(3)]
poscar_fh.write( '%15.10f %15.10f %15.10f\n' % (pos_disp[0], pos_disp[1], pos_disp[2]) )
#print '%10.6f %10.6f %10.6f %10.6f %10.6f %10.6f' % (pos[k][0], pos[k][1], pos[k][2], dis[k][0], dis[k][1], dis[k][2])
poscar_fh.close()
else:
print "[__main__]: Using provided POSCAR"
#
if args['gen']: # only generate POSCARs
poscar_fn = 'POSCAR.%+d.out' % disps[j]
move('POSCAR', poscar_fn)
print "[__main__]: '-gen' mode -> "+poscar_fn+" with displaced atoms have been generated"
#
if j+1 == len(disps): # last iteration for the current displacements list
print "[__main__]: '-gen' mode -> POSCAR files with displaced atoms have been generated, exiting now"
sys.exit(0)
else: # run VASP here
print "[__main__]: Running VASP..."
os.system(VASP_RAMAN_RUN)
try:
move('OUTCAR', disp_filename)
except IOError:
print "[__main__]: ERROR Couldn't find OUTCAR file, exiting..."
sys.exit(1)
#
outcar_fh = open(disp_filename, 'r')
#
try:
eps = get_epsilon_from_OUTCAR(outcar_fh)
outcar_fh.close()
except Exception, err:
print err
print "[__main__]: Moving "+disp_filename+" back to 'OUTCAR' and exiting..."
move(disp_filename, 'OUTCAR')
sys.exit(1)
#
for m in range(3):
for n in range(3):
ra[m][n] += eps[m][n] * coeffs[j]/step_size * norm * vol/(4.0*pi)
#units: A^2/amu^1/2 = dimless * 1/A * 1/amu^1/2 * A^3
#
alpha = (ra[0][0] + ra[1][1] + ra[2][2])/3.0
beta2 = ( (ra[0][0] - ra[1][1])**2 + (ra[0][0] - ra[2][2])**2 + (ra[1][1] - ra[2][2])**2 + 6.0 * (ra[0][1]**2 + ra[0][2]**2 + ra[1][2]**2) )/2.0
print ""
print "! %4i freq: %10.5f alpha: %10.7f beta2: %10.7f activity: %10.7f " % (i+1, eigval, alpha, beta2, 45.0*alpha**2 + 7.0*beta2)
output_fh.write("%i %10.5f %10.7f %10.7f %10.7f\n" % (i+1, eigval, alpha, beta2, 45.0*alpha**2 + 7.0*beta2))
output_fh.flush()
#
output_fh.close()
|
alexandr-fonari/raman-sc
|
VASP/vasp_raman.py
|
Python
|
mit
| 11,443 | 0.008914 |
import matplotlib as mpl
mpl.rcParams['font.size'] = 14
from morphforge.stdimports import *
from morphforgecontrib.stdimports import *
eqnset_txt_na = """
define_component hh_na {
i = g * (v-erev) * m**3*h
m_inf = m_alpha_rate / (m_alpha_rate + m_beta_rate)
m_tau = 1.0 / (m_alpha_rate + m_beta_rate)
m' = (m_inf-m) / m_tau
h_inf = h_alpha_rate / (h_alpha_rate + h_beta_rate)
h_tau = 1.0 / (h_alpha_rate + h_beta_rate)
h' = (h_inf-h) / h_tau
StdFormAB(V, a1, a2, a3, a4, a5) = (a1+a2*V)/(a3+std.math.exp((V+a4)/a5))
m_alpha_rate = StdFormAB(V=v, a1=m_a1, a2=m_a2, a3=m_a3, a4=m_a4, a5=m_a5)
m_beta_rate = StdFormAB(V=v, a1=m_b1, a2=m_b2, a3=m_b3, a4=m_b4, a5=m_b5)
h_alpha_rate = StdFormAB(V=v, a1=h_a1, a2=h_a2, a3=h_a3, a4=h_a4, a5=h_a5)
h_beta_rate = StdFormAB(V=v, a1=h_b1, a2=h_b2, a3=h_b3, a4=h_b4, a5=h_b5)
m_a1={-4.00 ms-1}; m_a2={-0.10 mV-1 ms-1}; m_a3={-1.00}; m_a4={40.00 mV}; m_a5={-10.00 mV};
m_b1={ 4.00 ms-1}; m_b2={ 0.00 mV-1 ms-1}; m_b3={ 0.00}; m_b4={65.00 mV}; m_b5={ 18.00 mV};
h_a1={ 0.07 ms-1}; h_a2={ 0.00 mV-1 ms-1}; h_a3={ 0.00}; h_a4={65.00 mV}; h_a5={ 20.00 mV};
h_b1={ 1.00 ms-1}; h_b2={ 0.00 mV-1 ms-1}; h_b3={ 1.00}; h_b4={35.00 mV}; h_b5={-10.00 mV};
erev = 50.0mV;
<=> PARAMETER g:(S/m2)
<=> OUTPUT i:(A/m2) METADATA {"mf":{"role":"TRANSMEMBRANECURRENT"} }
<=> INPUT v: V METADATA {"mf":{"role":"MEMBRANEVOLTAGE"} }
} """
eqnset_txt_k = """
define_component hh_k {
i = g * (v-erev) * n*n*n*n
n_inf = n_alpha_rate / (n_alpha_rate + n_beta_rate)
n_tau = 1.0 / (n_alpha_rate + n_beta_rate)
n' = (n_inf-n) / n_tau
StdFormAB(V, a1, a2, a3, a4, a5) = (a1 + a2*V)/(a3+std.math.exp((V+a4)/a5))
n_alpha_rate = StdFormAB(V=v, a1=n_a1, a2=n_a2, a3=n_a3, a4=n_a4, a5=n_a5)
n_beta_rate = StdFormAB(V=v, a1=n_b1, a2=n_b2, a3=n_b3, a4=n_b4, a5=n_b5)
n_a1={-0.55 ms-1}; n_a2={-0.01 mV-1 ms-1}; n_a3={-1.00}; n_a4={55.00 mV}; n_a5={-10.00 mV}
n_b1={0.125 ms-1}; n_b2={ 0.00 mV-1 ms-1}; n_b3={ 0.00}; n_b4={65.00 mV}; n_b5={ 80.00 mV}
g = {36.0mS/cm2}
erev = {-77.0mV}
<=> OUTPUT i:(A/m2) METADATA {"mf":{"role":"TRANSMEMBRANECURRENT"} }
<=> INPUT v: V METADATA {"mf":{"role":"MEMBRANEVOLTAGE"} }
} """
eqnset_txt_lk = """
define_component hh_lk {
i = {0.3mS/cm2} * (v- {-54.3mV})
<=> OUTPUT i:(A/m2) METADATA {"mf":{"role":"TRANSMEMBRANECURRENT"} }
<=> INPUT v: V METADATA {"mf":{"role":"MEMBRANEVOLTAGE"} }
} """
env = NEURONEnvironment()
sim = env.Simulation()
# Create a cell:
morph_dict = {'root': {'length': 18.8, 'diam': 18.8, 'id':'soma'} }
my_morph = MorphologyTree.fromDictionary(morph_dict)
cell = sim.create_cell(name="Cell1", morphology=my_morph)
#soma = cell.get_location("soma")
# Setup passive channels:
cell.set_passive( PassiveProperty.SpecificCapacitance, qty('1.0:uF/cm2'))
# Setup active channels:
na_chl = env.Channel(NeuroUnitEqnsetMechanism, name="NaChl", eqnset=eqnset_txt_na,
default_parameters={"g":qty("120:mS/cm2")}, )
k_chl = env.Channel(NeuroUnitEqnsetMechanism, name="KChl", eqnset=eqnset_txt_k, )
lk_chl = env.Channel(NeuroUnitEqnsetMechanism, name="LKChl", eqnset=eqnset_txt_lk, )
cell.apply_channel( na_chl)
cell.apply_channel( lk_chl)
cell.apply_channel( k_chl)
# Define what to record:
sim.record(cell, what=StandardTags.Voltage, name="SomaVoltage", cell_location = cell.soma)
sim.record(na_chl, what='m', cell_location=cell.soma, user_tags=[StandardTags.StateVariable])
sim.record(na_chl, what='h', cell_location=cell.soma, user_tags=[StandardTags.StateVariable])
sim.record(k_chl, what='n', cell_location=cell.soma, user_tags=[StandardTags.StateVariable])
# Create the stimulus and record the injected current:
cc = sim.create_currentclamp(name="CC1", amp=qty("100:pA"), dur=qty("100:ms"), delay=qty("100:ms"), cell_location=cell.soma)
sim.record(cc, what=StandardTags.Current)
# run the simulation
results = sim.run()
TagViewer(results, timerange=(50, 250)*units.ms, show=True)
|
mikehulluk/morphforge
|
doc/srcs_generated_examples/python_srcs/poster1.py
|
Python
|
bsd-2-clause
| 4,063 | 0.007138 |
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Compute Wasserstein distances between different subsets of CIFAR.
Note: comparing two fixed sets is a sanity check, not the target use case.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import time
import tensorflow as tf
from dataset import Dataset
from wasserstein import Wasserstein
tf.flags.DEFINE_string('filepattern', '/tmp/cifar10/cifar_train_class_%d.pic',
'Filepattern from which to read the dataset.')
tf.flags.DEFINE_integer('batch_size', 1000, 'Batch size of generator.')
tf.flags.DEFINE_integer('loss_steps', 50, 'Number of optimization steps.')
FLAGS = tf.flags.FLAGS
def print_flush(string):
sys.stdout.write(string)
sys.stdout.flush()
def main(unused_argv):
# tf.logging.set_verbosity(tf.logging.INFO)
# load two copies of the dataset
print('Loading datasets...')
dataset = [Dataset(bs=FLAGS.batch_size, filepattern=FLAGS.filepattern,
label=i) for i in range(10)]
print('Computing Wasserstein distance(s)...')
for i in range(10):
for j in range(10):
with tf.Graph().as_default():
# compute Wasserstein distance between sets of labels i and j
wasserstein = Wasserstein(dataset[i], dataset[j])
loss = wasserstein.dist(C=.1, nsteps=FLAGS.loss_steps)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
res = sess.run(loss)
print_flush('%f ' % res)
print_flush('\n')
if __name__ == '__main__':
tf.app.run(main)
|
google/wasserstein-dist
|
compute_all.py
|
Python
|
apache-2.0
| 2,165 | 0.006467 |
# -*- coding: utf-8 -*-
#
# This program is part of GASP, a toolkit for newbie Python Programmers.
# Copyright (C) 2009, the GASP Development Team
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class MockBackEnd(object):
def __init__(self):
self.screen = None
self.rate = None
def create_screen(self, screen):
self.screen = screen
def set_frame_rate(self, rate):
self.rate = rate
|
tarzenda/gasp
|
tests/mockbackends.py
|
Python
|
gpl-3.0
| 772 | 0 |
import Image
import os
import math
import argparse
# Set the root directory
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
HTML_DIR = os.path.join(BASE_DIR, 'diced_images')
def dice(image_path, out_name, out_ext, outdir, slices):
img = Image.open(image_path) # Load image
imgdir = os.path.join(outdir, out_name)
if not os.path.exists(imgdir):
os.makedirs(imgdir)
imageWidth, imageHeight = img.size # Get image dimensions
# Make sure the integer widths are bigger than the floats to avoid
# making 1px wide slices at the edges
sliceWidth = int(math.ceil(float(imageWidth) / slices))
sliceHeight = int(math.ceil(float(imageHeight) / slices))
percent = 100.0 / slices
html_file = open(os.path.join(HTML_DIR, out_name + '.html'), 'w+')
html_file.write('''
<style>
.dicedimage {
padding: 0; margin: 0; border-width: 0;
height: 100%%; width: 100%%;
}
.dicedimage-row {
width: %(imageWidth)spx; height: %(sliceHeight)spx;
padding: 0; margin: 0; border-width: 0;
}
.dicedimage img {
display: inline;
padding: 0; margin: 0; border-width: 0;
}
</style>
<div class="dicedimage">
''' % locals())
left = 0 # Set the left-most edge
upper = 0 # Set the top-most edge
while (upper < imageHeight):
html_file.write('<div class="dicedimage-row"><!--\n')
while (left < imageWidth):
# If the bottom and right of the cropping box overruns the image.
if (upper + sliceHeight > imageHeight and \
left + sliceWidth > imageWidth):
bbox = (left, upper, imageWidth, imageHeight)
# If the right of the cropping box overruns the image
elif (left + sliceWidth > imageWidth):
bbox = (left, upper, imageWidth, upper + sliceHeight)
# If the bottom of the cropping box overruns the image
elif (upper + sliceHeight > imageHeight):
bbox = (left, upper, left + sliceWidth, imageHeight)
# If the entire cropping box is inside the image,
# proceed normally.
else:
bbox = (left, upper, left + sliceWidth, upper + sliceHeight)
working_slice = img.crop(bbox) # Crop image based on created bounds
# Save your new cropped image.
dice_filename = '_'.join(['dice', str(upper), str(left)]) + out_ext
dice_path = os.path.join(imgdir, dice_filename)
working_slice.save(dice_path)
html_file.write(
'''
--><img class="dicedimage-piece" src="%s/%s"><!--\n
''' % (
diced_images_dir.split('/', 1)[1],
'/'.join([out_name, dice_filename])
)
)
left += sliceWidth # Increment the horizontal position
html_file.write('--></div>\n')
upper += sliceHeight # Increment the vertical position
left = 0
html_file.write('</div>')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("image_file", help="Path to an image file")
args = parser.parse_args()
image_path = args.image_file
try:
fileName, fileExtension = os.path.splitext(image_path.rsplit('/',1)[1])
except IndexError:
fileName, fileExtension = os.path.splitext(image_path)
diced_images_dir = os.path.join(HTML_DIR, '_'.join([fileName, 'pieces']))
if not os.path.exists(diced_images_dir):
os.makedirs(diced_images_dir)
dice(
image_path,
fileName,
fileExtension,
diced_images_dir,
10
)
print "Successfully diced %s" % image_path
|
skolsuper/imagedicer
|
imagedicer.py
|
Python
|
mit
| 3,913 | 0.004856 |
import os
from cauldron.test import support
from cauldron.test.support import scaffolds
class TestAlias(scaffolds.ResultsTest):
"""..."""
def test_unknown_command(self):
"""Should fail if the command is not recognized."""
r = support.run_command('alias fake')
self.assertTrue(r.failed, 'should have failed')
self.assertEqual(r.errors[0].code, 'UNKNOWN_COMMAND')
def test_list(self):
"""..."""
r = support.run_command('alias list')
self.assertFalse(r.failed, 'should not have failed')
def test_add(self):
"""..."""
p = self.get_temp_path('aliaser')
r = support.run_command('alias add test "{}" --temporary'.format(p))
self.assertFalse(r.failed, 'should not have failed')
def test_remove(self):
"""..."""
directory = self.get_temp_path('aliaser')
path = os.path.join(directory, 'test.text')
with open(path, 'w+') as f:
f.write('This is a test')
support.run_command('alias add test "{}" --temporary'.format(path))
r = support.run_command('alias remove test --temporary')
self.assertFalse(r.failed, 'should not have failed')
self.assertFalse(r.failed, 'should not have failed')
def test_empty(self):
"""..."""
r = support.run_command('alias add')
self.assertTrue(r.failed, 'should have failed')
self.assertEqual(r.errors[0].code, 'MISSING_ARG')
def test_autocomplete_command(self):
"""..."""
result = support.autocomplete('alias ad')
self.assertEqual(len(result), 1)
self.assertEqual(result[0], 'add')
def test_autocomplete_alias(self):
"""..."""
result = support.autocomplete('alias add fake-alias-not-real')
self.assertEqual(len(result), 0)
def test_autocomplete_path(self):
"""..."""
path = os.path.dirname(os.path.realpath(__file__))
result = support.autocomplete('alias add test {}'.format(path))
self.assertIsNotNone(result)
|
sernst/cauldron
|
cauldron/test/cli/commands/test_alias.py
|
Python
|
mit
| 2,067 | 0 |
import django_filters
from dal import autocomplete
from .models import Alert
class AlertFilter(django_filters.FilterSet):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.filters["reason"].lookup_expr = "icontains"
self.filters["author"].widget = autocomplete.ModelSelect2(
url="users:autocomplete"
)
class Meta:
model = Alert
fields = ["reason", "author", "status"]
|
watchdogpolska/feder
|
feder/alerts/filters.py
|
Python
|
mit
| 466 | 0 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Repository.study'
db.add_column(u'repos_repository', 'study',
self.gf('django.db.models.fields.related.ForeignKey')(related_name='repositories', null=True, to=orm['studies.Study']),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Repository.study'
db.delete_column(u'repos_repository', 'study_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'organizations.organization': {
'Meta': {'ordering': "['name']", 'object_name': 'Organization', '_ormbases': [u'auth.Group']},
'gravatar': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'organization_users'", 'symmetrical': 'False', 'through': u"orm['organizations.OrganizationUser']", 'to': u"orm['auth.User']"})
},
u'organizations.organizationuser': {
'Meta': {'ordering': "['organization', 'user']", 'unique_together': "(('user', 'organization'),)", 'object_name': 'OrganizationUser'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'organization_user'", 'to': u"orm['organizations.Organization']"}),
'pending': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'organization_user'", 'to': u"orm['auth.User']"})
},
u'repos.relationship': {
'Meta': {'ordering': "['name']", 'object_name': 'Relationship'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'repo_child': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'child_relations'", 'to': u"orm['repos.Repository']"}),
'repo_parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parent_relations'", 'to': u"orm['repos.Repository']"})
},
u'repos.repository': {
'Meta': {'ordering': "['org', 'name']", 'object_name': 'Repository'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'mongo_id': ('django.db.models.fields.CharField', [], {'max_length': '24'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'org': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'repositories'", 'null': 'True', 'to': u"orm['organizations.Organization']"}),
'study': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'repositories'", 'null': 'True', 'to': u"orm['studies.Study']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'repositories'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'studies.study': {
'Meta': {'ordering': "['name']", 'object_name': 'Study'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'org': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'studies'", 'null': 'True', 'to': u"orm['organizations.Organization']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'studies'", 'null': 'True', 'to': u"orm['auth.User']"})
}
}
complete_apps = ['repos']
|
9929105/KEEP
|
keep_backend/repos/migrations/0002_auto__add_field_repository_study.py
|
Python
|
mit
| 8,118 | 0.007761 |
import os
import sys
import threading
import SimpleHTTPServer
import SocketServer
import shutil
import enaml
from enaml.qt.qt_application import QtApplication
from PyQt4.QtCore import QFileSystemWatcher
from atom.api import Atom, Unicode, observe, Typed, Property, Int
from btsync import BTSync
import logging
logger = logging.getLogger(__name__)
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.DEBUG)
# Directory where all synced sites will be stored. Each site will be synced to
# a directory whose name is the secret.
STORAGE_PATH = os.path.join(
os.path.expanduser('~'),
os.path.join('Documents','Syncnet','synced_secrets')
)
class SyncNet(Atom):
# The text currently entered in the secret field.
address = Unicode()
# The currently loaded secret.
current_secret = Unicode()
# A list of all locally synced secrets.
known_secrets = Property()
# Instance of the BTSync API wrapper.
btsync = Typed(BTSync, ())
# The QUrl object referencing the currently displayed resource. It must be
# replaced wholesale for the UI to react.
url = Unicode()
# Root path where all synced site directories are added.
storage_path = Unicode()
# The filesystem watcher that monitors all currently synced site
# directories.
_watcher = Typed(QFileSystemWatcher)
# This thread runs the simple http server.
_server_thread = Typed(threading.Thread)
# The simple http server's port
http_port = Int()
### Public Interface #####################################################
def init_secret(self, secret):
""" Creates a new directry at `self.storage_path` and adds it to the
BTSync object to be synced with the given `secret`. The secret is
assumed valid.
Parameters
----------
secret : str
BTSync secret string referencing a directory of html files. This
secret is assumed to already exist on the network.
Notes
-----
The newly created folder's name will be the given `secret`.
"""
path = os.path.join(self.storage_path, secret)
if not os.path.exists(path):
os.mkdir(path)
else:
msg = 'init_secret called with existing secret: {}'.format(path)
logger.debug(msg)
self._watcher.addPath(path)
self.btsync.add_folder(path, secret)
logger.debug('Directory added to BTSync: {}'.format(path))
def load_secret(self, secret):
""" Display the HTML files referenced by the given secret in the View.
If the secret is not synced locally, it will be initialized and synced.
Parameters
----------
secret : str
BTSync secret string
Raises
------
RuntimeError if `secret` is invalid
"""
secret = secret.upper()
if not self.is_valid_secret(secret):
msg = 'Attempted to load invalid secret: {}'.format(secret)
raise RuntimeError(msg)
if secret not in self.known_secrets:
self.init_secret(secret)
# Store the currently loaded secret so its directory can be monitored.
self.current_secret = secret
# Ensure the HTTP server is running before the url is set.
if self._server_thread is None:
logger.debug('Creating server thread')
self._server_thread = self._create_server_thread()
url = 'http://localhost:{}/{}'.format(self.http_port, secret)
self.url = '' # FIXME hack to get the webview to reload
self.url = url
logger.debug('URL set to: {}'.format(url))
def is_valid_secret(self, secret):
""" True if the given `secret` is a valid btsync secret string. A
valid secret is a 160 bit base32 encoded string with an 'A' or 'B'
prepended.
"""
if not (secret.startswith('A') or secret.startswith('B')):
return False
if len(secret) != 33:
return False
if not secret.isupper():
return False
# ensure only legal chars as defined by RFC 4648
for char in ('1', '8', '9', '='):
if char in secret:
return False
return True
### Observers ############################################################
@observe('address')
def _address_changed(self, change):
""" Check the text entered into the address field to see if it contains
a valid secret. If so, attempt to load that secret.
"""
address = self.address.upper()
if self.is_valid_secret(address):
self.load_secret(address)
def on_directory_changed(self, dirname):
""" Slot connected to the `QFileSystemWatcher.directoryChanged` Signal.
"""
# If the directory containing the currently loaded secret changes, it
# is reloaded.
_, secret = os.path.split(os.path.normpath(dirname))
if secret == self.current_secret:
self.load_secret(secret)
def on_link_clicked(self, url):
""" Slot connected to the `QWebView.linkClicked` Signal.
"""
self._update_address_bar(url)
if url.scheme() == 'sync':
secret = url.host().upper()
if self.is_valid_secret(secret):
self.load_secret(secret)
else:
msg = 'Attempted to load invalid secret: {}'
logger.debug(msg.format(url.toString()))
else:
self.url = url.toString()
def on_url_changed(self, url):
""" Slot connected to the `QWebView.urlChanged` Signal.
"""
self._update_address_bar(url)
### Default methods ######################################################
def _default__watcher(self):
_watcher = QFileSystemWatcher()
_watcher.directoryChanged.connect(self.on_directory_changed)
return _watcher
def _default_storage_path(self):
storage_path = STORAGE_PATH
if not os.path.exists(storage_path):
os.makedirs(storage_path)
logger.debug('Creating storage path: {}'.format(storage_path))
return storage_path
### Property getters #####################################################
def _get_known_secrets(self):
""" List of all locally synced secrets. Getter for known_secrets.
"""
directories = os.listdir(self.storage_path)
secrets = [x['secret'] for x in self.btsync.get_folders()]
tracked_directories = filter((lambda x:x in secrets), directories)
return tracked_directories
### Private Interface ####################################################
def _create_server_thread(self):
os.chdir(self.storage_path)
handler = SimpleHTTPServer.SimpleHTTPRequestHandler
httpd = SocketServer.TCPServer(('localhost', 0), handler)
_, port = httpd.server_address
self.http_port = port
logger.debug('Serving on port #{}'.format(port))
t = threading.Thread(target=httpd.serve_forever)
t.daemon = True # don't hang on exit
t.start()
return t
def _update_address_bar(self, url):
"""
Parameters
----------
url : QUrl
The currently displayed url
"""
if url.host() == 'localhost':
self.address = url.path()[1:]
elif url.scheme() == 'sync':
self.address = url.host().upper()
else:
self.address = url.toString()
if __name__ == '__main__':
with enaml.imports():
from syncnet_view import SyncNetView
syncnet = SyncNet()
if getattr(sys, 'frozen', False):
HERE = os.path.dirname(sys.executable)
btsync_path = os.path.join(
HERE, 'BitTorrent\ Sync.app/Contents/MacOS/BitTorrent\ Sync')
syncnet.btsync.btsync_path = btsync_path
syncnet.btsync.start()
app = QtApplication()
view = SyncNetView(model=syncnet)
view.show()
app.start()
|
jminardi/syncnet
|
syncnet/main.py
|
Python
|
mit
| 8,094 | 0.001483 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# Copyright (c) 2015 Baidu.com, Inc. All Rights Reserved
#
################################################################################
"""
Description : manage config file $HOME/.broc.rc
Authors : zhousongsong(doublesongsong@gmail.com)
Date : 2015-09-18 10:28:23
"""
import os
import sys
import ConfigParser
broc_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'))
sys.path.insert(0, broc_dir)
from dependency import BrocModule_pb2
class BrocConfigError(Exception):
"""
"""
def __init__(self, msg):
"""
Args:
msg : the error msg
"""
self._msg = msg
def __str__(self):
"""
"""
return self._msg
class BrocConfig(object):
"""
this class manages the .broc.rc in $HOME
"""
class __impl(object):
"""Implementation of singleton interface"""
def __init__(self):
"""
"""
self._file = os.path.join(os.environ['HOME'], '.broc.rc')
self._svn_repo_domain = 'https://svn.github.com'
self._git_repo_domain = 'https://github.com'
self._svn_postfix_branch = "BRANCH"
self._svn_postfix_tag = "PD_BL"
def __str__(self):
"""
"""
return "svn repo domain: %s\ngit repo domain: %s\n \
svn postfix branch: %s\nsvn postfix tag: %s"% (self._svn_repo_domain,
self._git_repo_domain,
self._svn_postfix_branch,
self._svn_postfix_tag)
def Id(self):
"""
test method, return singleton id
"""
return id(self)
def load(self):
"""
load broc configurations
Raise:
if load config failed, raise BrocConfigError
"""
try:
# if configuration file does not exists in $HOME, create one
if not os.path.isfile(self._file):
cfgfile = open(self._file, 'w')
conf = ConfigParser.ConfigParser()
conf.add_section('repo')
conf.set('repo', 'svn_repo_domain', self._svn_repo_domain)
conf.set('repo', 'git_repo_domain', self._git_repo_domain)
conf.set('repo', 'svn_postfix_branch', 'BRANCH')
conf.set('repo', 'svn_postfix_tag', 'PD_BL')
conf.write(cfgfile)
cfgfile.close()
else:
cfgfile = open(self._file, 'r')
conf = ConfigParser.ConfigParser()
conf.read(self._file)
self._svn_repo_domain = conf.get('repo', 'svn_repo_domain')
self._git_repo_domain = conf.get('repo', 'git_repo_domain')
self._svn_postfix_branch = conf.get('repo', 'svn_postfix_branch')
self._svn_postfix_tag = conf.get('repo', 'svn_postfix_tag')
except ConfigParser.Error as e:
raise BrocConfigError(str(e))
def RepoDomain(self, repo_type):
"""
return repository domain
Args:
repo_type : BrocMode_pb2.Module.EnumRepo
"""
if repo_type == BrocModule_pb2.Module.SVN:
return self._svn_repo_domain
elif repo_type == BrocModule_pb2.Module.GIT:
return self._git_repo_domain
def SVNPostfixBranch(self):
"""
return postfix of svn branch
"""
return self._svn_postfix_branch
def SVNPostfixTag(self):
"""
return postfix of svn tag
"""
return self._svn_postfix_tag
def Dump(self):
"""
dump broc config
"""
print("-- svn domain : %s" % self._svn_repo_domain)
print("-- git domain : %s" % self._git_repo_domain)
print("-- svn branch posfix : %s" % self._svn_postfix_branch)
print("-- svn tag postfix : %s" % self._svn_postfix_tag)
# class BrocConfig
__instance = None
def __init__(self):
""" Create singleton instance """
# Check whether we already have an instance
if BrocConfig.__instance is None:
# Create and remember instance
BrocConfig.__instance = BrocConfig.__impl()
BrocConfig.__instance.load()
# Store instance reference as the only member in the handle
self.__dict__['_BrocConfig__instance'] = BrocConfig.__instance
def __getattr__(self, attr):
""" Delegate access to implementation """
return getattr(self.__instance, attr)
def __setattr__(self, attr, value):
""" Delegate access to implementation """
return setattr(self.__instance, attr, value)
|
baidu/broc
|
dependency/BrocConfig.py
|
Python
|
apache-2.0
| 5,186 | 0.003471 |
import csv
import collections
import itertools
def evaluateDuplicates(found_dupes, true_dupes):
true_positives = found_dupes.intersection(true_dupes)
false_positives = found_dupes.difference(true_dupes)
uncovered_dupes = true_dupes.difference(found_dupes)
print('found duplicate')
print(len(found_dupes))
print(len(true_dupes))
print('precision')
print(1 - len(false_positives) / float(len(found_dupes)))
print('recall')
print(len(true_positives) / float(len(true_dupes)))
def dupePairs(filename, colname) :
dupe_d = collections.defaultdict(list)
with open(filename) as f:
reader = csv.DictReader(f, delimiter=',', quotechar='"')
for row in reader:
dupe_d[row[colname]].append(row['person_id'])
if 'x' in dupe_d :
del dupe_d['x']
dupe_s = set([])
for (unique_id, cluster) in dupe_d.items():
if len(cluster) > 1:
for pair in itertools.combinations(cluster, 2):
dupe_s.add(frozenset(pair))
return dupe_s
dedupe_clusters = 'patstat_output.csv'
manual_clusters = 'patstat_reference.csv'
test_dupes = dupePairs(dedupe_clusters, 'Cluster ID')
true_dupes = dupePairs(manual_clusters, 'leuven_id')
evaluateDuplicates(test_dupes, true_dupes)
|
dedupeio/dedupe-examples
|
patent_example/patent_evaluation.py
|
Python
|
mit
| 1,311 | 0.006102 |
from flex.datastructures import (
ValidationDict,
)
from flex.constants import (
OBJECT,
)
from flex.validation.common import (
generate_object_validator,
)
from .operation import (
operation_validator,
)
from .parameters import (
parameters_validator,
)
path_item_schema = {
'type': OBJECT,
}
non_field_validators = ValidationDict()
non_field_validators.add_property_validator('get', operation_validator)
non_field_validators.add_property_validator('put', operation_validator)
non_field_validators.add_property_validator('post', operation_validator)
non_field_validators.add_property_validator('delete', operation_validator)
non_field_validators.add_property_validator('options', operation_validator)
non_field_validators.add_property_validator('head', operation_validator)
non_field_validators.add_property_validator('patch', operation_validator)
non_field_validators.add_property_validator('parameters', parameters_validator)
path_item_validator = generate_object_validator(
schema=path_item_schema,
non_field_validators=non_field_validators,
)
|
pipermerriam/flex
|
flex/loading/schema/paths/path_item/__init__.py
|
Python
|
mit
| 1,084 | 0 |
class Solution(object):
def isPalindrome(self, x):
"""
:type x: int
:rtype: bool
"""
s=str(x)
l=list(s)
l.reverse()
sq=''
s1=sq.join(l)
if s==s1:
return True
else:
return False
|
thydeyx/LeetCode-Python
|
Palindrome Number.py
|
Python
|
mit
| 302 | 0.023179 |
# Copyright 2006 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
# Authors:
# Daniel Benamy <dbenamy@redhat.com>
import os
import sys
sys.path.append("/usr/share/rhn/up2date_client/")
sys.path.append("/usr/share/rhn")
import rhnreg
import rhnregGui
import up2dateErrors
from rhn_register_firstboot_gui_window import RhnRegisterFirstbootGuiWindow
import gtk
from gtk import glade
import gettext
t = gettext.translation('rhn-client-tools', fallback=True)
_ = t.ugettext
gtk.glade.bindtextdomain("rhn-client-tools")
class RhnProvideCertificateWindow(RhnRegisterFirstbootGuiWindow, rhnregGui.ProvideCertificatePage):
runPriority=107
moduleName = _("Provide Certificate")
windowTitle = moduleName
shortMessage = _("Provide a certificate for this Red Hat Satellite server")
needsparent = 1
needsnetwork = 1
noSidebar = True
def __init__(self):
RhnRegisterFirstbootGuiWindow.__init__(self)
rhnregGui.ProvideCertificatePage.__init__(self)
if rhnreg.registered():
self.skipme = True
def _getVbox(self):
return self.provideCertificatePageVbox()
def apply(self, *args):
"""Returns True to change the page or None to stay on the same page."""
status = self.provideCertificatePageApply()
if status == 0: # cert was installed
return True
elif status == 1: # the user doesn't want to provide a cert right now
# TODO write a message to disk like the other cases? need to decide
# how we want to do error handling in general.
self.parent.setPage("rhn_finish_gui")
return True
else: # an error occurred and the user was notified
assert status == 2
return None
childWindow = RhnProvideCertificateWindow
|
aronparsons/spacewalk
|
client/rhel/rhn-client-tools/src/firstboot-legacy-rhel5/rhn_provide_certificate_gui.py
|
Python
|
gpl-2.0
| 2,411 | 0.004977 |
from tower import ugettext_lazy as _
# Add-on and File statuses.
STATUS_NULL = 0
STATUS_PENDING = 2
STATUS_PUBLIC = 4
STATUS_DISABLED = 5
STATUS_DELETED = 11
STATUS_REJECTED = 12
STATUS_APPROVED = 13
STATUS_BLOCKED = 15
STATUS_UNLISTED = 16
# AMO-only statuses. Kept here only for memory and to not re-use the IDs.
_STATUS_UNREVIEWED = 1
_STATUS_NOMINATED = 3
_STATUS_LISTED = 6 # See bug 616242.
_STATUS_BETA = 7
_STATUS_LITE = 8
_STATUS_LITE_AND_NOMINATED = 9
_STATUS_PURGATORY = 10 # A temporary home; bug 614686
_STATUS_REVIEW_PENDING = 14 # Themes queue, reviewed, needs further action.
STATUS_CHOICES = {
STATUS_NULL: _(u'Incomplete'),
STATUS_PENDING: _(u'Pending approval'),
STATUS_PUBLIC: _(u'Published'),
STATUS_DISABLED: _(u'Banned from Marketplace'),
STATUS_DELETED: _(u'Deleted'),
STATUS_REJECTED: _(u'Rejected'),
# Approved, but the developer would like to put it public when they want.
# The need to go to the marketplace and actualy make it public.
STATUS_APPROVED: _(u'Approved but private'),
STATUS_BLOCKED: _(u'Blocked'),
STATUS_UNLISTED: _(u'Unlisted'),
}
# Marketplace file status terms.
MKT_STATUS_FILE_CHOICES = STATUS_CHOICES.copy()
MKT_STATUS_FILE_CHOICES[STATUS_DISABLED] = _(u'Obsolete')
MKT_STATUS_FILE_CHOICES[STATUS_APPROVED] = _(u'Approved')
MKT_STATUS_FILE_CHOICES[STATUS_PUBLIC] = _(u'Published')
# We need to expose nice values that aren't localisable.
STATUS_CHOICES_API = {
STATUS_NULL: 'incomplete',
STATUS_PENDING: 'pending',
STATUS_PUBLIC: 'public',
STATUS_DISABLED: 'disabled', # TODO: Change to 'banned' for API v2.
STATUS_DELETED: 'deleted',
STATUS_REJECTED: 'rejected',
STATUS_APPROVED: 'waiting', # TODO: Change to 'private' for API v2.
STATUS_BLOCKED: 'blocked',
STATUS_UNLISTED: 'unlisted',
}
STATUS_CHOICES_API_LOOKUP = {
'incomplete': STATUS_NULL,
'pending': STATUS_PENDING,
'public': STATUS_PUBLIC,
'disabled': STATUS_DISABLED, # TODO: Change to 'banned' for API v2.
'deleted': STATUS_DELETED,
'rejected': STATUS_REJECTED,
'waiting': STATUS_APPROVED, # TODO: Change to 'private' for API v2.
'blocked': STATUS_BLOCKED,
'unlisted': STATUS_UNLISTED,
}
STATUS_CHOICES_API_v2 = {
STATUS_NULL: 'incomplete',
STATUS_PENDING: 'pending',
STATUS_PUBLIC: 'public',
STATUS_DISABLED: 'banned',
STATUS_DELETED: 'deleted',
STATUS_REJECTED: 'rejected',
STATUS_APPROVED: 'private',
STATUS_BLOCKED: 'blocked',
STATUS_UNLISTED: 'unlisted',
}
STATUS_CHOICES_API_LOOKUP_v2 = {
'incomplete': STATUS_NULL,
'pending': STATUS_PENDING,
'public': STATUS_PUBLIC,
'banned': STATUS_DISABLED,
'deleted': STATUS_DELETED,
'rejected': STATUS_REJECTED,
'private': STATUS_APPROVED,
'blocked': STATUS_BLOCKED,
'unlisted': STATUS_UNLISTED,
}
# Publishing types.
PUBLISH_IMMEDIATE = 0
PUBLISH_HIDDEN = 1
PUBLISH_PRIVATE = 2
REVIEWED_STATUSES = (STATUS_PUBLIC, STATUS_APPROVED, STATUS_UNLISTED)
UNREVIEWED_STATUSES = (STATUS_PENDING,)
VALID_STATUSES = (STATUS_PENDING, STATUS_PUBLIC, STATUS_UNLISTED,
STATUS_APPROVED)
# LISTED_STATUSES are statuses that should return a 200 on the app detail page
# for anonymous users.
LISTED_STATUSES = (STATUS_PUBLIC, STATUS_UNLISTED)
# An add-on in one of these statuses can become premium.
PREMIUM_STATUSES = (STATUS_NULL, STATUS_PENDING)
# Newly submitted apps begin life at this status.
WEBAPPS_UNREVIEWED_STATUS = STATUS_PENDING
# These apps have been approved and are listed; or could be without further
# review.
WEBAPPS_APPROVED_STATUSES = (STATUS_PUBLIC, STATUS_UNLISTED, STATUS_APPROVED)
# An app with this status makes its detail page "invisible".
WEBAPPS_UNLISTED_STATUSES = (STATUS_DISABLED, STATUS_PENDING, STATUS_APPROVED,
STATUS_REJECTED)
# These apps shouldn't be considered anymore in mass-emailing etc.
WEBAPPS_EXCLUDED_STATUSES = (STATUS_DISABLED, STATUS_DELETED, STATUS_REJECTED)
# Add-on author roles.
AUTHOR_ROLE_VIEWER = 1
AUTHOR_ROLE_DEV = 4
AUTHOR_ROLE_OWNER = 5
AUTHOR_ROLE_SUPPORT = 6
AUTHOR_CHOICES = (
(AUTHOR_ROLE_OWNER, _(u'Owner')),
(AUTHOR_ROLE_DEV, _(u'Developer')),
(AUTHOR_ROLE_VIEWER, _(u'Viewer')),
(AUTHOR_ROLE_SUPPORT, _(u'Support')),
)
AUTHOR_CHOICES_NAMES = dict(AUTHOR_CHOICES)
# ADDON_WEBAPP Types
ADDON_WEBAPP_HOSTED = 1
ADDON_WEBAPP_PACKAGED = 2
ADDON_WEBAPP_PRIVILEGED = 3
ADDON_WEBAPP_TYPES = {
ADDON_WEBAPP_HOSTED: 'hosted',
ADDON_WEBAPP_PACKAGED: 'packaged',
ADDON_WEBAPP_PRIVILEGED: 'privileged',
}
ADDON_WEBAPP_TYPES_LOOKUP = dict((v, k) for k, v in ADDON_WEBAPP_TYPES.items())
ADDON_FREE = 0
ADDON_PREMIUM = 1
ADDON_PREMIUM_INAPP = 2
ADDON_FREE_INAPP = 3
# The addon will have payments, but they aren't using our payment system.
ADDON_OTHER_INAPP = 4
ADDON_PREMIUM_TYPES = {
ADDON_FREE: _('Free'),
ADDON_PREMIUM: _('Premium'),
ADDON_PREMIUM_INAPP: _('Premium with in-app payments'),
ADDON_FREE_INAPP: _('Free with in-app payments'),
ADDON_OTHER_INAPP: _("I'll use my own system for in-app payments")
}
# Non-locale versions for the API.
ADDON_PREMIUM_API = {
ADDON_FREE: 'free',
ADDON_PREMIUM: 'premium',
ADDON_PREMIUM_INAPP: 'premium-inapp',
ADDON_FREE_INAPP: 'free-inapp',
ADDON_OTHER_INAPP: 'other',
}
ADDON_PREMIUM_API_LOOKUP = dict((v, k) for k, v in ADDON_PREMIUM_API.items())
# Apps that require some sort of payment prior to installing.
ADDON_PREMIUMS = (ADDON_PREMIUM, ADDON_PREMIUM_INAPP)
# Apps that do *not* require a payment prior to installing.
ADDON_FREES = (ADDON_FREE, ADDON_FREE_INAPP, ADDON_OTHER_INAPP)
ADDON_INAPPS = (ADDON_PREMIUM_INAPP, ADDON_FREE_INAPP)
ADDON_HAS_PAYMENTS = (ADDON_FREE_INAPP, ADDON_PREMIUM, ADDON_PREMIUM_INAPP)
# Edit addon information
MAX_TAGS = 20
MIN_TAG_LENGTH = 2
MAX_CATEGORIES = 2
# Icon sizes we want to generate and expose in the API.
CONTENT_ICON_SIZES = [32, 48, 64, 128]
# Promo img sizes we want to generate and expose in the API.
PROMO_IMG_SIZES = [320, 640, 1050]
PROMO_IMG_MINIMUMS = (1050, 300)
# Preview upload sizes [thumb, full]
ADDON_PREVIEW_SIZES = [(200, 150), (700, 525)]
# Accepted image MIME-types
IMG_TYPES = ('image/png', 'image/jpeg', 'image/jpg')
VIDEO_TYPES = ('video/webm',)
# Editor Tools
EDITOR_VIEWING_INTERVAL = 8 # How often we ping for "who's watching?"
# For use in urls.
ADDON_UUID = r'(?P<uuid>[\w]{8}-[\w]{4}-[\w]{4}-[\w]{4}-[\w]{12})'
APP_SLUG = r"""(?P<app_slug>[^/<>"']+)"""
# Reviewer Incentive Scores.
# Note: Don't change these since they're used as keys in the database.
REVIEWED_MANUAL = 0
REVIEWED_WEBAPP_HOSTED = 70
REVIEWED_WEBAPP_PACKAGED = 71
REVIEWED_WEBAPP_REREVIEW = 72
REVIEWED_WEBAPP_UPDATE = 73
REVIEWED_WEBAPP_PRIVILEGED = 74
REVIEWED_WEBAPP_PRIVILEGED_UPDATE = 75
REVIEWED_WEBAPP_PLATFORM_EXTRA = 76 # Not used as a key
REVIEWED_APP_REVIEW = 81
REVIEWED_APP_REVIEW_UNDO = 82
REVIEWED_WEBAPP_TARAKO = 90
REVIEWED_APP_ABUSE_REPORT = 100
REVIEWED_WEBSITE_ABUSE_REPORT = 101
REVIEWED_CHOICES = {
REVIEWED_MANUAL: _('Manual Reviewer Points'),
REVIEWED_WEBAPP_HOSTED: _('Web App Review'),
REVIEWED_WEBAPP_PACKAGED: _('Packaged App Review'),
REVIEWED_WEBAPP_PRIVILEGED: _('Privileged App Review'),
REVIEWED_WEBAPP_REREVIEW: _('Web App Re-review'),
REVIEWED_WEBAPP_UPDATE: _('Updated Packaged App Review'),
REVIEWED_WEBAPP_PRIVILEGED_UPDATE: _('Updated Privileged App Review'),
REVIEWED_APP_REVIEW: _('Moderated App Review'),
REVIEWED_APP_REVIEW_UNDO: _('App Review Moderation Reverted'),
REVIEWED_WEBAPP_TARAKO: _('Tarako App Review'),
REVIEWED_APP_ABUSE_REPORT: _('App Abuse Report Read'),
REVIEWED_WEBSITE_ABUSE_REPORT: _('Website Abuse Report Read'),
}
REVIEWED_SCORES = {
REVIEWED_MANUAL: 0,
REVIEWED_WEBAPP_HOSTED: 60,
REVIEWED_WEBAPP_PACKAGED: 60,
REVIEWED_WEBAPP_PRIVILEGED: 120,
REVIEWED_WEBAPP_REREVIEW: 30,
REVIEWED_WEBAPP_UPDATE: 40,
REVIEWED_WEBAPP_PRIVILEGED_UPDATE: 80,
REVIEWED_APP_REVIEW: 1,
REVIEWED_APP_REVIEW_UNDO: -1, # -REVIEWED_APP_REVIEW
REVIEWED_WEBAPP_TARAKO: 30,
REVIEWED_WEBAPP_PLATFORM_EXTRA: 10,
REVIEWED_APP_ABUSE_REPORT: 2,
REVIEWED_WEBSITE_ABUSE_REPORT: 2,
}
REVIEWED_MARKETPLACE = (
REVIEWED_WEBAPP_HOSTED,
REVIEWED_WEBAPP_PACKAGED,
REVIEWED_WEBAPP_PRIVILEGED,
REVIEWED_WEBAPP_REREVIEW,
REVIEWED_WEBAPP_UPDATE,
REVIEWED_WEBAPP_PRIVILEGED_UPDATE,
REVIEWED_APP_REVIEW,
REVIEWED_APP_REVIEW_UNDO,
REVIEWED_WEBAPP_TARAKO,
REVIEWED_APP_ABUSE_REPORT,
REVIEWED_WEBSITE_ABUSE_REPORT,
)
REVIEWED_LEVELS = [
{'name': _('Level 1'), 'points': 2160},
{'name': _('Level 2'), 'points': 4320},
{'name': _('Level 3'), 'points': 8700},
{'name': _('Level 4'), 'points': 21000},
{'name': _('Level 5'), 'points': 45000},
{'name': _('Level 6'), 'points': 96000},
{'name': _('Level 7'), 'points': 300000},
{'name': _('Level 8'), 'points': 1200000},
{'name': _('Level 9'), 'points': 3000000},
]
# Login credential source. We'll also include the site source in that.
# All the old existing AMO users and anyone before we started tracking this.
LOGIN_SOURCE_UNKNOWN = 0
# Most likely everyone who signed up for the marketplace.
LOGIN_SOURCE_BROWSERID = 1
# Everyone who signed up for the marketplace using BrowserID.
LOGIN_SOURCE_MMO_BROWSERID = 2
# Everyone who signed up for AMO once it uses BrowserID.
LOGIN_SOURCE_AMO_BROWSERID = 3
# Signups via Firefox Accounts.
LOGIN_SOURCE_FXA = 4
# Signups via Webpay Purchases
LOGIN_SOURCE_WEBPAY = 5
LOGIN_SOURCE_LOOKUP = {
LOGIN_SOURCE_UNKNOWN: 'unknown',
LOGIN_SOURCE_BROWSERID: 'persona',
LOGIN_SOURCE_MMO_BROWSERID: 'mmo-persona',
LOGIN_SOURCE_AMO_BROWSERID: 'amo-persona',
LOGIN_SOURCE_FXA: 'firefox-accounts',
LOGIN_SOURCE_WEBPAY: 'webpay',
}
# Add slug ~> id to the dict so lookups can be done with id or slug.
for source_id, source_slug in LOGIN_SOURCE_LOOKUP.items():
LOGIN_SOURCE_LOOKUP[source_slug] = source_id
# These are logins that use BrowserID.
LOGIN_SOURCE_BROWSERIDS = [LOGIN_SOURCE_BROWSERID, LOGIN_SOURCE_AMO_BROWSERID,
LOGIN_SOURCE_MMO_BROWSERID, LOGIN_SOURCE_WEBPAY]
|
tsl143/zamboni
|
mkt/constants/base.py
|
Python
|
bsd-3-clause
| 10,226 | 0 |
from .app import App
from .reviews import Reviews
from .r_longboarding import RLongboarding
|
Widdershin/community-review-poster
|
autoposter/__init__.py
|
Python
|
mit
| 91 | 0.010989 |
# -*- coding: utf-8 -*-
"""
object file io is a Python object to single file I/O framework. The word
'framework' means you can use any serialization/deserialization algorithm here.
- dump: dump python object to a file.
- safe_dump: add atomic writing guarantee for ``dump``.
- load: load python object from a file.
Features:
1. ``compress``: built-in compress/decompress options.
2. ``overwrite``: an option to prevent from overwrite existing file.
3. ``verbose``: optional built-in logger can display help infomation.
Usage:
suppose you have a function (dumper function, has to take python object as
input, and return a binary object) can dump python object to binary::
import pickle
def dump(obj):
return pickle.dumps(obj)
def load(binary):
return pickle.loads(binary)
You just need to add a decorator, and new function will do all magic for you:
from obj_file_io import dump_func, safe_dump_func, load_func
@dump_func
def dump(obj):
return pickle.dumps(obj)
@safe_dump_func
def safe_dump(obj):
return pickle.dumps(obj)
@load_func
def load(binary):
return pickle.loads(binary)
**中文文档**
object file io是一个将Python对象对单个本地文件的I/O
"""
import os
import time
import zlib
import logging
import inspect
from atomicwrites import atomic_write
# logging util
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
logger.addHandler(stream_handler)
def prt_console(message, verbose):
"""Print message to console, if ``verbose`` is True.
"""
if verbose:
logger.info(message)
def _check_serializer_type(serializer_type):
if serializer_type not in ["binary", "str"]:
raise ValueError("serializer_type has to be one of 'binary' or 'str'!")
# dump, load
def _dump(obj, abspath, serializer_type,
dumper_func=None,
compress=True,
overwrite=False,
verbose=False,
**kwargs):
"""Dump object to file.
:param abspath: The file path you want dump to.
:type abspath: str
:param serializer_type: 'binary' or 'str'.
:type serializer_type: str
:param dumper_func: A dumper function that takes an object as input, return
binary or string.
:type dumper_func: callable function
:param compress: default ``False``. If True, then compress binary.
:type compress: bool
:param overwrite: default ``False``, If ``True``, when you dump to
existing file, it silently overwrite it. If ``False``, an alert
message is shown. Default setting ``False`` is to prevent overwrite
file by mistake.
:type overwrite: boolean
:param verbose: default True, help-message-display trigger.
:type verbose: boolean
"""
_check_serializer_type(serializer_type)
if not inspect.isfunction(dumper_func):
raise TypeError("dumper_func has to be a function take object as input "
"and return binary!")
prt_console("\nDump to '%s' ..." % abspath, verbose)
if os.path.exists(abspath):
if not overwrite:
prt_console(
" Stop! File exists and overwrite is not allowed",
verbose,
)
return
st = time.clock()
b_or_str = dumper_func(obj, **kwargs)
if serializer_type is "str":
b = b_or_str.encode("utf-8")
else:
b = b_or_str
if compress:
b = zlib.compress(b)
with atomic_write(abspath, overwrite=overwrite, mode="wb") as f:
f.write(b)
elapsed = time.clock() - st
prt_console(" Complete! Elapse %.6f sec." % elapsed, verbose)
if serializer_type is "str":
return b_or_str
else:
return b
def _load(abspath, serializer_type,
loader_func=None,
decompress=True,
verbose=False,
**kwargs):
"""load object from file.
:param abspath: The file path you want load from.
:type abspath: str
:param serializer_type: 'binary' or 'str'.
:type serializer_type: str
:param loader_func: A loader function that takes binary as input, return
an object.
:type loader_func: callable function
:param decompress: default ``False``. If True, then decompress binary.
:type decompress: bool
:param verbose: default True, help-message-display trigger.
:type verbose: boolean
"""
_check_serializer_type(serializer_type)
if not inspect.isfunction(loader_func):
raise TypeError("loader_func has to be a function take binary as input "
"and return an object!")
prt_console("\nLoad from '%s' ..." % abspath, verbose)
if not os.path.exists(abspath):
raise ValueError("'%s' doesn't exist." % abspath)
st = time.clock()
with open(abspath, "rb") as f:
b = f.read()
if decompress:
b = zlib.decompress(b)
if serializer_type is "str":
obj = loader_func(b.decode("utf-8"), **kwargs)
else:
obj = loader_func(b, **kwargs)
elapsed = time.clock() - st
prt_console(" Complete! Elapse %.6f sec." % elapsed, verbose)
return obj
def dump_func(serializer_type):
"""A decorator for ``_dump(dumper_func=dumper_func, **kwargs)``
"""
def outer_wrapper(dumper_func):
def wrapper(*args, **kwargs):
return _dump(
*args,
dumper_func=dumper_func, serializer_type=serializer_type,
**kwargs
)
return wrapper
return outer_wrapper
def load_func(serializer_type):
"""A decorator for ``_load(loader_func=loader_func, **kwargs)``
"""
def outer_wrapper(loader_func):
def wrapper(*args, **kwargs):
return _load(
*args,
loader_func=loader_func, serializer_type=serializer_type,
**kwargs
)
return wrapper
return outer_wrapper
|
MacHu-GWU/single_file_module-project
|
sfm/obj_file_io.py
|
Python
|
mit
| 6,059 | 0.000332 |
import numpy as np
from menpo.image import Image, BooleanImage, MaskedImage
from menpo.shape import PointCloud
from menpo.testing import is_same_array
def test_image_copy():
pixels = np.ones([1, 10, 10])
landmarks = PointCloud(np.ones([3, 2]), copy=False)
im = Image(pixels, copy=False)
im.landmarks['test'] = landmarks
im_copy = im.copy()
assert (not is_same_array(im.pixels, im_copy.pixels))
assert (not is_same_array(im_copy.landmarks['test'].points,
im.landmarks['test'].points))
def test_booleanimage_copy():
pixels = np.ones([10, 10], dtype=np.bool)
landmarks = PointCloud(np.ones([3, 2]), copy=False)
im = BooleanImage(pixels, copy=False)
im.landmarks['test'] = landmarks
im_copy = im.copy()
assert (not is_same_array(im.pixels, im_copy.pixels))
assert (not is_same_array(im_copy.landmarks['test'].points,
im.landmarks['test'].points))
def test_maskedimage_copy():
pixels = np.ones([1, 10, 10])
landmarks = PointCloud(np.ones([3, 2]), copy=False)
im = MaskedImage(pixels, copy=False)
im.landmarks['test'] = landmarks
im_copy = im.copy()
assert (not is_same_array(im.pixels, im_copy.pixels))
assert (not is_same_array(im_copy.landmarks['test'].points,
im.landmarks['test'].points))
|
grigorisg9gr/menpo
|
menpo/image/test/image_copy_test.py
|
Python
|
bsd-3-clause
| 1,372 | 0 |
from sqlalchemy import BigInteger, Column, String
from tornado.gen import coroutine
from tornado.ioloop import IOLoop
from tornado.web import Application, RequestHandler
from tornado_sqlalchemy import (
SessionMixin,
as_future,
set_max_workers,
SQLAlchemy,
)
db = SQLAlchemy()
set_max_workers(10)
class User(db.Model):
__tablename__ = 'users'
id = Column(BigInteger, primary_key=True)
username = Column(String(255))
class Foo(db.Model):
__bind_key__ = 'foo'
__tablename__ = 'foo'
id = Column(BigInteger, primary_key=True)
foo = Column(String(255))
class Bar(db.Model):
__bind_key__ = 'bar'
__tablename__ = 'bar'
id = Column(BigInteger, primary_key=True)
bar = Column(String(255))
class SynchronousRequestHandler(SessionMixin, RequestHandler):
def get(self):
with self.make_session() as session:
count = session.query(User).count()
# OR count = self.session.query(User).count()
self.write('{} users so far!'.format(count))
class GenCoroutinesRequestHandler(SessionMixin, RequestHandler):
@coroutine
def get(self):
with self.make_session() as session:
session.add(User(username='b'))
session.add(Foo(foo='foo'))
session.add(Bar(bar='bar'))
session.commit()
count = yield as_future(session.query(User).count)
self.write('{} users so far!'.format(count))
class NativeCoroutinesRequestHandler(SessionMixin, RequestHandler):
async def get(self):
with self.make_session() as session:
session.add(User(username='c'))
session.add(Foo(foo='d'))
session.add(Bar(bar='e'))
session.commit()
count = await as_future(session.query(User).count)
self.write('{} users so far!'.format(count))
if __name__ == '__main__':
db.configure(
url='sqlite://',
binds={'foo': 'sqlite:///foo.db', 'bar': 'sqlite:///bar.db'},
)
app = Application(
[
(r'/sync', SynchronousRequestHandler),
(r'/gen-coroutines', GenCoroutinesRequestHandler),
(r'/native-coroutines', NativeCoroutinesRequestHandler),
],
db=db,
autoreload=True,
)
db.create_all()
session = db.sessionmaker()
session.add(User(id=1, username='a'))
session.commit()
session.close()
print('Listening on port 8888')
app.listen(8888)
IOLoop.current().start()
|
siddhantgoel/tornado-sqlalchemy
|
examples/multiple_databases.py
|
Python
|
mit
| 2,508 | 0 |
##############################################################################
#
# Copyright (C) 2009 EduSense BV (<http://www.edusense.nl>).
# Copyright (C) 2011 credativ Ltd (<http://www.credativ.co.uk>).
# All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import date
from openerp.osv import orm, fields
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT as OE_DATEFORMAT
class hsbc_export(orm.Model):
"""HSBC Export"""
_name = 'banking.export.hsbc'
_description = __doc__
_rec_name = 'execution_date'
_columns = {
'payment_order_ids': fields.many2many(
'payment.order',
'account_payment_order_hsbc_rel',
'banking_export_hsbc_id', 'account_order_id',
'Payment Orders',
readonly=True),
'identification':
fields.char('Identification', size=15, readonly=True, select=True),
'execution_date':
fields.date('Execution Date', readonly=True),
'no_transactions':
fields.integer('Number of Transactions', readonly=True),
'total_amount':
fields.float('Total Amount', readonly=True),
'date_generated':
fields.datetime('Generation Date', readonly=True, select=True),
'file':
fields.binary('HSBC File', readonly=True),
'state':
fields.selection([
('draft', 'Draft'),
('sent', 'Sent'),
('done', 'Reconciled'),
], 'State', readonly=True),
}
_defaults = {
'date_generated': lambda *a: date.today().strftime(OE_DATEFORMAT),
'state': 'draft',
}
class payment_line(orm.Model):
"""The standard payment order is using a mixture of details from the
partner record and the res.partner.bank record. For, instance, the account
holder name is coming from the res.partner.bank record, but the company
name and address are coming from the partner address record. This is
problematic because the HSBC payment format is validating for alphanumeric
characters in the company name and address. So, "Great Company Ltd." and
"Great Company s.a." will cause an error because they have full-stops in
the name.
A better approach is to use the name and address details from the
res.partner.bank record always. This way, the address details can be
sanitized for the payments, whilst being able to print the proper name and
address throughout the rest of the system e.g. on invoices.
"""
_inherit = 'payment.line'
def info_owner(self, cr, uid, ids, name=None, args=None, context=None):
if not ids:
return {}
result = {}
info = ''
for line in self.browse(cr, uid, ids, context=context):
owner = line.order_id.mode.bank_id
name = owner.owner_name or owner.partner_id.name
st = owner.street and owner.street or ''
st1 = '' # no street2 in res.partner.bank
zip = owner.zip and owner.zip or ''
city = owner.city and owner.city or ''
zip_city = zip + ' ' + city
cntry = owner.country_id and owner.country_id.name or ''
info = name + "\n".join((st + " ", st1, zip_city, cntry))
result[line.id] = info
return result
def info_partner(self, cr, uid, ids, name=None, args=None, context=None):
if not ids:
return {}
result = {}
info = ''
for line in self.browse(cr, uid, ids, context=context):
partner = line.bank_id
name = partner.owner_name or partner.partner_id.name
st = partner.street and partner.street or ''
st1 = '' # no street2 in res.partner.bank
zip = partner.zip and partner.zip or ''
city = partner.city and partner.city or ''
zip_city = zip + ' ' + city
cntry = partner.country_id and partner.country_id.name or ''
info = name + "\n".join((st + " ", st1, zip_city, cntry))
result[line.id] = info
return result
# Define the info_partner and info_owner so we can override the methods
_columns = {
'info_owner': fields.function(
info_owner,
string="Owner Account",
type="text",
help='Address of the Main Partner',
),
'info_partner': fields.function(
info_partner,
string="Destination Account",
type="text",
help='Address of the Ordering Customer.'
),
}
|
rschnapka/bank-payment
|
account_banking_uk_hsbc/account_banking_uk_hsbc.py
|
Python
|
agpl-3.0
| 5,362 | 0 |
import importlib.util
class Config:
def __init__(self, path: str) -> None:
spec = importlib.util.spec_from_file_location('conf', path)
self.module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(self.module)
@property
def rst_epilog(self) -> str:
"""A string that is appended to the end of every rst file. Useful for
replacements. Defaults to an empty string."""
return str(self.get('rst_epilog', ''))
@property
def source_suffix(self) -> str:
"""The file extension used for source files. Defaults to ".txt"."""
return str(self.get('source_suffix', '.txt'))
def get(self, key: str, default: object) -> object:
try:
return self[key]
except AttributeError:
return default
def __getitem__(self, key: str) -> object:
return getattr(self.module, key)
|
jeff-allen-mongo/mut
|
mut/tuft/config.py
|
Python
|
apache-2.0
| 911 | 0 |
"""
WSGI config for dtcbusroutes project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dtcbusroutes.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
dhirajt/dtc
|
wsgi/dtcbusroutes/dtcbusroutes/wsgi.py
|
Python
|
gpl-3.0
| 1,146 | 0.000873 |
"""Undocumented Module"""
__all__ = ['doTest']
from panda3d.core import *
from panda3d.direct import *
from .IntervalGlobal import *
def doTest():
smiley = loader.loadModel('models/misc/smiley')
smiley.reparentTo(render)
pi = ProjectileInterval(smiley, startPos=Point3(0, 0, 0),
endZ = -10, wayPoint=Point3(10, 0, 0),
timeToWayPoint=3)
pi.loop()
return pi
|
brakhane/panda3d
|
direct/src/interval/ProjectileIntervalTest.py
|
Python
|
bsd-3-clause
| 437 | 0.009153 |
from .constants import *
from ._xlcall import *
|
thatcr/cffi-xll
|
src/xlcall/templates/__init__.py
|
Python
|
mit
| 47 | 0.021277 |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import subprocess
from devtoolslib.shell import Shell
from devtoolslib import http_server
class LinuxShell(Shell):
"""Wrapper around Mojo shell running on Linux.
Args:
executable_path: path to the shell binary
command_prefix: optional list of arguments to prepend to the shell command,
allowing e.g. to run the shell under debugger.
"""
def __init__(self, executable_path, command_prefix=None):
self.executable_path = executable_path
self.command_prefix = command_prefix if command_prefix else []
def ServeLocalDirectory(self, local_dir_path, port=0):
"""Serves the content of the local (host) directory, making it available to
the shell under the url returned by the function.
The server will run on a separate thread until the program terminates. The
call returns immediately.
Args:
local_dir_path: path to the directory to be served
port: port at which the server will be available to the shell
Returns:
The url that the shell can use to access the content of |local_dir_path|.
"""
return 'http://%s:%d/' % http_server.StartHttpServer(local_dir_path, port)
def Run(self, arguments):
"""Runs the shell with given arguments until shell exits, passing the stdout
mingled with stderr produced by the shell onto the stdout.
Returns:
Exit code retured by the shell or None if the exit code cannot be
retrieved.
"""
command = self.command_prefix + [self.executable_path] + arguments
return subprocess.call(command, stderr=subprocess.STDOUT)
def RunAndGetOutput(self, arguments):
"""Runs the shell with given arguments until shell exits.
Args:
arguments: list of arguments for the shell
Returns:
A tuple of (return_code, output). |return_code| is the exit code returned
by the shell or None if the exit code cannot be retrieved. |output| is the
stdout mingled with the stderr produced by the shell.
"""
command = self.command_prefix + [self.executable_path] + arguments
p = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
(output, _) = p.communicate()
return p.returncode, output
|
collinjackson/mojo
|
mojo/devtools/common/devtoolslib/linux_shell.py
|
Python
|
bsd-3-clause
| 2,385 | 0.002935 |
#(C) Copyright Syd Logan 2017-2020
#(C) Copyright Thousand Smiles Foundation 2017-2020
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#
#You may obtain a copy of the License at
#http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
'''
unit tests for clinic station application. Assumes django server is up
and running on the specified host and port
'''
import unittest
import getopt, sys
import json
from tschartslib.service.serviceapi import ServiceAPI
from tschartslib.tscharts.tscharts import Login, Logout
from tschartslib.clinic.clinic import CreateClinic, DeleteClinic
from tschartslib.station.station import CreateStation, DeleteStation
from tschartslib.patient.patient import CreatePatient, GetPatient, DeletePatient
class CreateClinicStation(ServiceAPI):
def __init__(self, host, port, token, clinic, station, active=False, away=True, finished=False, name="", name_es="", level=None):
super(CreateClinicStation, self).__init__()
self.setHttpMethod("POST")
self.setHost(host)
self.setPort(port)
self.setToken(token)
payload = {"clinic": clinic, "away": away, "station": station, "active": active, "name": name, "finished": finished, "name_es": name_es, "level": level}
self.setPayload(payload)
self.setURL("tscharts/v1/clinicstation/")
class GetClinicStation(ServiceAPI):
def makeURL(self):
hasQArgs = False
if not self._id == None:
base = "tscharts/v1/clinicstation/{}/".format(self._id)
else:
base = "tscharts/v1/clinicstation/"
if not self._clinic == None:
if not hasQArgs:
base += "?"
else:
base += "&"
base += "clinic={}".format(self._clinic)
hasQArgs = True
if not self._active == None:
if not hasQArgs:
base += "?"
else:
base += "&"
base += "active={}".format(self._active)
hasQArgs = True
if not self._level == None:
if not hasQArgs:
base += "?"
else:
base += "&"
base += "level={}".format(self._level)
hasQArgs = True
if not self._away == None:
if not hasQArgs:
base += "?"
else:
base += "&"
base += "away={}".format(self._away)
hasQArgs = True
if not self._finished == None:
if not hasQArgs:
base += "?"
else:
base += "&"
base += "finished={}".format(self._finished)
hasQArgs = True
self.setURL(base)
def __init__(self, host, port, token, id=None):
super(GetClinicStation, self).__init__()
self.setHttpMethod("GET")
self.setHost(host)
self.setPort(port)
self.setToken(token)
self._id = None
self._away = None
self._active = None
self._finished = None
self._level = None
self._clinic = None
self.makeURL();
def setId(self, id):
self._id = id;
self.makeURL()
def setAway(self, away):
self._away = away
self.makeURL()
def setFinished(self, finished):
self._finished = finished
self.makeURL()
def setActive(self, active):
self._active = active
self.makeURL()
def setClinic(self, clinic):
self._clinic = clinic
self.makeURL()
def setLevel(self, level):
self._level = level
self.makeURL()
class UpdateClinicStation(ServiceAPI):
def __init__(self, host, port, token, id):
super(UpdateClinicStation, self).__init__()
self.setHttpMethod("PUT")
self.setHost(host)
self.setPort(port)
self.setToken(token)
self._payload = {}
self.setPayload(self._payload)
self.setURL("tscharts/v1/clinicstation/{}/".format(id))
def setAway(self, away):
self._payload["away"] = away
self.setPayload(self._payload)
def setFinished(self, finished):
self._payload["finished"] = finished
self.setPayload(self._payload)
def setActive(self, active):
self._payload["active"] = active
self.setPayload(self._payload)
def setName(self, name):
self._payload["name"] = name
self.setPayload(self._payload)
def setNameES(self, name):
self._payload["name_es"] = name
self.setPayload(self._payload)
def setLevel(self, level):
self._payload["level"] = level
self.setPayload(self._payload)
def setActivePatient(self, patient):
self._payload["activepatient"] = patient
self.setPayload(self._payload)
def setNextPatient(self, patient):
self._payload["nextpatient"] = patient
self.setPayload(self._payload)
def setAwayTime(self, minutes):
self._payload["awaytime"] = minutes
self.setPayload(self._payload)
class DeleteClinicStation(ServiceAPI):
def __init__(self, host, port, token, id):
super(DeleteClinicStation, self).__init__()
self.setHttpMethod("DELETE")
self.setHost(host)
self.setPort(port)
self.setToken(token)
self.setURL("tscharts/v1/clinicstation/{}/".format(id))
class TestTSClinicStation(unittest.TestCase):
def setUp(self):
login = Login(host, port, username, password)
ret = login.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("token" in ret[1])
global token
token = ret[1]["token"]
def testCreateClinicStation(self):
x = CreateClinic(host, port, token, "Ensenada", "02/05/2016", "02/06/2016")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("id" in ret[1])
clinicid = int(ret[1]["id"])
x = CreateStation(host, port, token, "ENT")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
stationid = int(ret[1]["id"])
# default active and away state
x = CreateClinicStation(host, port, token, clinicid, stationid, name="test1")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
id = int(ret[1]["id"])
x = GetClinicStation(host, port, token)
x.setId(id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("clinic" in ret[1])
clinicId = int(ret[1]["clinic"])
self.assertTrue(clinicId == clinicid)
self.assertTrue("station" in ret[1])
stationId = int(ret[1]["station"])
self.assertTrue(stationId == stationid)
self.assertTrue("active" in ret[1])
self.assertTrue(ret[1]["active"] == False)
self.assertTrue("finished" in ret[1])
self.assertTrue(ret[1]["finished"] == False)
self.assertTrue("away" in ret[1])
self.assertTrue(ret[1]["away"] == True)
self.assertTrue("name" in ret[1])
self.assertTrue(ret[1]["name"] == "test1")
x = DeleteClinicStation(host, port, token, id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = GetClinicStation(host, port, token)
x.setId(id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 404) # not found
# explicit active state
x = CreateClinicStation(host, port, token, clinicid, stationid, active=False, name="test2")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
id = int(ret[1]["id"])
x = GetClinicStation(host, port, token)
x.setId(id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("clinic" in ret[1])
clinicId = int(ret[1]["clinic"])
self.assertTrue(clinicId == clinicid)
self.assertTrue("station" in ret[1])
stationId = int(ret[1]["station"])
self.assertTrue(stationId == stationid)
self.assertTrue("active" in ret[1])
self.assertTrue(ret[1]["active"] == False)
self.assertTrue("name" in ret[1])
self.assertTrue(ret[1]["name"] == "test2")
self.assertTrue("finished" in ret[1])
self.assertTrue(ret[1]["finished"] == False)
x = DeleteClinicStation(host, port, token, id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = GetClinicStation(host, port, token)
x.setId(id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 404) # not found
x = CreateClinicStation(host, port, token, clinicid, stationid, active=True)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
id = int(ret[1]["id"])
x = GetClinicStation(host, port, token)
x.setId(id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("clinic" in ret[1])
clinicId = int(ret[1]["clinic"])
self.assertTrue(clinicId == clinicid)
self.assertTrue("station" in ret[1])
stationId = int(ret[1]["station"])
self.assertTrue(stationId == stationid)
self.assertTrue("active" in ret[1])
self.assertTrue(ret[1]["active"] == True)
self.assertTrue("name" in ret[1])
self.assertTrue(ret[1]["name"] == "")
self.assertTrue("finished" in ret[1])
self.assertTrue(ret[1]["finished"] == False)
x = DeleteClinicStation(host, port, token, id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = GetClinicStation(host, port, token)
x.setId(id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 404) # not found
# explicit away state
x = CreateClinicStation(host, port, token, clinicid, stationid, away=False)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
id = int(ret[1]["id"])
x = GetClinicStation(host, port, token)
x.setId(id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("clinic" in ret[1])
clinicId = int(ret[1]["clinic"])
self.assertTrue(clinicId == clinicid)
self.assertTrue("station" in ret[1])
stationId = int(ret[1]["station"])
self.assertTrue(stationId == stationid)
self.assertTrue("away" in ret[1])
self.assertTrue(ret[1]["away"] == False)
self.assertTrue("finished" in ret[1])
self.assertTrue(ret[1]["finished"] == False)
x = DeleteClinicStation(host, port, token, id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = GetClinicStation(host, port, token)
x.setId(id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 404) # not found
x = CreateClinicStation(host, port, token, clinicid, stationid, away=True)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
id = int(ret[1]["id"])
x = GetClinicStation(host, port, token)
x.setId(id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("clinic" in ret[1])
clinicId = int(ret[1]["clinic"])
self.assertTrue(clinicId == clinicid)
self.assertTrue("station" in ret[1])
stationId = int(ret[1]["station"])
self.assertTrue(stationId == stationid)
self.assertTrue("away" in ret[1])
self.assertTrue(ret[1]["away"] == True)
self.assertTrue("finished" in ret[1])
self.assertTrue(ret[1]["finished"] == False)
x = DeleteClinicStation(host, port, token, id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = GetClinicStation(host, port, token)
x.setId(id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 404) # not found
# explicit finished state
x = CreateClinicStation(host, port, token, clinicid, stationid, finished=False)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
id = int(ret[1]["id"])
x = GetClinicStation(host, port, token)
x.setId(id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("clinic" in ret[1])
clinicId = int(ret[1]["clinic"])
self.assertTrue(clinicId == clinicid)
self.assertTrue("station" in ret[1])
stationId = int(ret[1]["station"])
self.assertTrue(stationId == stationid)
self.assertTrue("finished" in ret[1])
self.assertTrue(ret[1]["finished"] == False)
x = DeleteClinicStation(host, port, token, id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = GetClinicStation(host, port, token)
x.setId(id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 404) # not found
x = CreateClinicStation(host, port, token, clinicid, stationid, finished=True)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
id = int(ret[1]["id"])
x = GetClinicStation(host, port, token)
x.setId(id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("clinic" in ret[1])
clinicId = int(ret[1]["clinic"])
self.assertTrue(clinicId == clinicid)
self.assertTrue("station" in ret[1])
stationId = int(ret[1]["station"])
self.assertTrue(stationId == stationid)
self.assertTrue("away" in ret[1])
self.assertTrue(ret[1]["finished"] == True)
x = DeleteClinicStation(host, port, token, id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = GetClinicStation(host, port, token)
x.setId(id)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 404) # not found
# non-existent clinic param
x = CreateClinicStation(host, port, token, 9999, stationid, active=True)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 404)
# non-existent station param
x = CreateClinicStation(host, port, token, clinicid, 9999, active=True)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 404)
# bogus active param
x = CreateClinicStation(host, port, token, clinicid, stationid, active="Hello")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 400)
# bogus away param
x = CreateClinicStation(host, port, token, clinicid, stationid, away="Hello")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 400)
x = DeleteStation(host, port, token, stationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = DeleteClinic(host, port, token, clinicid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
def testNextAndActivePatient(self):
data = {}
data["paternal_last"] = "abcd1234"
data["maternal_last"] = "yyyyyy"
data["first"] = "zzzzzzz"
data["middle"] = ""
data["suffix"] = "Jr."
data["prefix"] = ""
data["dob"] = "04/01/1962"
data["gender"] = "Female"
data["street1"] = "1234 First Ave"
data["street2"] = ""
data["city"] = "Ensenada"
data["colonia"] = ""
data["state"] = u"Baja California"
data["phone1"] = "1-111-111-1111"
data["phone2"] = ""
data["email"] = "patient@example.com"
data["emergencyfullname"] = "Maria Sanchez"
data["emergencyphone"] = "1-222-222-2222"
data["emergencyemail"] = "maria.sanchez@example.com"
x = CreatePatient(host, port, token, data)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
patientid = int(ret[1]["id"])
x = GetPatient(host, port, token)
x.setId(patientid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = CreateClinic(host, port, token, "Ensenada", "02/05/2016", "02/06/2016")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("id" in ret[1])
clinicid = int(ret[1]["id"])
x = CreateStation(host, port, token, "ENT")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
stationid = int(ret[1]["id"])
x = CreateClinicStation(host, port, token, clinicid, stationid, active=True)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
clinicstationid = int(ret[1]["id"])
x = GetClinicStation(host, port, token)
x.setId(clinicstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("active" in ret[1])
self.assertTrue(ret[1]["active"] == True)
self.assertTrue("name" in ret[1])
self.assertTrue("name_es" in ret[1])
self.assertTrue(ret[1]["name"] == "")
self.assertTrue(ret[1]["name_es"] == "")
self.assertTrue(ret[1]["activepatient"] == None)
self.assertTrue(ret[1]["nextpatient"] == None)
x = UpdateClinicStation(host, port, token, clinicstationid)
x.setActive(False)
x.setAway(True)
x.setAwayTime(15)
x.setName("Dental 1")
x.setActivePatient(patientid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = GetClinicStation(host, port, token)
x.setId(clinicstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("away" in ret[1])
self.assertTrue(ret[1]["away"] == True)
self.assertTrue("active" in ret[1])
self.assertTrue(ret[1]["active"] == False)
self.assertTrue("awaytime" in ret[1])
self.assertTrue(ret[1]["awaytime"] == 15)
self.assertTrue("name" in ret[1])
self.assertTrue(ret[1]["name"] == "Dental 1")
self.assertTrue("willreturn" in ret[1])
self.assertTrue(ret[1]["activepatient"] == patientid)
self.assertTrue(ret[1]["nextpatient"] == None)
x = UpdateClinicStation(host, port, token, clinicstationid)
x.setActive(True)
x.setAway(False)
x.setActivePatient(None)
x.setNextPatient(patientid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = GetClinicStation(host, port, token)
x.setId(clinicstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("active" in ret[1])
self.assertTrue(ret[1]["active"] == True)
self.assertTrue(ret[1]["away"] == False)
self.assertTrue(ret[1]["activepatient"] == None)
self.assertTrue(ret[1]["nextpatient"] == patientid)
x = UpdateClinicStation(host, port, token, clinicstationid)
x.setLevel(15)
x.setActivePatient(None)
x.setNextPatient(None)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = GetClinicStation(host, port, token)
x.setId(clinicstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("level" in ret[1])
self.assertTrue(int(ret[1]["level"]) == 15)
self.assertTrue(ret[1]["active"] == True)
self.assertTrue(ret[1]["away"] == False)
self.assertTrue(ret[1]["activepatient"] == None)
self.assertTrue(ret[1]["nextpatient"] == None)
x = UpdateClinicStation(host, port, token, clinicstationid)
x.setLevel(0)
x.setAwayTime(23)
x.setActive(False)
x.setAway(True)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = GetClinicStation(host, port, token)
x.setId(clinicstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("level" in ret[1])
self.assertTrue(int(ret[1]["level"]) == 0)
self.assertTrue(ret[1]["active"] == False)
self.assertTrue("awaytime" in ret[1])
self.assertTrue(ret[1]["awaytime"] == 23)
self.assertTrue("willreturn" in ret[1])
self.assertTrue("away" in ret[1])
self.assertTrue(ret[1]["away"] == True)
self.assertTrue(ret[1]["activepatient"] == None)
self.assertTrue(ret[1]["nextpatient"] == None)
x = DeleteClinicStation(host, port, token, clinicstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = DeleteStation(host, port, token, stationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = DeleteClinic(host, port, token, clinicid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = DeletePatient(host, port, token, patientid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
def testDeleteClinicStation(self):
x = CreateClinic(host, port, token, "Ensenada", "02/05/2016", "02/06/2016")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("id" in ret[1])
clinicid = int(ret[1]["id"])
x = CreateStation(host, port, token, "ENT")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
stationid = int(ret[1]["id"])
x = CreateClinicStation(host, port, token, clinicid, stationid, True)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
clinicstationid = int(ret[1]["id"])
x = DeleteClinicStation(host, port, token, clinicstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = GetClinicStation(host, port, token)
x.setId(clinicstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 404) # not found
x = DeleteClinicStation(host, port, token, clinicstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 404)
x = DeleteStation(host, port, token, stationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = DeleteClinic(host, port, token, clinicid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
def testUpdateClinicStation(self):
x = CreateClinic(host, port, token, "Ensenada", "02/05/2016", "02/06/2016")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("id" in ret[1])
clinicid = int(ret[1]["id"])
x = CreateStation(host, port, token, "ENT")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
stationid = int(ret[1]["id"])
x = CreateClinicStation(host, port, token, clinicid, stationid, active=True)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
clinicstationid = int(ret[1]["id"])
x = GetClinicStation(host, port, token)
x.setId(clinicstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("active" in ret[1])
self.assertTrue(ret[1]["active"] == True)
self.assertTrue("name" in ret[1])
self.assertTrue(ret[1]["name"] == "")
x = UpdateClinicStation(host, port, token, clinicstationid)
x.setActive(False)
x.setAway(True)
x.setAwayTime(15)
x.setName("Dental 1")
x.setNameES("Dental Uno")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = GetClinicStation(host, port, token)
x.setId(clinicstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("away" in ret[1])
self.assertTrue(ret[1]["away"] == True)
self.assertTrue("active" in ret[1])
self.assertTrue(ret[1]["active"] == False)
self.assertTrue("awaytime" in ret[1])
self.assertTrue(ret[1]["awaytime"] == 15)
self.assertTrue("name" in ret[1])
self.assertTrue(ret[1]["name"] == "Dental 1")
self.assertTrue(ret[1]["name_es"] == "Dental Uno")
self.assertTrue("willreturn" in ret[1])
x = UpdateClinicStation(host, port, token, clinicstationid)
x.setActive(True)
x.setAway(False)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = GetClinicStation(host, port, token)
x.setId(clinicstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("active" in ret[1])
self.assertTrue(ret[1]["active"] == True)
self.assertTrue(ret[1]["away"] == False)
x = UpdateClinicStation(host, port, token, clinicstationid)
x.setLevel(15)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = GetClinicStation(host, port, token)
x.setId(clinicstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("level" in ret[1])
self.assertTrue(int(ret[1]["level"]) == 15)
self.assertTrue(ret[1]["active"] == True)
self.assertTrue(ret[1]["away"] == False)
x = UpdateClinicStation(host, port, token, clinicstationid)
x.setLevel(0)
x.setAwayTime(23)
x.setActive(False)
x.setAway(True)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = GetClinicStation(host, port, token)
x.setId(clinicstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("level" in ret[1])
self.assertTrue(int(ret[1]["level"]) == 0)
self.assertTrue(ret[1]["active"] == False)
self.assertTrue("awaytime" in ret[1])
self.assertTrue(ret[1]["awaytime"] == 23)
self.assertTrue("willreturn" in ret[1])
self.assertTrue("away" in ret[1])
self.assertTrue(ret[1]["away"] == True)
x = DeleteClinicStation(host, port, token, clinicstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = DeleteStation(host, port, token, stationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
x = DeleteClinic(host, port, token, clinicid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
def testGetAllClinicStations(self):
x = CreateClinic(host, port, token, "Ensenada", "02/05/2016", "02/06/2016")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
self.assertTrue("id" in ret[1])
clinicid = int(ret[1]["id"])
x = CreateStation(host, port, token, "ENT")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
entstationid = int(ret[1]["id"])
x = CreateStation(host, port, token, "Dental")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
dentalstationid = int(ret[1]["id"])
x = CreateStation(host, port, token, "Ortho")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
orthostationid = int(ret[1]["id"])
x = CreateStation(host, port, token, "Screening")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
screeningstationid = int(ret[1]["id"])
x = CreateStation(host, port, token, "Speech")
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
speechstationid = int(ret[1]["id"])
ids = []
delids = []
x = CreateClinicStation(host, port, token, clinicid, entstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
ids.append(ret[1]["id"])
delids.append(ret[1]["id"])
x = CreateClinicStation(host, port, token, clinicid, dentalstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
ids.append(ret[1]["id"])
delids.append(ret[1]["id"])
x = CreateClinicStation(host, port, token, clinicid, orthostationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
ids.append(ret[1]["id"])
delids.append(ret[1]["id"])
x = CreateClinicStation(host, port, token, clinicid, screeningstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
ids.append(ret[1]["id"])
delids.append(ret[1]["id"])
x = CreateClinicStation(host, port, token, clinicid, speechstationid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
ids.append(ret[1]["id"])
delids.append(ret[1]["id"])
x = GetClinicStation(host, port, token)
x.setClinic(clinicid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 200)
stations = ret[1]
self.assertTrue(len(stations) == 5)
for x in stations:
if x["id"] in ids:
ids.remove(x["id"])
if len(ids):
self.assertTrue("failed to find all created clinicstation items {}".format(ids) == None)
for x in delids:
y = DeleteClinicStation(host, port, token, x)
ret = y.send(timeout=30)
self.assertEqual(ret[0], 200)
x = GetClinicStation(host, port, token)
x.setClinic(clinicid)
ret = x.send(timeout=30)
self.assertEqual(ret[0], 404)
stations = ret[1]
self.assertTrue(len(stations) == 0)
def usage():
print("clinicstations [-h host] [-p port] [-u username] [-w password]")
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "h:p:u:w:")
except getopt.GetoptError as err:
print(str(err))
usage()
sys.exit(2)
global host
host = "127.0.0.1"
global port
port = 8000
global username
username = None
global password
password = None
for o, a in opts:
if o == "-h":
host = a
elif o == "-p":
port = int(a)
elif o == "-u":
username = a
elif o == "-w":
password = a
else:
assert False, "unhandled option"
unittest.main(argv=[sys.argv[0]])
if __name__ == "__main__":
main()
|
slogan621/tscharts
|
tschartslib/clinicstation/clinicstation.py
|
Python
|
apache-2.0
| 30,523 | 0.003407 |
#coding:utf8
from flask.ext.sqlalchemy import SQLAlchemy
from . import app
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///relative/../../test.db'
db = SQLAlchemy(app)
|
prikevs/PasteSite
|
paste/database.py
|
Python
|
mit
| 173 | 0.00578 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
import rdmo.core.models
class Migration(migrations.Migration):
dependencies = [
('domain', '0001_initial_after_reset'),
]
operations = [
migrations.CreateModel(
name='Catalog',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(verbose_name='created', editable=False)),
('updated', models.DateTimeField(verbose_name='updated', editable=False)),
('order', models.IntegerField(null=True)),
('title_en', models.CharField(max_length=256)),
('title_de', models.CharField(max_length=256)),
],
options={
'ordering': ('order',),
'verbose_name': 'Catalog',
'verbose_name_plural': 'Catalogs',
},
bases=(models.Model, rdmo.core.models.TranslationMixin),
),
migrations.CreateModel(
name='QuestionEntity',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(verbose_name='created', editable=False)),
('updated', models.DateTimeField(verbose_name='updated', editable=False)),
('order', models.IntegerField(null=True)),
('help_en', models.TextField(null=True, blank=True)),
('help_de', models.TextField(null=True, blank=True)),
],
options={
'ordering': ('subsection__section__catalog__order', 'subsection__section__order', 'subsection__order'),
'verbose_name': 'QuestionEntity',
'verbose_name_plural': 'QuestionEntities',
},
bases=(models.Model, rdmo.core.models.TranslationMixin),
),
migrations.CreateModel(
name='Section',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(verbose_name='created', editable=False)),
('updated', models.DateTimeField(verbose_name='updated', editable=False)),
('order', models.IntegerField(null=True)),
('title_en', models.CharField(max_length=256)),
('title_de', models.CharField(max_length=256)),
('catalog', models.ForeignKey(related_name='sections', to='questions.Catalog', on_delete=models.CASCADE)),
],
options={
'ordering': ('catalog__order', 'order'),
'verbose_name': 'Section',
'verbose_name_plural': 'Sections',
},
bases=(models.Model, rdmo.core.models.TranslationMixin),
),
migrations.CreateModel(
name='Subsection',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(verbose_name='created', editable=False)),
('updated', models.DateTimeField(verbose_name='updated', editable=False)),
('order', models.IntegerField(null=True)),
('title_en', models.CharField(max_length=256)),
('title_de', models.CharField(max_length=256)),
('section', models.ForeignKey(related_name='subsections', to='questions.Section', on_delete=models.CASCADE)),
],
options={
'ordering': ('section__catalog__order', 'section__order', 'order'),
'verbose_name': 'Subsection',
'verbose_name_plural': 'Subsections',
},
bases=(models.Model, rdmo.core.models.TranslationMixin),
),
migrations.CreateModel(
name='Question',
fields=[
('questionentity_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='questions.QuestionEntity', on_delete=models.CASCADE)),
('text_en', models.TextField()),
('text_de', models.TextField()),
('widget_type', models.CharField(max_length=12, choices=[('text', 'Text'), ('textarea', 'Textarea'), ('yesno', 'Yes/No'), ('checkbox', 'Checkboxes'), ('radio', 'Radio buttons'), ('select', 'Select drop-down'), ('range', 'Range slider'), ('date', 'Date picker')])),
],
options={
'ordering': ('subsection__section__catalog__order', 'subsection__section__order', 'subsection__order'),
'verbose_name': 'Question',
'verbose_name_plural': 'Questions',
},
bases=('questions.questionentity',),
),
migrations.AddField(
model_name='questionentity',
name='attribute_entity',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='domain.AttributeEntity', null=True),
),
migrations.AddField(
model_name='questionentity',
name='subsection',
field=models.ForeignKey(related_name='entities', to='questions.Subsection', on_delete=models.CASCADE),
),
migrations.AddField(
model_name='question',
name='parent_entity',
field=models.ForeignKey(related_name='questions', blank=True, to='questions.QuestionEntity', null=True, on_delete=models.CASCADE),
),
]
|
DMPwerkzeug/DMPwerkzeug
|
rdmo/questions/migrations/0001_initial_after_reset.py
|
Python
|
apache-2.0
| 5,793 | 0.003798 |
# Sum of num divisble by 3 and 5 upto 1000
def sumN(n):
return ((n * (n+1)) // 2)
def sumDivisibleBy(num, upto):
linearUpto = (upto-1)//num
return num * sumN(linearUpto)
upto = int(input())
ans = sumDivisibleBy(3,upto) + sumDivisibleBy(5,upto) - sumDivisibleBy(15,upto)
print(ans)
|
iammrdollar/ProjectEulerSolutions
|
Problem 1 - Multiples of 3 and 5/mul_3_5.py
|
Python
|
mit
| 296 | 0.016892 |
from flask import Flask
from flask.ext.restful import reqparse, abort, Api, Resource
app = Flask(__name__)
api = Api(app)
TODOS = {
'todo1': {'task': 'build an API'},
'todo2': {'task': '?????'},
'todo3': {'task': 'profit!'},
}
def abort_if_todo_doesnt_exist(todo_id):
if todo_id not in TODOS:
abort(404, message="Todo {} doesn't exist".format(todo_id))
parser = reqparse.RequestParser()
parser.add_argument('task', type=str)
# Todo
# show a single todo item and lets you delete them
class Todo(Resource):
def get(self, todo_id):
abort_if_todo_doesnt_exist(todo_id)
return TODOS[todo_id]
def delete(self, todo_id):
abort_if_todo_doesnt_exist(todo_id)
del TODOS[todo_id]
return '', 204
def put(self, todo_id):
args = parser.parse_args()
task = {'task': args['task']}
TODOS[todo_id] = task
return task, 201
# TodoList
# shows a list of all todos, and lets you POST to add new tasks
class TodoList(Resource):
def get(self):
return TODOS
def post(self):
args = parser.parse_args()
todo_id = 'todo%d' % (len(TODOS) + 1)
TODOS[todo_id] = {'task': args['task']}
return TODOS[todo_id], 201
##
## Actually setup the Api resource routing here
##
api.add_resource(TodoList, '/todos')
api.add_resource(Todo, '/todos/<string:todo_id>')
if __name__ == '__main__':
app.run(debug=True)
|
CanalTP/flask-restful
|
examples/todo.py
|
Python
|
bsd-3-clause
| 1,446 | 0.003458 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from oslo_config import cfg
from senlin.cmd import conductor
from senlin.common import config
from senlin.common import consts
from senlin.common import messaging
from senlin.common import profiler
from senlin.conductor import service
from senlin.tests.unit.common import base
CONF = cfg.CONF
class TestConductor(base.SenlinTestCase):
def setUp(self):
super(TestConductor, self).setUp()
@mock.patch('oslo_log.log.setup')
@mock.patch('oslo_log.log.set_defaults')
@mock.patch('oslo_service.service.launch')
@mock.patch.object(config, 'parse_args')
@mock.patch.object(messaging, 'setup')
@mock.patch.object(profiler, 'setup')
@mock.patch.object(service, 'ConductorService')
def test_main(self, mock_service, mock_profiler_setup,
mock_messaging_setup, mock_parse_args, mock_launch,
mock_log_set_defaults, mock_log_setup):
conductor.main()
mock_parse_args.assert_called_once()
mock_log_setup.assert_called_once()
mock_log_set_defaults.assert_called_once()
mock_messaging_setup.assert_called_once()
mock_profiler_setup.assert_called_once()
mock_service.assert_called_once_with(
mock.ANY, consts.CONDUCTOR_TOPIC
)
mock_launch.assert_called_once_with(
mock.ANY, mock.ANY, workers=1, restart_method='mutate'
)
|
openstack/senlin
|
senlin/tests/unit/cmd/test_conductor.py
|
Python
|
apache-2.0
| 1,992 | 0 |
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Htop(AutotoolsPackage):
"""htop is an interactive text-mode process viewer for Unix systems."""
homepage = "https://github.com/hishamhm/htop"
url = "https://hisham.hm/htop/releases/2.0.2/htop-2.0.2.tar.gz"
list_url = "https://hisham.hm/htop/releases"
list_depth = 1
version('2.0.2', '7d354d904bad591a931ad57e99fea84a')
depends_on('ncurses')
def configure_args(self):
return ['--enable-shared']
|
skosukhin/spack
|
var/spack/repos/builtin/packages/htop/package.py
|
Python
|
lgpl-2.1
| 1,706 | 0.000586 |
import ctypes
import graph_partitioning.partitioners.utils as putils
from graph_partitioning.partitioners.patoh.parameters import PATOHParameters
'''
Usage:
# Load the library
libPath = 'path/to/libpatoh.dylib' # .so for linux
lib = LibPatoh(libraryPath = libPath)
lib.load()
# Check library is Loaded
if lib.libIsLoaded() == False:
throw Exception(...)
# Prepare the data for partitioning
G = nx.Graph()
... load data into G ...
fixedNodes = None
... if some of the nodes are already fixed ...
fixedNodes = [-1 -1 0 -1 -1 2 -1 ... ]
data = PatohData()
data.fromNetworkxGraph(G, num_partitions = 4, partvec = partvec)
# Perform partitioning
lib.initializeParameters(data, num_partitions)
if lib.checkUserParameters(data, verbose = True):
if lib.alloc(data) == True:
if lib.part(data) == True:
# do something with partition data...
# free memory
lib.free(data)
'''
class LibPatoh(putils.CLibInterface):
def __init__(self, libraryPath = None):
super().__init__(libraryPath=libraryPath)
def _getDefaultLibPath(self):
return putils.defaultPATOHLibraryPath()
def _loadLibraryFunctions(self):
self.PATOH_Version = self.clib.Patoh_VersionStr
self.PATOH_Version.restype = (ctypes.c_char_p)
self.PATOH_InitializeParameters = self.clib.Patoh_Initialize_Parameters
self.PATOH_InitializeParameters.argtypes = (ctypes.POINTER(PATOHParameters), ctypes.c_int, ctypes.c_int)
self.PATOH_checkUserParameters = self.clib.Patoh_Check_User_Parameters
self.PATOH_checkUserParameters.argtypes = (ctypes.POINTER(PATOHParameters), ctypes.c_int)
self.PATOH_Alloc = self.clib.Patoh_Alloc
self.PATOH_Alloc.argtypes = (ctypes.POINTER(PATOHParameters), ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p)
self.PATOH_Part = self.clib.Patoh_Part
self.PATOH_Part.argtypes = (ctypes.POINTER(PATOHParameters), ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p)
self.PATOH_Free = self.clib.Patoh_Free
self.cfree = self.clib.free
self.cfree.argtypes = (ctypes.c_void_p,)
def version(self):
return self.PATOH_Version().decode('utf-8')
def initializeParameters(self, patohData, num_partitions = 2):
if(isinstance(num_partitions, int) == False):
num_partitions = 2
patohData.params = PATOHParameters()
ok = self.PATOH_InitializeParameters(ctypes.byref(patohData.params), 1, 0)
if(ok == 0):
patohData.params._k = num_partitions
return True
else:
patohData.params = None
return False
def checkUserParameters(self, patohData, verbose = True):
if (isinstance(patohData.params, PATOHParameters) == False):
print('Cannot check parameters as params is not of type PATOHParameters')
return False
# check verbosity mode
v = 0
if verbose == True:
v = 1
# perform parameter check
ok = self.PATOH_checkUserParameters(ctypes.byref(patohData.params), v)
if(ok == 0):
#print('User Parameters Valid')
return True
else:
print('Error in the user parameters. Use verbose mode for greater details.')
return False
def alloc(self, patohData):
#if (isinstance(patohData, patdata.PatohData) == False):
# return False
#PPaToH_Parameters pargs, int _c, int _n, int _nconst, int *cwghts, int *nwghts, int *xpins, int *pins
ok = self.PATOH_Alloc(ctypes.byref(patohData.params), patohData._c, patohData._n, patohData._nconst, patohData._cwghts.ctypes, patohData._nwghts.ctypes, patohData._xpins.ctypes, patohData._pins.ctypes)
if (ok == 0):
return True
return False
def part(self, patohData):
'''
int PaToH_Part(PPaToH_Parameters pargs, int _c, int _n, int _nconst, int useFixCells,
int *cwghts, int *nwghts, int *xpins, int *pins, float *targetweights,
int *partvec, int *partweights, int *cut);
'''
cut_val = ctypes.c_int(patohData.cut)
cut_addr = ctypes.addressof(cut_val)
ok = self.PATOH_Part(ctypes.byref(patohData.params), patohData._c, patohData._n, patohData._nconst, patohData.useFixCells, patohData._cwghts.ctypes, patohData._nwghts.ctypes, patohData._xpins.ctypes, patohData._pins.ctypes, patohData._targetweights.ctypes, patohData._partvec.ctypes, patohData._partweights.ctypes, cut_addr)
if (ok == 0):
# get value back
patohData.cut = cut_val
return True
return False
def free(self, patohData):
#self.cfree(patohData._cwghts.ctypes)
#self.cfree(patohData._nwghts.ctypes)
#self.cfree(patohData._xpins.ctypes)
#self.cfree(patohData._pins.ctypes)
#self.cfree(patohData._partweights.ctypes)
#self.cfree(patohData._partvec.ctypes)
ok = self.PATOH_Free()
if ok == 0:
return True
return False
|
sbarakat/graph-partitioning
|
graph_partitioning/partitioners/patoh/patoh.py
|
Python
|
mit
| 5,305 | 0.005844 |
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from prov.model import *
EX_NS = Namespace('ex', 'http://example.org/')
EX_OTHER_NS = Namespace('other', 'http://example.org/')
class TestAttributesBase(object):
"""This is the base class for testing support for various datatypes.
It is not runnable and needs to be included in a subclass of RoundTripTestCase.
"""
attribute_values = [
"un lieu",
Literal("un lieu", langtag='fr'),
Literal("a place", langtag='en'),
Literal(1, XSD_INT),
Literal(1, XSD_LONG),
Literal(1, XSD_SHORT),
Literal(2.0, XSD_DOUBLE),
Literal(1.0, XSD_FLOAT),
Literal(10, XSD_DECIMAL),
True,
False,
Literal(10, XSD_BYTE),
Literal(10, XSD_UNSIGNEDINT),
Literal(10, XSD_UNSIGNEDLONG),
Literal(10, XSD_INTEGER),
Literal(10, XSD_UNSIGNEDSHORT),
Literal(10, XSD_NONNEGATIVEINTEGER),
Literal(-10, XSD_NONPOSITIVEINTEGER),
Literal(10, XSD_POSITIVEINTEGER),
Literal(10, XSD_UNSIGNEDBYTE),
Identifier('http://example.org'),
Literal('http://example.org', XSD_ANYURI),
EX_NS['abc'],
EX_OTHER_NS['abcd'],
Namespace('ex', 'http://example4.org/')['zabc'],
Namespace('other', 'http://example4.org/')['zabcd'],
datetime.datetime.now(),
Literal(datetime.datetime.now().isoformat(), XSD_DATETIME)
]
def new_document(self):
return ProvDocument()
def run_entity_with_one_type_attribute(self, n):
document = self.new_document()
document.entity(EX_NS['et%d' % n], {'prov:type': self.attribute_values[n]})
self.assertRoundTripEquivalence(document)
def test_entity_with_one_type_attribute_0(self):
self.run_entity_with_one_type_attribute(0)
def test_entity_with_one_type_attribute_1(self):
self.run_entity_with_one_type_attribute(1)
def test_entity_with_one_type_attribute_2(self):
self.run_entity_with_one_type_attribute(2)
def test_entity_with_one_type_attribute_3(self):
self.run_entity_with_one_type_attribute(3)
def test_entity_with_one_type_attribute_4(self):
self.run_entity_with_one_type_attribute(4)
def test_entity_with_one_type_attribute_5(self):
self.run_entity_with_one_type_attribute(5)
def test_entity_with_one_type_attribute_6(self):
self.run_entity_with_one_type_attribute(6)
def test_entity_with_one_type_attribute_7(self):
self.run_entity_with_one_type_attribute(7)
def test_entity_with_one_type_attribute_8(self):
self.run_entity_with_one_type_attribute(8)
def test_entity_with_one_type_attribute_9(self):
self.run_entity_with_one_type_attribute(9)
def test_entity_with_one_type_attribute_10(self):
self.run_entity_with_one_type_attribute(10)
def test_entity_with_one_type_attribute_11(self):
self.run_entity_with_one_type_attribute(11)
def test_entity_with_one_type_attribute_12(self):
self.run_entity_with_one_type_attribute(12)
def test_entity_with_one_type_attribute_13(self):
self.run_entity_with_one_type_attribute(13)
def test_entity_with_one_type_attribute_14(self):
self.run_entity_with_one_type_attribute(14)
def test_entity_with_one_type_attribute_15(self):
self.run_entity_with_one_type_attribute(15)
def test_entity_with_one_type_attribute_16(self):
self.run_entity_with_one_type_attribute(16)
def test_entity_with_one_type_attribute_17(self):
self.run_entity_with_one_type_attribute(17)
def test_entity_with_one_type_attribute_18(self):
self.run_entity_with_one_type_attribute(18)
def test_entity_with_one_type_attribute_19(self):
self.run_entity_with_one_type_attribute(19)
def test_entity_with_one_type_attribute_20(self):
self.run_entity_with_one_type_attribute(20)
def test_entity_with_one_type_attribute_21(self):
self.run_entity_with_one_type_attribute(21)
def test_entity_with_one_type_attribute_22(self):
self.run_entity_with_one_type_attribute(22)
def test_entity_with_one_type_attribute_23(self):
self.run_entity_with_one_type_attribute(23)
def test_entity_with_one_type_attribute_24(self):
self.run_entity_with_one_type_attribute(24)
def test_entity_with_one_type_attribute_25(self):
self.run_entity_with_one_type_attribute(25)
def test_entity_with_one_type_attribute_26(self):
self.run_entity_with_one_type_attribute(26)
def test_entity_with_one_type_attribute_27(self):
self.run_entity_with_one_type_attribute(27)
def test_entity_with_multiple_attribute(self):
document = self.new_document()
attributes = [
(EX_NS['v_%d'% i], value) for i, value in enumerate(self.attribute_values)
]
document.entity(EX_NS['emov'], attributes)
self.assertRoundTripEquivalence(document)
def test_entity_with_multiple_value_attribute(self):
document = self.new_document()
attributes = [
('prov:value', value) for i, value in enumerate(self.attribute_values)
]
document.entity(EX_NS['emv'], attributes)
self.assertRoundTripEquivalence(document)
|
krischer/prov
|
prov/tests/attributes.py
|
Python
|
mit
| 5,400 | 0.000926 |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
import json
from sverchok.utils.logging import error
def pack_monad(node, node_items, groups_dict, create_dict_of_tree):
"""
we can not rely on .items() to be present for various reasons, so we must gather
something to fill .params with - due to dynamic nature of node.
"""
name = node.monad.name
node_items['all_props'] = node.monad.get_all_props()
node_items['monad'] = name
node_items['cls_dict'] = {}
node_items['cls_dict']['cls_bl_idname'] = node.bl_idname
for template in ['input_template', 'output_template']:
node_items['cls_dict'][template] = getattr(node, template)
if name not in groups_dict:
group_ng = bpy.data.node_groups[name]
group_dict = create_dict_of_tree(group_ng)
group_dict['bl_idname'] = group_ng.bl_idname
group_dict['cls_bl_idname'] = node.bl_idname
group_json = json.dumps(group_dict)
groups_dict[name] = group_json
# [['Y', 'StringsSocket', {'prop_name': 'y'}], [....
for idx, (socket_name, socket_type, prop_dict) in enumerate(node.input_template):
socket = node.inputs[idx]
if not socket.is_linked and prop_dict:
prop_name = prop_dict['prop_name']
v = getattr(node, prop_name)
if not isinstance(v, (float, int, str)):
v = v[:]
node_items[prop_name] = v
def unpack_monad(nodes, node_ref):
params = node_ref.get('params')
if params:
socket_prop_data = params.get('all_props')
monad_name = params.get('monad')
monad = bpy.data.node_groups[monad_name]
if socket_prop_data:
# including this to keep bw comp for trees that don't include this info.
monad.set_all_props(socket_prop_data)
cls_ref = monad.update_cls()
node = nodes.new(cls_ref.bl_idname)
# -- addition 1 --------- setting correct properties on sockets.
cls_dict = params.get('cls_dict')
input_template = cls_dict['input_template']
for idx, (sock_name, sock_type, sock_props) in enumerate(input_template):
socket_reference = node.inputs[idx]
if sock_props:
for prop, val in sock_props.items():
setattr(socket_reference, prop, val)
# -- addition 2 --------- force push param values
# -- (this step is skipped by apply_core_props because this node has a cls_dict)
for prop_data in ('float_props', 'int_props'):
data_list = socket_prop_data.get(prop_data)
if not data_list:
continue
for k, v in data_list.items():
if hasattr(node, k):
if k in params:
setattr(node, k, params[k])
# else:
# print(k, 'not in', params)
#else:
# print('node name:', node, node.name, 'has no property called', k, 'yet..')
# node.output_template = cls_dict['output_template']
return node
else:
error('no parameters found! .json might be broken')
|
portnov/sverchok
|
utils/sv_IO_monad_helpers.py
|
Python
|
gpl-3.0
| 3,948 | 0.002786 |
import codecs
import json
import os
import shutil
import socket
import subprocess
import tempfile
from contextlib import contextmanager
from cStringIO import StringIO
from django.conf import settings
from django.core import mail
from django.core.files.storage import default_storage as storage
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
import mock
from nose.tools import eq_, ok_
from PIL import Image
from requests import RequestException
import mkt
import mkt.site.tests
from mkt.users.models import UserProfile
from mkt.developers import tasks
from mkt.files.models import FileUpload
from mkt.site.fixtures import fixture
from mkt.site.tests.test_utils_ import get_image_path
from mkt.site.utils import app_factory, ImageCheck
from mkt.submit.tests.test_views import BaseWebAppTest
from mkt.webapps.models import AddonExcludedRegion as AER
from mkt.webapps.models import Preview, Webapp
def test_resize_icon_shrink():
""" Image should be shrunk so that the longest side is 32px. """
resize_size = [32]
final_size = [(32, 12)]
_uploader(resize_size, final_size)
def test_resize_icon_enlarge():
""" Image stays the same, since the new size is bigger than both sides. """
resize_size = [1000]
final_size = [(339, 128)]
_uploader(resize_size, final_size)
def test_resize_icon_same():
""" Image stays the same, since the new size is the same. """
resize_size = [339]
final_size = [(339, 128)]
_uploader(resize_size, final_size)
def test_resize_icon_list():
""" Resize multiple images at once. """
resize_size = [32, 82, 100]
final_size = [(32, 12), (82, 30), (100, 37)]
_uploader(resize_size, final_size)
def _uploader(resize_size, final_size):
img = get_image_path('mozilla.png')
original_size = (339, 128)
for rsize, fsize in zip(resize_size, final_size):
dest_name = os.path.join(settings.ADDON_ICONS_PATH, '1234')
src = tempfile.NamedTemporaryFile(mode='r+w+b', suffix='.png',
delete=False)
# resize_icon removes the original, copy it to a tempfile and use that.
shutil.copyfile(img, src.name)
# Sanity check.
with storage.open(src.name) as fp:
src_image = Image.open(fp)
src_image.load()
eq_(src_image.size, original_size)
val = tasks.resize_icon(src.name, dest_name, resize_size, locally=True)
eq_(val, {'icon_hash': 'bb362450'})
with storage.open('%s-%s.png' % (dest_name, rsize)) as fp:
dest_image = Image.open(fp)
dest_image.load()
# Assert that the width is always identical.
eq_(dest_image.size[0], fsize[0])
# Assert that the height can be a wee bit fuzzy.
assert -1 <= dest_image.size[1] - fsize[1] <= 1, (
'Got width %d, expected %d' % (
fsize[1], dest_image.size[1]))
if os.path.exists(dest_image.filename):
os.remove(dest_image.filename)
assert not os.path.exists(dest_image.filename)
assert not os.path.exists(src.name)
class TestPngcrushImage(mkt.site.tests.TestCase):
def setUp(self):
img = get_image_path('mozilla.png')
self.src = tempfile.NamedTemporaryFile(mode='r+w+b', suffix=".png",
delete=False)
shutil.copyfile(img, self.src.name)
patcher = mock.patch('subprocess.Popen')
self.mock_popen = patcher.start()
attrs = {
'returncode': 0,
'communicate.return_value': ('ouput', 'error')
}
self.mock_popen.return_value.configure_mock(**attrs)
self.addCleanup(patcher.stop)
def tearDown(self):
os.remove(self.src.name)
@mock.patch('shutil.move')
def test_pngcrush_image_is_called(self, mock_move):
name = self.src.name
expected_suffix = '.opti.png'
expected_cmd = ['pngcrush', '-q', '-rem', 'alla', '-brute', '-reduce',
'-e', expected_suffix, name]
rval = tasks.pngcrush_image(name)
self.mock_popen.assert_called_once_with(
expected_cmd, stdin=subprocess.PIPE, stderr=subprocess.PIPE,
stdout=subprocess.PIPE)
mock_move.assert_called_once_with(
'%s%s' % (os.path.splitext(name)[0], expected_suffix), name)
eq_(rval, {'image_hash': 'bb362450'})
@mock.patch('mkt.webapps.models.Webapp.update')
@mock.patch('shutil.move')
def test_set_modified(self, mock_move, update_mock):
"""Test passed instance is updated with the hash."""
name = self.src.name
obj = app_factory()
ret = tasks.pngcrush_image(name, 'some_hash', set_modified_on=[obj])
ok_('some_hash' in ret)
eq_(update_mock.call_args_list[-1][1]['some_hash'], ret['some_hash'])
ok_('modified' in update_mock.call_args_list[-1][1])
class TestValidator(mkt.site.tests.TestCase):
def setUp(self):
self.upload = FileUpload.objects.create()
assert not self.upload.valid
def get_upload(self):
return FileUpload.objects.get(pk=self.upload.pk)
@mock.patch('mkt.developers.tasks.run_validator')
def test_pass_validation(self, _mock):
_mock.return_value = '{"errors": 0}'
tasks.validator(self.upload.pk)
assert self.get_upload().valid
@mock.patch('mkt.developers.tasks.run_validator')
def test_fail_validation(self, _mock):
_mock.return_value = '{"errors": 2}'
tasks.validator(self.upload.pk)
assert not self.get_upload().valid
@mock.patch('mkt.developers.tasks.run_validator')
def test_validation_error(self, _mock):
_mock.side_effect = Exception
eq_(self.upload.task_error, None)
tasks.validator(self.upload.pk)
error = self.get_upload().task_error
assert error is not None
assert error.startswith('Traceback (most recent call last)'), error
@mock.patch('mkt.developers.tasks.validate_app')
@mock.patch('mkt.developers.tasks.storage.open')
def test_validate_manifest(self, _open, _mock):
_open.return_value = StringIO('')
_mock.return_value = '{"errors": 0}'
tasks.validator(self.upload.pk)
assert _mock.called
@mock.patch('mkt.developers.tasks.validate_packaged_app')
@mock.patch('zipfile.is_zipfile')
def test_validate_packaged_app(self, _zipfile, _mock):
_zipfile.return_value = True
_mock.return_value = '{"errors": 0}'
tasks.validator(self.upload.pk)
assert _mock.called
storage_open = storage.open
def _mock_hide_64px_icon(path, *args, **kwargs):
"""
A function that mocks `storage.open` and throws an IOError if you try to
open a 128x128px icon.
"""
if '128' in path:
raise IOError('No 128px icon for you!')
return storage_open(path, *args, **kwargs)
@override_settings(
PREVIEW_FULL_PATH='/tmp/uploads-tests/previews/full/%s/%d.%s',
PREVIEW_THUMBNAIL_PATH='/tmp/uploads-tests/previews/thumbs/%s/%d.png')
class TestResizePreview(mkt.site.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
# Make sure there are no leftover files in the test directory before
# launching tests that depend on the files presence/absence.
shutil.rmtree('/tmp/uploads-tests/previews/', ignore_errors=True)
def get_image(self, filename):
"""Copy image to tmp and return tmp path.
We do this because the task `resize_preview` removes the src file when
finished.
"""
src = get_image_path(filename)
dst = os.path.join(settings.TMP_PATH, 'preview', filename)
shutil.copy(src, dst)
return dst
def test_preview(self):
addon = Webapp.objects.get(pk=337141)
preview = Preview.objects.create(addon=addon)
src = self.get_image('preview.jpg')
tasks.resize_preview(src, preview.pk)
preview = preview.reload()
eq_(preview.image_size, [400, 533])
eq_(preview.thumbnail_size, [100, 133])
eq_(preview.is_landscape, False)
with storage.open(preview.thumbnail_path) as fp:
im = Image.open(fp)
eq_(list(im.size), [100, 133])
with storage.open(preview.image_path) as fp:
im = Image.open(fp)
eq_(list(im.size), [400, 533])
def test_preview_rotated(self):
addon = Webapp.objects.get(pk=337141)
preview = Preview.objects.create(addon=addon)
src = self.get_image('preview_landscape.jpg')
tasks.resize_preview(src, preview.pk)
preview = preview.reload()
eq_(preview.image_size, [533, 400])
eq_(preview.thumbnail_size, [133, 100])
eq_(preview.is_landscape, True)
with storage.open(preview.thumbnail_path) as fp:
im = Image.open(fp)
eq_(list(im.size), [133, 100])
with storage.open(preview.image_path) as fp:
im = Image.open(fp)
eq_(list(im.size), [533, 400])
def test_preview_dont_generate_image(self):
addon = Webapp.objects.get(pk=337141)
preview = Preview.objects.create(addon=addon)
src = self.get_image('preview.jpg')
tasks.resize_preview(src, preview.pk, generate_image=False)
preview = preview.reload()
eq_(preview.image_size, [])
eq_(preview.thumbnail_size, [100, 133])
eq_(preview.sizes, {u'thumbnail': [100, 133]})
with storage.open(preview.thumbnail_path) as fp:
im = Image.open(fp)
eq_(list(im.size), [100, 133])
assert not os.path.exists(preview.image_path), preview.image_path
class TestFetchManifest(mkt.site.tests.TestCase):
def setUp(self):
self.upload = FileUpload.objects.create()
self.content_type = 'application/x-web-app-manifest+json'
patcher = mock.patch('mkt.developers.tasks.requests.get')
self.requests_mock = patcher.start()
self.addCleanup(patcher.stop)
def get_upload(self):
return FileUpload.objects.get(pk=self.upload.pk)
def file(self, name):
return os.path.join(os.path.dirname(__file__), 'addons', name)
@contextmanager
def patch_requests(self):
response_mock = mock.Mock(status_code=200)
response_mock.iter_content.return_value = mock.Mock(
next=lambda: '<default>')
response_mock.headers = {'content-type': self.content_type}
yield response_mock
self.requests_mock.return_value = response_mock
@mock.patch('mkt.developers.tasks.validator')
def test_success_add_file(self, validator_mock):
with self.patch_requests() as ur:
ur.iter_content.return_value = mock.Mock(next=lambda: 'woo')
tasks.fetch_manifest('http://xx.com/manifest.json', self.upload.pk)
upload = FileUpload.objects.get(pk=self.upload.pk)
eq_(upload.name, 'http://xx.com/manifest.json')
eq_(storage.open(upload.path).read(), 'woo')
@mock.patch('mkt.developers.tasks.validator')
def test_success_call_validator(self, validator_mock):
with self.patch_requests() as ur:
ct = self.content_type + '; charset=utf-8'
ur.headers = {'content-type': ct}
tasks.fetch_manifest('http://xx.com/manifest.json', self.upload.pk)
assert validator_mock.called
assert self.requests_mock.called
eq_(self.requests_mock.call_args[1]['headers'], tasks.REQUESTS_HEADERS)
def check_validation(self, msg=''):
upload = self.get_upload()
if msg:
validation = json.loads(upload.validation)
eq_([m['message'] for m in validation['messages']], [msg])
eq_(validation['errors'], 1)
eq_(validation['success'], False)
eq_(len(validation['messages']), 1)
else:
validation_output = upload.validation
if not validation_output:
return
validation = json.loads(validation_output)
assert not validation['messages']
eq_(validation['errors'], 0)
eq_(validation['success'], True)
def test_connection_error(self):
reason = socket.gaierror(8, 'nodename nor servname provided')
self.requests_mock.side_effect = RequestException(reason)
tasks.fetch_manifest('url', self.upload.pk)
self.check_validation(
'No manifest was found at that URL. Check the address and try '
'again.')
def test_url_timeout(self):
reason = socket.timeout('too slow')
self.requests_mock.side_effect = RequestException(reason)
tasks.fetch_manifest('url', self.upload.pk)
self.check_validation(
'No manifest was found at that URL. Check the address and try '
'again.')
def test_other_url_error(self):
reason = Exception('Some other failure.')
self.requests_mock.side_effect = RequestException(reason)
tasks.fetch_manifest('url', self.upload.pk)
self.check_validation(
'No manifest was found at that URL. Check the address and try '
'again.')
@mock.patch('mkt.developers.tasks.validator', lambda uid, **kw: None)
def test_no_content_type(self):
with self.patch_requests() as ur:
ur.headers = {}
tasks.fetch_manifest('url', self.upload.pk)
self.check_validation(
'No manifest was found at that URL. Check the address and try '
'again.')
@mock.patch('mkt.developers.tasks.validator', lambda uid, **kw: None)
def test_bad_content_type(self):
with self.patch_requests() as ur:
ur.headers = {'Content-Type': 'x'}
tasks.fetch_manifest('url', self.upload.pk)
self.check_validation(
'Manifests must be served with the HTTP header "Content-Type: '
'application/x-web-app-manifest+json". See %s for more '
'information.' % tasks.CT_URL)
@mock.patch('mkt.developers.tasks.validator', lambda uid, **kw: None)
def test_good_charset(self):
with self.patch_requests() as ur:
ur.headers = {
'content-type': 'application/x-web-app-manifest+json;'
'charset=utf-8'
}
tasks.fetch_manifest('url', self.upload.pk)
self.check_validation()
@mock.patch('mkt.developers.tasks.validator', lambda uid, **kw: None)
def test_bad_charset(self):
with self.patch_requests() as ur:
ur.headers = {
'content-type': 'application/x-web-app-manifest+json;'
'charset=ISO-1234567890-LOL'
}
tasks.fetch_manifest('url', self.upload.pk)
self.check_validation("The manifest's encoding does not match the "
'charset provided in the HTTP Content-Type.')
def test_response_too_large(self):
with self.patch_requests() as ur:
content = 'x' * (settings.MAX_WEBAPP_UPLOAD_SIZE + 1)
ur.iter_content.return_value = mock.Mock(next=lambda: content)
tasks.fetch_manifest('url', self.upload.pk)
max_webapp_size = settings.MAX_WEBAPP_UPLOAD_SIZE
self.check_validation('Your manifest must be less than %s bytes.' %
max_webapp_size)
@mock.patch('mkt.developers.tasks.validator', lambda uid, **kw: None)
def test_http_error(self):
with self.patch_requests() as ur:
ur.status_code = 404
tasks.fetch_manifest('url', self.upload.pk)
self.check_validation(
'No manifest was found at that URL. Check the address and try '
'again.')
def test_strip_utf8_bom(self):
with self.patch_requests() as ur:
with open(self.file('utf8bom.webapp')) as fp:
content = fp.read()
ur.iter_content.return_value = mock.Mock(next=lambda: content)
tasks.fetch_manifest('url', self.upload.pk)
# Should not be called with anything else (e.g., `decode_unicode`).
ur.iter_content.assert_called_with(
chunk_size=settings.MAX_WEBAPP_UPLOAD_SIZE + 1)
upload = self.get_upload()
with storage.open(upload.path, 'rb') as fp:
manifest = fp.read()
json.loads(manifest) # No parse error.
assert not manifest.startswith(codecs.BOM_UTF8)
def test_non_utf8_encoding(self):
with self.patch_requests() as ur:
with open(self.file('utf8bom.webapp')) as fp:
# Set encoding to utf16 which will be invalid.
content = fp.read().decode('utf8').encode('utf16')
ur.iter_content.return_value = mock.Mock(next=lambda: content)
tasks.fetch_manifest('url', self.upload.pk)
self.check_validation(
'Your manifest file was not encoded as valid UTF-8.')
class TestFetchIcon(BaseWebAppTest):
def setUp(self):
super(TestFetchIcon, self).setUp()
self.content_type = 'image/png'
self.apps_path = os.path.join(settings.ROOT, 'mkt', 'developers',
'tests', 'addons')
patcher = mock.patch('mkt.developers.tasks.requests.get')
self.requests_mock = patcher.start()
self.requests_mock.return_value = StringIO('mozballin')
self.addCleanup(patcher.stop)
def webapp_from_path(self, path):
self.upload = self.get_upload(abspath=path,
user=UserProfile.objects.get(pk=999))
self.url = reverse('submit.app')
self.login('regular@mozilla.com')
return self.post_addon()
def test_no_version(self):
app = app_factory()
eq_(tasks.fetch_icon(app.pk), None)
def test_no_icons(self):
path = os.path.join(self.apps_path, 'noicon.webapp')
iconless_app = self.webapp_from_path(path)
tasks.fetch_icon(iconless_app.pk,
iconless_app.latest_version.all_files[0].pk)
assert not self.requests_mock.called
def test_bad_icons(self):
path = os.path.join(self.apps_path, 'badicon.webapp')
iconless_app = self.webapp_from_path(path)
tasks.fetch_icon(iconless_app.pk,
iconless_app.latest_version.all_files[0].pk)
assert not self.requests_mock.called
def check_icons(self, webapp, file_obj=None):
manifest = webapp.get_manifest_json(file_obj)
biggest = max([int(size) for size in manifest['icons']])
icon_dir = webapp.get_icon_dir()
for size in mkt.CONTENT_ICON_SIZES:
if not size <= biggest:
continue
icon_path = os.path.join(icon_dir, '%s-%s.png'
% (str(webapp.id), size))
with open(icon_path, 'r') as img:
checker = ImageCheck(img)
assert checker.is_image()
eq_(checker.img.size, (size, size))
def test_data_uri(self):
app_path = os.path.join(self.apps_path, 'dataicon.webapp')
webapp = self.webapp_from_path(app_path)
file_obj = webapp.latest_version.all_files[0]
tasks.fetch_icon(webapp.pk, file_obj.pk)
eq_(webapp.icon_type, self.content_type)
self.check_icons(webapp, file_obj)
def test_hosted_icon(self):
app_path = os.path.join(self.apps_path, 'mozball.webapp')
webapp = self.webapp_from_path(app_path)
file_obj = webapp.latest_version.all_files[0]
img_path = os.path.join(self.apps_path, 'mozball-128.png')
with open(img_path, 'r') as content:
tasks.save_icon(webapp, content.read())
eq_(webapp.icon_type, self.content_type)
self.check_icons(webapp, file_obj)
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
@mock.patch('mkt.developers.tasks._fetch_content')
@mock.patch('mkt.developers.tasks.save_icon')
def test_cdn_icon(self, save, fetch, json):
response = mock.Mock()
response.read.return_value = ''
webapp = app_factory()
url = 'http://foo.com/bar'
json.return_value = {'icons': {'128': url}}
tasks.fetch_icon(webapp.pk, webapp.latest_version.all_files[0].pk)
assert url in fetch.call_args[0][0]
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
@mock.patch('mkt.developers.tasks.SafeUnzip')
@mock.patch('mkt.developers.tasks.save_icon')
def test_packaged_icon(self, save, zip, json):
response = mock.Mock()
response.read.return_value = ''
zf = mock.Mock()
zip.return_value = zf
webapp = app_factory(is_packaged=True)
file_obj = webapp.latest_version.all_files[0]
url = '/path/to/icon.png'
json.return_value = {'icons': {'128': url}}
tasks.fetch_icon(webapp.pk, file_obj.pk)
assert url[1:] in zf.extract_path.call_args[0][0]
class TestRegionEmail(mkt.site.tests.WebappTestCase):
@mock.patch.object(settings, 'SITE_URL', 'http://omg.org/')
def test_email_for_one_new_region(self):
tasks.region_email([self.app.id], [mkt.regions.BRA.id])
msg = mail.outbox[0]
eq_(msg.subject, '%s: Brazil region added to the Firefox Marketplace'
% self.app.name)
eq_(msg.to, ['steamcube@mozilla.com'])
dev_url = ('http://omg.org/developers/app/something-something/'
'edit#details')
assert unicode(self.app.name) in msg.body
assert dev_url in msg.body
assert ' added a new ' in msg.body
assert ' for Brazil.' in msg.body
# TODO: Re-enable this when we bring back Unsubscribe (bug 802379).
# assert 'Unsubscribe' in msg.body
@mock.patch.object(settings, 'SITE_URL', 'http://omg.org/')
def test_email_for_two_new_regions(self):
tasks.region_email([self.app.id],
[mkt.regions.GBR.id, mkt.regions.BRA.id])
msg = mail.outbox[0]
eq_(msg.subject, '%s: New regions added to the Firefox Marketplace'
% self.app.name)
eq_(msg.to, ['steamcube@mozilla.com'])
dev_url = ('http://omg.org/developers/app/something-something/'
'edit#details')
assert unicode(self.app.name) in msg.body
assert dev_url in msg.body
assert ' added two new ' in msg.body
assert ': Brazil and United Kingdom.' in msg.body
# TODO: Re-enable this when we bring back Unsubscribe (bug 802379).
# assert 'Unsubscribe' in msg.body
@mock.patch.object(settings, 'SITE_URL', 'http://omg.org/')
def test_email_for_several_new_regions(self):
tasks.region_email([self.app.id],
[mkt.regions.GBR.id, mkt.regions.USA.id,
mkt.regions.BRA.id])
msg = mail.outbox[0]
eq_(msg.subject,
'%s: New regions added to the Firefox Marketplace' % self.app.name)
assert ' added a few new ' in msg.body
assert ': Brazil, United Kingdom, and United States.' in msg.body
class TestRegionExclude(mkt.site.tests.WebappTestCase):
def test_exclude_no_apps(self):
tasks.region_exclude([], [])
eq_(AER.objects.count(), 0)
tasks.region_exclude([], [mkt.regions.GBR.id])
eq_(AER.objects.count(), 0)
def test_exclude_no_regions(self):
tasks.region_exclude([self.app.id], [])
eq_(AER.objects.count(), 0)
def test_exclude_one_new_region(self):
tasks.region_exclude([self.app.id], [mkt.regions.GBR.id])
excluded = list(AER.objects.filter(addon=self.app)
.values_list('region', flat=True))
eq_(excluded, [mkt.regions.GBR.id])
def test_exclude_several_new_regions(self):
tasks.region_exclude([self.app.id], [mkt.regions.USA.id,
mkt.regions.GBR.id])
excluded = sorted(AER.objects.filter(addon=self.app)
.values_list('region', flat=True))
eq_(excluded, sorted([mkt.regions.USA.id, mkt.regions.GBR.id]))
|
kumar303/zamboni
|
mkt/developers/tests/test_tasks.py
|
Python
|
bsd-3-clause
| 24,331 | 0 |
import sys
import solution
# from classes import ?
class TestSuite:
def run(self):
self.test000()
self.test001()
self.test002()
# self.test003()
# self.test004()
def test000(self):
print 'test 000\n'
n = 13
r = solution.answer(n)
print ' input:\t', n
print ' expect:\t', 4
print ' output:\t', r
print
def test001(self):
print 'test 002\n'
n = 1235
r = solution.answer(n)
print ' input:\t', n
print ' expect:\t', 2
print ' output:\t', r
print
def test002(self):
print 'test 002\n'
n = 6471289
r = solution.answer(n)
print ' input:\t', n
print ' expect:\t', 1
print ' output:\t', r
print
def main(argv):
TestSuite().run()
if __name__ == '__main__':
main(sys.argv)
|
54lihaoxin/GoogleFooBar
|
src/guard_game/test_suite.py
|
Python
|
apache-2.0
| 938 | 0.007463 |
from fabric.api import *
from fabric.utils import *
from fabric.contrib import *
class Apt(object):
def __init__(self):
return
def update(self):
cmd = 'sudo apt update'
run(cmd)
print(cmd)
def purge(self, package):
cmd = 'sudo apt purge -y %(package)s' % {'package': package}
# print(cmd)
run(cmd)
def upgrade(self):
cmd = 'sudo apt upgrade -y'
run(cmd)
# print(cmd)
def install(self, package):
if package != None:
cmd = 'sudo apt -y install %(package)s' % {'package': package}
run(cmd)
# print(cmd)
|
stregatto/fabric_lib
|
apt.py
|
Python
|
gpl-2.0
| 651 | 0.003072 |
"""
WSGI config for pelawak project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pelawak.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
mclumd/pelawak
|
pelawak/wsgi.py
|
Python
|
mit
| 482 | 0 |
# -*- coding: utf-8 -*-
import io
import os
import pytest
import six
from anymarkup_core import *
from test import *
class TestSerialize(object):
"""Note: testing serialization is a bit tricky, since serializing dicts can result
in different order of values in serialized string in different runs.
That means that we can't just test whether the serialized string equals to expected
string. To solve this, we rather parse the serialized string back and make sure
that it equals the original structure.
"""
fixtures = os.path.join(os.path.dirname(__file__), 'fixtures')
def _read_decode(self, file):
if isinstance(file, six.string_types):
file = open(file, 'rb')
else:
file.seek(0)
return file.read().decode('utf-8')
@pytest.mark.parametrize(('struct', 'format'), [
(example_as_dict, 'ini'),
(example_as_dict, 'json'),
(example_as_dict, 'json5'),
(toml_example_as_dict, 'toml'),
(example_as_ordered_dict, 'xml'),
(example_as_dict, 'yaml'),
(example_as_ordered_dict, 'yaml'),
])
def test_serialize_basic(self, struct, format):
serialized = serialize(struct, format)
parsed_back = parse(serialized, format)
assert parsed_back == struct
assert type(parsed_back) == type(struct)
def test_serialize_works_with_wb_opened_file(self, tmpdir):
f = os.path.join(str(tmpdir), 'foo.xml')
fhandle = open(f, 'wb+')
serialize(example_as_ordered_dict, 'xml', fhandle)
assert self._read_decode(fhandle) == example_xml
def test_serialize_raises_with_unicode_opened_file(self, tmpdir):
# on Python 2, this can only be simulated with io.open
f = os.path.join(str(tmpdir), 'foo.json')
fhandle = io.open(f, 'w+', encoding='utf-8')
with pytest.raises(AnyMarkupError):
serialize(example_as_dict, 'json', fhandle)
@pytest.mark.parametrize(('struct', 'fmt', 'fname'), [
(example_as_dict, None, 'example.ini'),
(example_as_dict, None, 'example.json'),
(example_as_dict, 'json5', 'example.json5'),
(toml_example_as_dict, 'toml', 'example.toml'),
(example_as_ordered_dict, None, 'example.xml'),
(example_as_dict, None, 'example.yaml'),
(example_as_ordered_dict, None, 'example_ordered.yaml'),
])
def test_serialize_file_basic(self, struct, fmt, fname, tmpdir):
f = os.path.join(str(tmpdir), fname)
serialize_file(struct, f)
parsed_back = parse(self._read_decode(f), fmt)
assert parsed_back == struct
assert type(parsed_back) == type(struct)
def test_serialize_file_format_overrides_extension(self, tmpdir):
f = os.path.join(str(tmpdir), 'foo.ini')
serialize_file(example_as_dict, f, 'json')
assert parse(self._read_decode(f)) == example_as_dict
def test_parse_and_serialize_yaml_multiline_string(self):
# https://github.com/bkabrda/anymarkup-core/issues/1
inp = b'foo: |-\n line1\n line2\n line3\n'
assert serialize(parse(inp), 'yaml') == inp
|
bkabrda/anymarkup-core
|
test/test_serialize.py
|
Python
|
bsd-3-clause
| 3,156 | 0.000951 |
"""Handle automations."""
# Copyright 2013-2017 The Home Assistant Authors
# https://github.com/home-assistant/home-assistant/blob/master/LICENSE.md
# This file was modified by The Camacq Authors.
import logging
from collections import deque
from functools import partial
import voluptuous as vol
from camacq.exceptions import TemplateError
from camacq.helper import BASE_ACTION_SCHEMA, get_module, has_at_least_one_key
from camacq.helper.template import make_template, render_template
from camacq.const import CAMACQ_STOP_EVENT, CONF_DATA, CONF_ID
_LOGGER = logging.getLogger(__name__)
CONF_AUTOMATIONS = "automations"
CONF_ACTION = "action"
CONF_CONDITION = "condition"
CONF_CONDITIONS = "conditions"
CONF_NAME = "name"
CONF_TRIGGER = "trigger"
CONF_TYPE = "type"
ENABLED = "enabled"
NAME = "name"
ACTION_DELAY = "delay"
ACTION_TOGGLE = "toggle"
DATA_AUTOMATIONS = "automations"
TRIGGER_ACTION_SCHEMA = vol.Schema(
[
{
vol.Required(CONF_TYPE): vol.Coerce(str),
vol.Required(CONF_ID): vol.Coerce(str),
vol.Optional(CONF_DATA, default={}): dict,
}
],
)
CONDITION_SCHEMA = vol.All(
has_at_least_one_key(CONF_TYPE, CONF_CONDITION),
{
# pylint: disable=no-value-for-parameter
vol.Inclusive(CONF_TYPE, "condition"): vol.All(
vol.Upper, vol.In(["AND", "OR"])
),
vol.Inclusive(CONF_CONDITIONS, "condition"): [
# pylint: disable=unnecessary-lambda
lambda value: CONDITION_SCHEMA(value)
],
vol.Exclusive(CONF_CONDITION, "condition"): vol.Coerce(str),
},
)
CONFIG_SCHEMA = vol.Schema(
[
{
vol.Required(CONF_NAME): vol.Coerce(str),
vol.Required(CONF_TRIGGER): TRIGGER_ACTION_SCHEMA,
vol.Required(CONF_ACTION): TRIGGER_ACTION_SCHEMA,
vol.Optional(
CONF_CONDITION, default={CONF_CONDITION: "true"}
): CONDITION_SCHEMA,
}
]
)
async def setup_module(center, config):
"""Set up automations package.
Parameters
----------
center : Center instance
The Center instance.
config : dict
The config dict.
"""
_process_automations(center, config)
automations = center.data[DATA_AUTOMATIONS]
async def handle_action(**kwargs):
"""Enable or disable an automation."""
name = kwargs[NAME]
automation = automations[name]
enabled = kwargs.get(ENABLED, not automation.enabled)
if enabled:
automation.enable()
else:
automation.disable()
toggle_action_schema = BASE_ACTION_SCHEMA.extend(
{
vol.Required(NAME): vol.All(vol.Coerce(str), vol.In(automations)),
ENABLED: vol.Boolean(), # pylint: disable=no-value-for-parameter
}
)
# register action to enable/disable automation
center.actions.register(
"automations", ACTION_TOGGLE, handle_action, toggle_action_schema
)
def _process_automations(center, config):
"""Process automations from config."""
automations = center.data.setdefault(DATA_AUTOMATIONS, {})
conf = config[CONF_AUTOMATIONS]
for block in conf:
name = block[CONF_NAME]
_LOGGER.debug("Setting up automation %s", name)
action_sequence = _get_actions(center, block[CONF_ACTION])
cond_func = _process_condition(center, block[CONF_CONDITION])
# use partial to get a function with args to call later
attach_triggers = partial(_process_trigger, center, block[CONF_TRIGGER])
automations[name] = Automation(
center, name, attach_triggers, cond_func, action_sequence
)
def _get_actions(center, config_block):
"""Return actions."""
actions = (TemplateAction(center, action_conf) for action_conf in config_block)
return ActionSequence(center, actions)
def _process_condition(center, config_block):
"""Return a function that parses the condition."""
if CONF_TYPE in config_block:
checks = []
condition_type = config_block[CONF_TYPE]
conditions = config_block[CONF_CONDITIONS]
for cond in conditions:
check = _process_condition(center, cond)
checks.append(check)
return make_checker(condition_type, checks)
data = config_block[CONF_CONDITION]
template = make_template(center, data)
return partial(render_template, template)
def make_checker(condition_type, checks):
"""Return a function to check condition."""
def check_condition(variables):
"""Return True if all or any condition(s) pass."""
if condition_type.lower() == "and":
return all(template_check(check(variables)) for check in checks)
if condition_type.lower() == "or":
return any(template_check(check(variables)) for check in checks)
return False
return check_condition
def template_check(value):
"""Check if a rendered template string equals true.
If value is not a string, return value as is.
"""
if isinstance(value, str):
return value.lower() == "true"
return value
def _process_trigger(center, config_block, trigger):
"""Process triggers for an automation."""
remove_funcs = []
for conf in config_block:
trigger_id = conf[CONF_ID]
trigger_type = conf[CONF_TYPE]
trigger_mod = get_module(__name__, trigger_type)
if not trigger_mod:
continue
_LOGGER.debug("Setting up trigger %s", trigger_id)
remove = trigger_mod.handle_trigger(center, conf, trigger)
if not remove:
_LOGGER.error("Setting up trigger %s failed", trigger_id)
continue
remove_funcs.append(remove)
if not remove_funcs:
return None
def remove_triggers():
"""Remove attached triggers."""
for remove in remove_funcs:
remove()
return remove_triggers
class Automation:
"""Automation class."""
# pylint: disable=too-many-arguments
def __init__(
self, center, name, attach_triggers, cond_func, action_sequence, enabled=True
):
"""Set up instance."""
self._center = center
self.name = name
self.enabled = False
self._action_sequence = action_sequence
self._attach_triggers = attach_triggers
self._detach_triggers = None
self._cond_func = cond_func
if enabled:
self.enable()
def __repr__(self):
"""Return the representation."""
return (
f"Automation(center={self._center}, name={self.name}, "
f"attach_triggers={self._attach_triggers}, cond_func={self._cond_func}, "
f"action_sequence={self._action_sequence}, enabled={self.enabled})"
)
def enable(self):
"""Enable automation."""
if self.enabled:
return
self._detach_triggers = self._attach_triggers(self.trigger)
self.enabled = True
def disable(self):
"""Disable automation."""
if not self.enabled:
return
if self._detach_triggers is not None:
self._detach_triggers()
self._detach_triggers = None
self.enabled = False
async def trigger(self, variables):
"""Run actions of this automation."""
variables["samples"] = self._center.samples
_LOGGER.debug("Triggered automation %s", self.name)
try:
cond = self._cond_func(variables)
except TemplateError as exc:
_LOGGER.error("Failed to render condition for %s: %s", self.name, exc)
return
if cond:
_LOGGER.debug("Condition passed for %s", self.name)
await self._action_sequence(variables)
class ActionSequence:
"""Represent a sequence of actions."""
# pylint: disable=too-few-public-methods
def __init__(self, center, actions):
"""Set up instance."""
self._center = center
self.actions = list(actions) # copy to list to make sure it's a list
async def __call__(self, variables):
"""Start action sequence."""
waiting = deque(self.actions)
while waiting:
action = waiting.popleft()
if action.action_type == "automations" and action.action_id == ACTION_DELAY:
rendered_kwargs = action.render(variables)
seconds = rendered_kwargs.get("seconds")
self.delay(float(seconds), variables, waiting)
else:
_LOGGER.debug(
"Calling action %s.%s", action.action_type, action.action_id
)
await action(variables)
def delay(self, seconds, variables, waiting):
"""Delay action sequence.
Parameters
----------
seconds : float
A time interval to delay the pending action sequence.
variables : dict
A dict of template variables.
"""
sequence = ActionSequence(self._center, waiting)
callback = partial(self._center.create_task, sequence(variables))
waiting.clear()
_LOGGER.info("Action delay for %s seconds", seconds)
callback = self._center.loop.call_later(seconds, callback)
async def cancel_pending_actions(center, event):
"""Cancel pending actions."""
callback.cancel()
self._center.bus.register(CAMACQ_STOP_EVENT, cancel_pending_actions)
class TemplateAction:
"""Representation of an action with template data."""
# pylint: disable=too-few-public-methods
def __init__(self, center, action_conf):
"""Set up instance."""
self._center = center
self.action_id = action_conf[CONF_ID]
self.action_type = action_conf[CONF_TYPE]
action_data = action_conf[CONF_DATA]
self.template = make_template(center, action_data)
async def __call__(self, variables=None):
"""Execute action with optional template variables."""
try:
rendered = self.render(variables)
except TemplateError:
return
await self._center.actions.call(self.action_type, self.action_id, **rendered)
def render(self, variables):
"""Render the template with the kwargs for the action."""
variables = variables or {}
try:
rendered = render_template(self.template, variables)
except TemplateError as exc:
_LOGGER.error(
"Failed to render variables for %s.%s: %s",
self.action_type,
self.action_id,
exc,
)
raise
return rendered
|
CellProfiling/cam_acq
|
camacq/plugins/automations/__init__.py
|
Python
|
apache-2.0
| 10,741 | 0.000745 |
# -*- coding: utf-8 -*-
# @Date : Jul 13, 2016
# @Author : Ram Prakash, Sharath Puranik
# @Version : 1
import CART
from QuillLanguage import QuillLanguage
import pickle
class QuillTrainer(object):
def __init__(self,quillLang):
if isinstance(quillLang,QuillLanguage):
self.language = quillLang
else:
raise Exception,'Invalid parameter. Not of type QuillLanguage'
def train(self,uWords,scope=4,splRulesFlag=True):
self.language.setKnowledge(self.__buildKeyToCARTMap(uWords,scope,splRulesFlag,"primary"),"primary")
self.language.setKnowledge(self.__buildKeyToCARTMap(uWords,scope,splRulesFlag,"predictive"),"predictive")
return self.language
def getLanguage(self):
return self.language
def store(self,fname=None):
if fname == None:
fname = self.language.language+'.qil'
keyToCARTMap = self.language.keyToCARTMap
keyToCARTMapPrimary = self.language.keyToCARTMapPrimary
f = file(fname,'w')
f.write('<QuillLanguage lang="%s" script="%s" deffont="%s" epsilon="%s">\n'%(self.language.language,self.language.script,self.language.default_font,self.language.epsilon.encode('utf-8')))
for (key,keyCart) in keyToCARTMap.items():
keyCart.storeCart(f,"predictive")
for (key,keyCart) in keyToCARTMapPrimary.items():
keyCart.storeCart(f,"primary")
f.write('</QuillLanguage>')
f.close()
def load(self, trainedData):
pass
def __buildKeyToCARTMap ( self, uWords,scope=4,splRulesFlag=True,type="predictive" ):
contextLen = scope
splRules = []
if splRulesFlag == True:
splRules = self.language.getSpecialRules(type)
keyToCARTMap = {}
data={}
for uWord in uWords:
try:
trainPairs = self.language.getTrainingPairs(uWord,type)
except KeyError:
trainPairs = None
if trainPairs != None:
data1 = CART.CART.prepareTrainingData(trainPairs,contextLen,1)
for key in data1.keys():
if data.has_key(key):
data[key].extend( data1[key] )
else:
data.update({key:data1[key]})
if type == "primary":
contextPrefOrder = [0,1,2,-1,3,-2,4,-3-4]
elif type == "predictive":
contextPrefOrder = None
for key in data.keys():
keyCart = CART.CART(key,data[key],contextLen, splRules,contextPrefOrder)
keyCart.build()
keyToCARTMap.update( {key:keyCart } )
return keyToCARTMap
def createTrainingData( self, uWords,scope=4,splRulesType='predictive',fname = None ):
contextLen = scope
splRules = []
if splRulesType != None:
splRules = self.language.getSpecialRules(splRulesType)
if fname == None:
fname = self.language.language+'.data'
f = file(fname,'w')
f.write('<QuillTrainData lang="%s" script="%s" deffont="%s" epsilon="%s" context-len="%s">\n'%(self.language.language,self.language.script,self.language.default_font,self.language.epsilon.encode('utf-8'),scope))
f.write('\t<SpecialRules>\n')
for eachRule in splRules:
f.write('\t\t<SpecialRule>')
f.write(repr(eachRule))
f.write('</SpecialRule>')
f.write('\n')
f.write('\t\t</SpecialRules>\n')
keyToCARTMap = {}
data={}
for uWord in uWords:
try:
trainPairs = self.language.getTrainingPairs(uWord)
except KeyError:
trainPairs = None
if trainPairs != None:
data1 = CART.CART.prepareTrainingData(trainPairs,contextLen,1)
for key in data1.keys():
if data.has_key(key):
data[key].extend( data1[key] )
else:
data.update({key:data1[key]})
for key in data.keys():
keyData = data[key];
f.write('\t<QuillWordList key="%s">\n'%key)
for cWord in keyData:
f.write('\t\t<QuillWord>\n')
f.write('\t\t\t<Literal>%s</Literal>\n'%cWord.word)
f.write('\t\t\t<Focus>%s</Focus>\n'%cWord.focus)
f.write('\t\t\t<ClassAssign>%s</ClassAssign>\n'%cWord.classID.encode('utf-8'))
f.write('\t\t\t<Count>%s</Count>\n'%cWord.count)
f.write('\t\t</QuillWord>\n')
f.write('\t</QuillWordList>\n')
f.write('</QuillTrainData>\n')
f.close()
|
teamtachyon/Quillpad-Server
|
QuillTrainer.py
|
Python
|
bsd-3-clause
| 4,979 | 0.022896 |
"""
Module for storing static data structures
"""
import os
import sys
VERSION = 0.9999
PID = os.getpid()
S3SITE_CFG_DIR = os.path.join(os.path.expanduser('~'), '.s3site')
S3SITE_CFG_FILE = os.path.join(S3SITE_CFG_DIR, 'config')
S3SITE_LOG_DIR = os.path.join(S3SITE_CFG_DIR, 'logs')
S3SITE_META_FILE = '__s3site.cfg'
DEBUG_FILE = os.path.join(S3SITE_LOG_DIR, 'debug.log')
AWS_DEBUG_FILE = os.path.join(S3SITE_LOG_DIR, 'aws-debug.log')
CRASH_FILE = os.path.join(S3SITE_LOG_DIR, 'crash-report-%d.txt' % PID)
GLOBAL_SETTINGS = {
# setting, type, required?, default, options, callback
'enable_experimental': (bool, False, False, None, None),
'web_browser': (str, False, None, None, None),
'include': (list, False, [], None, None),
}
AWS_SETTINGS = {
'aws_access_key_id': (str, True, None, None, None),
'aws_secret_access_key': (str, True, None, None, None),
'aws_user_id': (str, False, None, None, None),
'aws_port': (int, False, None, None, None),
'aws_ec2_path': (str, False, '/', None, None),
'aws_s3_path': (str, False, '/', None, None),
'aws_is_secure': (bool, False, True, None, None),
'aws_region_name': (str, False, None, None, None),
'aws_region_host': (str, False, None, None, None),
'aws_s3_host': (str, False, None, None, None),
'aws_proxy': (str, False, None, None, None),
'aws_proxy_port': (int, False, None, None, None),
'aws_proxy_user': (str, False, None, None, None),
'aws_proxy_pass': (str, False, None, None, None),
}
def __expand_all(path):
path = os.path.expanduser(path)
path = os.path.expandvars(path)
return path
def __makedirs(path, exit_on_failure=False):
if not os.path.exists(path):
try:
os.makedirs(path)
except OSError:
if exit_on_failure:
sys.stderr.write("!!! ERROR - %s *must* be a directory\n" %
path)
elif not os.path.isdir(path) and exit_on_failure:
sys.stderr.write("!!! ERROR - %s *must* be a directory\n" % path)
sys.exit(1)
def create_config_dirs():
__makedirs(S3SITE_CFG_DIR, exit_on_failure=True)
__makedirs(S3SITE_LOG_DIR)
|
jtriley/s3site
|
s3site/static.py
|
Python
|
gpl-3.0
| 2,179 | 0 |
from pylab import *
data = loadtxt('Data/dummy_data.dat')
posterior_sample = atleast_2d(loadtxt('posterior_sample.txt'))
ion()
for i in xrange(0, posterior_sample.shape[0]):
hold(False)
plot(data[:,0], data[:,1], 'bo')
hold(True)
plot(data[:,0], posterior_sample[i, -data.shape[0]:], 'r-')
ylim([0, 1.1*data[:,1].max()])
draw()
ioff()
show()
hist(posterior_sample[:,9], 20)
xlabel('Number of Bursts')
show()
pos = posterior_sample[:, 10:110]
pos = pos[pos != 0.]
hist(pos, 1000)
xlabel('Time')
title('Positions of Bursts')
show()
|
jchiang87/TimeBombs
|
display.py
|
Python
|
gpl-3.0
| 548 | 0.021898 |
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
Written (W) 2013 Heiko Strathmann
Written (W) 2013 Dino Sejdinovic
"""
from kameleon_mcmc.distribution.Gaussian import Gaussian
from matplotlib.patches import Ellipse
from matplotlib.pyplot import imshow, ylim, xlim, contour, plot, hold, gca
from numpy import linspace
from numpy.linalg.linalg import eigh
from numpy import zeros, array, exp, arctan2, sqrt
import numpy
class Visualise(object):
def __init__(self):
pass
@staticmethod
def get_plotting_arrays(distribution):
bounds = distribution.get_plotting_bounds()
assert(len(bounds) == 2)
Xs = linspace(bounds[0][0], bounds[0][1])
Ys = linspace(bounds[1][0], bounds[1][1])
return Xs, Ys
@staticmethod
def visualise_distribution(distribution, Z=None, log_density=False, Xs=None, Ys=None):
"""
Plots the density of a given Distribution instance and plots some
samples on top.
"""
if Xs is None or Ys is None:
Xs, Ys = Visualise.get_plotting_arrays(distribution)
Visualise.plot_density(distribution, Xs, Ys)
if Z is not None:
hold(True)
Visualise.plot_data(Z)
hold(False)
@staticmethod
def plot_density(distribution, Xs, Ys, log_domain=False):
"""
Plots a 2D density
density - density - distribution instance to plot
Xs - x values the density is evaluated at
Ys - y values the density is evaluated at
log_domain - if False, density will be put into exponential function
"""
assert(distribution.dimension == 2)
D = zeros((len(Xs), len(Ys)))
# compute log-density
for i in range(len(Xs)):
for j in range(len(Ys)):
x = array([[Xs[i], Ys[j]]])
D[j, i] = distribution.log_pdf(x)
if log_domain == False:
D = exp(D)
im = imshow(D, origin='lower')
im.set_extent([Xs.min(), Xs.max(), Ys.min(), Ys.max()])
im.set_interpolation('nearest')
im.set_cmap('gray')
ylim([Ys.min(), Ys.max()])
xlim([Xs.min(), Xs.max()])
@staticmethod
def contour_plot_density(distribution, Xs=None, Ys=None, log_domain=False):
"""
Contour-plots a 2D density. If Gaussian, plots 1.96 interval contour only
density - distribution instance to plot
Xs - x values the density is evaluated at
Ys - y values the density is evaluated at
log_domain - if False, density will be put into exponential function
"""
if isinstance(distribution, Gaussian) and log_domain == False:
gca().add_artist(Visualise.get_gaussian_ellipse_artist(distribution))
gca().plot(distribution.mu[0], distribution.mu[1], 'r*', \
markersize=3.0, markeredgewidth=.1)
return
assert(distribution.dimension == 2)
if Xs is None:
(xmin, xmax), _ = distribution.get_plotting_bounds()
Xs = linspace(xmin, xmax)
if Ys is None:
_, (ymin, ymax) = distribution.get_plotting_bounds()
Ys = linspace(ymin, ymax)
D = zeros((len(Ys), len(Xs)))
# compute log-density
for i in range(len(Xs)):
for j in range(len(Ys)):
x = array([[Xs[i], Ys[j]]])
D[j, i] = distribution.log_pdf(x)
if log_domain == False:
D = exp(D)
contour(Xs, Ys, D, origin='lower')
@staticmethod
def plot_array(Xs, Ys, D):
"""
Plots a 2D array
Xs - x values the density is evaluated at
Ys - y values the density is evaluated at
D - array to plot
"""
im = imshow(D, origin='lower')
im.set_extent([Xs.min(), Xs.max(), Ys.min(), Ys.max()])
im.set_interpolation('nearest')
im.set_cmap('gray')
ylim([Ys.min(), Ys.max()])
xlim([Xs.min(), Xs.max()])
@staticmethod
def plot_data(Z, y=None):
"""
Plots collection of 2D points and optionally adds a marker to one of them
Z - set of row-vectors points to plot
y - one point that is marked in red, might be None
"""
plot(Z[:, 0], Z[:, 1], '*', markersize=3.0, markeredgewidth=.1)
if y is not None:
plot(y[0, 0], y[0, 1], 'r*', markersize=10.0, markeredgewidth=.1)
@staticmethod
def get_gaussian_ellipse_artist(gaussian, nstd=1.96, linewidth=1):
"""
Returns an allipse artist for nstd times the standard deviation of this
Gaussian
"""
assert(isinstance(gaussian, Gaussian))
assert(gaussian.dimension == 2)
# compute eigenvalues (ordered)
vals, vecs = eigh(gaussian.L.dot(gaussian.L.T))
order = vals.argsort()[::-1]
vals, vecs = vals[order], vecs[:, order]
theta = numpy.degrees(arctan2(*vecs[:, 0][::-1]))
# width and height are "full" widths, not radius
width, height = 2 * nstd * sqrt(vals)
e = Ellipse(xy=gaussian.mu, width=width, height=height, angle=theta, \
edgecolor="red", fill=False, linewidth=linewidth)
return e
|
karlnapf/kameleon-mcmc
|
kameleon_mcmc/tools/Visualise.py
|
Python
|
bsd-2-clause
| 5,656 | 0.006895 |
import os
from twisted.trial import unittest
from lisa.server.plugins.PluginManager import PluginManagerSingleton
class LisaPluginTestCase(unittest.TestCase):
def setUp(self):
self.pluginManager = PluginManagerSingleton.get()
def test_a_install_plugin_ok(self):
answer = self.pluginManager.installPlugin(plugin_name="UnitTest", test_mode=True, version='0.1.6')
self.assertEqual(answer['status'], "success")
def test_aa_install_plugin_fail(self):
answer = self.pluginManager.installPlugin(plugin_name="UnitTest", test_mode=True)
self.assertEqual(answer['status'], "fail")
def test_b_disable_plugin_ok(self):
answer = self.pluginManager.disablePlugin(plugin_name="UnitTest")
self.assertEqual(answer['status'], "success")
def test_bb_disable_plugin_fail(self):
answer = self.pluginManager.disablePlugin(plugin_name="UnitTest")
self.assertEqual(answer['status'], "fail")
def test_c_enable_plugin_ok(self):
answer = self.pluginManager.enablePlugin(plugin_name="UnitTest")
self.assertEqual(answer['status'], "success")
def test_cc_enable_plugin_fail(self):
answer = self.pluginManager.enablePlugin(plugin_name="UnitTest")
self.assertEqual(answer['status'], "fail")
def test_d_upgrade_plugin_ok(self):
answer = self.pluginManager.upgradePlugin(plugin_name="UnitTest", test_mode=True)
self.assertEqual(answer['status'], "success")
def test_dd_upgrade_plugin_fail(self):
answer = self.pluginManager.upgradePlugin(plugin_name="UnitTest", test_mode=True)
self.assertEqual(answer['status'], "fail")
def test_e_load_plugin(self):
answer = self.pluginManager.loadPlugins()
test_list = ['UnitTest']
self.assertListEqual(answer, test_list)
def test_f_methodList_plugin(self):
answer = self.pluginManager.methodListPlugin()
methodlist = [{'methods': ['test'], 'plugin': u'UnitTest'}, {'core': 'intents', 'methods': ['list']}]
self.assertListEqual(answer, methodlist)
def test_g_create_plugin(self):
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lisa.server.web.weblisa.settings")
answer = self.pluginManager.createPlugin(plugin_name="TestPlugin", author_name="TestAuthor",
author_email="test@test.com")
self.assertEqual(answer['status'], "success")
def test_h_uninstall_plugin(self):
answer = self.pluginManager.uninstallPlugin(plugin_name="UnitTest")
self.assertEqual(answer['status'], "success")
def test_hh_uninstall_plugin(self):
answer = self.pluginManager.uninstallPlugin(plugin_name="UnitTest")
self.assertEqual(answer['status'], "fail")
|
Seraf/LISA
|
lisa/server/tests/test_plugins.py
|
Python
|
mit
| 2,790 | 0.002867 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-03 18:56
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.manager
class Migration(migrations.Migration):
dependencies = [
('wiblog', '0003_auto_20160325_1441'),
]
operations = [
migrations.AlterModelManagers(
name='comment',
managers=[
('approved', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='post',
managers=[
('published', django.db.models.manager.Manager()),
],
),
migrations.AlterField(
model_name='comment',
name='url',
field=models.URLField(blank=True, null=True),
),
migrations.AlterField(
model_name='tag',
name='desc',
field=models.SlugField(unique=True, verbose_name='Tag'),
),
]
|
lo-windigo/fragdev
|
wiblog/migrations/0004_auto_20170703_1156.py
|
Python
|
agpl-3.0
| 1,015 | 0 |
"""
This scripts compares the autocorrelation in statsmodels with
the one that you can build using only correlate.
"""
import numpy as np
import matplotlib.pyplot as plt
import scipy.signal as signal
import statsmodels.api as sm
from signals.time_series_class import MixAr, AR
from signals.aux_functions import sidekick
plot = False
plot2 = True
# Time parameters
dt = 0.1
Tmax = 100
# Let's get the axuiliary class
amplitude = 1
w1 = 1
w2 = 5
beta = sidekick(w1, w2, dt, Tmax, amplitude)
# First we need the phi's vector
phi0 = 0.0
phi1 = -0.8
phi2 = 0.3
phi = np.array((phi0, phi1, phi2))
# Now we need the initial conditions
x0 = 1
x1 = 1
x2 = 0
initial_conditions = np.array((x0, x1, x2))
# First we construct the series without the sidekick
B = AR(phi, dt=dt, Tmax=Tmax)
B.initial_conditions(initial_conditions)
normal_series = B.construct_series()
# Second we construct the series with the mix
A = MixAr(phi, dt=dt, Tmax=Tmax, beta=beta)
A.initial_conditions(initial_conditions)
mix_series = A.construct_series()
time = A.time
if plot:
plt.subplot(3, 1, 1)
plt.plot(time, beta)
plt.subplot(3, 1, 2)
plt.plot(time, normal_series)
plt.subplot(3, 1, 3)
plt.plot(time, mix_series)
plt.show()
# Let's calculate the auto correlation
nlags = 40
normal_series -= normal_series.mean()
var = np.var(normal_series)
n = len(normal_series)
nlags1 = nlags
normalizing = np.arange(n, n - nlags1, -1)
auto_correlation1 = np.correlate(normal_series, normal_series, mode='full')
aux = auto_correlation1.size/2
auto_correlation1 = auto_correlation1[aux:aux + nlags1] / (normalizing * var)
auto_correlation2 = sm.tsa.stattools.acf(normal_series, nlags=nlags)
print 'result', np.sum(auto_correlation1 - auto_correlation2)
if plot2:
plt.subplot(2, 1, 1)
plt.plot(auto_correlation1)
plt.subplot(2, 1, 2)
plt.plot(auto_correlation2)
plt.show()
|
h-mayorquin/time_series_basic
|
examples/auto_correlations_compare.py
|
Python
|
bsd-3-clause
| 1,897 | 0 |
from logbook import Logger
from ..core.local import get_current_conf
from ..core.connection import autoccontext
from .. import db
from datetime import timedelta, datetime
log = Logger(__name__)
def del_inactive_queries():
conf = get_current_conf()
with autoccontext(commit=True) as conn:
before = db.get_query_count(conn)
db.del_inactive_queries(
conn,
before=datetime.utcnow() - timedelta(days=conf['TORABOT_DELETE_INACTIVE_QUERIES_BEFORE_DAYS']),
limit=conf['TORABOT_DELETE_INACTIVE_QUERIES_LIMIT']
)
after = db.get_query_count(conn)
log.info('delete inactive queries, from {} to {}, deleted {}', before, after, before - after)
return before - after
def del_old_changes():
conf = get_current_conf()
with autoccontext(commit=True) as conn:
before = db.get_change_count(conn)
db.del_old_changes(
conn,
before=datetime.utcnow() - timedelta(days=conf['TORABOT_DELETE_OLD_CHANGES_BEFORE_DAYS']),
limit=conf['TORABOT_DELETE_OLD_CHANGES_LIMIT']
)
after = db.get_change_count(conn)
log.info('delete old changes, from {} to {}, deleted {}', before, after, before - after)
return before - after
|
Answeror/torabot
|
torabot/tasks/delete.py
|
Python
|
mit
| 1,277 | 0.003132 |
import os
from autotest.client import test, utils
# tests is a simple array of "cmd" "arguments"
tests = [["aio-dio-invalidate-failure", "poo"],
["aio-dio-subblock-eof-read", "eoftest"],
["aio-free-ring-with-bogus-nr-pages", ""],
["aio-io-setup-with-nonwritable-context-pointer", ""],
["aio-dio-extend-stat", "file"],
]
name = 0
arglist = 1
class aio_dio_bugs(test.test):
version = 5
preserve_srcdir = True
def initialize(self):
self.job.require_gcc()
self.job.setup_dep(['libaio'])
ldflags = '-L ' + self.autodir + '/deps/libaio/lib'
cflags = '-I ' + self.autodir + '/deps/libaio/include'
self.gcc_flags = ldflags + ' ' + cflags
def setup(self):
os.chdir(self.srcdir)
utils.make('"CFLAGS=' + self.gcc_flags + '"')
def execute(self, args = ''):
os.chdir(self.tmpdir)
libs = self.autodir + '/deps/libaio/lib/'
ld_path = utils.prepend_path(libs,
utils.environ('LD_LIBRARY_PATH'))
var_ld_path = 'LD_LIBRARY_PATH=' + ld_path
for test in tests:
cmd = self.srcdir + '/' + test[name] + ' ' + args + ' ' \
+ test[arglist]
utils.system(var_ld_path + ' ' + cmd)
|
nacc/autotest
|
client/tests/aio_dio_bugs/aio_dio_bugs.py
|
Python
|
gpl-2.0
| 1,337 | 0.005236 |
#!/usr/bin/env python
"""
Copyright (c) 2021 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import itertools
import logging
import os
import cocotb_test.simulator
import pytest
import cocotb
from cocotb.clock import Clock
from cocotb.triggers import RisingEdge
from cocotb.regression import TestFactory
from cocotbext.axi import AxiStreamBus, AxiStreamFrame, AxiStreamSource, AxiStreamSink
from cocotbext.axi.stream import define_stream
StatusBus, StatusTransaction, StatusSource, StatusSink, StatusMonitor = define_stream("Status",
signals=["frame_pad", "frame_truncate", "frame_length", "frame_original_length", "valid"],
optional_signals=["ready"]
)
class TB(object):
def __init__(self, dut):
self.dut = dut
self.log = logging.getLogger("cocotb.tb")
self.log.setLevel(logging.DEBUG)
cocotb.fork(Clock(dut.clk, 10, units="ns").start())
self.source = AxiStreamSource(AxiStreamBus.from_prefix(dut, "s_axis"), dut.clk, dut.rst)
self.sink = AxiStreamSink(AxiStreamBus.from_prefix(dut, "m_axis"), dut.clk, dut.rst)
# Status
self.status_sink = StatusSink(StatusBus.from_prefix(dut, "status"), dut.clk, dut.rst)
self.dut.length_min.setimmediatevalue(0)
self.dut.length_max.setimmediatevalue(2048)
def set_idle_generator(self, generator=None):
if generator:
self.source.set_pause_generator(generator())
def set_backpressure_generator(self, generator=None):
if generator:
self.sink.set_pause_generator(generator())
async def reset(self):
self.dut.rst.setimmediatevalue(0)
await RisingEdge(self.dut.clk)
await RisingEdge(self.dut.clk)
self.dut.rst <= 1
await RisingEdge(self.dut.clk)
await RisingEdge(self.dut.clk)
self.dut.rst <= 0
await RisingEdge(self.dut.clk)
await RisingEdge(self.dut.clk)
async def run_test(dut, payload_lengths=None, payload_data=None, idle_inserter=None, backpressure_inserter=None):
tb = TB(dut)
data_width = len(tb.source.bus.tkeep)
byte_width = data_width // 8
id_count = 2**len(tb.source.bus.tid)
cur_id = 1
await tb.reset()
tb.set_idle_generator(idle_inserter)
tb.set_backpressure_generator(backpressure_inserter)
for length_max in range(1, 4):
for length_min in range(0, length_max+1):
tb.log.info("length_min %d, length_max %d", length_min, length_max)
await RisingEdge(dut.clk)
tb.dut.length_min <= length_min
tb.dut.length_max <= length_max
await RisingEdge(dut.clk)
test_frames = []
for test_data in [payload_data(x) for x in payload_lengths()]:
test_frame = AxiStreamFrame(test_data, tid=cur_id, tdest=cur_id)
test_frames.append(test_frame)
await tb.source.send(test_frame)
cur_id = (cur_id + 1) % id_count
for test_frame in test_frames:
rx_frame = await tb.sink.recv()
len_rx = len(rx_frame.tdata)
len_test = len(test_frame.tdata)
len_min = min(len_rx, len_test)
assert len_rx >= length_min
assert len_rx <= length_max
assert rx_frame.tdata[:len_min] == test_frame.tdata[:len_min]
assert rx_frame.tid == test_frame.tid
assert rx_frame.tdest == test_frame.tdest
assert not rx_frame.tuser
status = await tb.status_sink.recv()
tb.log.info("Status: %s", status)
assert status.frame_pad == int(len_test < length_min)
assert status.frame_truncate == int(len_test > length_max)
assert status.frame_length == len_rx
assert status.frame_original_length == len_test
assert tb.sink.empty()
await RisingEdge(dut.clk)
await RisingEdge(dut.clk)
async def run_test_tuser_assert(dut):
tb = TB(dut)
await tb.reset()
test_data = bytearray(itertools.islice(itertools.cycle(range(256)), 32))
test_frame = AxiStreamFrame(test_data, tuser=1)
await tb.source.send(test_frame)
rx_frame = await tb.sink.recv()
assert rx_frame.tdata == test_data
assert rx_frame.tuser
assert tb.sink.empty()
await RisingEdge(dut.clk)
await RisingEdge(dut.clk)
async def run_test_init_sink_pause(dut):
tb = TB(dut)
await tb.reset()
tb.sink.pause = True
test_data = bytearray(itertools.islice(itertools.cycle(range(256)), 32))
test_frame = AxiStreamFrame(test_data)
await tb.source.send(test_frame)
for k in range(64):
await RisingEdge(dut.clk)
tb.sink.pause = False
rx_frame = await tb.sink.recv()
assert rx_frame.tdata == test_data
assert not rx_frame.tuser
assert tb.sink.empty()
await RisingEdge(dut.clk)
await RisingEdge(dut.clk)
async def run_test_init_sink_pause_reset(dut):
tb = TB(dut)
await tb.reset()
tb.sink.pause = True
test_data = bytearray(itertools.islice(itertools.cycle(range(256)), 32))
test_frame = AxiStreamFrame(test_data)
await tb.source.send(test_frame)
for k in range(64):
await RisingEdge(dut.clk)
await tb.reset()
tb.sink.pause = False
for k in range(64):
await RisingEdge(dut.clk)
assert tb.sink.empty()
await RisingEdge(dut.clk)
await RisingEdge(dut.clk)
async def run_test_overflow(dut):
tb = TB(dut)
await tb.reset()
tb.sink.pause = True
test_data = bytearray(itertools.islice(itertools.cycle(range(256)), 2048))
test_frame = AxiStreamFrame(test_data)
await tb.source.send(test_frame)
for k in range(2048):
await RisingEdge(dut.clk)
tb.sink.pause = False
rx_frame = await tb.sink.recv()
assert rx_frame.tdata == test_data
assert not rx_frame.tuser
assert tb.sink.empty()
await RisingEdge(dut.clk)
await RisingEdge(dut.clk)
def cycle_pause():
return itertools.cycle([1, 1, 1, 0])
def size_list():
data_width = len(cocotb.top.m_axis_tdata)
byte_width = data_width // 8
return list(range(1, byte_width*4+1))+[512]+[1]*64
def incrementing_payload(length):
return bytearray(itertools.islice(itertools.cycle(range(256)), length))
if cocotb.SIM_NAME:
factory = TestFactory(run_test)
factory.add_option("payload_lengths", [size_list])
factory.add_option("payload_data", [incrementing_payload])
factory.add_option("idle_inserter", [None, cycle_pause])
factory.add_option("backpressure_inserter", [None, cycle_pause])
factory.generate_tests()
for test in [run_test_tuser_assert, run_test_init_sink_pause, run_test_init_sink_pause_reset, run_test_overflow]:
factory = TestFactory(test)
factory.generate_tests()
# cocotb-test
tests_dir = os.path.dirname(__file__)
rtl_dir = os.path.abspath(os.path.join(tests_dir, '..', '..', 'rtl'))
@pytest.mark.parametrize("data_width", [8, 16, 32])
def test_axis_frame_length_adjust_fifo(request, data_width):
dut = "axis_frame_length_adjust_fifo"
module = os.path.splitext(os.path.basename(__file__))[0]
toplevel = dut
verilog_sources = [
os.path.join(rtl_dir, f"{dut}.v"),
os.path.join(rtl_dir, f"axis_frame_length_adjust.v"),
os.path.join(rtl_dir, f"axis_fifo.v"),
]
parameters = {}
parameters['DATA_WIDTH'] = data_width
parameters['KEEP_ENABLE'] = int(parameters['DATA_WIDTH'] > 8)
parameters['KEEP_WIDTH'] = parameters['DATA_WIDTH'] // 8
parameters['ID_ENABLE'] = 1
parameters['ID_WIDTH'] = 8
parameters['DEST_ENABLE'] = 1
parameters['DEST_WIDTH'] = 8
parameters['USER_ENABLE'] = 1
parameters['USER_WIDTH'] = 1
parameters['LEN_WIDTH'] = 16
parameters['FRAME_FIFO_DEPTH'] = 1024
parameters['HEADER_FIFO_DEPTH'] = 8
extra_env = {f'PARAM_{k}': str(v) for k, v in parameters.items()}
sim_build = os.path.join(tests_dir, "sim_build",
request.node.name.replace('[', '-').replace(']', ''))
cocotb_test.simulator.run(
python_search=[tests_dir],
verilog_sources=verilog_sources,
toplevel=toplevel,
module=module,
parameters=parameters,
sim_build=sim_build,
extra_env=extra_env,
)
|
alexforencich/xfcp
|
lib/eth/lib/axis/tb/axis_frame_length_adjust_fifo/test_axis_frame_length_adjust_fifo.py
|
Python
|
mit
| 9,383 | 0.001172 |
import sys
sys.path.append("../")
sys.path.append("../neuralmind")
import gzip
import cPickle
import numpy as np
import theano
import theano.tensor as T
from neuralmind import NeuralNetwork
from layers import HiddenLayer
from layers import DropoutLayer
import activations
from trainers import SGDTrainer
from trainers import ExponentialDecay
import datasets
# Load MNIST
#datasets = datasets.load_cifar10("/home/miguel/deeplearning/datasets")
datasets = datasets.load_cifar10("/home/ubuntu/deeplearning/datasets")
"""
model = NeuralNetwork(
n_inputs=32*32*3,
layers = [
(HiddenLayer,
{
'n_units': 512,
'non_linearity': activations.rectify
}),
(HiddenLayer,
{
'n_units': 512,
'non_linearity': activations.rectify
}),
(HiddenLayer,
{
'n_units': 10,
'non_linearity': activations.softmax
})
],
trainer=(SGDTrainer,
{
'batch_size': 20,
'learning_rate': 0.1,
'n_epochs': 400,
'global_L2_regularization': 0.0001,
'dynamic_learning_rate': (ExponentialDecay, {'decay': 0.99}),
}
)
)
"""
model = NeuralNetwork(
n_inputs=32*32*3,
layers = [
(HiddenLayer,
{
'n_units': 1024,
'non_linearity': activations.rectify
}),
(DropoutLayer, {'probability': 0.5}),
(HiddenLayer,
{
'n_units': 1024,
'non_linearity': activations.rectify
}),
(DropoutLayer, {'probability': 0.5}),
(HiddenLayer,
{
'n_units': 10,
'non_linearity': activations.softmax
})
],
trainer=(SGDTrainer,
{
'batch_size': 512,
'learning_rate': 0.1,
'n_epochs': 400,
#'global_L2_regularization': 0.0001,
'dynamic_learning_rate': (ExponentialDecay, {'decay': 0.99}),
}
)
)
model.train(datasets[0], datasets[1])
|
miguelpedroso/neuralmind
|
examples/cifar10_mlp2.py
|
Python
|
mit
| 1,690 | 0.049112 |
# -*- coding: utf-8 -*-
#
# Atizo - The Open Innovation Platform
# http://www.atizo.com/
#
# Copyright (c) 2008-2010 Atizo AG. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
from django.forms.util import flatatt
from django.forms.widgets import Select
from django.utils.safestring import mark_safe
from django import forms
from widgets import Html5DateTimeInput
class InputLabelWidget(Select):
def render(self, name, value, attrs=None, choices=()):
if value is None: value = ''
final_attrs = self.build_attrs(attrs, name=name)
label = final_attrs.get('label','')
if label:
del final_attrs['label']
output = [u'<select%s>' % flatatt(final_attrs)]
if label:
output.append(self.render_option([], '', '- %s -' % label))
options = self.render_options(choices, [value])
if options:
output.append(options)
output.append(u'</select>')
return mark_safe(u'\n'.join(output))
class LabelCharField(forms.CharField):
widget = InputLabelWidget
def __init__(self, *args, **kwargs):
super(LabelCharField, self).__init__(*args, **kwargs)
self.label = kwargs.get('label', '')
def widget_attrs(self, widget):
if self.label:
return {'label': u'%s' % self.label}
return {}
class LabelIntegerField(forms.IntegerField):
widget = InputLabelWidget
def __init__(self, *args, **kwargs):
super(LabelIntegerField, self).__init__(*args, **kwargs)
self.label = kwargs.get('label', '')
def widget_attrs(self, widget):
if self.label:
return {'label': u'%s' % self.label}
return {}
|
atizo/djangojames
|
djangojames/forms/fields.py
|
Python
|
gpl-2.0
| 2,436 | 0.006568 |
#!/usr/bin/python2
# -*- coding: utf-8 -*-
#
# The Qubes OS Project, http://www.qubes-os.org
#
# Copyright (C) 2016 Marek Marczykowski-Górecki
# <marmarek@invisiblethingslab.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, see <https://www.gnu.org/licenses/>.
#
#
import os
import qubes.tests
import time
import subprocess
from unittest import expectedFailure
class TC_00_HVM(qubes.tests.SystemTestsMixin, qubes.tests.QubesTestCase):
def setUp(self):
super(TC_00_HVM, self).setUp()
self.vm = self.qc.add_new_vm("QubesHVm",
name=self.make_vm_name('vm1'))
self.vm.create_on_disk(verbose=False)
@expectedFailure
def test_000_pci_passthrough_presence(self):
pcidev = os.environ.get('QUBES_TEST_PCIDEV', None)
if pcidev is None:
self.skipTest('Specify PCI device with QUBES_TEST_PCIDEV '
'environment variable')
self.vm.pcidevs = [pcidev]
self.vm.pci_strictreset = False
self.qc.save()
self.qc.unlock_db()
init_script = (
"#!/bin/sh\n"
"set -e\n"
"lspci -n > /dev/xvdb\n"
"poweroff\n"
)
self.prepare_hvm_system_linux(self.vm, init_script,
['/usr/sbin/lspci'])
self.vm.start()
timeout = 60
while timeout > 0:
if not self.vm.is_running():
break
time.sleep(1)
timeout -= 1
if self.vm.is_running():
self.fail("Timeout while waiting for VM shutdown")
with open(self.vm.storage.private_img, 'r') as f:
lspci_vm = f.read(512).strip('\0')
p = subprocess.Popen(['lspci', '-ns', pcidev], stdout=subprocess.PIPE)
(lspci_host, _) = p.communicate()
# strip BDF, as it is different in VM
pcidev_desc = ' '.join(lspci_host.strip().split(' ')[1:])
self.assertIn(pcidev_desc, lspci_vm)
|
woju/qubes-core-admin
|
tests/hardware.py
|
Python
|
lgpl-2.1
| 2,564 | 0.00078 |
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .enums import ProbingState, MachineState
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCJPDistributionAnalysis
from .jpcntx import EUCJPContextAnalysis
from .mbcssm import EUCJP_SM_MODEL
class EUCJPProber(MultiByteCharSetProber):
def __init__(self):
super(EUCJPProber, self).__init__()
self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL)
self.distribution_analyzer = EUCJPDistributionAnalysis()
self.context_analyzer = EUCJPContextAnalysis()
self.reset()
def reset(self):
super(EUCJPProber, self).reset()
self.context_analyzer.reset()
@property
def charset_name(self):
return "EUC-JP"
def feed(self, byte_str):
for i in range(len(byte_str)):
# PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte
coding_state = self.coding_sm.next_state(byte_str[i])
if coding_state == MachineState.error:
self.logger.debug('%s prober hit error at byte %s',
self.charset_name, i)
self._state = ProbingState.not_me
break
elif coding_state == MachineState.its_me:
self._state = ProbingState.found_it
break
elif coding_state == MachineState.start:
char_len = self.coding_sm.get_current_charlen()
if i == 0:
self._last_char[1] = byte_str[0]
self.context_analyzer.feed(self._last_char, char_len)
self.distribution_analyzer.feed(self._last_char, char_len)
else:
self.context_analyzer.feed(byte_str[i - 1:i + 1],
char_len)
self.distribution_analyzer.feed(byte_str[i - 1:i + 1],
char_len)
self._last_char[0] = byte_str[-1]
if self.state == ProbingState.detecting:
if (self.context_analyzer.got_enough_data() and
(self.get_confidence() > self.SHORTCUT_THRESHOLD)):
self._state = ProbingState.found_it
return self.state
def get_confidence(self):
context_conf = self.context_analyzer.get_confidence()
distrib_conf = self.distribution_analyzer.get_confidence()
return max(context_conf, distrib_conf)
|
adam111316/SickGear
|
lib/chardet/eucjpprober.py
|
Python
|
gpl-3.0
| 3,754 | 0.001332 |
"""
Serialization
``django.core.serializers`` provides interfaces to converting Django
``QuerySet`` objects to and from "flat" data (i.e. strings).
"""
from decimal import Decimal
from django.db import models
class CategoryMetaDataManager(models.Manager):
def get_by_natural_key(self, kind, name):
return self.get(kind=kind, name=name)
class CategoryMetaData(models.Model):
kind = models.CharField(max_length=10)
name = models.CharField(max_length=10)
value = models.CharField(max_length=10)
objects = CategoryMetaDataManager()
class Meta:
unique_together = (('kind', 'name'),)
def __str__(self):
return '[%s:%s]=%s' % (self.kind, self.name, self.value)
def natural_key(self):
return (self.kind, self.name)
class Category(models.Model):
name = models.CharField(max_length=20)
meta_data = models.ForeignKey(CategoryMetaData, models.SET_NULL, null=True, default=None)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class Author(models.Model):
name = models.CharField(max_length=20)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class Article(models.Model):
author = models.ForeignKey(Author, models.CASCADE)
headline = models.CharField(max_length=50)
pub_date = models.DateTimeField()
categories = models.ManyToManyField(Category)
meta_data = models.ManyToManyField(CategoryMetaData)
class Meta:
ordering = ('pub_date',)
def __str__(self):
return self.headline
class AuthorProfile(models.Model):
author = models.OneToOneField(Author, models.CASCADE, primary_key=True)
date_of_birth = models.DateField()
def __str__(self):
return "Profile of %s" % self.author
class Actor(models.Model):
name = models.CharField(max_length=20, primary_key=True)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class Movie(models.Model):
actor = models.ForeignKey(Actor, models.CASCADE)
title = models.CharField(max_length=50)
price = models.DecimalField(max_digits=6, decimal_places=2, default=Decimal('0.00'))
class Meta:
ordering = ('title',)
def __str__(self):
return self.title
class Score(models.Model):
score = models.FloatField()
class Team:
def __init__(self, title):
self.title = title
def __str__(self):
raise NotImplementedError("Not so simple")
def to_string(self):
return "%s" % self.title
class TeamField(models.CharField):
def __init__(self):
super(TeamField, self).__init__(max_length=100)
def get_db_prep_save(self, value, connection):
return str(value.title)
def to_python(self, value):
if isinstance(value, Team):
return value
return Team(value)
def from_db_value(self, value, expression, connection, context):
return Team(value)
def value_to_string(self, obj):
return self.value_from_object(obj).to_string()
def deconstruct(self):
name, path, args, kwargs = super(TeamField, self).deconstruct()
del kwargs['max_length']
return name, path, args, kwargs
class Player(models.Model):
name = models.CharField(max_length=50)
rank = models.IntegerField()
team = TeamField()
def __str__(self):
return '%s (%d) playing for %s' % (self.name, self.rank, self.team.to_string())
class BaseModel(models.Model):
parent_data = models.IntegerField()
class ProxyBaseModel(BaseModel):
class Meta:
proxy = True
class ProxyProxyBaseModel(ProxyBaseModel):
class Meta:
proxy = True
class ComplexModel(models.Model):
field1 = models.CharField(max_length=10)
field2 = models.CharField(max_length=10)
field3 = models.CharField(max_length=10)
|
mattseymour/django
|
tests/serializers/models/base.py
|
Python
|
bsd-3-clause
| 3,903 | 0.000769 |
import io
import sys
import threading
import six
from chainer.dataset import dataset_mixin
class TextDataset(dataset_mixin.DatasetMixin):
"""Dataset of a line-oriented text file.
This dataset reads each line of text file(s) on every call of the
:meth:`__getitem__` operator.
Positions of line boundaries are cached so that you can quickliy
random access the text file by the line number.
.. note::
Cache will be built in the constructor.
You can pickle and unpickle the dataset to reuse the cache, but in
that case you are responsible to guarantee that files are not
modified after the cache has built.
Args:
paths (str or list of str):
Path to the text file(s).
If it is a string, this dataset reads a line from the text file
and emits it as :class:`str`.
If it is a list of string, this dataset reads lines from each
text file and emits it as a tuple of :class:`str`. In this case,
number of lines in all files must be the same.
encoding (str or list of str):
Name of the encoding used to decode the file.
See the description in :func:`open` for the supported options and
how it works.
When reading from multiple text files, you can also pass a list of
:class:`str` to use different encoding for each file.
errors (str or list of str):
String that specifies how decoding errors are to be handled.
See the description in :func:`open` for the supported options and
how it works.
When reading from multiple text files, you can also pass a list of
:class:`str` to use different error handling policy for each file.
newline (str or list of str):
Controls how universal newlines mode works.
See the description in :func:`open` for the supported options and
how it works.
When reading from multiple text files, you can also pass a list of
:class:`str` to use different mode for each file.
filter_func (callable):
Function to filter each line of the text file.
It should be a function that takes number of arguments equals to
the number of files. Arguments are lines loaded from each file.
The filter function must return True to accept the line, or
return False to skip the line.
"""
def __init__(
self, paths, encoding=None, errors=None, newline=None,
filter_func=None):
if isinstance(paths, six.string_types):
paths = [paths]
elif not paths:
raise ValueError('at least one text file must be specified')
if isinstance(encoding, six.string_types) or encoding is None:
encoding = [encoding] * len(paths)
if isinstance(errors, six.string_types) or errors is None:
errors = [errors] * len(paths)
if isinstance(newline, six.string_types) or newline is None:
newline = [newline] * len(paths)
if not (len(paths) == len(encoding) == len(errors) == len(newline)):
raise ValueError(
'length of each option must match with the number of '
'text files to read')
self._paths = paths
self._encoding = encoding
self._errors = errors
self._newline = newline
self._fps = None
self._open()
# Line number is 0-origin.
# `lines` is a list of line numbers not filtered; if no filter_func is
# given, it is range(linenum)).
# `bounds` is a list of cursor positions of line boundaries for each
# file, i.e. i-th line of k-th file starts at `bounds[k][i]`.
linenum = 0
lines = []
bounds = tuple([[0] for _ in self._fps])
while True:
data = [fp.readline() for fp in self._fps]
if not all(data): # any of files reached EOF
if any(data): # not all files reached EOF
raise ValueError(
'number of lines in files does not match')
break
for i, fp in enumerate(self._fps):
bounds[i].append(fp.tell())
if filter_func is not None and filter_func(*data):
lines.append(linenum)
linenum += 1
if filter_func is None:
lines = six.moves.range(linenum)
self._bounds = bounds
self._lines = lines
self._lock = threading.Lock()
def __getstate__(self):
state = self.__dict__.copy()
del state['_fps']
del state['_lock']
return state
def __setstate__(self, state):
self.__dict__ = state
self._open()
self._lock = threading.Lock()
def __len__(self):
return len(self._lines)
def _open(self):
self._fps = [
io.open(
path,
mode='rt',
encoding=encoding,
errors=errors,
newline=newline,
) for path, encoding, errors, newline in
six.moves.zip(self._paths, self._encoding, self._errors,
self._newline)
]
def close(self):
"""Manually closes all text files.
In most cases, you do not have to call this method, because files will
automatically be closed after TextDataset instance goes out of scope.
"""
exc = None
for fp in self._fps:
try:
fp.close()
except Exception:
exc = sys.exc_info()
if exc is not None:
six.reraise(*exc)
def get_example(self, idx):
if idx < 0 or len(self._lines) <= idx:
raise IndexError
linenum = self._lines[idx]
self._lock.acquire()
try:
for k, fp in enumerate(self._fps):
fp.seek(self._bounds[k][linenum])
lines = [fp.readline() for fp in self._fps]
if len(lines) == 1:
return lines[0]
return tuple(lines)
finally:
self._lock.release()
|
okuta/chainer
|
chainer/datasets/text_dataset.py
|
Python
|
mit
| 6,272 | 0 |
"""Authentication for HTTP component."""
import base64
import logging
from aiohttp import hdrs
from aiohttp.web import middleware
import jwt
from homeassistant.auth.providers import legacy_api_password
from homeassistant.auth.util import generate_secret
from homeassistant.const import HTTP_HEADER_HA_AUTH
from homeassistant.core import callback
from homeassistant.util import dt as dt_util
from .const import KEY_AUTHENTICATED, KEY_HASS_USER, KEY_REAL_IP
_LOGGER = logging.getLogger(__name__)
DATA_API_PASSWORD = "api_password"
DATA_SIGN_SECRET = "http.auth.sign_secret"
SIGN_QUERY_PARAM = "authSig"
@callback
def async_sign_path(hass, refresh_token_id, path, expiration):
"""Sign a path for temporary access without auth header."""
secret = hass.data.get(DATA_SIGN_SECRET)
if secret is None:
secret = hass.data[DATA_SIGN_SECRET] = generate_secret()
now = dt_util.utcnow()
return "{}?{}={}".format(
path,
SIGN_QUERY_PARAM,
jwt.encode(
{
"iss": refresh_token_id,
"path": path,
"iat": now,
"exp": now + expiration,
},
secret,
algorithm="HS256",
).decode(),
)
@callback
def setup_auth(hass, app):
"""Create auth middleware for the app."""
old_auth_warning = set()
support_legacy = hass.auth.support_legacy
if support_legacy:
_LOGGER.warning("legacy_api_password support has been enabled.")
trusted_networks = []
for prv in hass.auth.auth_providers:
if prv.type == "trusted_networks":
trusted_networks += prv.trusted_networks
async def async_validate_auth_header(request):
"""
Test authorization header against access token.
Basic auth_type is legacy code, should be removed with api_password.
"""
try:
auth_type, auth_val = request.headers.get(hdrs.AUTHORIZATION).split(" ", 1)
except ValueError:
# If no space in authorization header
return False
if auth_type == "Bearer":
refresh_token = await hass.auth.async_validate_access_token(auth_val)
if refresh_token is None:
return False
request[KEY_HASS_USER] = refresh_token.user
return True
if auth_type == "Basic" and support_legacy:
decoded = base64.b64decode(auth_val).decode("utf-8")
try:
username, password = decoded.split(":", 1)
except ValueError:
# If no ':' in decoded
return False
if username != "homeassistant":
return False
user = await legacy_api_password.async_validate_password(hass, password)
if user is None:
return False
request[KEY_HASS_USER] = user
_LOGGER.info(
"Basic auth with api_password is going to deprecate,"
" please use a bearer token to access %s from %s",
request.path,
request[KEY_REAL_IP],
)
old_auth_warning.add(request.path)
return True
return False
async def async_validate_signed_request(request):
"""Validate a signed request."""
secret = hass.data.get(DATA_SIGN_SECRET)
if secret is None:
return False
signature = request.query.get(SIGN_QUERY_PARAM)
if signature is None:
return False
try:
claims = jwt.decode(
signature, secret, algorithms=["HS256"], options={"verify_iss": False}
)
except jwt.InvalidTokenError:
return False
if claims["path"] != request.path:
return False
refresh_token = await hass.auth.async_get_refresh_token(claims["iss"])
if refresh_token is None:
return False
request[KEY_HASS_USER] = refresh_token.user
return True
async def async_validate_trusted_networks(request):
"""Test if request is from a trusted ip."""
ip_addr = request[KEY_REAL_IP]
if not any(ip_addr in trusted_network for trusted_network in trusted_networks):
return False
user = await hass.auth.async_get_owner()
if user is None:
return False
request[KEY_HASS_USER] = user
return True
async def async_validate_legacy_api_password(request, password):
"""Validate api_password."""
user = await legacy_api_password.async_validate_password(hass, password)
if user is None:
return False
request[KEY_HASS_USER] = user
return True
@middleware
async def auth_middleware(request, handler):
"""Authenticate as middleware."""
authenticated = False
if HTTP_HEADER_HA_AUTH in request.headers or DATA_API_PASSWORD in request.query:
if request.path not in old_auth_warning:
_LOGGER.log(
logging.INFO if support_legacy else logging.WARNING,
"api_password is going to deprecate. You need to use a"
" bearer token to access %s from %s",
request.path,
request[KEY_REAL_IP],
)
old_auth_warning.add(request.path)
if hdrs.AUTHORIZATION in request.headers and await async_validate_auth_header(
request
):
# it included both use_auth and api_password Basic auth
authenticated = True
# We first start with a string check to avoid parsing query params
# for every request.
elif (
request.method == "GET"
and SIGN_QUERY_PARAM in request.query
and await async_validate_signed_request(request)
):
authenticated = True
elif trusted_networks and await async_validate_trusted_networks(request):
if request.path not in old_auth_warning:
# When removing this, don't forget to remove the print logic
# in http/view.py
request["deprecate_warning_message"] = (
"Access from trusted networks without auth token is "
"going to be removed in Home Assistant 0.96. Configure "
"the trusted networks auth provider or use long-lived "
"access tokens to access {} from {}".format(
request.path, request[KEY_REAL_IP]
)
)
old_auth_warning.add(request.path)
authenticated = True
elif (
support_legacy
and HTTP_HEADER_HA_AUTH in request.headers
and await async_validate_legacy_api_password(
request, request.headers[HTTP_HEADER_HA_AUTH]
)
):
authenticated = True
elif (
support_legacy
and DATA_API_PASSWORD in request.query
and await async_validate_legacy_api_password(
request, request.query[DATA_API_PASSWORD]
)
):
authenticated = True
request[KEY_AUTHENTICATED] = authenticated
return await handler(request)
app.middlewares.append(auth_middleware)
|
fbradyirl/home-assistant
|
homeassistant/components/http/auth.py
|
Python
|
apache-2.0
| 7,407 | 0.001215 |
# -*- coding: utf-8 -*-
"""
Project Bluebox
Copyright (C) <2015> <University of Stuttgart>
This software may be modified and distributed under the terms
of the MIT license. See the LICENSE file for details.
"""
from mcm.Bluebox import app
from mcm.Bluebox import configuration
# socketio.run(
# app,
app.run(
host=configuration.my_bind_host,
port=int(configuration.my_endpoint_port),
debug=False,
threaded=True
)
|
timwaizenegger/swift-bluebox
|
_runApp_Development_nodebug.py
|
Python
|
mit
| 442 | 0.011312 |
from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.db.models.fields import FieldDoesNotExist
from django.core.exceptions import ImproperlyConfigured
from django.utils.timezone import now
from model_utils.managers import QueryManager
from model_utils.fields import AutoCreatedField, AutoLastModifiedField, \
StatusField, MonitorField
class TimeStampedModel(models.Model):
"""
An abstract base class model that provides self-updating
``created`` and ``modified`` fields.
"""
created = AutoCreatedField(_('created'))
modified = AutoLastModifiedField(_('modified'))
class Meta:
abstract = True
class TimeFramedModel(models.Model):
"""
An abstract base class model that provides ``start``
and ``end`` fields to record a timeframe.
"""
start = models.DateTimeField(_('start'), null=True, blank=True)
end = models.DateTimeField(_('end'), null=True, blank=True)
class Meta:
abstract = True
class StatusModel(models.Model):
"""
An abstract base class model with a ``status`` field that
automatically uses a ``STATUS`` class attribute of choices, a
``status_changed`` date-time field that records when ``status``
was last modified, and an automatically-added manager for each
status that returns objects with that status only.
"""
status = StatusField(_('status'))
status_changed = MonitorField(_('status changed'), monitor='status')
class Meta:
abstract = True
def add_status_query_managers(sender, **kwargs):
"""
Add a Querymanager for each status item dynamically.
"""
if not issubclass(sender, StatusModel):
return
for value, display in getattr(sender, 'STATUS', ()):
if _field_exists(sender, value):
raise ImproperlyConfigured(
"StatusModel: Model '%s' has a field named '%s' which "
"conflicts with a status of the same name."
% (sender.__name__, value)
)
sender.add_to_class(value, QueryManager(status=value))
def add_timeframed_query_manager(sender, **kwargs):
"""
Add a QueryManager for a specific timeframe.
"""
if not issubclass(sender, TimeFramedModel):
return
if _field_exists(sender, 'timeframed'):
raise ImproperlyConfigured(
"Model '%s' has a field named 'timeframed' "
"which conflicts with the TimeFramedModel manager."
% sender.__name__
)
sender.add_to_class('timeframed', QueryManager(
(models.Q(start__lte=now) | models.Q(start__isnull=True)) &
(models.Q(end__gte=now) | models.Q(end__isnull=True))
))
models.signals.class_prepared.connect(add_status_query_managers)
models.signals.class_prepared.connect(add_timeframed_query_manager)
def _field_exists(model_class, field_name):
return field_name in [f.attname for f in model_class._meta.local_fields]
|
GbalsaC/bitnamiP
|
venv/lib/python2.7/site-packages/model_utils/models.py
|
Python
|
agpl-3.0
| 3,021 | 0 |
import logging
from dao import DAO, TableDesc, FieldDesc
log = logging.getLogger(__name__)
card_td = TableDesc("Cards", "multiverseid",
[FieldDesc("multiverseid", "int"),
FieldDesc("set_code", "text"),
FieldDesc("number", "int"),
FieldDesc("name", "text"),
FieldDesc("language", "text"),
FieldDesc("translation_of", "int"),
FieldDesc("back_face_of", "int"),
FieldDesc("equivalent_to", "int")])
class CardDAO(DAO):
@staticmethod
def create_table(conn):
card_td.create_table(conn)
def __init__(self, card, conn):
super(CardDAO, self).__init__(card_td, conn)
self.card = card
def get_pkey(self):
return self.card.multiverseid
def get_values(self):
return [self.card.multiverseid,
self.card.set_code,
self.card.number,
self.card.name.decode('utf-8'),
self.card.language,
self.card.translation_of.multiverseid if self.card.translation_of else None,
self.card.back_face_of.multiverseid if self.card.back_face_of else None,
self.card.equivalent_to]
def __str__(self):
return str(self.card)
|
nuxgu/magic_db
|
sqldb/card_dao.py
|
Python
|
gpl-3.0
| 1,343 | 0.001489 |
# Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from telemetry.core.timeline import model
from telemetry.core.backends.chrome import tracing_timeline_data
class TimelineModelUnittest(unittest.TestCase):
def testEmptyImport(self):
model.TimelineModel(
tracing_timeline_data.TracingTimelineData([]))
model.TimelineModel(
tracing_timeline_data.TracingTimelineData(''))
|
patrickm/chromium.src
|
tools/telemetry/telemetry/core/timeline/model_unittest.py
|
Python
|
bsd-3-clause
| 529 | 0.003781 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.