text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
import time
from pygame.locals import *
import gui
MOUSE_LEFT_BUTTON = 1
MOUSE_MIDDLE_BUTTON = 2
MOUSE_RIGHT_BUTTON = 3
MOUSE_WHEELUP = 4
MOUSE_WHEELDOWN = 5
class Screen(object):
"""Base gui screen class
every game screen class should inherit from this one
"""
__triggers = []
__old_hover = None
__hover = None
__hover_changed = False
def __init__(self):
pass
def log_info(self, message):
"""Prints an INFO message to standard output"""
ts = int(time.time())
print("# INFO %i ... %s" % (ts, message))
def log_error(self, message):
"""Prints an ERROR message to standard output"""
ts = int(time.time())
print("! ERROR %i ... %s" % (ts, message))
def reset_triggers_list(self):
"""Clears the screen's trigger list"""
self.__triggers = []
def add_trigger(self, trigger):
"""Appends given trigger to the end of screen's trigger list"""
if not trigger.has_key('hover_id'):
trigger['hover_id'] = None
self.__triggers.append(trigger)
def list_triggers(self):
"""Returns the screen's list of triggers"""
return self.__triggers
def get_timestamp(self, zoom = 1):
"""Returns an actual timestamp"""
return int(time.time() * zoom)
def get_image(self, img_key, subkey1 = None, subkey2 = None, subkey3 = None):
"""Returns an image object from GUI engine, identified by its key(s)"""
return gui.GUI.get_image(img_key, subkey1, subkey2, subkey3)
def redraw_flip(self):
"""Redraws the screen, takes care about mouse cursor and flips the graphic buffer to display"""
self.draw()
gui.GUI.highlight_triggers(self.list_triggers())
gui.GUI.flip()
def redraw_noflip(self):
"""Redraws the screen, takes care about mouse cursor but doesn't flip the buffer to display"""
self.draw()
gui.GUI.highlight_triggers(self.list_triggers())
def prepare(self):
"""This method should be implemented by screens that require some
special actions each time before the screen is run.
For example to reset screen to a well known state to prevent unexpected behaviour.
"""
pass
def draw(self):
"""All static graphic output should be implemented in this method.
Unless there is only a dynamic graphic (animations),
every screen should implement this method.
"""
pass
def animate(self):
"""Entry point for Screen animations, e.g. ship trajectory on MainScreen.
GUI engine calls this method periodically
Animations should be time-dependant - such screens have to implement the timing!
"""
pass
def get_escape_trigger(self):
"""Returns standard trigger for sending escape action"""
return {'action': "ESCAPE"}
def on_mousebuttonup(self, event):
"""Default implementation of mouse click event serving.
Checks the mouse wheel events (up and down scrolling) and regular mouse buttons.
If the event's subject is the left mouse button it checks the mouse position against the trigger list and
returns the first trigger where mouse positions is within its rectangle.
There is a good chance that no screen would have to override this method.
"""
if event.button == MOUSE_MIDDLE_BUTTON:
print event
elif event.button == MOUSE_WHEELUP:
return {'action': "SCROLL_UP"}
elif event.button == MOUSE_WHEELDOWN:
return {'action': "SCROLL_DOWN"}
else:
triggers_list = self.list_triggers()
for trigger in triggers_list:
if trigger['rect'].collidepoint(event.pos):
if event.button == MOUSE_LEFT_BUTTON:
trigger['mouse_pos'] = event.pos
return trigger
elif event.button == MOUSE_RIGHT_BUTTON:
return {'action': "help", 'help': trigger['action']}
def on_keydown(self, event):
"""Default implementation of a keyboard event handling.
If keypress is detected by a GUI engine it calls this method.
The pressed key is checked against the trigger list.
Returns the first trigger where the key matches the pressed or
None if no trigger matches the keypress
There is a good chance that no screen would have to override this method.
"""
print("@ screen.Screen::on_keydown()")
print(" scancode = %i" % event.scancode)
print(" key = %i" % event.key)
if event.key == K_ESCAPE:
return {'action': "ESCAPE"}
else:
triggers_list = self.list_triggers()
for trigger in triggers_list:
if trigger.has_key('key') and trigger['key'] == event.key:
return trigger
return {'action': "key", 'key': event.key}
def update_hover(self, mouse_pos):
"""This method is invoked by a GUI engine on every pure mouse move
and right before the screen's on_mousemotion() method.
Mouse position is checked against screen's trigger list.
If hover is detected (=mouse position is inside the trigger's rectangle)
the trigger is copied and can be returned by get_hover() method
Also if the previously stored value is different than the new one,
the __hover_changed flag is set to True
The idea is to handle mouse hover detection separately,
so other methods could rely on get_hover() and hover_changed() methods.
Probably no screen should require to override this method.
"""
for trigger in self.list_triggers():
if trigger.has_key('hover_id') and trigger['rect'].collidepoint(mouse_pos):
if self.__hover != trigger:
self.__hover_changed = True
self.__hover = trigger
break
def get_hover(self):
"""Returns the current hover trigger"""
return self.__hover
def hover_changed(self):
"""Returns True if screen's hover has changed since last call of this method"""
if self.__hover_changed:
self.__hover_changed = False
return True
else:
return False
def on_mousemotion(self, event):
"""Invoked by a GUI engine on every pure (non-dragging) mouse move.
Currently no screen requires to override this empty implementation.
"""
pass
def get_drag_item(self, mouse_pos):
""""""
for trigger in self.list_triggers():
if trigger.has_key('drag_id') and trigger['rect'].collidepoint(mouse_pos):
return trigger['drag_id']
return None
def on_mousedrag(self, drag_item, pos, rel):
"""Invoked by a GUI engine when left mouse button is being held, drag item is set and mouse moves"""
pass
def on_mousedrop(self, drag_item, (mouse_x, mouse_y)):
"""Invoked by a GUI engine when mouse dragging stops
(drag item was set and left mouse button was released).
"""
pass
def process_trigger(self, trigger):
"""Empty implementation of a trigger handling
If a screen trigger is positively evaluated
(e.g. returned from on_mousebuttonup() or on_keydown() methods)
it's passed as a trigger argument to this method
Every screen should override this method to handle the proper actions.
"""
pass
def enter(self):
""" Called by GUI engine right before gui_client::run_screen() is invoked
Suitable for saving initial state that can be reveresed by the screen's cancel() method
"""
pass
def leave_confirm(self):
""" Called by GUI engine when CONFIRM trigger is activated
Every screen that sends data to the game server should implement this method
"""
pass
def leave_cancel(self):
""" Called by GUI engine when ESCAPE trigger is activated
This is the right place to implement things like getting the screen to state before any changes were made
"""
pass
|
pjotrligthart/openmoo2-unofficial
|
game/gui/screen.py
|
Python
|
gpl-2.0
| 8,413 | 0.006894 |
# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import os
from dataclasses import dataclass
from pathlib import Path
from textwrap import dedent
from typing import Mapping, Optional, Tuple, cast
from pants.core.util_rules import subprocess_environment
from pants.core.util_rules.subprocess_environment import SubprocessEnvironmentVars
from pants.engine import process
from pants.engine.engine_aware import EngineAwareReturnType
from pants.engine.environment import Environment, EnvironmentRequest
from pants.engine.process import BinaryPath, BinaryPathRequest, BinaryPaths, BinaryPathTest
from pants.engine.rules import Get, MultiGet, collect_rules, rule
from pants.option.global_options import GlobalOptions
from pants.option.subsystem import Subsystem
from pants.python.python_setup import PythonSetup
from pants.util.frozendict import FrozenDict
from pants.util.logging import LogLevel
from pants.util.memo import memoized_method
from pants.util.ordered_set import OrderedSet
from pants.util.strutil import create_path_env_var
class PexRuntimeEnvironment(Subsystem):
options_scope = "pex"
help = "How Pants uses Pex to run Python subprocesses."
@classmethod
def register_options(cls, register):
super().register_options(register)
# TODO(#9760): We'll want to deprecate this in favor of a global option which allows for a
# per-process override.
register(
"--executable-search-paths",
advanced=True,
type=list,
default=["<PATH>"],
metavar="<binary-paths>",
help=(
"The PATH value that will be used by the PEX subprocess and any subprocesses it "
'spawns.\n\nThe special string "<PATH>" will expand to the contents of the PATH '
"env var."
),
)
register(
"--bootstrap-interpreter-names",
advanced=True,
type=list,
default=["python", "python3", "python2"],
metavar="<bootstrap-python-names>",
help=(
"The names of Python binaries to search for to bootstrap PEX files with.\n\nThis "
"does not impact which Python interpreter is used to run your code, only what is "
"used to run the PEX tool. See the `interpreter_search_paths` option in "
"`[python-setup]` to influence where interpreters are searched for."
),
)
register(
"--verbosity",
advanced=True,
type=int,
default=0,
help=(
"Set the verbosity level of PEX logging, from 0 (no logging) up to 9 (max logging)."
),
)
@memoized_method
def path(self, env: Environment) -> Tuple[str, ...]:
def iter_path_entries():
for entry in self.options.executable_search_paths:
if entry == "<PATH>":
path = env.get("PATH")
if path:
for path_entry in path.split(os.pathsep):
yield path_entry
else:
yield entry
return tuple(OrderedSet(iter_path_entries()))
@property
def bootstrap_interpreter_names(self) -> Tuple[str, ...]:
return tuple(self.options.bootstrap_interpreter_names)
@property
def verbosity(self) -> int:
level = cast(int, self.options.verbosity)
if level < 0 or level > 9:
raise ValueError("verbosity level must be between 0 and 9")
return level
class PythonExecutable(BinaryPath, EngineAwareReturnType):
"""The BinaryPath of a Python executable."""
def message(self) -> str:
return f"Selected {self.path} to run PEXes with."
@dataclass(frozen=True)
class PexEnvironment(EngineAwareReturnType):
path: Tuple[str, ...]
interpreter_search_paths: Tuple[str, ...]
subprocess_environment_dict: FrozenDict[str, str]
named_caches_dir: str
bootstrap_python: Optional[PythonExecutable] = None
def create_argv(
self, pex_filepath: str, *args: str, python: Optional[PythonExecutable] = None
) -> Tuple[str, ...]:
python = python or self.bootstrap_python
if python:
return (python.path, pex_filepath, *args)
if os.path.basename(pex_filepath) == pex_filepath:
return (f"./{pex_filepath}", *args)
return (pex_filepath, *args)
def environment_dict(self, *, python_configured: bool) -> Mapping[str, str]:
"""The environment to use for running anything with PEX.
If the Process is run with a pre-selected Python interpreter, set `python_configured=True`
to avoid PEX from trying to find a new interpreter.
"""
d = dict(
PATH=create_path_env_var(self.path),
PEX_INHERIT_PATH="false",
PEX_IGNORE_RCFILES="true",
PEX_ROOT=os.path.join(self.named_caches_dir, "pex_root"),
**self.subprocess_environment_dict,
)
# NB: We only set `PEX_PYTHON_PATH` if the Python interpreter has not already been
# pre-selected by Pants. Otherwise, Pex would inadvertently try to find another interpreter
# when running PEXes. (Creating a PEX will ignore this env var in favor of `--python-path`.)
if not python_configured:
d["PEX_PYTHON_PATH"] = create_path_env_var(self.interpreter_search_paths)
return d
def level(self) -> LogLevel:
return LogLevel.DEBUG if self.bootstrap_python else LogLevel.WARN
def message(self) -> str:
if not self.bootstrap_python:
return (
"No bootstrap Python executable could be found from the option "
"`interpreter_search_paths` in the `[python-setup]` scope. Will attempt to run "
"PEXes directly."
)
return f"Selected {self.bootstrap_python.path} to bootstrap PEXes with."
@rule(desc="Find Python interpreter to bootstrap PEX", level=LogLevel.DEBUG)
async def find_pex_python(
python_setup: PythonSetup,
pex_runtime_env: PexRuntimeEnvironment,
subprocess_env_vars: SubprocessEnvironmentVars,
global_options: GlobalOptions,
) -> PexEnvironment:
pex_relevant_environment = await Get(
Environment, EnvironmentRequest(["PATH", "HOME", "PYENV_ROOT"])
)
# PEX files are compatible with bootstrapping via Python 2.7 or Python 3.5+. The bootstrap
# code will then re-exec itself if the underlying PEX user code needs a more specific python
# interpreter. As such, we look for many Pythons usable by the PEX bootstrap code here for
# maximum flexibility.
all_python_binary_paths = await MultiGet(
Get(
BinaryPaths,
BinaryPathRequest(
search_path=python_setup.interpreter_search_paths(pex_relevant_environment),
binary_name=binary_name,
test=BinaryPathTest(
args=[
"-c",
# N.B.: The following code snippet must be compatible with Python 2.7 and
# Python 3.5+.
#
# We hash the underlying Python interpreter executable to ensure we detect
# changes in the real interpreter that might otherwise be masked by Pyenv
# shim scripts found on the search path. Naively, just printing out the full
# version_info would be enough, but that does not account for supported abi
# changes (e.g.: a pyenv switch from a py27mu interpreter to a py27m
# interpreter.)
#
# When hashing, we pick 8192 for efficiency of reads and fingerprint updates
# (writes) since it's a common OS buffer size and an even multiple of the
# hash block size.
dedent(
"""\
import sys
major, minor = sys.version_info[:2]
if (major, minor) != (2, 7) and not (major == 3 and minor >= 5):
sys.exit(1)
import hashlib
hasher = hashlib.sha256()
with open(sys.executable, "rb") as fp:
for chunk in iter(lambda: fp.read(8192), b""):
hasher.update(chunk)
sys.stdout.write(hasher.hexdigest())
"""
),
],
fingerprint_stdout=False, # We already emit a usable fingerprint to stdout.
),
),
)
for binary_name in pex_runtime_env.bootstrap_interpreter_names
)
def first_python_binary() -> Optional[PythonExecutable]:
for binary_paths in all_python_binary_paths:
if binary_paths.first_path:
return PythonExecutable(
path=binary_paths.first_path.path,
fingerprint=binary_paths.first_path.fingerprint,
)
return None
return PexEnvironment(
path=pex_runtime_env.path(pex_relevant_environment),
interpreter_search_paths=tuple(
python_setup.interpreter_search_paths(pex_relevant_environment)
),
subprocess_environment_dict=subprocess_env_vars.vars,
# TODO: This path normalization is duplicated with `engine_initializer.py`. How can we do
# the normalization only once, via the options system?
named_caches_dir=Path(global_options.options.named_caches_dir).resolve().as_posix(),
bootstrap_python=first_python_binary(),
)
def rules():
return [*collect_rules(), *process.rules(), *subprocess_environment.rules()]
|
jsirois/pants
|
src/python/pants/backend/python/util_rules/pex_environment.py
|
Python
|
apache-2.0
| 10,132 | 0.003652 |
#!/usr/bin/env python3
import os
import pathlib
import sysconfig
import compileall
import subprocess
prefix = pathlib.Path(os.environ.get('MESON_INSTALL_PREFIX', '/usr/local'))
datadir = prefix / 'share'
destdir = os.environ.get('DESTDIR', '')
if not destdir:
print('Compiling gsettings schemas...')
subprocess.call(['glib-compile-schemas', str(datadir / 'glib-2.0' / 'schemas')])
print('Updating icon cache...')
subprocess.call(['gtk-update-icon-cache', '-qtf', str(datadir / 'icons' / 'hicolor')])
print('Updating desktop database...')
subprocess.call(['update-desktop-database', '-q', str(datadir / 'applications')])
print('Compiling python bytecode...')
moduledir = sysconfig.get_path('purelib', vars={'base': str(prefix)})
compileall.compile_dir(destdir + os.path.join(moduledir, 'eidisi'), optimize=2)
|
sramkrishna/eidisi
|
scripts/meson_post_install.py
|
Python
|
gpl-3.0
| 838 | 0.00358 |
from __future__ import unicode_literals
def execute():
"""Make standard print formats readonly for system manager"""
import webnotes.model.doc
new_perms = [
{
'parent': 'Print Format',
'parentfield': 'permissions',
'parenttype': 'DocType',
'role': 'System Manager',
'permlevel': 1,
'read': 1,
},
{
'parent': 'Print Format',
'parentfield': 'permissions',
'parenttype': 'DocType',
'role': 'Administrator',
'permlevel': 1,
'read': 1,
'write': 1
},
]
for perms in new_perms:
doc = webnotes.model.doc.Document('DocPerm')
doc.fields.update(perms)
doc.save()
webnotes.conn.commit()
webnotes.conn.begin()
webnotes.reload_doc('core', 'doctype', 'print_format')
|
gangadhar-kadam/mtn-erpnext
|
patches/may_2012/std_pf_readonly.py
|
Python
|
agpl-3.0
| 718 | 0.044568 |
# Copyright 2016 Huawei Technologies India Pvt. Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_log import log as logging
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy.orm import exc
from neutron._i18n import _
from neutron._i18n import _LW
from neutron.db import agents_db
from neutron.db import agentschedulers_db as as_db
from neutron.db import model_base
from neutron.extensions import bgp_dragentscheduler as bgp_dras_ext
from neutron.services.bgp.common import constants as bgp_consts
LOG = logging.getLogger(__name__)
BGP_DRAGENT_SCHEDULER_OPTS = [
cfg.StrOpt(
'bgp_drscheduler_driver',
default='neutron.services.bgp.scheduler'
'.bgp_dragent_scheduler.ChanceScheduler',
help=_('Driver used for scheduling BGP speakers to BGP DrAgent'))
]
cfg.CONF.register_opts(BGP_DRAGENT_SCHEDULER_OPTS)
class BgpSpeakerDrAgentBinding(model_base.BASEV2):
"""Represents a mapping between BGP speaker and BGP DRAgent"""
__tablename__ = 'bgp_speaker_dragent_bindings'
bgp_speaker_id = sa.Column(sa.String(length=36),
sa.ForeignKey("bgp_speakers.id",
ondelete='CASCADE'),
nullable=False)
dragent = orm.relation(agents_db.Agent)
agent_id = sa.Column(sa.String(length=36),
sa.ForeignKey("agents.id",
ondelete='CASCADE'),
primary_key=True)
class BgpDrAgentSchedulerDbMixin(bgp_dras_ext.BgpDrSchedulerPluginBase,
as_db.AgentSchedulerDbMixin):
bgp_drscheduler = None
def schedule_unscheduled_bgp_speakers(self, context, host):
if self.bgp_drscheduler:
return self.bgp_drscheduler.schedule_unscheduled_bgp_speakers(
context, host)
else:
LOG.warning(_LW("Cannot schedule BgpSpeaker to DrAgent. "
"Reason: No scheduler registered."))
def schedule_bgp_speaker(self, context, created_bgp_speaker):
if self.bgp_drscheduler:
agents = self.bgp_drscheduler.schedule(context,
created_bgp_speaker)
for agent in agents:
self._bgp_rpc.bgp_speaker_created(context,
created_bgp_speaker['id'],
agent.host)
else:
LOG.warning(_LW("Cannot schedule BgpSpeaker to DrAgent. "
"Reason: No scheduler registered."))
def add_bgp_speaker_to_dragent(self, context, agent_id, speaker_id):
"""Associate a BgpDrAgent with a BgpSpeaker."""
try:
self._save_bgp_speaker_dragent_binding(context,
agent_id,
speaker_id)
except db_exc.DBDuplicateEntry:
raise bgp_dras_ext.DrAgentAssociationError(
agent_id=agent_id)
LOG.debug('BgpSpeaker %(bgp_speaker_id)s added to '
'BgpDrAgent %(agent_id)s',
{'bgp_speaker_id': speaker_id, 'agent_id': agent_id})
def _save_bgp_speaker_dragent_binding(self, context,
agent_id, speaker_id):
with context.session.begin(subtransactions=True):
agent_db = self._get_agent(context, agent_id)
agent_up = agent_db['admin_state_up']
is_agent_bgp = (agent_db['agent_type'] ==
bgp_consts.AGENT_TYPE_BGP_ROUTING)
if not is_agent_bgp or not agent_up:
raise bgp_dras_ext.DrAgentInvalid(id=agent_id)
binding = BgpSpeakerDrAgentBinding()
binding.bgp_speaker_id = speaker_id
binding.agent_id = agent_id
context.session.add(binding)
self._bgp_rpc.bgp_speaker_created(context, speaker_id, agent_db.host)
def remove_bgp_speaker_from_dragent(self, context, agent_id, speaker_id):
with context.session.begin(subtransactions=True):
agent_db = self._get_agent(context, agent_id)
is_agent_bgp = (agent_db['agent_type'] ==
bgp_consts.AGENT_TYPE_BGP_ROUTING)
if not is_agent_bgp:
raise bgp_dras_ext.DrAgentInvalid(id=agent_id)
query = context.session.query(BgpSpeakerDrAgentBinding)
query = query.filter_by(bgp_speaker_id=speaker_id,
agent_id=agent_id)
num_deleted = query.delete()
if not num_deleted:
raise bgp_dras_ext.DrAgentNotHostingBgpSpeaker(
bgp_speaker_id=speaker_id,
agent_id=agent_id)
LOG.debug('BgpSpeaker %(bgp_speaker_id)s removed from '
'BgpDrAgent %(agent_id)s',
{'bgp_speaker_id': speaker_id,
'agent_id': agent_id})
self._bgp_rpc.bgp_speaker_removed(context, speaker_id, agent_db.host)
def get_dragents_hosting_bgp_speakers(self, context, bgp_speaker_ids,
active=None, admin_state_up=None):
query = context.session.query(BgpSpeakerDrAgentBinding)
query = query.options(orm.contains_eager(
BgpSpeakerDrAgentBinding.dragent))
query = query.join(BgpSpeakerDrAgentBinding.dragent)
if len(bgp_speaker_ids) == 1:
query = query.filter(
BgpSpeakerDrAgentBinding.bgp_speaker_id == (
bgp_speaker_ids[0]))
elif bgp_speaker_ids:
query = query.filter(
BgpSpeakerDrAgentBinding.bgp_speaker_id in bgp_speaker_ids)
if admin_state_up is not None:
query = query.filter(agents_db.Agent.admin_state_up ==
admin_state_up)
return [binding.dragent
for binding in query
if as_db.AgentSchedulerDbMixin.is_eligible_agent(
active, binding.dragent)]
def get_dragent_bgp_speaker_bindings(self, context):
return context.session.query(BgpSpeakerDrAgentBinding).all()
def list_dragent_hosting_bgp_speaker(self, context, speaker_id):
dragents = self.get_dragents_hosting_bgp_speakers(context,
[speaker_id])
agent_ids = [dragent.id for dragent in dragents]
if not agent_ids:
return {'agents': []}
return {'agents': self.get_agents(context, filters={'id': agent_ids})}
def list_bgp_speaker_on_dragent(self, context, agent_id):
query = context.session.query(BgpSpeakerDrAgentBinding.bgp_speaker_id)
query = query.filter_by(agent_id=agent_id)
bgp_speaker_ids = [item[0] for item in query]
if not bgp_speaker_ids:
# Exception will be thrown if the requested agent does not exist.
self._get_agent(context, agent_id)
return {'bgp_speakers': []}
return {'bgp_speakers':
self.get_bgp_speakers(context,
filters={'id': bgp_speaker_ids})}
def get_bgp_speakers_for_agent_host(self, context, host):
agent = self._get_agent_by_type_and_host(
context, bgp_consts.AGENT_TYPE_BGP_ROUTING, host)
if not agent.admin_state_up:
return {}
query = context.session.query(BgpSpeakerDrAgentBinding)
query = query.filter(BgpSpeakerDrAgentBinding.agent_id == agent.id)
try:
binding = query.one()
except exc.NoResultFound:
return []
bgp_speaker = self.get_bgp_speaker_with_advertised_routes(
context, binding['bgp_speaker_id'])
return [bgp_speaker]
def get_bgp_speaker_by_speaker_id(self, context, bgp_speaker_id):
try:
return self.get_bgp_speaker(context, bgp_speaker_id)
except exc.NoResultFound:
return {}
def get_bgp_peer_by_peer_id(self, context, bgp_peer_id):
try:
return self.get_bgp_peer(context, bgp_peer_id)
except exc.NoResultFound:
return {}
|
dims/neutron
|
neutron/db/bgp_dragentscheduler_db.py
|
Python
|
apache-2.0
| 9,018 | 0 |
from sklearn2sql_heroku.tests.regression import generic as reg_gen
reg_gen.test_model("MLPRegressor" , "freidman1" , "mysql")
|
antoinecarme/sklearn2sql_heroku
|
tests/regression/freidman1/ws_freidman1_MLPRegressor_mysql_code_gen.py
|
Python
|
bsd-3-clause
| 128 | 0.015625 |
from __future__ import absolute_import
from datetime import timedelta
import pytest
import logging
import re
import mock
from tornado import gen
from tornado.ioloop import PeriodicCallback, IOLoop
from tornado.httpclient import HTTPError
import bokeh.server.server as server
from bokeh.application import Application
from bokeh.application.handlers import Handler
from bokeh.model import Model
from bokeh.core.properties import List, String
from bokeh.client import pull_session
from bokeh.server.server import Server
from bokeh.util.session_id import check_session_id_signature
from .utils import ManagedServerLoop, url, ws_url, http_get, websocket_open
logging.basicConfig(level=logging.DEBUG)
def test__create_hosts_whitelist_no_host():
hosts = server._create_hosts_whitelist(None, 1000)
assert hosts == ["localhost:1000"]
hosts = server._create_hosts_whitelist([], 1000)
assert hosts == ["localhost:1000"]
def test__create_hosts_whitelist_host_value_with_port_use_port():
hosts = server._create_hosts_whitelist(["foo:1000"], 1000)
assert hosts == ["foo:1000"]
hosts = server._create_hosts_whitelist(["foo:1000","bar:2100"], 1000)
assert hosts == ["foo:1000","bar:2100"]
def test__create_hosts_whitelist_host_without_port_use_port_80():
hosts = server._create_hosts_whitelist(["foo"], 1000)
assert hosts == ["foo:80"]
hosts = server._create_hosts_whitelist(["foo","bar"], 1000)
assert hosts == ["foo:80","bar:80"]
def test__create_hosts_whitelist_host_non_int_port_raises():
with pytest.raises(ValueError):
server._create_hosts_whitelist(["foo:xyz"], 1000)
def test__create_hosts_whitelist_bad_host_raises():
with pytest.raises(ValueError):
server._create_hosts_whitelist([""], 1000)
with pytest.raises(ValueError):
server._create_hosts_whitelist(["a:b:c"], 1000)
with pytest.raises(ValueError):
server._create_hosts_whitelist([":80"], 1000)
@gen.coroutine
def async_value(value):
yield gen.moment # this ensures we actually return to the loop
raise gen.Return(value)
class HookListModel(Model):
hooks = List(String)
class HookTestHandler(Handler):
def __init__(self):
super(HookTestHandler, self).__init__()
self.load_count = 0
self.unload_count = 0
self.session_creation_async_value = 0
self.hooks = []
self.server_periodic_remover = None
self.session_periodic_remover = None
def modify_document(self, doc):
# this checks that the session created hook has run
# and session destroyed has not.
assert self.session_creation_async_value == 3
doc.title = "Modified"
doc.roots[0].hooks.append("modify")
self.hooks.append("modify")
def on_server_loaded(self, server_context):
assert len(server_context.sessions) == 0
self.load_count += 1
self.hooks.append("server_loaded")
server_context.add_next_tick_callback(self.on_next_tick_server)
server_context.add_timeout_callback(self.on_timeout_server, 2)
server_context.add_periodic_callback(self.on_periodic_server, 3)
def remover():
server_context.remove_periodic_callback(self.on_periodic_server)
self.server_periodic_remover = remover
def on_server_unloaded(self, server_context):
self.unload_count += 1
self.hooks.append("server_unloaded")
# important to test that this can be async
@gen.coroutine
def on_session_created(self, session_context):
@gen.coroutine
def setup_document(doc):
# session creation hook is allowed to init the document
# before any modify_document() handlers kick in
from bokeh.document import DEFAULT_TITLE
hook_list = HookListModel()
assert doc.title == DEFAULT_TITLE
assert len(doc.roots) == 0
hook_list.hooks.append("session_created")
doc.add_root(hook_list)
self.session_creation_async_value = yield async_value(1)
self.session_creation_async_value = yield async_value(2)
self.session_creation_async_value = yield async_value(3)
yield session_context.with_locked_document(setup_document)
server_context = session_context.server_context
server_context.add_next_tick_callback(self.on_next_tick_session)
server_context.add_timeout_callback(self.on_timeout_session, 2)
server_context.add_periodic_callback(self.on_periodic_session, 3)
def remover():
server_context.remove_periodic_callback(self.on_periodic_session)
self.session_periodic_remover = remover
self.hooks.append("session_created")
# this has to be async too
@gen.coroutine
def on_session_destroyed(self, session_context):
@gen.coroutine
def shutdown_document(doc):
doc.roots[0].hooks.append("session_destroyed")
self.session_creation_async_value = yield async_value(4)
self.session_creation_async_value = yield async_value(5)
self.session_creation_async_value = yield async_value(6)
yield session_context.with_locked_document(shutdown_document)
self.hooks.append("session_destroyed")
def on_next_tick_server(self):
self.hooks.append("next_tick_server")
def on_timeout_server(self):
self.hooks.append("timeout_server")
def on_periodic_server(self):
self.hooks.append("periodic_server")
self.server_periodic_remover()
def on_next_tick_session(self):
self.hooks.append("next_tick_session")
def on_timeout_session(self):
self.hooks.append("timeout_session")
def on_periodic_session(self):
self.hooks.append("periodic_session")
self.session_periodic_remover()
def test__lifecycle_hooks():
application = Application()
handler = HookTestHandler()
application.add(handler)
with ManagedServerLoop(application, check_unused_sessions_milliseconds=30) as server:
# wait for server callbacks to run before we mix in the
# session, this keeps the test deterministic
def check_done():
if len(handler.hooks) == 4:
server.io_loop.stop()
server_load_checker = PeriodicCallback(check_done, 1,
io_loop=server.io_loop)
server_load_checker.start()
server.io_loop.start()
server_load_checker.stop()
# now we create a session
client_session = pull_session(session_id='test__lifecycle_hooks',
url=url(server),
io_loop=server.io_loop)
client_doc = client_session.document
assert len(client_doc.roots) == 1
server_session = server.get_session('/', client_session.id)
server_doc = server_session.document
assert len(server_doc.roots) == 1
client_session.close()
# expire the session quickly rather than after the
# usual timeout
server_session.request_expiration()
def on_done():
server.io_loop.stop()
server.io_loop.call_later(0.1, on_done)
server.io_loop.start()
assert handler.hooks == ["server_loaded",
"next_tick_server",
"timeout_server",
"periodic_server",
"session_created",
"next_tick_session",
"modify",
"timeout_session",
"periodic_session",
"session_destroyed",
"server_unloaded"]
client_hook_list = client_doc.roots[0]
server_hook_list = server_doc.roots[0]
assert handler.load_count == 1
assert handler.unload_count == 1
assert handler.session_creation_async_value == 6
assert client_doc.title == "Modified"
assert server_doc.title == "Modified"
# the client session doesn't see the event that adds "session_destroyed" since
# we shut down at that point.
assert client_hook_list.hooks == ["session_created", "modify"]
assert server_hook_list.hooks == ["session_created", "modify", "session_destroyed"]
def test_get_sessions():
application = Application()
with ManagedServerLoop(application) as server:
server_sessions = server.get_sessions('/')
assert len(server_sessions) == 0
http_get(server.io_loop, url(server))
server_sessions = server.get_sessions('/')
assert len(server_sessions) == 1
http_get(server.io_loop, url(server))
server_sessions = server.get_sessions('/')
assert len(server_sessions) == 2
server_sessions = server.get_sessions()
assert len(server_sessions) == 2
with pytest.raises(ValueError):
server.get_sessions("/foo")
with ManagedServerLoop({"/foo": application, "/bar": application}) as server:
http_get(server.io_loop, url(server) + "foo")
server_sessions = server.get_sessions('/foo')
assert len(server_sessions) == 1
server_sessions = server.get_sessions('/bar')
assert len(server_sessions) == 0
server_sessions = server.get_sessions()
assert len(server_sessions) == 1
http_get(server.io_loop, url(server) + "foo")
server_sessions = server.get_sessions('/foo')
assert len(server_sessions) == 2
server_sessions = server.get_sessions('/bar')
assert len(server_sessions) == 0
server_sessions = server.get_sessions()
assert len(server_sessions) == 2
http_get(server.io_loop, url(server) + "bar")
server_sessions = server.get_sessions('/foo')
assert len(server_sessions) == 2
server_sessions = server.get_sessions('/bar')
assert len(server_sessions) == 1
server_sessions = server.get_sessions()
assert len(server_sessions) == 3
def test__request_in_session_context():
application = Application()
with ManagedServerLoop(application) as server:
response = http_get(server.io_loop,
url(server) + "?foo=10")
html = response.body
sessionid = extract_sessionid_from_json(html)
server_session = server.get_session('/', sessionid)
server_doc = server_session.document
session_context = server_doc.session_context
# do we have a request
assert session_context.request is not None
def test__request_in_session_context_has_arguments():
application = Application()
with ManagedServerLoop(application) as server:
response = http_get(server.io_loop,
url(server) + "?foo=10")
html = response.body
sessionid = extract_sessionid_from_json(html)
server_session = server.get_session('/', sessionid)
server_doc = server_session.document
session_context = server_doc.session_context
# test if we can get the argument from the request
assert session_context.request.arguments['foo'] == [b'10']
def test__no_request_arguments_in_session_context():
application = Application()
with ManagedServerLoop(application) as server:
response = http_get(server.io_loop,
url(server))
html = response.body
sessionid = extract_sessionid_from_json(html)
server_session = server.get_session('/', sessionid)
server_doc = server_session.document
session_context = server_doc.session_context
# if we do not pass any arguments to the url, the request arguments
# should be empty
assert len(session_context.request.arguments) == 0
# examples:
# "sessionid" : "NzlNoPfEYJahnPljE34xI0a5RSTaU1Aq1Cx5"
# 'sessionid':'NzlNoPfEYJahnPljE34xI0a5RSTaU1Aq1Cx5'
sessionid_in_json = re.compile("""["']sessionid["'] *: *["']([^"]+)["']""")
def extract_sessionid_from_json(html):
from six import string_types
if not isinstance(html, string_types):
import codecs
html = codecs.decode(html, 'utf-8')
match = sessionid_in_json.search(html)
return match.group(1)
# examples:
# "sessionid" : "NzlNoPfEYJahnPljE34xI0a5RSTaU1Aq1Cx5"
# 'sessionid':'NzlNoPfEYJahnPljE34xI0a5RSTaU1Aq1Cx5'
use_for_title_in_json = re.compile("""["']use_for_title["'] *: *(false|true)""")
def extract_use_for_title_from_json(html):
from six import string_types
if not isinstance(html, string_types):
import codecs
html = codecs.decode(html, 'utf-8')
match = use_for_title_in_json.search(html)
return match.group(1)
def autoload_url(server):
return url(server) + \
"autoload.js?bokeh-protocol-version=1.0&bokeh-autoload-element=foo"
def resource_files_requested(response, requested=True):
from six import string_types
if not isinstance(response, string_types):
import codecs
response = codecs.decode(response, 'utf-8')
for file in [
'static/css/bokeh.min.css', 'static/css/bokeh-widgets.min.css',
'static/js/bokeh.min.js', 'static/js/bokeh-widgets.min.js']:
if requested:
assert file in response
else:
assert file not in response
def test_use_xheaders():
application = Application()
with ManagedServerLoop(application, use_xheaders=True) as server:
assert server._http.xheaders == True
@pytest.mark.parametrize("querystring,requested", [
("", True),
("&resources=default", True),
("&resources=whatever", True),
("&resources=none", False),
])
def test__resource_files_requested(querystring, requested):
"""
Checks if the loading of resource files is requested by the autoload.js
response based on the value of the "resources" parameter.
"""
application = Application()
with ManagedServerLoop(application) as server:
response = http_get(server.io_loop,
autoload_url(server) + querystring)
resource_files_requested(response.body, requested=requested)
def test__autocreate_session_autoload():
application = Application()
with ManagedServerLoop(application) as server:
sessions = server.get_sessions('/')
assert 0 == len(sessions)
response = http_get(server.io_loop,
autoload_url(server))
js = response.body
sessionid = extract_sessionid_from_json(js)
sessions = server.get_sessions('/')
assert 1 == len(sessions)
assert sessionid == sessions[0].id
def test__no_set_title_autoload():
application = Application()
with ManagedServerLoop(application) as server:
sessions = server.get_sessions('/')
assert 0 == len(sessions)
response = http_get(server.io_loop,
autoload_url(server))
js = response.body
use_for_title = extract_use_for_title_from_json(js)
assert use_for_title == "false"
def test__autocreate_session_doc():
application = Application()
with ManagedServerLoop(application) as server:
sessions = server.get_sessions('/')
assert 0 == len(sessions)
response = http_get(server.io_loop,
url(server))
html = response.body
sessionid = extract_sessionid_from_json(html)
sessions = server.get_sessions('/')
assert 1 == len(sessions)
assert sessionid == sessions[0].id
def test__no_autocreate_session_websocket():
application = Application()
with ManagedServerLoop(application) as server:
sessions = server.get_sessions('/')
assert 0 == len(sessions)
websocket_open(server.io_loop,
ws_url(server) + "?bokeh-protocol-version=1.0")
sessions = server.get_sessions('/')
assert 0 == len(sessions)
def test__use_provided_session_autoload():
application = Application()
with ManagedServerLoop(application) as server:
sessions = server.get_sessions('/')
assert 0 == len(sessions)
expected = 'foo'
response = http_get(server.io_loop,
autoload_url(server) + "&bokeh-session-id=" + expected)
js = response.body
sessionid = extract_sessionid_from_json(js)
assert expected == sessionid
sessions = server.get_sessions('/')
assert 1 == len(sessions)
assert expected == sessions[0].id
def test__use_provided_session_doc():
application = Application()
with ManagedServerLoop(application) as server:
sessions = server.get_sessions('/')
assert 0 == len(sessions)
expected = 'foo'
response = http_get(server.io_loop,
url(server) + "?bokeh-session-id=" + expected)
html = response.body
sessionid = extract_sessionid_from_json(html)
assert expected == sessionid
sessions = server.get_sessions('/')
assert 1 == len(sessions)
assert expected == sessions[0].id
def test__use_provided_session_websocket():
application = Application()
with ManagedServerLoop(application) as server:
sessions = server.get_sessions('/')
assert 0 == len(sessions)
expected = 'foo'
url = ws_url(server) + \
"?bokeh-protocol-version=1.0" + \
"&bokeh-session-id=" + expected
websocket_open(server.io_loop,
url)
sessions = server.get_sessions('/')
assert 1 == len(sessions)
assert expected == sessions[0].id
def test__autocreate_signed_session_autoload():
application = Application()
with ManagedServerLoop(application, sign_sessions=True, secret_key='foo') as server:
sessions = server.get_sessions('/')
assert 0 == len(sessions)
response = http_get(server.io_loop,
autoload_url(server))
js = response.body
sessionid = extract_sessionid_from_json(js)
sessions = server.get_sessions('/')
assert 1 == len(sessions)
assert sessionid == sessions[0].id
assert check_session_id_signature(sessionid, signed=True, secret_key='foo')
def test__autocreate_signed_session_doc():
application = Application()
with ManagedServerLoop(application, sign_sessions=True, secret_key='foo') as server:
sessions = server.get_sessions('/')
assert 0 == len(sessions)
response = http_get(server.io_loop,
url(server))
html = response.body
sessionid = extract_sessionid_from_json(html)
sessions = server.get_sessions('/')
assert 1 == len(sessions)
assert sessionid == sessions[0].id
assert check_session_id_signature(sessionid, signed=True, secret_key='foo')
def test__reject_unsigned_session_autoload():
application = Application()
with ManagedServerLoop(application, sign_sessions=True, secret_key='bar') as server:
sessions = server.get_sessions('/')
assert 0 == len(sessions)
expected = 'foo'
with (pytest.raises(HTTPError)) as info:
http_get(server.io_loop,
autoload_url(server) + "&bokeh-session-id=" + expected)
assert 'Invalid session ID' in repr(info.value)
sessions = server.get_sessions('/')
assert 0 == len(sessions)
def test__reject_unsigned_session_doc():
application = Application()
with ManagedServerLoop(application, sign_sessions=True, secret_key='bar') as server:
sessions = server.get_sessions('/')
assert 0 == len(sessions)
expected = 'foo'
with (pytest.raises(HTTPError)) as info:
http_get(server.io_loop, url(server) + "?bokeh-session-id=" + expected)
assert 'Invalid session ID' in repr(info.value)
sessions = server.get_sessions('/')
assert 0 == len(sessions)
def test__reject_unsigned_session_websocket():
application = Application()
with ManagedServerLoop(application, sign_sessions=True, secret_key='bar') as server:
sessions = server.get_sessions('/')
assert 0 == len(sessions)
expected = 'foo'
url = ws_url(server) + \
"?bokeh-protocol-version=1.0" + \
"&bokeh-session-id=" + expected
websocket_open(server.io_loop,
url)
sessions = server.get_sessions('/')
assert 0 == len(sessions)
def test__no_generate_session_autoload():
application = Application()
with ManagedServerLoop(application, generate_session_ids=False) as server:
sessions = server.get_sessions('/')
assert 0 == len(sessions)
with (pytest.raises(HTTPError)) as info:
http_get(server.io_loop, autoload_url(server))
assert 'No bokeh-session-id provided' in repr(info.value)
sessions = server.get_sessions('/')
assert 0 == len(sessions)
def test__no_generate_session_doc():
application = Application()
with ManagedServerLoop(application, generate_session_ids=False) as server:
sessions = server.get_sessions('/')
assert 0 == len(sessions)
with (pytest.raises(HTTPError)) as info:
http_get(server.io_loop, url(server))
assert 'No bokeh-session-id provided' in repr(info.value)
sessions = server.get_sessions('/')
assert 0 == len(sessions)
def test__server_multiple_processes():
with mock.patch('tornado.process.fork_processes') as tornado_fp:
application = Application()
with ManagedServerLoop(application, num_procs=3):
pass
tornado_fp.assert_called_with(3)
def test__existing_ioloop_with_multiple_processes_exception():
application = Application()
ioloop_instance = IOLoop.instance() ; ioloop_instance # silence flake8
with pytest.raises(RuntimeError):
with ManagedServerLoop(application, num_procs=3):
pass
def test__actual_port_number():
application = Application()
with ManagedServerLoop(application, port=0) as server:
port = server.port
assert port > 0
http_get(server.io_loop, url(server))
def test__ioloop_not_forcibly_stopped():
# Issue #5494
application = Application()
loop = IOLoop()
loop.make_current()
server = Server(application, ioloop=loop)
server.start()
result = []
def f():
server.unlisten()
server.stop()
# If server.stop() were to stop the Tornado IO loop,
# g() wouldn't be called and `result` would remain empty.
loop.add_timeout(timedelta(seconds=0.01), g)
def g():
result.append(None)
loop.stop()
loop.add_callback(f)
loop.start()
assert result == [None]
|
Ziqi-Li/bknqgis
|
bokeh/bokeh/server/tests/test_server.py
|
Python
|
gpl-2.0
| 22,987 | 0.002871 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for type_info module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.autograph.pyct import anno
from tensorflow.python.autograph.pyct import cfg
from tensorflow.python.autograph.pyct import parser
from tensorflow.python.autograph.pyct import qual_names
from tensorflow.python.autograph.pyct import transformer
from tensorflow.python.autograph.pyct.static_analysis import activity
from tensorflow.python.autograph.pyct.static_analysis import live_values
from tensorflow.python.autograph.pyct.static_analysis import reaching_definitions
from tensorflow.python.autograph.pyct.static_analysis import type_info
from tensorflow.python.client import session
from tensorflow.python.platform import test
from tensorflow.python.training import training
class ScopeTest(test.TestCase):
def test_basic(self):
scope = type_info.Scope(None)
self.assertFalse(scope.hasval('foo'))
scope.setval('foo', 'bar')
self.assertTrue(scope.hasval('foo'))
self.assertFalse(scope.hasval('baz'))
def test_nesting(self):
scope = type_info.Scope(None)
scope.setval('foo', '')
child = type_info.Scope(scope)
self.assertTrue(child.hasval('foo'))
self.assertTrue(scope.hasval('foo'))
child.setval('bar', '')
self.assertTrue(child.hasval('bar'))
self.assertFalse(scope.hasval('bar'))
class TypeInfoResolverTest(test.TestCase):
def _parse_and_analyze(self,
test_fn,
namespace,
arg_types=None):
node, source = parser.parse_entity(test_fn)
entity_info = transformer.EntityInfo(
source_code=source,
source_file=None,
namespace=namespace,
arg_values=None,
arg_types=arg_types,
owner_type=None)
node = qual_names.resolve(node)
graphs = cfg.build(node)
ctx = transformer.Context(entity_info)
node = activity.resolve(node, ctx)
node = reaching_definitions.resolve(node, ctx, graphs,
reaching_definitions.Definition)
node = live_values.resolve(node, ctx, {})
node = type_info.resolve(node, ctx)
node = live_values.resolve(node, ctx, {})
return node
def test_constructor_detection(self):
def test_fn():
opt = training.GradientDescentOptimizer(0.1)
return opt
node = self._parse_and_analyze(test_fn, {'training': training})
call_node = node.body[0].body[0].value
self.assertEquals(training.GradientDescentOptimizer,
anno.getanno(call_node, 'type'))
self.assertEquals((training.__name__, 'GradientDescentOptimizer'),
anno.getanno(call_node, 'type_fqn'))
def test_class_members_of_detected_constructor(self):
def test_fn():
opt = training.GradientDescentOptimizer(0.1)
opt.minimize(0)
node = self._parse_and_analyze(test_fn, {'training': training})
method_call = node.body[0].body[1].value.func
self.assertEquals(training.GradientDescentOptimizer.minimize,
anno.getanno(method_call, 'live_val'))
def test_class_members_in_with_stmt(self):
def test_fn(x):
with session.Session() as sess:
sess.run(x)
node = self._parse_and_analyze(test_fn, {'session': session})
constructor_call = node.body[0].body[0].items[0].context_expr
self.assertEquals(session.Session, anno.getanno(constructor_call, 'type'))
self.assertEquals((session.__name__, 'Session'),
anno.getanno(constructor_call, 'type_fqn'))
method_call = node.body[0].body[0].body[0].value.func
self.assertEquals(session.Session.run, anno.getanno(method_call,
'live_val'))
def test_constructor_data_dependent(self):
def test_fn(x):
if x > 0:
opt = training.GradientDescentOptimizer(0.1)
else:
opt = training.GradientDescentOptimizer(0.01)
opt.minimize(0)
node = self._parse_and_analyze(test_fn, {'training': training})
method_call = node.body[0].body[1].value.func
self.assertFalse(anno.hasanno(method_call, 'live_val'))
def test_parameter_class_members(self):
def test_fn(opt):
opt.minimize(0)
node = self._parse_and_analyze(test_fn, {})
method_call = node.body[0].body[0].value.func
self.assertFalse(anno.hasanno(method_call, 'live_val'))
def test_parameter_class_members_with_value_hints(self):
def test_fn(opt):
opt.minimize(0)
node = self._parse_and_analyze(
test_fn, {},
arg_types={
'opt': (training.GradientDescentOptimizer.__name__,
training.GradientDescentOptimizer)
})
method_call = node.body[0].body[0].value.func
self.assertEquals(training.GradientDescentOptimizer.minimize,
anno.getanno(method_call, 'live_val'))
def test_function_variables(self):
def bar():
pass
def test_fn():
foo = bar
foo()
node = self._parse_and_analyze(test_fn, {'bar': bar})
method_call = node.body[0].body[1].value.func
self.assertFalse(anno.hasanno(method_call, 'live_val'))
def test_nested_members(self):
def test_fn():
foo = training.GradientDescentOptimizer(0.1)
foo.bar.baz()
node = self._parse_and_analyze(test_fn, {'training': training})
method_call = node.body[0].body[1].value.func
self.assertFalse(anno.hasanno(method_call, 'live_val'))
def test_nested_unpacking(self):
class Foo(object):
pass
class Bar(object):
pass
def test_fn():
a, (b, c) = (Foo(), (Bar(), Foo()))
return a, b, c
node = self._parse_and_analyze(test_fn, {'Foo': Foo, 'Bar': Bar})
a, b, c = node.body[0].body[1].value.elts
self.assertEquals(anno.getanno(a, 'type'), Foo)
self.assertEquals(anno.getanno(b, 'type'), Bar)
self.assertEquals(anno.getanno(c, 'type'), Foo)
self.assertFalse(anno.hasanno(a, 'live_val'))
self.assertFalse(anno.hasanno(b, 'live_val'))
self.assertFalse(anno.hasanno(c, 'live_val'))
if __name__ == '__main__':
test.main()
|
apark263/tensorflow
|
tensorflow/python/autograph/pyct/static_analysis/type_info_test.py
|
Python
|
apache-2.0
| 6,883 | 0.004794 |
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
#Used in subsequent imports from params
from resource_management.libraries.script.script import Script
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.format import format
from install_params import exclude_packages
from status_params import *
config = Script.get_config()
hadoop_conf_dir = None
hbase_conf_dir = None
hadoop_home = None
try:
hadoop_conf_dir = os.environ["HADOOP_CONF_DIR"]
hbase_conf_dir = os.environ["HBASE_CONF_DIR"]
hadoop_home = os.environ["HADOOP_HOME"]
except:
pass
#directories & files
dfs_name_dir = config['configurations']['hdfs-site']['dfs.namenode.name.dir']
fs_checkpoint_dir = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.dir']
dfs_data_dir = config['configurations']['hdfs-site']['dfs.datanode.data.dir']
#decomission
hdfs_exclude_file = default("/clusterHostInfo/decom_dn_hosts", [])
exclude_file_path = config['configurations']['hdfs-site']['dfs.hosts.exclude']
include_file_path = default("/configurations/hdfs-site/dfs.hosts", None)
hdfs_include_file = None
manage_include_files = default("/configurations/hdfs-site/manage.include.files", False)
if include_file_path and manage_include_files:
slave_hosts = default("/clusterHostInfo/slave_hosts", [])
hdfs_include_file = slave_hosts
update_files_only = default("/commandParams/update_files_only",False)
# HDFS High Availability properties
dfs_ha_enabled = False
dfs_ha_nameservices = default("/configurations/hdfs-site/dfs.internal.nameservices", None)
dfs_ha_namenode_ids = default(format("/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"), None)
namenode_id = None
namenode_rpc = None
hostname = config["hostname"]
if dfs_ha_namenode_ids:
dfs_ha_namemodes_ids_list = dfs_ha_namenode_ids.split(",")
dfs_ha_namenode_ids_array_len = len(dfs_ha_namemodes_ids_list)
if dfs_ha_namenode_ids_array_len > 1:
dfs_ha_enabled = True
if dfs_ha_enabled:
for nn_id in dfs_ha_namemodes_ids_list:
nn_host = config['configurations']['hdfs-site'][format('dfs.namenode.rpc-address.{dfs_ha_nameservices}.{nn_id}')]
if hostname.lower() in nn_host.lower():
namenode_id = nn_id
namenode_rpc = nn_host
hadoop_user = config["configurations"]["cluster-env"]["hadoop.user.name"]
hdfs_user = hadoop_user
grep_exe = "findstr"
name_node_params = default("/commandParams/namenode", None)
service_map = {
"datanode" : datanode_win_service_name,
"journalnode" : journalnode_win_service_name,
"namenode" : namenode_win_service_name,
"secondarynamenode" : snamenode_win_service_name,
"zkfc_slave": zkfc_win_service_name
}
|
radicalbit/ambari
|
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_windows.py
|
Python
|
apache-2.0
| 3,411 | 0.007622 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
'''
ISO PART YET TO BE ADDED:: remove this after adding it.
'''
import sys, os
sys.path.append(os.path.abspath(os.path.dirname(__file__) + '/'+'../lib'))
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import NoSuchElementException
import unittest, time
import initialize
import Global_Locators
class Template_Add(unittest.TestCase):
def setUp(self):
self.driver = initialize.getOrCreateWebdriver()
self.verificationErrors = []
def test_templateadd(self):
driver = self.driver
## Action part
#Make sure you are on Dashboard
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
time.sleep(2)
# Go to Templates
driver.find_element_by_xpath(Global_Locators.templates_xpath).click()
#Select Template from drop down list
driver.find_element_by_xpath(Global_Locators.template_xpath).click()
# Add Template
driver.find_element_by_xpath(Global_Locators.AddTemplate_xpath).click()
# Following have names.. so they do not have their global entries.
driver.find_element_by_id("label_name").clear()
driver.find_element_by_id("label_name").send_keys("Test Template Ubuntu")
driver.find_element_by_id("label_description").clear()
driver.find_element_by_id("label_description").send_keys("Ubuntu 10.04")
driver.find_element_by_id("URL").clear()
driver.find_element_by_id("URL").send_keys("http://nfs1.lab.vmops.com/templates/Ubuntu/Ubuntuu-10-04-64bit-server.vhd")
Select(driver.find_element_by_id("label_os_type")).select_by_visible_text("Ubuntu 10.04 (64-bit)")
driver.find_element_by_id("label_public").click()
driver.find_element_by_id("label_featured").click()
driver.find_element_by_xpath("//button[@type='button']").click()
time.sleep(2)
# Go to Dash Board
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
time.sleep(600)
##Verification will be if this offering shows up into table and we can actually edit it.
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
class Template_Edit(unittest.TestCase):
def setUp(self):
self.driver = initialize.getOrCreateWebdriver()
self.verificationErrors = []
def test_templateedit(self):
driver = self.driver
## Action part
#Make sure you are on Dashboard
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
time.sleep(2)
# Go to Templates
driver.find_element_by_xpath(Global_Locators.templates_xpath).click()
#Select Template from drop down list
driver.find_element_by_xpath(Global_Locators.template_xpath).click()
linkclass = None
linkclass = driver.find_elements_by_xpath(Global_Locators.template_table_xpath) # This returns a list
for link in linkclass:
if link.text == "Test Template Ubuntu": # We will search for our VM in this table
link.click()
time.sleep(2)
# Change name
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys("Test template")
# Change Description
driver.find_element_by_name("displaytext").clear()
driver.find_element_by_name("displaytext").send_keys("ubuntu")
driver.find_element_by_css_selector(Global_Locators.template_editdone_css).click()
time.sleep(2)
#Dashboard
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
time.sleep(10)
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
# Now we will find this offering and delete it!!
class Template_Delete(unittest.TestCase):
def setUp(self):
self.driver = initialize.getOrCreateWebdriver()
self.verificationErrors = []
def test_templatedelete(self):
driver = self.driver
## Action part
#Make sure you are on Dashboard
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
time.sleep(2)
# Go to Templates
driver.find_element_by_xpath(Global_Locators.templates_xpath).click()
#Select Template from drop down list
driver.find_element_by_xpath(Global_Locators.template_xpath).click()
linkclass = None
linkclass = driver.find_elements_by_xpath(Global_Locators.template_table_xpath) # This returns a list
for link in linkclass:
if link.text == "Test Template": # We will search for our VM in this table
link.click()
time.sleep(2)
driver.find_element_by_css_selector(Gloabl_Locators.template_delete_css).click()
driver.find_element_by_xpath(Global_Locators.yesconfirmation_xapth).click()
time.sleep(2)
#Dashboard
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
time.sleep(20)
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
|
GabrielBrascher/cloudstack
|
test/selenium/smoke/TemplatesAndISO.py
|
Python
|
apache-2.0
| 6,757 | 0.027823 |
from unittest import TestCase
from django.conf import settings
from django.test.utils import override_settings
from mock import patch
from elastic_django.client import ElasticsearchClient
from elastic_django.exceptions import ElasticsearchClientConfigurationError
class ElasticsearchClientTestCase(TestCase):
def test_client_constructor_sanity_check(self):
"""
Tests sanity checks in ``ElasticsearchClient.__init__``.
"""
self.assertRaises(
ElasticsearchClientConfigurationError,
ElasticsearchClient,
hosts='not a list or tuple'
)
@override_settings(ELASTICSEARCH_HOSTS=None)
@patch('elasticsearch.Elasticsearch.ping')
def test_no_hosts_given_nor_configured(self, mock):
"""
Tests client behaviour when being called with no hosts specified and no
hosts defined in Django settings. It should fallback to the ES default
expected configuration (localhost, port 9200).
"""
# Delete setting.
del settings.ELASTICSEARCH_HOSTS
# Mock ES backend ping response to pass test.
mock.return_value = True
client = ElasticsearchClient()
self.assertEqual(client.hosts, [{'host': 'localhost', 'port': '9200'}])
@override_settings(
ELASTICSEARCH_HOSTS=[{'host': '127.0.0.1', 'port': '443'}])
@patch('elasticsearch.Elasticsearch.ping')
def test_no_hosts_given_and_configured(self, mock):
"""
Tests client behaviour when being called with no hosts specified and
hosts already defined in Django settings.
"""
# Mock ES backend ping response to pass test.
mock.return_value = True
client = ElasticsearchClient()
self.assertEqual(client.hosts, [{'host': '127.0.0.1', 'port': '443'}])
@override_settings(
ELASTICSEARCH_HOSTS=[{'host': '127.0.0.1', 'port': '9999'}])
def test_no_ping_response(self):
"""
Tests exception raised when backend doesn't respond to ping - specified
backend is unavailable.
"""
self.assertRaises(
ElasticsearchClientConfigurationError, ElasticsearchClient)
|
jose-lpa/elastic-django
|
tests/test_client.py
|
Python
|
bsd-3-clause
| 2,202 | 0 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2016, OVH SAS
#
# This file is part of ip-reputation-monitoring.
#
# ip-reputation-monitoring is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Blocklist CSV parser
"""
from datetime import datetime
from parsing.csv.csvparser import CSVParser
PARSER_NAME = 'BlockList'
def compute_weight(service):
"""
Map a service with a weight. All reported services have a
default weight of 10, except ssh failed attemp (1), manual
list addition (5) and the wtf category "all" (5).
"""
return {
'ssh': 1,
'all': 5,
'manually added': 5
}.get(service, 10)
class BlockListParser(CSVParser):
"""
Blocklist.de dedicated csv parser
"""
def __init__(self, path):
CSVParser.__init__(self, path, ':')
def compute_weight(self, data):
return compute_weight(self._get_service(data[3]))
def get_date(self, data):
timestamp = float(data[4].strip()[:10])
return datetime.utcfromtimestamp(timestamp)
def get_source(self, data):
return PARSER_NAME
def get_ip(self, data):
if len(data) != 6:
return None
return data[0]
def _get_service(self, cell):
""" Try to extract service associated to the issue from a cell """
return cell.strip().split(',')[0]
@staticmethod
def get_description():
""" Mandatory method for auto-registration """
return {
'name': PARSER_NAME,
'shortened': 'BLCK'
}
|
ovh/ip-reputation-monitoring
|
reputation/parsing/csv/blocklistde.py
|
Python
|
gpl-3.0
| 2,133 | 0.000469 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('home', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Assignment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Chore',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('title', models.CharField(max_length=255)),
('description', models.TextField()),
('assigned_to', models.ManyToManyField(to=settings.AUTH_USER_MODEL, through='chores.Assignment')),
('home', models.ForeignKey(related_name='chores', to='home.Home')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.AddField(
model_name='assignment',
name='chore',
field=models.ForeignKey(to='chores.Chore'),
preserve_default=True,
),
migrations.AddField(
model_name='assignment',
name='user',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
preserve_default=True,
),
]
|
birkholz/homeboard
|
chores/migrations/0001_initial.py
|
Python
|
gpl-2.0
| 1,978 | 0.002022 |
# Define settings that are specific to the local environment.
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'database.db',
}
}
INTERNAL_IPS = ('127.0.0.1',)
from custom_settings import INSTALLED_APPS
INSTALLED_APPS += (
# 'debug_toolbar',
)
|
django-settings/django-settings
|
myproject/myproject/local_settings.py
|
Python
|
unlicense
| 301 | 0.003322 |
import operator
import random
import pytest
from diofant import (And, Eq, Equality, FiniteSet, Float, Function, Ge,
GreaterThan, Gt, I, Implies, Integer, Interval, Le,
LessThan, Lt, Ne, Not, Or, Rational, Rel, Relational,
StrictGreaterThan, StrictLessThan, Symbol, Unequality,
Wild, Xor, ceiling, false, floor, nan, oo, pi, simplify,
sqrt, true, zoo)
from diofant.abc import t, w, x, y, z
from diofant.core.relational import _Inequality as Inequality
__all__ = ()
def test_rel_ne():
assert Relational(x, y, '!=') == Ne(x, y)
def test_rel_subs():
e = Relational(x, y, '==')
e = e.subs({x: z})
assert isinstance(e, Equality)
assert e.lhs == z
assert e.rhs == y
e = Relational(x, y, '>=')
e = e.subs({x: z})
assert isinstance(e, GreaterThan)
assert e.lhs == z
assert e.rhs == y
e = Relational(x, y, '<=')
e = e.subs({x: z})
assert isinstance(e, LessThan)
assert e.lhs == z
assert e.rhs == y
e = Relational(x, y, '>')
e = e.subs({x: z})
assert isinstance(e, StrictGreaterThan)
assert e.lhs == z
assert e.rhs == y
e = Relational(x, y, '<')
e = e.subs({x: z})
assert isinstance(e, StrictLessThan)
assert e.lhs == z
assert e.rhs == y
e = Eq(x, 0)
assert e.subs({x: 0}) is true
assert e.subs({x: 1}) is false
def test_wrappers():
e = x + x**2
res = Relational(y, e, '==')
assert Rel(y, x + x**2, '==') == res
assert Eq(y, x + x**2) == res
res = Relational(y, e, '<')
assert Lt(y, x + x**2) == res
res = Relational(y, e, '<=')
assert Le(y, x + x**2) == res
res = Relational(y, e, '>')
assert Gt(y, x + x**2) == res
res = Relational(y, e, '>=')
assert Ge(y, x + x**2) == res
res = Relational(y, e, '!=')
assert Ne(y, x + x**2) == res
def test_Eq():
assert Eq(x**2, 0) == Eq(x**2, 0)
assert Eq(x**2, 0) != Eq(x**2, 1)
pytest.raises(TypeError, lambda: Eq(x))
assert Eq(x, x) # issue sympy/sympy#5719
# issue sympy/sympy#6116
p = Symbol('p', positive=True)
assert Eq(p, 0) is false
def test_rel_Infinity():
# pylint: disable=comparison-with-itself
# NOTE: All of these are actually handled by diofant.core.Number, and do
# not create Relational objects.
assert (oo > oo) is false
assert (oo > -oo) is true
assert (oo > 1) is true
assert (oo < oo) is false
assert (oo < -oo) is false
assert (oo < 1) is false
assert (oo >= oo) is true
assert (oo >= -oo) is true
assert (oo >= 1) is true
assert (oo <= oo) is true
assert (oo <= -oo) is false
assert (oo <= 1) is false
assert (-oo > oo) is false
assert (-oo > -oo) is false
assert (-oo > 1) is false
assert (-oo < oo) is true
assert (-oo < -oo) is false
assert (-oo < 1) is true
assert (-oo >= oo) is false
assert (-oo >= -oo) is true
assert (-oo >= 1) is false
assert (-oo <= oo) is true
assert (-oo <= -oo) is true
assert (-oo <= 1) is true
def test_bool():
assert Eq(0, 0) is true
assert Eq(1, 0) is false
assert Ne(0, 0) is false
assert Ne(1, 0) is true
assert Lt(0, 1) is true
assert Lt(1, 0) is false
assert Le(0, 1) is true
assert Le(1, 0) is false
assert Le(0, 0) is true
assert Gt(1, 0) is true
assert Gt(0, 1) is false
assert Ge(1, 0) is true
assert Ge(0, 1) is false
assert Ge(1, 1) is true
assert Eq(I, 2) is false
assert Ne(I, 2) is true
pytest.raises(TypeError, lambda: Gt(I, 2))
pytest.raises(TypeError, lambda: Ge(I, 2))
pytest.raises(TypeError, lambda: Lt(I, 2))
pytest.raises(TypeError, lambda: Le(I, 2))
a = Float('.000000000000000000001')
b = Float('.0000000000000000000001')
assert Eq(pi + a, pi + b) is false
def test_rich_cmp():
assert (x < y) == Lt(x, y)
assert (x <= y) == Le(x, y)
assert (x > y) == Gt(x, y)
assert (x >= y) == Ge(x, y)
def test_doit():
p = Symbol('p', positive=True)
n = Symbol('n', negative=True)
np = Symbol('np', nonpositive=True)
nn = Symbol('nn', nonnegative=True)
assert Gt(p, 0).doit() is true
assert Gt(p, 1).doit() == Gt(p, 1)
assert Ge(p, 0).doit() is true
assert Le(p, 0).doit() is false
assert Lt(n, 0).doit() is true
assert Le(np, 0).doit() is true
assert Gt(nn, 0).doit() == Gt(nn, 0)
assert Lt(nn, 0).doit() is false
assert Eq(x, 0).doit() == Eq(x, 0)
def test_new_relational():
assert Eq(x, 0) == Relational(x, 0) # None ==> Equality
assert Eq(x, 0) == Relational(x, 0, '==')
assert Eq(x, 0) == Relational(x, 0, 'eq')
assert Eq(x, 0) == Equality(x, 0)
assert Eq(x, -1) == Relational(x, -1) # None ==> Equality
assert Eq(x, -1) == Relational(x, -1, '==')
assert Eq(x, -1) == Relational(x, -1, 'eq')
assert Eq(x, -1) == Equality(x, -1)
assert Eq(x, 0) != Relational(x, 1) # None ==> Equality
assert Eq(x, 0) != Relational(x, 1, '==')
assert Eq(x, 0) != Relational(x, 1, 'eq')
assert Eq(x, 0) != Equality(x, 1)
assert Eq(x, -1) != Relational(x, 1) # None ==> Equality
assert Eq(x, -1) != Relational(x, 1, '==')
assert Eq(x, -1) != Relational(x, 1, 'eq')
assert Eq(x, -1) != Equality(x, 1)
assert Ne(x, 0) == Relational(x, 0, '!=')
assert Ne(x, 0) == Relational(x, 0, '<>')
assert Ne(x, 0) == Relational(x, 0, 'ne')
assert Ne(x, 0) == Unequality(x, 0)
assert Ne(x, 0) != Relational(x, 1, '!=')
assert Ne(x, 0) != Relational(x, 1, '<>')
assert Ne(x, 0) != Relational(x, 1, 'ne')
assert Ne(x, 0) != Unequality(x, 1)
assert Ge(x, 0) == Relational(x, 0, '>=')
assert Ge(x, 0) == Relational(x, 0, 'ge')
assert Ge(x, 0) == GreaterThan(x, 0)
assert Ge(x, 1) != Relational(x, 0, '>=')
assert Ge(x, 1) != Relational(x, 0, 'ge')
assert Ge(x, 1) != GreaterThan(x, 0)
assert (x >= 1) == Relational(x, 1, '>=')
assert (x >= 1) == Relational(x, 1, 'ge')
assert (x >= 1) == GreaterThan(x, 1)
assert (x >= 0) != Relational(x, 1, '>=')
assert (x >= 0) != Relational(x, 1, 'ge')
assert (x >= 0) != GreaterThan(x, 1)
assert Le(x, 0) == Relational(x, 0, '<=')
assert Le(x, 0) == Relational(x, 0, 'le')
assert Le(x, 0) == LessThan(x, 0)
assert Le(x, 1) != Relational(x, 0, '<=')
assert Le(x, 1) != Relational(x, 0, 'le')
assert Le(x, 1) != LessThan(x, 0)
assert (x <= 1) == Relational(x, 1, '<=')
assert (x <= 1) == Relational(x, 1, 'le')
assert (x <= 1) == LessThan(x, 1)
assert (x <= 0) != Relational(x, 1, '<=')
assert (x <= 0) != Relational(x, 1, 'le')
assert (x <= 0) != LessThan(x, 1)
assert Gt(x, 0) == Relational(x, 0, '>')
assert Gt(x, 0) == Relational(x, 0, 'gt')
assert Gt(x, 0) == StrictGreaterThan(x, 0)
assert Gt(x, 1) != Relational(x, 0, '>')
assert Gt(x, 1) != Relational(x, 0, 'gt')
assert Gt(x, 1) != StrictGreaterThan(x, 0)
assert (x > 1) == Relational(x, 1, '>')
assert (x > 1) == Relational(x, 1, 'gt')
assert (x > 1) == StrictGreaterThan(x, 1)
assert (x > 0) != Relational(x, 1, '>')
assert (x > 0) != Relational(x, 1, 'gt')
assert (x > 0) != StrictGreaterThan(x, 1)
assert Lt(x, 0) == Relational(x, 0, '<')
assert Lt(x, 0) == Relational(x, 0, 'lt')
assert Lt(x, 0) == StrictLessThan(x, 0)
assert Lt(x, 1) != Relational(x, 0, '<')
assert Lt(x, 1) != Relational(x, 0, 'lt')
assert Lt(x, 1) != StrictLessThan(x, 0)
assert (x < 1) == Relational(x, 1, '<')
assert (x < 1) == Relational(x, 1, 'lt')
assert (x < 1) == StrictLessThan(x, 1)
assert (x < 0) != Relational(x, 1, '<')
assert (x < 0) != Relational(x, 1, 'lt')
assert (x < 0) != StrictLessThan(x, 1)
# finally, some fuzz testing
for _ in range(100):
while 1:
strtype, length = (chr, 65535) if random.randint(0, 1) else (chr, 255)
relation_type = strtype(random.randint(0, length))
if random.randint(0, 1):
relation_type += strtype(random.randint(0, length))
if relation_type not in ('==', 'eq', '!=', '<>', 'ne', '>=', 'ge',
'<=', 'le', '>', 'gt', '<', 'lt', ':='):
break
pytest.raises(ValueError, lambda: Relational(x, 1, relation_type))
assert all(Relational(x, 0, op).rel_op == '==' for op in ('eq', '=='))
assert all(Relational(x, 0, op).rel_op == '!=' for op in ('ne', '<>', '!='))
assert all(Relational(x, 0, op).rel_op == '>' for op in ('gt', '>'))
assert all(Relational(x, 0, op).rel_op == '<' for op in ('lt', '<'))
assert all(Relational(x, 0, op).rel_op == '>=' for op in ('ge', '>='))
assert all(Relational(x, 0, op).rel_op == '<=' for op in ('le', '<='))
# issue sympy/sympy#10633
assert Eq(True, False) is false
assert Eq(False, True) is false
assert Eq(True, True) is true
assert Eq(False, False) is true
def test_relational_bool_output():
# https://github.com/sympy/sympy/issues/5931
pytest.raises(TypeError, lambda: bool(x > 3))
pytest.raises(TypeError, lambda: bool(x >= 3))
pytest.raises(TypeError, lambda: bool(x < 3))
pytest.raises(TypeError, lambda: bool(x <= 3))
pytest.raises(TypeError, lambda: bool(Eq(x, 3)))
pytest.raises(TypeError, lambda: bool(Ne(x, 3)))
def test_relational_logic_symbols():
# See issue sympy/sympy#6204
assert (x < y) & (z < t) == And(x < y, z < t)
assert (x < y) | (z < t) == Or(x < y, z < t)
assert ~(x < y) == Not(x < y)
assert (x < y) >> (z < t) == Implies(x < y, z < t)
assert (x < y) << (z < t) == Implies(z < t, x < y)
assert (x < y) ^ (z < t) == Xor(x < y, z < t)
assert isinstance((x < y) & (z < t), And)
assert isinstance((x < y) | (z < t), Or)
assert isinstance(~(x < y), GreaterThan)
assert isinstance((x < y) >> (z < t), Implies)
assert isinstance((x < y) << (z < t), Implies)
assert isinstance((x < y) ^ (z < t), (Or, Xor))
def test_univariate_relational_as_set():
assert (x > 0).as_set() == Interval(0, oo, True)
assert (x >= 0).as_set() == Interval(0, oo)
assert (x < 0).as_set() == Interval(-oo, 0, False, True)
assert (x <= 0).as_set() == Interval(-oo, 0)
assert Eq(x, 0).as_set() == FiniteSet(0)
assert Ne(x, 0).as_set() == Interval(-oo, 0, False, True) + Interval(0, oo, True)
assert (x**2 >= 4).as_set() == Interval(-oo, -2) + Interval(2, oo)
@pytest.mark.xfail
def test_multivariate_relational_as_set():
(x*y >= 0).as_set()
# Interval(0, oo)*Interval(0, oo) + Interval(-oo, 0)*Interval(-oo, 0)
def test_Not():
assert Not(Equality(x, y)) == Unequality(x, y)
assert Not(Unequality(x, y)) == Equality(x, y)
assert Not(StrictGreaterThan(x, y)) == LessThan(x, y)
assert Not(StrictLessThan(x, y)) == GreaterThan(x, y)
assert Not(GreaterThan(x, y)) == StrictLessThan(x, y)
assert Not(LessThan(x, y)) == StrictGreaterThan(x, y)
def test_evaluate():
assert str(Eq(x, x, evaluate=False)) == 'Eq(x, x)'
assert Eq(x, x, evaluate=False).doit() == true
assert str(Ne(x, x, evaluate=False)) == 'Ne(x, x)'
assert Ne(x, x, evaluate=False).doit() == false
assert str(Ge(x, x, evaluate=False)) == 'x >= x'
assert str(Le(x, x, evaluate=False)) == 'x <= x'
assert str(Gt(x, x, evaluate=False)) == 'x > x'
assert str(Lt(x, x, evaluate=False)) == 'x < x'
def assert_all_ineq_raise_TypeError(a, b):
pytest.raises(TypeError, lambda: a > b)
pytest.raises(TypeError, lambda: a >= b)
pytest.raises(TypeError, lambda: a < b)
pytest.raises(TypeError, lambda: a <= b)
pytest.raises(TypeError, lambda: b > a)
pytest.raises(TypeError, lambda: b >= a)
pytest.raises(TypeError, lambda: b < a)
pytest.raises(TypeError, lambda: b <= a)
def assert_all_ineq_give_class_Inequality(a, b):
"""All inequality operations on `a` and `b` result in class Inequality."""
assert isinstance(a > b, Inequality)
assert isinstance(a >= b, Inequality)
assert isinstance(a < b, Inequality)
assert isinstance(a <= b, Inequality)
assert isinstance(b > a, Inequality)
assert isinstance(b >= a, Inequality)
assert isinstance(b < a, Inequality)
assert isinstance(b <= a, Inequality)
def test_imaginary_compare_raises_TypeError():
# See issue sympy/sympy#5724
assert_all_ineq_raise_TypeError(I, x)
def test_complex_compare_not_real():
# two cases which are not real
y = Symbol('y', imaginary=True, nonzero=True)
z = Symbol('z', complex=True, extended_real=False)
for a in (y, z):
assert_all_ineq_raise_TypeError(2, a)
# some cases which should remain un-evaluated
x = Symbol('x', extended_real=True)
z = Symbol('z', complex=True)
for a in (x, z, t):
assert_all_ineq_give_class_Inequality(2, a)
def test_imaginary_and_inf_compare_raises_TypeError():
# See pull request sympy/sympy#7835
y = Symbol('y', imaginary=True, nonzero=True)
assert_all_ineq_raise_TypeError(oo, y)
assert_all_ineq_raise_TypeError(-oo, y)
def test_complex_pure_imag_not_ordered():
pytest.raises(TypeError, lambda: 2*I < 3*I)
# more generally
x = Symbol('x', extended_real=True, nonzero=True)
y = Symbol('y', imaginary=True)
z = Symbol('z', complex=True)
assert_all_ineq_raise_TypeError(I, y)
t = I*x # an imaginary number, should raise errors
assert_all_ineq_raise_TypeError(2, t)
t = -I*y # a real number, so no errors
assert_all_ineq_give_class_Inequality(2, t)
t = I*z # unknown, should be unevaluated
assert_all_ineq_give_class_Inequality(2, t)
def test_x_minus_y_not_same_as_x_lt_y():
"""
A consequence of pull request sympy/sympy#7792 is that `x - y < 0` and `x < y`
are not synonymous.
"""
x = I + 2
y = I + 3
pytest.raises(TypeError, lambda: x < y)
assert x - y < 0
ineq = Lt(x, y, evaluate=False)
pytest.raises(TypeError, ineq.doit)
assert ineq.lhs - ineq.rhs < 0
t = Symbol('t', imaginary=True, nonzero=True)
x = 2 + t
y = 3 + t
ineq = Lt(x, y, evaluate=False)
pytest.raises(TypeError, ineq.doit)
assert ineq.lhs - ineq.rhs < 0
# this one should give error either way
x = I + 2
y = 2*I + 3
pytest.raises(TypeError, lambda: x < y)
pytest.raises(TypeError, lambda: x - y < 0)
def test_nan_equality_exceptions():
# See issue sympy/sympy#7774
assert Equality(nan, nan) is false
assert Unequality(nan, nan) is true
# See issue sympy/sympy#7773
A = (x, Integer(0), Rational(1, 3), pi, oo, -oo)
assert Equality(nan, random.choice(A)) is false
assert Equality(random.choice(A), nan) is false
assert Unequality(nan, random.choice(A)) is true
assert Unequality(random.choice(A), nan) is true
def test_nan_inequality_raise_errors():
# See discussion in pull request sympy/sympy#7776. We test inequalities with
# a set including examples of various classes.
for q in (x, Integer(0), Integer(10), Rational(1, 3), pi, Float(1.3), oo, -oo, nan):
assert_all_ineq_raise_TypeError(q, nan)
def test_nan_complex_inequalities():
# Comparisons of NaN with non-real raise errors, we're not too
# fussy whether its the NaN error or complex error.
for r in (I, zoo, Symbol('z', imaginary=True)):
assert_all_ineq_raise_TypeError(r, nan)
def test_complex_infinity_inequalities():
pytest.raises(TypeError, lambda: zoo > 0)
pytest.raises(TypeError, lambda: zoo >= 0)
pytest.raises(TypeError, lambda: zoo < 0)
pytest.raises(TypeError, lambda: zoo <= 0)
def test_inequalities_symbol_name_same():
"""Using the operator and functional forms should give same results."""
# We test all combinations from a set
# FIXME: could replace with random selection after test passes
A = (x, y, Integer(0), Rational(1, 3), pi, oo, -oo)
for a in A:
for b in A:
assert Gt(a, b) == (a > b)
assert Lt(a, b) == (a < b)
assert Ge(a, b) == (a >= b)
assert Le(a, b) == (a <= b)
for b in (y, Integer(0), Rational(1, 3), pi, oo, -oo):
assert Gt(x, b, evaluate=False) == (x > b)
assert Lt(x, b, evaluate=False) == (x < b)
assert Ge(x, b, evaluate=False) == (x >= b)
assert Le(x, b, evaluate=False) == (x <= b)
for b in (y, Integer(0), Rational(1, 3), pi, oo, -oo):
assert Gt(b, x, evaluate=False) == (b > x)
assert Lt(b, x, evaluate=False) == (b < x)
assert Ge(b, x, evaluate=False) == (b >= x)
assert Le(b, x, evaluate=False) == (b <= x)
def test_inequalities_symbol_name_same_complex():
"""Using the operator and functional forms should give same results.
With complex non-real numbers, both should raise errors.
"""
# FIXME: could replace with random selection after test passes
for a in (x, Integer(0), Rational(1, 3), pi, oo):
pytest.raises(TypeError, lambda: Gt(a, I))
pytest.raises(TypeError, lambda: a > I)
pytest.raises(TypeError, lambda: Lt(a, I))
pytest.raises(TypeError, lambda: a < I)
pytest.raises(TypeError, lambda: Ge(a, I))
pytest.raises(TypeError, lambda: a >= I)
pytest.raises(TypeError, lambda: Le(a, I))
pytest.raises(TypeError, lambda: a <= I)
def test_inequalities_cant_sympify_other():
# see issue sympy/sympy#7833
bar = 'foo'
for a in (x, Integer(0), Rational(1, 3), pi, I, zoo, oo, -oo, nan):
for op in (operator.lt, operator.gt, operator.le, operator.ge):
pytest.raises(TypeError, lambda: op(a, bar))
def test_ineq_avoid_wild_symbol_flip():
p = Wild('p')
assert Gt(x, p) == Gt(x, p, evaluate=False)
assert (x < p) == Lt(x, p, evaluate=False) # issue sympy/sympy#7951
# Previously failed as 'p > x':
e = Lt(x, y).subs({y: p})
assert e == Lt(x, p, evaluate=False)
# Previously failed as 'p <= x':
e = Ge(x, p).doit()
assert e == Ge(x, p, evaluate=False)
def test_evalf():
# issue sympy/sympy#8245
a = Rational(6506833320952669167898688709329, 5070602400912917605986812821504)
q = a.evalf(10)
assert (a == q) is True
assert (a != q) is False
assert (a > q) is false
assert (a < q) is false
assert (a >= q) is true
assert (a <= q) is true
a = sqrt(2)
r = Rational(str(a.evalf(30)))
assert (r == a) is False
assert (r != a) is True
assert (r > a) is true
assert (r < a) is false
assert (r >= a) is true
assert (r <= a) is false
a = sqrt(2)
r = Rational(str(a.evalf(29)))
assert (r == a) is False
assert (r != a) is True
assert (r > a) is false
assert (r < a) is true
assert (r >= a) is false
assert (r <= a) is true
def test_infinity():
# issue sympy/sympy#8449
p = Symbol('p', nonnegative=True)
assert Lt(-oo, p)
assert Ge(-oo, p) is false
assert Gt(oo, -p)
assert Le(oo, -p) is false
def test_simplify():
assert simplify(x*(y + 1) - x*y - x + 1 < x) == (x > 1)
assert simplify(Integer(1) < -x) == (x < -1)
# issue sympy/sympy#10304
d = -(3*2**pi)**(1/pi) + 2*3**(1/pi)
assert d.is_real
assert simplify(Eq(1 + I*d, 0)) is False
assert simplify(Ne(1 + I*d, 0)) is True
def test_equals():
f = Function('f')
assert Eq(x, 1).equals(1) is not True
assert Eq(x, 1).equals(Eq(x*(y + 1) - x*y - x + 1, x))
assert Eq(x, y).equals(x < y, True) is False
assert Eq(x, f(1)).equals(Eq(x, f(2)), True) == f(1) - f(2)
assert Eq(f(1), y).equals(Eq(f(2), y), True) == f(1) - f(2)
assert Eq(x, f(1)).equals(Eq(f(2), x), True) == f(1) - f(2)
assert Eq(f(1), x).equals(Eq(x, f(2)), True) == f(1) - f(2)
assert Eq(w, x).equals(Eq(y, z), True) is False
assert Eq(f(1), f(2)).equals(Eq(f(3), f(4)), True) == f(1) - f(3) + f(4) - f(2)
assert Eq(f(1), f(2)).equals(Eq(f(3), f(4))) is None
assert (x < y).equals(y > x, True) is True
assert (x < y).equals(y >= x, True) is False
assert (x < y).equals(z < y, True) is False
assert (x < y).equals(x < z, True) is False
assert (x < f(1)).equals(x < f(2), True) == f(1) - f(2)
assert (f(1) < x).equals(f(2) < x, True) == f(1) - f(2)
def test_reversed():
assert (x < y).reversed == (y > x)
assert (x <= y).reversed == (y >= x)
assert Eq(x, y, evaluate=False).reversed == Eq(y, x, evaluate=False)
assert Ne(x, y, evaluate=False).reversed == Ne(y, x, evaluate=False)
assert (x >= y).reversed == (y <= x)
assert (x > y).reversed == (y < x)
def test_canonical():
one = Integer(1)
def unchanged(v):
c = v.canonical
return v.is_Relational and c.is_Relational and v == c
def isreversed(v):
return v.canonical == v.reversed
assert unchanged(x < one)
assert unchanged(x <= one)
assert isreversed(Eq(one, x, evaluate=False))
assert unchanged(Eq(x, one, evaluate=False))
assert isreversed(Ne(one, x, evaluate=False))
assert unchanged(Ne(x, one, evaluate=False))
assert unchanged(x >= one)
assert unchanged(x > one)
assert unchanged(x < y)
assert unchanged(x <= y)
assert isreversed(Eq(y, x, evaluate=False))
assert unchanged(Eq(x, y, evaluate=False))
assert isreversed(Ne(y, x, evaluate=False))
assert unchanged(Ne(x, y, evaluate=False))
assert isreversed(x >= y)
assert isreversed(x > y)
assert (-x < 1).canonical == (x > -1)
assert isreversed(-x > y)
@pytest.mark.xfail
def test_sympyissue_8444():
x = Symbol('x', extended_real=True)
assert (x <= oo) == (x >= -oo) == true
x = Symbol('x', real=True)
assert x >= floor(x)
assert (x < floor(x)) is false
assert Gt(x, floor(x)) == Gt(x, floor(x), evaluate=False)
assert Ge(x, floor(x)) == Ge(x, floor(x), evaluate=False)
assert x <= ceiling(x)
assert (x > ceiling(x)) is false
assert Lt(x, ceiling(x)) == Lt(x, ceiling(x), evaluate=False)
assert Le(x, ceiling(x)) == Le(x, ceiling(x), evaluate=False)
i = Symbol('i', integer=True)
assert (i > floor(i)) is false
assert (i < ceiling(i)) is false
|
diofant/diofant
|
diofant/tests/core/test_relational.py
|
Python
|
bsd-3-clause
| 22,310 | 0.000359 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest, os, json
from subprocess import PIPE, STDOUT
from proton import Message, PENDING, ACCEPTED, REJECTED, RELEASED, SSLDomain, SSLUnavailable, Timeout
from system_test import TestCase, Qdrouterd, main_module, DIR, TIMEOUT, Process
from proton.handlers import MessagingHandler
from proton.reactor import Container, DynamicNodeProperties
# PROTON-828:
try:
from proton import MODIFIED
except ImportError:
from proton import PN_STATUS_MODIFIED as MODIFIED
class RouterTest(TestCase):
inter_router_port = None
@classmethod
def setUpClass(cls):
"""Start a router"""
super(RouterTest, cls).setUpClass()
def router(name, connection):
config = [
('router', {'mode': 'interior', 'id': name}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no'}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no', 'multiTenant': 'yes'}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no', 'role': 'route-container'}),
('linkRoute', {'prefix': '0.0.0.0/link', 'dir': 'in', 'containerId': 'LRC'}),
('linkRoute', {'prefix': '0.0.0.0/link', 'dir': 'out', 'containerId': 'LRC'}),
('autoLink', {'addr': '0.0.0.0/queue.waypoint', 'containerId': 'ALC', 'dir': 'in'}),
('autoLink', {'addr': '0.0.0.0/queue.waypoint', 'containerId': 'ALC', 'dir': 'out'}),
('address', {'prefix': 'closest', 'distribution': 'closest'}),
('address', {'prefix': 'spread', 'distribution': 'balanced'}),
('address', {'prefix': 'multicast', 'distribution': 'multicast'}),
('address', {'prefix': '0.0.0.0/queue', 'waypoint': 'yes'}),
connection
]
config = Qdrouterd.Config(config)
cls.routers.append(cls.tester.qdrouterd(name, config, wait=True))
cls.routers = []
inter_router_port = cls.tester.get_port()
router('A', ('listener', {'role': 'inter-router', 'port': inter_router_port}))
router('B', ('connector', {'name': 'connectorToA', 'role': 'inter-router', 'port': inter_router_port, 'verifyHostName': 'no'}))
cls.routers[0].wait_router_connected('B')
cls.routers[1].wait_router_connected('A')
def test_01_one_router_targeted_sender_no_tenant(self):
test = MessageTransferTest(self.routers[0].addresses[0],
self.routers[0].addresses[0],
"anything/addr_01",
"anything/addr_01",
self.routers[0].addresses[0],
"M0anything/addr_01")
test.run()
self.assertEqual(None, test.error)
def test_02_one_router_targeted_sender_tenant_on_sender(self):
test = MessageTransferTest(self.routers[0].addresses[1],
self.routers[0].addresses[0],
"addr_02",
"0.0.0.0/addr_02",
self.routers[0].addresses[0],
"M00.0.0.0/addr_02")
test.run()
self.assertEqual(None, test.error)
def test_03_one_router_targeted_sender_tenant_on_receiver(self):
test = MessageTransferTest(self.routers[0].addresses[0],
self.routers[0].addresses[1],
"0.0.0.0/addr_03",
"addr_03",
self.routers[0].addresses[0],
"M00.0.0.0/addr_03")
test.run()
self.assertEqual(None, test.error)
def test_04_one_router_targeted_sender_tenant_on_both(self):
test = MessageTransferTest(self.routers[0].addresses[1],
self.routers[0].addresses[1],
"addr_04",
"addr_04",
self.routers[0].addresses[0],
"M00.0.0.0/addr_04")
test.run()
self.assertEqual(None, test.error)
def test_05_two_router_targeted_sender_no_tenant(self):
test = MessageTransferTest(self.routers[0].addresses[0],
self.routers[1].addresses[0],
"0.0.0.0/addr_05",
"0.0.0.0/addr_05",
self.routers[0].addresses[0],
"M00.0.0.0/addr_05")
test.run()
self.assertEqual(None, test.error)
def test_06_two_router_targeted_sender_tenant_on_sender(self):
test = MessageTransferTest(self.routers[0].addresses[1],
self.routers[1].addresses[0],
"addr_06",
"0.0.0.0/addr_06",
self.routers[0].addresses[0],
"M00.0.0.0/addr_06")
test.run()
self.assertEqual(None, test.error)
def test_07_two_router_targeted_sender_tenant_on_receiver(self):
test = MessageTransferTest(self.routers[0].addresses[0],
self.routers[1].addresses[1],
"0.0.0.0/addr_07",
"addr_07",
self.routers[0].addresses[0],
"M00.0.0.0/addr_07")
test.run()
self.assertEqual(None, test.error)
def test_08_two_router_targeted_sender_tenant_on_both(self):
test = MessageTransferTest(self.routers[0].addresses[1],
self.routers[1].addresses[1],
"addr_08",
"addr_08",
self.routers[0].addresses[0],
"M00.0.0.0/addr_08")
test.run()
self.assertEqual(None, test.error)
def test_09_one_router_anonymous_sender_no_tenant(self):
test = MessageTransferAnonTest(self.routers[0].addresses[0],
self.routers[0].addresses[0],
"anything/addr_09",
"anything/addr_09",
self.routers[0].addresses[0],
"M0anything/addr_09")
test.run()
self.assertEqual(None, test.error)
def test_10_one_router_anonymous_sender_tenant_on_sender(self):
test = MessageTransferAnonTest(self.routers[0].addresses[1],
self.routers[0].addresses[0],
"addr_10",
"0.0.0.0/addr_10",
self.routers[0].addresses[0],
"M00.0.0.0/addr_10")
test.run()
self.assertEqual(None, test.error)
def test_11_one_router_anonymous_sender_tenant_on_receiver(self):
test = MessageTransferAnonTest(self.routers[0].addresses[0],
self.routers[0].addresses[1],
"0.0.0.0/addr_11",
"addr_11",
self.routers[0].addresses[0],
"M00.0.0.0/addr_11")
test.run()
self.assertEqual(None, test.error)
def test_12_one_router_anonymous_sender_tenant_on_both(self):
test = MessageTransferAnonTest(self.routers[0].addresses[1],
self.routers[0].addresses[1],
"addr_12",
"addr_12",
self.routers[0].addresses[0],
"M00.0.0.0/addr_12")
test.run()
self.assertEqual(None, test.error)
def test_13_two_router_anonymous_sender_no_tenant(self):
test = MessageTransferAnonTest(self.routers[0].addresses[0],
self.routers[1].addresses[0],
"anything/addr_13",
"anything/addr_13",
self.routers[0].addresses[0],
"M0anything/addr_13")
test.run()
self.assertEqual(None, test.error)
def test_14_two_router_anonymous_sender_tenant_on_sender(self):
test = MessageTransferAnonTest(self.routers[0].addresses[1],
self.routers[1].addresses[0],
"addr_14",
"0.0.0.0/addr_14",
self.routers[0].addresses[0],
"M00.0.0.0/addr_14")
test.run()
self.assertEqual(None, test.error)
def test_15_two_router_anonymous_sender_tenant_on_receiver(self):
test = MessageTransferAnonTest(self.routers[0].addresses[0],
self.routers[1].addresses[1],
"0.0.0.0/addr_15",
"addr_15",
self.routers[0].addresses[0],
"M00.0.0.0/addr_15")
test.run()
self.assertEqual(None, test.error)
def test_16_two_router_anonymous_sender_tenant_on_both(self):
test = MessageTransferAnonTest(self.routers[0].addresses[1],
self.routers[1].addresses[1],
"addr_16",
"addr_16",
self.routers[0].addresses[0],
"M00.0.0.0/addr_16")
test.run()
self.assertEqual(None, test.error)
def test_17_one_router_link_route_targeted(self):
test = LinkRouteTest(self.routers[0].addresses[1],
self.routers[0].addresses[2],
"link.addr_17",
"0.0.0.0/link.addr_17",
False,
self.routers[0].addresses[0])
test.run()
self.assertEqual(None, test.error)
def test_18_one_router_link_route_targeted_no_tenant(self):
test = LinkRouteTest(self.routers[0].addresses[0],
self.routers[0].addresses[2],
"0.0.0.0/link.addr_18",
"0.0.0.0/link.addr_18",
False,
self.routers[0].addresses[0])
test.run()
self.assertEqual(None, test.error)
def test_19_one_router_link_route_dynamic(self):
test = LinkRouteTest(self.routers[0].addresses[1],
self.routers[0].addresses[2],
"link.addr_19",
"0.0.0.0/link.addr_19",
True,
self.routers[0].addresses[0])
test.run()
self.assertEqual(None, test.error)
def test_20_one_router_link_route_dynamic_no_tenant(self):
test = LinkRouteTest(self.routers[0].addresses[0],
self.routers[0].addresses[2],
"0.0.0.0/link.addr_20",
"0.0.0.0/link.addr_20",
True,
self.routers[0].addresses[0])
test.run()
self.assertEqual(None, test.error)
def test_21_two_router_link_route_targeted(self):
test = LinkRouteTest(self.routers[0].addresses[1],
self.routers[1].addresses[2],
"link.addr_21",
"0.0.0.0/link.addr_21",
False,
self.routers[0].addresses[0])
test.run()
self.assertEqual(None, test.error)
def test_22_two_router_link_route_targeted_no_tenant(self):
test = LinkRouteTest(self.routers[0].addresses[0],
self.routers[1].addresses[2],
"0.0.0.0/link.addr_22",
"0.0.0.0/link.addr_22",
False,
self.routers[0].addresses[0])
test.run()
self.assertEqual(None, test.error)
def test_23_two_router_link_route_dynamic(self):
test = LinkRouteTest(self.routers[0].addresses[1],
self.routers[1].addresses[2],
"link.addr_23",
"0.0.0.0/link.addr_23",
True,
self.routers[0].addresses[0])
test.run()
self.assertEqual(None, test.error)
def test_24_two_router_link_route_dynamic_no_tenant(self):
test = LinkRouteTest(self.routers[0].addresses[0],
self.routers[1].addresses[2],
"0.0.0.0/link.addr_24",
"0.0.0.0/link.addr_24",
True,
self.routers[0].addresses[0])
test.run()
self.assertEqual(None, test.error)
def test_25_one_router_anonymous_sender_non_mobile(self):
test = MessageTransferAnonTest(self.routers[0].addresses[1],
self.routers[0].addresses[0],
"_local/addr_25",
"_local/addr_25",
self.routers[0].addresses[0],
"Laddr_25")
test.run()
self.assertEqual(None, test.error)
def test_26_one_router_targeted_sender_non_mobile(self):
test = MessageTransferTest(self.routers[0].addresses[1],
self.routers[0].addresses[0],
"_local/addr_26",
"_local/addr_26",
self.routers[0].addresses[0],
"Laddr_26")
test.run()
self.assertEqual(None, test.error)
def test_27_two_router_anonymous_sender_non_mobile(self):
test = MessageTransferAnonTest(self.routers[0].addresses[1],
self.routers[1].addresses[0],
"_topo/0/B/addr_27",
"_local/addr_27",
self.routers[1].addresses[0],
"Laddr_27")
test.run()
self.assertEqual(None, test.error)
def test_28_two_router_targeted_sender_non_mobile(self):
test = MessageTransferTest(self.routers[0].addresses[1],
self.routers[1].addresses[0],
"_topo/0/B/addr_28",
"_local/addr_28",
self.routers[1].addresses[0],
"Laddr_28")
test.run()
self.assertEqual(None, test.error)
def test_29_one_router_waypoint_no_tenant(self):
test = WaypointTest(self.routers[0].addresses[0],
self.routers[0].addresses[2],
"0.0.0.0/queue.waypoint",
"0.0.0.0/queue.waypoint")
test.run()
self.assertEqual(None, test.error)
def test_30_one_router_waypoint(self):
test = WaypointTest(self.routers[0].addresses[1],
self.routers[0].addresses[2],
"queue.waypoint",
"0.0.0.0/queue.waypoint")
test.run()
self.assertEqual(None, test.error)
def test_31_two_router_waypoint_no_tenant(self):
test = WaypointTest(self.routers[0].addresses[0],
self.routers[1].addresses[2],
"0.0.0.0/queue.waypoint",
"0.0.0.0/queue.waypoint")
test.run()
self.assertEqual(None, test.error)
def test_32_two_router_waypoint(self):
test = WaypointTest(self.routers[0].addresses[1],
self.routers[1].addresses[2],
"queue.waypoint",
"0.0.0.0/queue.waypoint")
test.run()
self.assertEqual(None, test.error)
class Entity(object):
def __init__(self, status_code, status_description, attrs):
self.status_code = status_code
self.status_description = status_description
self.attrs = attrs
def __getattr__(self, key):
return self.attrs[key]
class RouterProxy(object):
def __init__(self, reply_addr):
self.reply_addr = reply_addr
def response(self, msg):
ap = msg.properties
return Entity(ap['statusCode'], ap['statusDescription'], msg.body)
def read_address(self, name):
ap = {'operation': 'READ', 'type': 'org.apache.qpid.dispatch.router.address', 'name': name}
return Message(properties=ap, reply_to=self.reply_addr)
def query_addresses(self):
ap = {'operation': 'QUERY', 'type': 'org.apache.qpid.dispatch.router.address'}
return Message(properties=ap, reply_to=self.reply_addr)
class Timeout(object):
def __init__(self, parent):
self.parent = parent
def on_timer_task(self, event):
self.parent.timeout()
class PollTimeout(object):
def __init__(self, parent):
self.parent = parent
def on_timer_task(self, event):
self.parent.poll_timeout()
class MessageTransferTest(MessagingHandler):
def __init__(self, sender_host, receiver_host, sender_address, receiver_address, lookup_host, lookup_address):
super(MessageTransferTest, self).__init__()
self.sender_host = sender_host
self.receiver_host = receiver_host
self.sender_address = sender_address
self.receiver_address = receiver_address
self.lookup_host = lookup_host
self.lookup_address = lookup_address
self.sender_conn = None
self.receiver_conn = None
self.lookup_conn = None
self.error = None
self.sender = None
self.receiver = None
self.proxy = None
self.count = 10
self.n_sent = 0
self.n_rcvd = 0
self.n_accepted = 0
self.n_receiver_opened = 0
self.n_sender_opened = 0
def timeout(self):
self.error = "Timeout Expired: n_sent=%d n_rcvd=%d n_accepted=%d n_receiver_opened=%d n_sender_opened=%d" %\
(self.n_sent, self.n_rcvd, self.n_accepted, self.n_receiver_opened, self.n_sender_opened)
self.sender_conn.close()
self.receiver_conn.close()
self.lookup_conn.close()
def on_start(self, event):
self.timer = event.reactor.schedule(5, Timeout(self))
self.sender_conn = event.container.connect(self.sender_host)
self.receiver_conn = event.container.connect(self.receiver_host)
self.lookup_conn = event.container.connect(self.lookup_host)
self.reply_receiver = event.container.create_receiver(self.lookup_conn, dynamic=True)
self.agent_sender = event.container.create_sender(self.lookup_conn, "$management")
def send(self):
while self.sender.credit > 0 and self.n_sent < self.count:
self.n_sent += 1
m = Message(body="Message %d of %d" % (self.n_sent, self.count))
self.sender.send(m)
def on_link_opened(self, event):
if event.receiver:
self.n_receiver_opened += 1
else:
self.n_sender_opened += 1
if event.receiver == self.reply_receiver:
self.proxy = RouterProxy(self.reply_receiver.remote_source.address)
self.sender = event.container.create_sender(self.sender_conn, self.sender_address)
self.receiver = event.container.create_receiver(self.receiver_conn, self.receiver_address)
def on_sendable(self, event):
if event.sender == self.sender:
self.send()
def on_message(self, event):
if event.receiver == self.receiver:
self.n_rcvd += 1
if event.receiver == self.reply_receiver:
response = self.proxy.response(event.message)
if response.status_code != 200:
self.error = "Unexpected error code from agent: %d - %s" % (response.status_code, response.status_description)
if self.n_sent != self.count or self.n_rcvd != self.count:
self.error = "Unexpected counts: n_sent=%d n_rcvd=%d n_accepted=%d" % (self.n_sent, self.n_rcvd, self.n_accepted)
self.sender_conn.close()
self.receiver_conn.close()
self.lookup_conn.close()
self.timer.cancel()
def on_accepted(self, event):
if event.sender == self.sender:
self.n_accepted += 1
if self.n_accepted == self.count:
request = self.proxy.read_address(self.lookup_address)
self.agent_sender.send(request)
def run(self):
Container(self).run()
class MessageTransferAnonTest(MessagingHandler):
def __init__(self, sender_host, receiver_host, sender_address, receiver_address, lookup_host, lookup_address):
super(MessageTransferAnonTest, self).__init__()
self.sender_host = sender_host
self.receiver_host = receiver_host
self.sender_address = sender_address
self.receiver_address = receiver_address
self.lookup_host = lookup_host
self.lookup_address = lookup_address
self.sender_conn = None
self.receiver_conn = None
self.lookup_conn = None
self.error = None
self.sender = None
self.receiver = None
self.proxy = None
self.count = 10
self.n_sent = 0
self.n_rcvd = 0
self.n_accepted = 0
self.n_agent_reads = 0
self.n_receiver_opened = 0
self.n_sender_opened = 0
def timeout(self):
self.error = "Timeout Expired: n_sent=%d n_rcvd=%d n_accepted=%d n_agent_reads=%d n_receiver_opened=%d n_sender_opened=%d" %\
(self.n_sent, self.n_rcvd, self.n_accepted, self.n_agent_reads, self.n_receiver_opened, self.n_sender_opened)
self.sender_conn.close()
self.receiver_conn.close()
self.lookup_conn.close()
if self.poll_timer:
self.poll_timer.cancel()
def poll_timeout(self):
self.poll()
def on_start(self, event):
self.timer = event.reactor.schedule(5, Timeout(self))
self.poll_timer = None
self.sender_conn = event.container.connect(self.sender_host)
self.receiver_conn = event.container.connect(self.receiver_host)
self.lookup_conn = event.container.connect(self.lookup_host)
self.reply_receiver = event.container.create_receiver(self.lookup_conn, dynamic=True)
self.agent_sender = event.container.create_sender(self.lookup_conn, "$management")
self.receiver = event.container.create_receiver(self.receiver_conn, self.receiver_address)
def send(self):
while self.sender.credit > 0 and self.n_sent < self.count:
self.n_sent += 1
m = Message(body="Message %d of %d" % (self.n_sent, self.count))
m.address = self.sender_address
self.sender.send(m)
def poll(self):
request = self.proxy.read_address(self.lookup_address)
self.agent_sender.send(request)
self.n_agent_reads += 1
def on_link_opened(self, event):
if event.receiver:
self.n_receiver_opened += 1
else:
self.n_sender_opened += 1
if event.receiver == self.reply_receiver:
self.proxy = RouterProxy(self.reply_receiver.remote_source.address)
self.poll()
def on_sendable(self, event):
if event.sender == self.sender:
self.send()
def on_message(self, event):
if event.receiver == self.receiver:
self.n_rcvd += 1
if event.receiver == self.reply_receiver:
response = self.proxy.response(event.message)
if response.status_code == 200 and (response.remoteCount + response.subscriberCount) > 0:
self.sender = event.container.create_sender(self.sender_conn, None)
if self.poll_timer:
self.poll_timer.cancel()
self.poll_timer = None
else:
self.poll_timer = event.reactor.schedule(0.25, PollTimeout(self))
def on_accepted(self, event):
if event.sender == self.sender:
self.n_accepted += 1
if self.n_accepted == self.count:
self.sender_conn.close()
self.receiver_conn.close()
self.lookup_conn.close()
self.timer.cancel()
def run(self):
Container(self).run()
class LinkRouteTest(MessagingHandler):
def __init__(self, first_host, second_host, first_address, second_address, dynamic, lookup_host):
super(LinkRouteTest, self).__init__(prefetch=0)
self.first_host = first_host
self.second_host = second_host
self.first_address = first_address
self.second_address = second_address
self.dynamic = dynamic
self.lookup_host = lookup_host
self.first_conn = None
self.second_conn = None
self.error = None
self.first_sender = None
self.first_receiver = None
self.second_sender = None
self.second_receiver = None
self.poll_timer = None
self.count = 10
self.n_sent = 0
self.n_rcvd = 0
self.n_settled = 0
def timeout(self):
self.error = "Timeout Expired: n_sent=%d n_rcvd=%d n_settled=%d" % (self.n_sent, self.n_rcvd, self.n_settled)
self.first_conn.close()
self.second_conn.close()
self.lookup_conn.close()
if self.poll_timer:
self.poll_timer.cancel()
def poll_timeout(self):
self.poll()
def fail(self, text):
self.error = text
self.second_conn.close()
self.first_conn.close()
self.timer.cancel()
self.lookup_conn.close()
if self.poll_timer:
self.poll_timer.cancel()
def send(self):
while self.first_sender.credit > 0 and self.n_sent < self.count:
self.n_sent += 1
m = Message(body="Message %d of %d" % (self.n_sent, self.count))
self.first_sender.send(m)
def poll(self):
request = self.proxy.read_address("D0.0.0.0/link")
self.agent_sender.send(request)
def setup_first_links(self, event):
self.first_sender = event.container.create_sender(self.first_conn, self.first_address)
if self.dynamic:
self.first_receiver = event.container.create_receiver(self.first_conn,
dynamic=True,
options=DynamicNodeProperties({"x-opt-qd.address": unicode(self.first_address)}))
else:
self.first_receiver = event.container.create_receiver(self.first_conn, self.first_address)
def on_start(self, event):
self.timer = event.reactor.schedule(5, Timeout(self))
self.first_conn = event.container.connect(self.first_host)
self.second_conn = event.container.connect(self.second_host)
self.lookup_conn = event.container.connect(self.lookup_host)
self.reply_receiver = event.container.create_receiver(self.lookup_conn, dynamic=True)
self.agent_sender = event.container.create_sender(self.lookup_conn, "$management")
def on_link_opening(self, event):
if event.sender:
self.second_sender = event.sender
if self.dynamic:
if event.sender.remote_source.dynamic:
event.sender.source.address = self.second_address
event.sender.open()
else:
self.fail("Expected dynamic source on sender")
else:
if event.sender.remote_source.address == self.second_address:
event.sender.source.address = self.second_address
event.sender.open()
else:
self.fail("Incorrect address on incoming sender: got %s, expected %s" %
(event.sender.remote_source.address, self.second_address))
elif event.receiver:
self.second_receiver = event.receiver
if event.receiver.remote_target.address == self.second_address:
event.receiver.target.address = self.second_address
event.receiver.open()
else:
self.fail("Incorrect address on incoming receiver: got %s, expected %s" %
(event.receiver.remote_target.address, self.second_address))
def on_link_opened(self, event):
if event.receiver:
event.receiver.flow(self.count)
if event.receiver == self.reply_receiver:
self.proxy = RouterProxy(self.reply_receiver.remote_source.address)
self.poll()
def on_sendable(self, event):
if event.sender == self.first_sender:
self.send()
def on_message(self, event):
if event.receiver == self.first_receiver:
self.n_rcvd += 1
if event.receiver == self.reply_receiver:
response = self.proxy.response(event.message)
if response.status_code == 200 and (response.remoteCount + response.containerCount) > 0:
if self.poll_timer:
self.poll_timer.cancel()
self.poll_timer = None
self.setup_first_links(event)
else:
self.poll_timer = event.reactor.schedule(0.25, PollTimeout(self))
def on_settled(self, event):
if event.sender == self.first_sender:
self.n_settled += 1
if self.n_settled == self.count:
self.fail(None)
def run(self):
container = Container(self)
container.container_id = 'LRC'
container.run()
class WaypointTest(MessagingHandler):
def __init__(self, first_host, second_host, first_address, second_address):
super(WaypointTest, self).__init__()
self.first_host = first_host
self.second_host = second_host
self.first_address = first_address
self.second_address = second_address
self.first_conn = None
self.second_conn = None
self.error = None
self.first_sender = None
self.first_receiver = None
self.waypoint_sender = None
self.waypoint_receiver = None
self.waypoint_queue = []
self.count = 10
self.n_sent = 0
self.n_rcvd = 0
self.n_thru = 0
def timeout(self):
self.error = "Timeout Expired: n_sent=%d n_rcvd=%d n_thru=%d" % (self.n_sent, self.n_rcvd, self.n_thru)
self.first_conn.close()
self.second_conn.close()
def fail(self, text):
self.error = text
self.second_conn.close()
self.first_conn.close()
self.timer.cancel()
def send_client(self):
while self.first_sender.credit > 0 and self.n_sent < self.count:
self.n_sent += 1
m = Message(body="Message %d of %d" % (self.n_sent, self.count))
self.first_sender.send(m)
def send_waypoint(self):
while self.waypoint_sender.credit > 0 and len(self.waypoint_queue) > 0:
self.n_thru += 1
m = self.waypoint_queue.pop()
self.waypoint_sender.send(m)
def on_start(self, event):
self.timer = event.reactor.schedule(10, Timeout(self))
self.first_conn = event.container.connect(self.first_host)
self.second_conn = event.container.connect(self.second_host)
def on_connection_opened(self, event):
if event.connection == self.first_conn:
self.first_sender = event.container.create_sender(self.first_conn, self.first_address)
self.first_receiver = event.container.create_receiver(self.first_conn, self.first_address)
def on_link_opening(self, event):
if event.sender:
self.waypoint_sender = event.sender
if event.sender.remote_source.address == self.second_address:
event.sender.source.address = self.second_address
event.sender.open()
else:
self.fail("Incorrect address on incoming sender: got %s, expected %s" %
(event.sender.remote_source.address, self.second_address))
elif event.receiver:
self.waypoint_receiver = event.receiver
if event.receiver.remote_target.address == self.second_address:
event.receiver.target.address = self.second_address
event.receiver.open()
else:
self.fail("Incorrect address on incoming receiver: got %s, expected %s" %
(event.receiver.remote_target.address, self.second_address))
def on_sendable(self, event):
if event.sender == self.first_sender:
self.send_client()
elif event.sender == self.waypoint_sender:
self.send_waypoint()
def on_message(self, event):
if event.receiver == self.first_receiver:
self.n_rcvd += 1
if self.n_rcvd == self.count and self.n_thru == self.count:
self.fail(None)
elif event.receiver == self.waypoint_receiver:
m = Message(body=event.message.body)
self.waypoint_queue.append(m)
self.send_waypoint()
def run(self):
container = Container(self)
container.container_id = 'ALC'
container.run()
if __name__ == '__main__':
unittest.main(main_module())
|
adel-boutros/qpid-dispatch
|
tests/system_tests_multi_tenancy.py
|
Python
|
apache-2.0
| 35,894 | 0.004987 |
import numpy as N
from mplstereonet import stereonet_math
from scipy.stats import chi2
from numpy.testing import assert_array_almost_equal
from .geom.util import vector, unit_vector, dot
def quaternion(vector, angle):
"""
Unit quaternion for a vector and an angle
"""
return N.cos(angle/2)+vector*N.sin(angle/2)
def ellipse(n=1000, adaptive=False):
"""
Get a parameterized set of vectors defining
ellipse for a major and minor axis length.
Resulting vector bundle has major axes
along axes given.
"""
u = N.linspace(0,2*N.pi,n)
# Get a bundle of vectors defining
# a full rotation around the unit circle
return N.array([N.cos(u),N.sin(u)]).T
def sph2cart(lat,lon):
_ = stereonet_math.sph2cart(lat,lon)
#val = N.array(_).flatten()
val = N.roll(_,-1)
val[:-1] *= -1
return val
def scale_errors(cov_axes, confidence_level=0.95):
"""
Returns major axes of error ellipse or
hyperbola, rescaled using chi2 test statistic
"""
dof = len(cov_axes)
x2t = chi2.ppf(confidence_level,dof)
return N.sqrt(x2t*cov_axes)
def normal_errors(axes, covariance_matrix, **kwargs):
"""
Currently assumes upper hemisphere of stereonet
"""
level = kwargs.pop('level',1)
traditional_layout = kwargs.pop('traditional_layout',True)
d = N.diagonal(covariance_matrix)
ell = ellipse(**kwargs)
if axes[2,2] < 0:
axes *= -1
# Not sure where this factor comes from but it
# seems to make things work better
c1 = 2
axis_lengths = d[:2]
f = N.linalg.norm(
ell*axis_lengths,axis=1)
e0 = -ell.T*d[2]*c1
e = N.vstack((e0,f))
_ = dot(e.T,axes).T
if traditional_layout:
lon,lat = stereonet_math.cart2sph(_[2],_[0],-_[1])
else:
lon,lat = stereonet_math.cart2sph(-_[1],_[0],_[2])
return list(zip(lon,lat))
def test_ellipse():
ell = ellipse(n=1000)
u = N.linspace(0, 2*N.pi, n)
arr = N.hstack([N.cos(u), N.sin(u)])
assert_array_almost_equal(ell, arr)
def plane_errors(axes, covariance_matrix, sheet='upper',**kwargs):
"""
kwargs:
traditional_layout boolean [True]
Lay the stereonet out traditionally, with north at the pole of
the diagram. The default is a more natural and intuitive visualization
with vertical at the pole and the compass points of strike around the equator.
Thus, longitude at the equator represents strike and latitude represents
apparent dip at that azimuth.
"""
level = kwargs.pop('level',1)
traditional_layout = kwargs.pop('traditional_layout',True)
d = covariance_matrix
ell = ellipse(**kwargs)
bundle = dot(ell, d[:2])
res = d[2]*level*2
# Switch hemispheres if PCA is upside-down
# Normal vector is always correctly fit
#if traditional_layout:
#if axes[2,2] > 0:
if axes[2,2] > 0:
res *= -1
if sheet == 'upper':
bundle += res
elif sheet == 'lower':
bundle -= res
_ = dot(bundle,axes).T
if traditional_layout:
lon,lat = stereonet_math.cart2sph(_[2],_[0],_[1])
else:
lon,lat = stereonet_math.cart2sph(-_[1],_[0],_[2])
return list(zip(lon,lat))
def iterative_normal_errors(axes, covariance_matrix, **kwargs):
"""
Currently assumes upper hemisphere of stereonet
"""
level = kwargs.pop('level',1)
traditional_layout = kwargs.pop('traditional_layout',True)
n = kwargs.get('n', 100)
d = N.diagonal(covariance_matrix)
u = N.linspace(0, 2*N.pi, n)
if axes[2,2] < 0:
axes *= -1
# Not sure where this factor comes from but it
# seems to make things work better
c1 = 2
def sdot(a,b):
return sum([i*j for i,j in zip(a,b)])
def step_func(a):
e = [
-c1*d[2]*N.cos(a),
-c1*d[2]*N.sin(a),
N.linalg.norm([N.cos(a)*d[0],N.sin(a)*d[1]])
]
r = [sdot(e,i)
for i in axes.T]
if traditional_layout:
x,y,z = r[2],r[0],-r[1]
else:
x,y,z = -r[1],r[0],r[2]
r = N.sqrt(x**2 + y**2 + z**2)
lon = N.arctan2(y, x)
lat = N.arcsin(z/r)
return lon,lat
# Get a bundle of vectors defining
# a full rotation around the unit circle
vals = [step_func(i) for i in u]
return vals
def iterative_plane_errors(axes,covariance_matrix, **kwargs):
"""
An iterative version of `pca.plane_errors`,
which computes an error surface for a plane.
"""
sheet = kwargs.pop('sheet','upper')
level = kwargs.pop('level',1)
n = kwargs.pop('n',100)
cov = N.diagonal(covariance_matrix)
u = N.linspace(0, 2*N.pi, n)
scales = dict(upper=1,lower=-1,nominal=0)
c1 = scales[sheet]*2 # We double the scale of errors since they are symmetrical
c1 *= -1 # We assume upper hemisphere
if axes[2,2] < 0:
c1 *= -1
def sdot(a,b):
return sum([i*j for i,j in zip(a,b)])
def step_func(a):
e = [
N.cos(a)*cov[0],
N.sin(a)*cov[1],
c1*cov[2]]
d = [sdot(e,i)
for i in axes.T]
x,y,z = d[2],d[0],d[1]
r = N.sqrt(x**2 + y**2 + z**2)
lat = N.arcsin(z/r)
lon = N.arctan2(y, x)
return lon,lat
# Get a bundle of vectors defining
# a full rotation around the unit circle
return N.array([step_func(i)
for i in u])
def error_ellipse(axes, covariance_matrix, **kwargs):
level = kwargs.pop('level',1)
traditional_layout = kwargs.pop('traditional_layout',True)
d = N.sqrt(covariance_matrix)
ell = ellipse(**kwargs)
# Bundle of vectors surrounding nominal values
bundle = dot(ell, d[:2])
res = d[2]*level
# Switch hemispheres if PCA is upside-down
# Normal vector is always correctly fit
if axes[2,2] > 0:
res *= -1
normal = vector(0,0,1)
_ = normal + bundle
if traditional_layout:
lon,lat = stereonet_math.cart2sph(_[2],_[0],_[1])
else:
lon,lat = stereonet_math.cart2sph(-_[1],_[0],_[2])
return list(zip(lon,lat))
def error_coords(axes, covariance_matrix, **kwargs):
# Support for multiple levels of errors
# (not sure if this directly corresponds
# to sigma).
levels = kwargs.pop('levels',None)
do_ellipse = kwargs.pop('ellipse',True)
u = 'upper'
l = 'lower'
def _(half, level=1):
lonlat = plane_errors(axes, covariance_matrix,
half, level=level, **kwargs)
return N.degrees(lonlat).tolist()
def __(level):
data = dict(
upper=_(u, level),
lower=_(l, level))
if do_ellipse:
ell = error_ellipse(
axes, covariance_matrix,
level=level, **kwargs)
data['ellipse'] = N.degrees(ell).tolist()
return data
out = dict(nominal=_('nominal'))
if levels is None:
i = __(1)
else:
i = {l:__(l) for l in levels}
out.update(i)
return out
|
davenquinn/Attitude
|
attitude/stereonet.py
|
Python
|
mit
| 7,069 | 0.014854 |
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains Melange Task API related helper modules."""
|
MatthewWilkes/mw4068-packaging
|
src/melange/src/soc/tasks/helper/__init__.py
|
Python
|
apache-2.0
| 654 | 0.004587 |
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.common.utils import data_utils
from neutron.tests.api import base
from neutron.tests.tempest import test
class ExtraDHCPOptionsTestJSON(base.BaseNetworkTest):
"""
Tests the following operations with the Extra DHCP Options Neutron API
extension:
port create
port list
port show
port update
v2.0 of the Neutron API is assumed. It is also assumed that the Extra
DHCP Options extension is enabled in the [network-feature-enabled]
section of etc/tempest.conf
"""
@classmethod
def resource_setup(cls):
super(ExtraDHCPOptionsTestJSON, cls).resource_setup()
if not test.is_extension_enabled('extra_dhcp_opt', 'network'):
msg = "Extra DHCP Options extension not enabled."
raise cls.skipException(msg)
cls.network = cls.create_network()
cls.subnet = cls.create_subnet(cls.network)
cls.port = cls.create_port(cls.network)
cls.ip_tftp = ('123.123.123.123' if cls._ip_version == 4
else '2015::dead')
cls.ip_server = ('123.123.123.45' if cls._ip_version == 4
else '2015::badd')
cls.extra_dhcp_opts = [
{'opt_value': 'pxelinux.0', 'opt_name': 'bootfile-name'},
{'opt_value': cls.ip_tftp, 'opt_name': 'tftp-server'},
{'opt_value': cls.ip_server, 'opt_name': 'server-ip-address'}
]
@test.attr(type='smoke')
@test.idempotent_id('d2c17063-3767-4a24-be4f-a23dbfa133c9')
def test_create_list_port_with_extra_dhcp_options(self):
# Create a port with Extra DHCP Options
body = self.client.create_port(
network_id=self.network['id'],
extra_dhcp_opts=self.extra_dhcp_opts)
port_id = body['port']['id']
self.addCleanup(self.client.delete_port, port_id)
# Confirm port created has Extra DHCP Options
body = self.client.list_ports()
ports = body['ports']
port = [p for p in ports if p['id'] == port_id]
self.assertTrue(port)
self._confirm_extra_dhcp_options(port[0], self.extra_dhcp_opts)
@test.attr(type='smoke')
@test.idempotent_id('9a6aebf4-86ee-4f47-b07a-7f7232c55607')
def test_update_show_port_with_extra_dhcp_options(self):
# Update port with extra dhcp options
name = data_utils.rand_name('new-port-name')
body = self.client.update_port(
self.port['id'],
name=name,
extra_dhcp_opts=self.extra_dhcp_opts)
# Confirm extra dhcp options were added to the port
body = self.client.show_port(self.port['id'])
self._confirm_extra_dhcp_options(body['port'], self.extra_dhcp_opts)
def _confirm_extra_dhcp_options(self, port, extra_dhcp_opts):
retrieved = port['extra_dhcp_opts']
self.assertEqual(len(retrieved), len(extra_dhcp_opts))
for retrieved_option in retrieved:
for option in extra_dhcp_opts:
if (retrieved_option['opt_value'] == option['opt_value'] and
retrieved_option['opt_name'] == option['opt_name']):
break
else:
self.fail('Extra DHCP option not found in port %s' %
str(retrieved_option))
class ExtraDHCPOptionsIpV6TestJSON(ExtraDHCPOptionsTestJSON):
_ip_version = 6
|
miyakz1192/neutron
|
neutron/tests/api/test_extra_dhcp_options.py
|
Python
|
apache-2.0
| 4,030 | 0.000248 |
# Copyright (c) 2011 The Chromium OS Authors.
#
# SPDX-License-Identifier: GPL-2.0+
#
import command
import re
import os
import series
import subprocess
import sys
import terminal
import checkpatch
import settings
def CountCommitsToBranch():
"""Returns number of commits between HEAD and the tracking branch.
This looks back to the tracking branch and works out the number of commits
since then.
Return:
Number of patches that exist on top of the branch
"""
pipe = [['git', 'log', '--no-color', '--oneline', '--no-decorate',
'@{upstream}..'],
['wc', '-l']]
stdout = command.RunPipe(pipe, capture=True, oneline=True).stdout
patch_count = int(stdout)
return patch_count
def GetUpstream(git_dir, branch):
"""Returns the name of the upstream for a branch
Args:
git_dir: Git directory containing repo
branch: Name of branch
Returns:
Name of upstream branch (e.g. 'upstream/master') or None if none
"""
try:
remote = command.OutputOneLine('git', '--git-dir', git_dir, 'config',
'branch.%s.remote' % branch)
merge = command.OutputOneLine('git', '--git-dir', git_dir, 'config',
'branch.%s.merge' % branch)
except:
return None
if remote == '.':
return merge
elif remote and merge:
leaf = merge.split('/')[-1]
return '%s/%s' % (remote, leaf)
else:
raise ValueError, ("Cannot determine upstream branch for branch "
"'%s' remote='%s', merge='%s'" % (branch, remote, merge))
def GetRangeInBranch(git_dir, branch, include_upstream=False):
"""Returns an expression for the commits in the given branch.
Args:
git_dir: Directory containing git repo
branch: Name of branch
Return:
Expression in the form 'upstream..branch' which can be used to
access the commits. If the branch does not exist, returns None.
"""
upstream = GetUpstream(git_dir, branch)
if not upstream:
return None
return '%s%s..%s' % (upstream, '~' if include_upstream else '', branch)
def CountCommitsInBranch(git_dir, branch, include_upstream=False):
"""Returns the number of commits in the given branch.
Args:
git_dir: Directory containing git repo
branch: Name of branch
Return:
Number of patches that exist on top of the branch, or None if the
branch does not exist.
"""
range_expr = GetRangeInBranch(git_dir, branch, include_upstream)
if not range_expr:
return None
pipe = [['git', '--git-dir', git_dir, 'log', '--oneline', '--no-decorate',
range_expr],
['wc', '-l']]
result = command.RunPipe(pipe, capture=True, oneline=True)
patch_count = int(result.stdout)
return patch_count
def CountCommits(commit_range):
"""Returns the number of commits in the given range.
Args:
commit_range: Range of commits to count (e.g. 'HEAD..base')
Return:
Number of patches that exist on top of the branch
"""
pipe = [['git', 'log', '--oneline', '--no-decorate', commit_range],
['wc', '-l']]
stdout = command.RunPipe(pipe, capture=True, oneline=True).stdout
patch_count = int(stdout)
return patch_count
def Checkout(commit_hash, git_dir=None, work_tree=None, force=False):
"""Checkout the selected commit for this build
Args:
commit_hash: Commit hash to check out
"""
pipe = ['git']
if git_dir:
pipe.extend(['--git-dir', git_dir])
if work_tree:
pipe.extend(['--work-tree', work_tree])
pipe.append('checkout')
if force:
pipe.append('-f')
pipe.append(commit_hash)
result = command.RunPipe([pipe], capture=True, raise_on_error=False)
if result.return_code != 0:
raise OSError, 'git checkout (%s): %s' % (pipe, result.stderr)
def Clone(git_dir, output_dir):
"""Checkout the selected commit for this build
Args:
commit_hash: Commit hash to check out
"""
pipe = ['git', 'clone', git_dir, '.']
result = command.RunPipe([pipe], capture=True, cwd=output_dir)
if result.return_code != 0:
raise OSError, 'git clone: %s' % result.stderr
def Fetch(git_dir=None, work_tree=None):
"""Fetch from the origin repo
Args:
commit_hash: Commit hash to check out
"""
pipe = ['git']
if git_dir:
pipe.extend(['--git-dir', git_dir])
if work_tree:
pipe.extend(['--work-tree', work_tree])
pipe.append('fetch')
result = command.RunPipe([pipe], capture=True)
if result.return_code != 0:
raise OSError, 'git fetch: %s' % result.stderr
def CreatePatches(start, count, series):
"""Create a series of patches from the top of the current branch.
The patch files are written to the current directory using
git format-patch.
Args:
start: Commit to start from: 0=HEAD, 1=next one, etc.
count: number of commits to include
Return:
Filename of cover letter
List of filenames of patch files
"""
if series.get('version'):
version = '%s ' % series['version']
cmd = ['git', 'format-patch', '-M', '--signoff']
if series.get('cover'):
cmd.append('--cover-letter')
prefix = series.GetPatchPrefix()
if prefix:
cmd += ['--subject-prefix=%s' % prefix]
cmd += ['HEAD~%d..HEAD~%d' % (start + count, start)]
stdout = command.RunList(cmd)
files = stdout.splitlines()
# We have an extra file if there is a cover letter
if series.get('cover'):
return files[0], files[1:]
else:
return None, files
def ApplyPatch(verbose, fname):
"""Apply a patch with git am to test it
TODO: Convert these to use command, with stderr option
Args:
fname: filename of patch file to apply
"""
col = terminal.Color()
cmd = ['git', 'am', fname]
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = pipe.communicate()
re_error = re.compile('^error: patch failed: (.+):(\d+)')
for line in stderr.splitlines():
if verbose:
print line
match = re_error.match(line)
if match:
print checkpatch.GetWarningMsg(col, 'warning', match.group(1),
int(match.group(2)), 'Patch failed')
return pipe.returncode == 0, stdout
def ApplyPatches(verbose, args, start_point):
"""Apply the patches with git am to make sure all is well
Args:
verbose: Print out 'git am' output verbatim
args: List of patch files to apply
start_point: Number of commits back from HEAD to start applying.
Normally this is len(args), but it can be larger if a start
offset was given.
"""
error_count = 0
col = terminal.Color()
# Figure out our current position
cmd = ['git', 'name-rev', 'HEAD', '--name-only']
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE)
stdout, stderr = pipe.communicate()
if pipe.returncode:
str = 'Could not find current commit name'
print col.Color(col.RED, str)
print stdout
return False
old_head = stdout.splitlines()[0]
# Checkout the required start point
cmd = ['git', 'checkout', 'HEAD~%d' % start_point]
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = pipe.communicate()
if pipe.returncode:
str = 'Could not move to commit before patch series'
print col.Color(col.RED, str)
print stdout, stderr
return False
# Apply all the patches
for fname in args:
ok, stdout = ApplyPatch(verbose, fname)
if not ok:
print col.Color(col.RED, 'git am returned errors for %s: will '
'skip this patch' % fname)
if verbose:
print stdout
error_count += 1
cmd = ['git', 'am', '--skip']
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE)
stdout, stderr = pipe.communicate()
if pipe.returncode != 0:
print col.Color(col.RED, 'Unable to skip patch! Aborting...')
print stdout
break
# Return to our previous position
cmd = ['git', 'checkout', old_head]
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = pipe.communicate()
if pipe.returncode:
print col.Color(col.RED, 'Could not move back to head commit')
print stdout, stderr
return error_count == 0
def BuildEmailList(in_list, tag=None, alias=None, raise_on_error=True):
"""Build a list of email addresses based on an input list.
Takes a list of email addresses and aliases, and turns this into a list
of only email address, by resolving any aliases that are present.
If the tag is given, then each email address is prepended with this
tag and a space. If the tag starts with a minus sign (indicating a
command line parameter) then the email address is quoted.
Args:
in_list: List of aliases/email addresses
tag: Text to put before each address
alias: Alias dictionary
raise_on_error: True to raise an error when an alias fails to match,
False to just print a message.
Returns:
List of email addresses
>>> alias = {}
>>> alias['fred'] = ['f.bloggs@napier.co.nz']
>>> alias['john'] = ['j.bloggs@napier.co.nz']
>>> alias['mary'] = ['Mary Poppins <m.poppins@cloud.net>']
>>> alias['boys'] = ['fred', ' john']
>>> alias['all'] = ['fred ', 'john', ' mary ']
>>> BuildEmailList(['john', 'mary'], None, alias)
['j.bloggs@napier.co.nz', 'Mary Poppins <m.poppins@cloud.net>']
>>> BuildEmailList(['john', 'mary'], '--to', alias)
['--to "j.bloggs@napier.co.nz"', \
'--to "Mary Poppins <m.poppins@cloud.net>"']
>>> BuildEmailList(['john', 'mary'], 'Cc', alias)
['Cc j.bloggs@napier.co.nz', 'Cc Mary Poppins <m.poppins@cloud.net>']
"""
quote = '"' if tag and tag[0] == '-' else ''
raw = []
for item in in_list:
raw += LookupEmail(item, alias, raise_on_error=raise_on_error)
result = []
for item in raw:
if not item in result:
result.append(item)
if tag:
return ['%s %s%s%s' % (tag, quote, email, quote) for email in result]
return result
def EmailPatches(series, cover_fname, args, dry_run, raise_on_error, cc_fname,
self_only=False, alias=None, in_reply_to=None):
"""Email a patch series.
Args:
series: Series object containing destination info
cover_fname: filename of cover letter
args: list of filenames of patch files
dry_run: Just return the command that would be run
raise_on_error: True to raise an error when an alias fails to match,
False to just print a message.
cc_fname: Filename of Cc file for per-commit Cc
self_only: True to just email to yourself as a test
in_reply_to: If set we'll pass this to git as --in-reply-to.
Should be a message ID that this is in reply to.
Returns:
Git command that was/would be run
# For the duration of this doctest pretend that we ran patman with ./patman
>>> _old_argv0 = sys.argv[0]
>>> sys.argv[0] = './patman'
>>> alias = {}
>>> alias['fred'] = ['f.bloggs@napier.co.nz']
>>> alias['john'] = ['j.bloggs@napier.co.nz']
>>> alias['mary'] = ['m.poppins@cloud.net']
>>> alias['boys'] = ['fred', ' john']
>>> alias['all'] = ['fred ', 'john', ' mary ']
>>> alias[os.getenv('USER')] = ['this-is-me@me.com']
>>> series = series.Series()
>>> series.to = ['fred']
>>> series.cc = ['mary']
>>> EmailPatches(series, 'cover', ['p1', 'p2'], True, True, 'cc-fname', \
False, alias)
'git send-email --annotate --to "f.bloggs@napier.co.nz" --cc \
"m.poppins@cloud.net" --cc-cmd "./patman --cc-cmd cc-fname" cover p1 p2'
>>> EmailPatches(series, None, ['p1'], True, True, 'cc-fname', False, \
alias)
'git send-email --annotate --to "f.bloggs@napier.co.nz" --cc \
"m.poppins@cloud.net" --cc-cmd "./patman --cc-cmd cc-fname" p1'
>>> series.cc = ['all']
>>> EmailPatches(series, 'cover', ['p1', 'p2'], True, True, 'cc-fname', \
True, alias)
'git send-email --annotate --to "this-is-me@me.com" --cc-cmd "./patman \
--cc-cmd cc-fname" cover p1 p2'
>>> EmailPatches(series, 'cover', ['p1', 'p2'], True, True, 'cc-fname', \
False, alias)
'git send-email --annotate --to "f.bloggs@napier.co.nz" --cc \
"f.bloggs@napier.co.nz" --cc "j.bloggs@napier.co.nz" --cc \
"m.poppins@cloud.net" --cc-cmd "./patman --cc-cmd cc-fname" cover p1 p2'
# Restore argv[0] since we clobbered it.
>>> sys.argv[0] = _old_argv0
"""
to = BuildEmailList(series.get('to'), '--to', alias, raise_on_error)
if not to:
print ("No recipient, please add something like this to a commit\n"
"Series-to: Fred Bloggs <f.blogs@napier.co.nz>")
return
cc = BuildEmailList(series.get('cc'), '--cc', alias, raise_on_error)
if self_only:
to = BuildEmailList([os.getenv('USER')], '--to', alias, raise_on_error)
cc = []
cmd = ['git', 'send-email', '--annotate']
if in_reply_to:
cmd.append('--in-reply-to="%s"' % in_reply_to)
cmd += to
cmd += cc
cmd += ['--cc-cmd', '"%s --cc-cmd %s"' % (sys.argv[0], cc_fname)]
if cover_fname:
cmd.append(cover_fname)
cmd += args
str = ' '.join(cmd)
if not dry_run:
os.system(str)
return str
def LookupEmail(lookup_name, alias=None, raise_on_error=True, level=0):
"""If an email address is an alias, look it up and return the full name
TODO: Why not just use git's own alias feature?
Args:
lookup_name: Alias or email address to look up
alias: Dictionary containing aliases (None to use settings default)
raise_on_error: True to raise an error when an alias fails to match,
False to just print a message.
Returns:
tuple:
list containing a list of email addresses
Raises:
OSError if a recursive alias reference was found
ValueError if an alias was not found
>>> alias = {}
>>> alias['fred'] = ['f.bloggs@napier.co.nz']
>>> alias['john'] = ['j.bloggs@napier.co.nz']
>>> alias['mary'] = ['m.poppins@cloud.net']
>>> alias['boys'] = ['fred', ' john', 'f.bloggs@napier.co.nz']
>>> alias['all'] = ['fred ', 'john', ' mary ']
>>> alias['loop'] = ['other', 'john', ' mary ']
>>> alias['other'] = ['loop', 'john', ' mary ']
>>> LookupEmail('mary', alias)
['m.poppins@cloud.net']
>>> LookupEmail('arthur.wellesley@howe.ro.uk', alias)
['arthur.wellesley@howe.ro.uk']
>>> LookupEmail('boys', alias)
['f.bloggs@napier.co.nz', 'j.bloggs@napier.co.nz']
>>> LookupEmail('all', alias)
['f.bloggs@napier.co.nz', 'j.bloggs@napier.co.nz', 'm.poppins@cloud.net']
>>> LookupEmail('odd', alias)
Traceback (most recent call last):
...
ValueError: Alias 'odd' not found
>>> LookupEmail('loop', alias)
Traceback (most recent call last):
...
OSError: Recursive email alias at 'other'
>>> LookupEmail('odd', alias, raise_on_error=False)
\033[1;31mAlias 'odd' not found\033[0m
[]
>>> # In this case the loop part will effectively be ignored.
>>> LookupEmail('loop', alias, raise_on_error=False)
\033[1;31mRecursive email alias at 'other'\033[0m
\033[1;31mRecursive email alias at 'john'\033[0m
\033[1;31mRecursive email alias at 'mary'\033[0m
['j.bloggs@napier.co.nz', 'm.poppins@cloud.net']
"""
if not alias:
alias = settings.alias
lookup_name = lookup_name.strip()
if '@' in lookup_name: # Perhaps a real email address
return [lookup_name]
lookup_name = lookup_name.lower()
col = terminal.Color()
out_list = []
if level > 10:
msg = "Recursive email alias at '%s'" % lookup_name
if raise_on_error:
raise OSError, msg
else:
print col.Color(col.RED, msg)
return out_list
if lookup_name:
if not lookup_name in alias:
msg = "Alias '%s' not found" % lookup_name
if raise_on_error:
raise ValueError, msg
else:
print col.Color(col.RED, msg)
return out_list
for item in alias[lookup_name]:
todo = LookupEmail(item, alias, raise_on_error, level + 1)
for new_item in todo:
if not new_item in out_list:
out_list.append(new_item)
#print "No match for alias '%s'" % lookup_name
return out_list
def GetTopLevel():
"""Return name of top-level directory for this git repo.
Returns:
Full path to git top-level directory
This test makes sure that we are running tests in the right subdir
>>> os.path.realpath(os.path.dirname(__file__)) == \
os.path.join(GetTopLevel(), 'tools', 'patman')
True
"""
return command.OutputOneLine('git', 'rev-parse', '--show-toplevel')
def GetAliasFile():
"""Gets the name of the git alias file.
Returns:
Filename of git alias file, or None if none
"""
fname = command.OutputOneLine('git', 'config', 'sendemail.aliasesfile',
raise_on_error=False)
if fname:
fname = os.path.join(GetTopLevel(), fname.strip())
return fname
def GetDefaultUserName():
"""Gets the user.name from .gitconfig file.
Returns:
User name found in .gitconfig file, or None if none
"""
uname = command.OutputOneLine('git', 'config', '--global', 'user.name')
return uname
def GetDefaultUserEmail():
"""Gets the user.email from the global .gitconfig file.
Returns:
User's email found in .gitconfig file, or None if none
"""
uemail = command.OutputOneLine('git', 'config', '--global', 'user.email')
return uemail
def Setup():
"""Set up git utils, by reading the alias files."""
# Check for a git alias file also
alias_fname = GetAliasFile()
if alias_fname:
settings.ReadGitAliases(alias_fname)
def GetHead():
"""Get the hash of the current HEAD
Returns:
Hash of HEAD
"""
return command.OutputOneLine('git', 'show', '-s', '--pretty=format:%H')
if __name__ == "__main__":
import doctest
doctest.testmod()
|
lkylei/ten_thousand
|
roms/u-boot/tools/patman/gitutil.py
|
Python
|
gpl-2.0
| 18,813 | 0.002232 |
# Copyright 2010-2012 Opera Software ASA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Created on 31. mars 2012
@author: yngve
'''
# List of status codes used in both web prober and the batch prober
RESULTC_CH_MINLEN_NOT_TESTED = "CMNT" # Client Hello Minimum length not tested
RESULTC_CH_MINLEN_PASSED_256 = "CMP1" # Client Hello Minimum length 256 passed
RESULTC_CH_MINLEN_FAILED_256 = "CMF1" # Client Hello Minimum length 256 failed
RESULTC_CH_MINLEN_FAILED_256_30 = "CM10" # Client Hello Minimum length 256 failed SSL v3
RESULTC_CH_MINLEN_FAILED_256_31 = "CM11" # Client Hello Minimum length 256 failed TLS 1.0
RESULTC_CH_MINLEN_FAILED_256_33 = "CM13" # Client Hello Minimum length 256 failed TLS 1.2
RESULTC_HIGHER_RECV_TEST = "HRVT" # Higher record versions than TLS 1.0 tested during handshake
RESULTC_HIGHER_RECV_NOTEST = "HRVN" # Higher record versions than TLS 1.0 not tested during handshake
RESULTC_RECV_ANY_FAILED = "HRAF" # Higher Record version tested during handshake, some failed
RESULTC_RECV_ANY_PASSED = "HRAP" # Higher Record version tested during handshake, all passed
RESULTC_RECV_32_FAILED = "HR2F" # Record version TLS 1.1 tested during handshake, failed
RESULTC_RECV_32_PASSED = "HR2P" # Record version TLS 1.1 tested during handshake, passed
RESULTC_RECV_33_FAILED = "HR3F" # Record version TLS 1.2 tested during handshake, failed
RESULTC_RECV_33_PASSED = "HR3P" # Record version TLS 1.2 tested during handshake, passed
TRESULTC_VALUES = (
(RESULTC_CH_MINLEN_NOT_TESTED, "Client Hello Minimum length not tested"),
(RESULTC_CH_MINLEN_PASSED_256, "Client Hello Minimum length 256 passed"),
(RESULTC_CH_MINLEN_FAILED_256, "Client Hello Minimum length 256 failed"),
(RESULTC_CH_MINLEN_FAILED_256_30,"Client Hello Minimum length 256 failed SSL v3"),
(RESULTC_CH_MINLEN_FAILED_256_31,"Client Hello Minimum length 256 failed TLS 1.0"),
(RESULTC_CH_MINLEN_FAILED_256_33,"Client Hello Minimum length 256 failed TLS 1.2"),
(RESULTC_HIGHER_RECV_TEST, "Higher record versions than TLS 1.0 tested during handshake"),
(RESULTC_HIGHER_RECV_NOTEST, "Higher record versions than TLS 1.0 not tested during handshake"),
(RESULTC_RECV_ANY_FAILED, "Higher Record version tested during handshake, some failed"),
(RESULTC_RECV_ANY_PASSED, "Higher Record version tested during handshake, all passed"),
(RESULTC_RECV_32_FAILED, "Record version TLS 1.1 tested during handshake, failed"),
(RESULTC_RECV_32_PASSED, "Record version TLS 1.1 tested during handshake, passed"),
(RESULTC_RECV_33_FAILED, "Record version TLS 1.2 tested during handshake, failed"),
(RESULTC_RECV_33_PASSED, "Record version TLS 1.2 tested during handshake, passed"),
)
TRESULTC_VALUES_dict = dict(TRESULTC_VALUES)
# Check for duplicates and missing status codes
__values_set = {}
for __result_var in dir():
if not __result_var.startswith("RESULTC_") or __result_var.startswith("RESULTC_VALUES"):
continue
if eval(__result_var) not in TRESULTC_VALUES_dict:
raise Exception("Entry %s was not present in RESULTC_VALUES list" % (__result_var,))
if eval(__result_var) in __values_set:
print "Double entry in RESULTC_* enum values: ", __result_var, ". Matches ", __values_set[ eval(__result_var)]
raise Exception("Double entry in RESULTC_* enum values: " + __result_var+ ". Matches "+ __values_set[ eval(__result_var)])
__values_set[eval(__result_var)] = __result_var
if any([len([__y for __y in TRESULTC_VALUES if __x[0] == __y[0]])>1 for __x in TRESULTC_VALUES]):
print "Double entry in RESULTC_* enum values"
raise Exception("Double entry in RESULTC_* enum values")
if any([len([__y for __y in TRESULTC_VALUES if __x != __y and __x[1] == __y[1]])>1 for __x in TRESULTC_VALUES]):
print "Double explanation entry in RESULTC_* enum values", str([__z for __z in [[(__x,__y) for __y in TRESULTC_VALUES if __x != __y and __x[1] == __y[1]] for __x in TRESULTC_VALUES] if len(__z) > 1])
raise Exception("Double explanation entry in RESULTC_* enum values" + str([__z for __z in [[(__x,__y) for __y in TRESULTC_VALUES if __x != __y and __x[1] == __y[1]] for __x in TRESULTC_VALUES] if len(__z) > 1]))
|
operasoftware/tlscommon
|
test_results.py
|
Python
|
apache-2.0
| 4,642 | 0.019604 |
import game
import server
class tt2(game.Game):
class TT2Process(server.GameProcess):
def memory_percent_usage(self):
return 0.0
def __init__(self, server, name):
game.Game.__init__(self, server, name)
self.process_class = self.TT2Process
def get_option(self, query):
if query['misere'] == 'yes':
return 1
def respond_to_unknown_request(self, req):
if req.command == 'getOptions':
options = [{'misere': 'yes',
'number': 1,
'width': 6,
'height': 3},
{'misere': 'no',
'number': 2,
'width': 6,
'height': 3},
{}]
req.respond(self.format_parsed(
{'status': 'ok',
'response': options}))
else:
raise NotImplemented()
|
GamesCrafters/GamesmanClassic
|
src/py/games/tt2.py
|
Python
|
gpl-2.0
| 963 | 0.001038 |
#! /usr/bin/env python
"""Post a new MT entry"""
# username, password, blogid, publish
from settings import *
import types
import xmlrpclib
def post(content):
"""Post an entry to a blog. Return postid on success."""
content.check()
weblogContent = { 'title' : content.getTitle(),
'description' : content.getEntry() }
server = xmlrpclib.ServerProxy(uri)
# on success, result should be an integer representing a postid
result = server.metaWeblog.newPost(blogid, username, password,
weblogContent, publish)
return result
|
false-git/mail2entry
|
postentry.py
|
Python
|
gpl-2.0
| 621 | 0.011272 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._recoverable_databases_operations import RecoverableDatabasesOperations
from ._data_masking_policies_operations import DataMaskingPoliciesOperations
from ._data_masking_rules_operations import DataMaskingRulesOperations
from ._geo_backup_policies_operations import GeoBackupPoliciesOperations
from ._databases_operations import DatabasesOperations
from ._elastic_pools_operations import ElasticPoolsOperations
from ._replication_links_operations import ReplicationLinksOperations
from ._server_communication_links_operations import ServerCommunicationLinksOperations
from ._service_objectives_operations import ServiceObjectivesOperations
from ._elastic_pool_activities_operations import ElasticPoolActivitiesOperations
from ._elastic_pool_database_activities_operations import ElasticPoolDatabaseActivitiesOperations
from ._server_usages_operations import ServerUsagesOperations
from ._extended_database_blob_auditing_policies_operations import ExtendedDatabaseBlobAuditingPoliciesOperations
from ._extended_server_blob_auditing_policies_operations import ExtendedServerBlobAuditingPoliciesOperations
from ._server_blob_auditing_policies_operations import ServerBlobAuditingPoliciesOperations
from ._database_blob_auditing_policies_operations import DatabaseBlobAuditingPoliciesOperations
from ._database_advisors_operations import DatabaseAdvisorsOperations
from ._database_automatic_tuning_operations import DatabaseAutomaticTuningOperations
from ._database_columns_operations import DatabaseColumnsOperations
from ._database_recommended_actions_operations import DatabaseRecommendedActionsOperations
from ._database_schemas_operations import DatabaseSchemasOperations
from ._database_security_alert_policies_operations import DatabaseSecurityAlertPoliciesOperations
from ._database_tables_operations import DatabaseTablesOperations
from ._database_vulnerability_assessment_rule_baselines_operations import DatabaseVulnerabilityAssessmentRuleBaselinesOperations
from ._database_vulnerability_assessments_operations import DatabaseVulnerabilityAssessmentsOperations
from ._database_vulnerability_assessment_scans_operations import DatabaseVulnerabilityAssessmentScansOperations
from ._data_warehouse_user_activities_operations import DataWarehouseUserActivitiesOperations
from ._deleted_servers_operations import DeletedServersOperations
from ._elastic_pool_operations_operations import ElasticPoolOperationsOperations
from ._encryption_protectors_operations import EncryptionProtectorsOperations
from ._failover_groups_operations import FailoverGroupsOperations
from ._firewall_rules_operations import FirewallRulesOperations
from ._instance_failover_groups_operations import InstanceFailoverGroupsOperations
from ._instance_pools_operations import InstancePoolsOperations
from ._job_agents_operations import JobAgentsOperations
from ._job_credentials_operations import JobCredentialsOperations
from ._job_executions_operations import JobExecutionsOperations
from ._jobs_operations import JobsOperations
from ._job_step_executions_operations import JobStepExecutionsOperations
from ._job_steps_operations import JobStepsOperations
from ._job_target_executions_operations import JobTargetExecutionsOperations
from ._job_target_groups_operations import JobTargetGroupsOperations
from ._job_versions_operations import JobVersionsOperations
from ._capabilities_operations import CapabilitiesOperations
from ._long_term_retention_policies_operations import LongTermRetentionPoliciesOperations
from ._maintenance_window_options_operations import MaintenanceWindowOptionsOperations
from ._maintenance_windows_operations import MaintenanceWindowsOperations
from ._managed_backup_short_term_retention_policies_operations import ManagedBackupShortTermRetentionPoliciesOperations
from ._managed_database_columns_operations import ManagedDatabaseColumnsOperations
from ._managed_database_queries_operations import ManagedDatabaseQueriesOperations
from ._managed_database_restore_details_operations import ManagedDatabaseRestoreDetailsOperations
from ._managed_databases_operations import ManagedDatabasesOperations
from ._managed_database_schemas_operations import ManagedDatabaseSchemasOperations
from ._managed_database_security_alert_policies_operations import ManagedDatabaseSecurityAlertPoliciesOperations
from ._managed_database_security_events_operations import ManagedDatabaseSecurityEventsOperations
from ._managed_database_sensitivity_labels_operations import ManagedDatabaseSensitivityLabelsOperations
from ._managed_database_recommended_sensitivity_labels_operations import ManagedDatabaseRecommendedSensitivityLabelsOperations
from ._managed_database_tables_operations import ManagedDatabaseTablesOperations
from ._managed_database_transparent_data_encryption_operations import ManagedDatabaseTransparentDataEncryptionOperations
from ._managed_database_vulnerability_assessment_rule_baselines_operations import ManagedDatabaseVulnerabilityAssessmentRuleBaselinesOperations
from ._managed_database_vulnerability_assessments_operations import ManagedDatabaseVulnerabilityAssessmentsOperations
from ._managed_database_vulnerability_assessment_scans_operations import ManagedDatabaseVulnerabilityAssessmentScansOperations
from ._managed_instance_administrators_operations import ManagedInstanceAdministratorsOperations
from ._managed_instance_azure_ad_only_authentications_operations import ManagedInstanceAzureADOnlyAuthenticationsOperations
from ._managed_instance_encryption_protectors_operations import ManagedInstanceEncryptionProtectorsOperations
from ._managed_instance_keys_operations import ManagedInstanceKeysOperations
from ._managed_instance_long_term_retention_policies_operations import ManagedInstanceLongTermRetentionPoliciesOperations
from ._managed_instance_operations_operations import ManagedInstanceOperationsOperations
from ._managed_instance_private_endpoint_connections_operations import ManagedInstancePrivateEndpointConnectionsOperations
from ._managed_instance_private_link_resources_operations import ManagedInstancePrivateLinkResourcesOperations
from ._managed_instance_tde_certificates_operations import ManagedInstanceTdeCertificatesOperations
from ._managed_instance_vulnerability_assessments_operations import ManagedInstanceVulnerabilityAssessmentsOperations
from ._managed_restorable_dropped_database_backup_short_term_retention_policies_operations import ManagedRestorableDroppedDatabaseBackupShortTermRetentionPoliciesOperations
from ._managed_server_security_alert_policies_operations import ManagedServerSecurityAlertPoliciesOperations
from ._operations import Operations
from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
from ._private_link_resources_operations import PrivateLinkResourcesOperations
from ._recoverable_managed_databases_operations import RecoverableManagedDatabasesOperations
from ._restore_points_operations import RestorePointsOperations
from ._sensitivity_labels_operations import SensitivityLabelsOperations
from ._recommended_sensitivity_labels_operations import RecommendedSensitivityLabelsOperations
from ._server_advisors_operations import ServerAdvisorsOperations
from ._server_automatic_tuning_operations import ServerAutomaticTuningOperations
from ._server_azure_ad_administrators_operations import ServerAzureADAdministratorsOperations
from ._server_azure_ad_only_authentications_operations import ServerAzureADOnlyAuthenticationsOperations
from ._server_dev_ops_audit_settings_operations import ServerDevOpsAuditSettingsOperations
from ._server_dns_aliases_operations import ServerDnsAliasesOperations
from ._server_keys_operations import ServerKeysOperations
from ._server_operations_operations import ServerOperationsOperations
from ._server_security_alert_policies_operations import ServerSecurityAlertPoliciesOperations
from ._server_trust_groups_operations import ServerTrustGroupsOperations
from ._server_vulnerability_assessments_operations import ServerVulnerabilityAssessmentsOperations
from ._sql_agent_operations import SqlAgentOperations
from ._subscription_usages_operations import SubscriptionUsagesOperations
from ._sync_agents_operations import SyncAgentsOperations
from ._sync_groups_operations import SyncGroupsOperations
from ._sync_members_operations import SyncMembersOperations
from ._tde_certificates_operations import TdeCertificatesOperations
from ._time_zones_operations import TimeZonesOperations
from ._virtual_clusters_operations import VirtualClustersOperations
from ._virtual_network_rules_operations import VirtualNetworkRulesOperations
from ._workload_classifiers_operations import WorkloadClassifiersOperations
from ._workload_groups_operations import WorkloadGroupsOperations
from ._transparent_data_encryptions_operations import TransparentDataEncryptionsOperations
from ._backup_short_term_retention_policies_operations import BackupShortTermRetentionPoliciesOperations
from ._database_extensions_operations import DatabaseExtensionsOperations
from ._database_operations_operations import DatabaseOperationsOperations
from ._database_usages_operations import DatabaseUsagesOperations
from ._ledger_digest_uploads_operations import LedgerDigestUploadsOperations
from ._outbound_firewall_rules_operations import OutboundFirewallRulesOperations
from ._servers_operations import ServersOperations
from ._usages_operations import UsagesOperations
from ._long_term_retention_backups_operations import LongTermRetentionBackupsOperations
from ._long_term_retention_managed_instance_backups_operations import LongTermRetentionManagedInstanceBackupsOperations
from ._managed_instances_operations import ManagedInstancesOperations
from ._restorable_dropped_databases_operations import RestorableDroppedDatabasesOperations
from ._restorable_dropped_managed_databases_operations import RestorableDroppedManagedDatabasesOperations
from ._server_connection_policies_operations import ServerConnectionPoliciesOperations
from ._distributed_availability_groups_operations import DistributedAvailabilityGroupsOperations
from ._server_trust_certificates_operations import ServerTrustCertificatesOperations
from ._ipv6_firewall_rules_operations import IPv6FirewallRulesOperations
__all__ = [
'RecoverableDatabasesOperations',
'DataMaskingPoliciesOperations',
'DataMaskingRulesOperations',
'GeoBackupPoliciesOperations',
'DatabasesOperations',
'ElasticPoolsOperations',
'ReplicationLinksOperations',
'ServerCommunicationLinksOperations',
'ServiceObjectivesOperations',
'ElasticPoolActivitiesOperations',
'ElasticPoolDatabaseActivitiesOperations',
'ServerUsagesOperations',
'ExtendedDatabaseBlobAuditingPoliciesOperations',
'ExtendedServerBlobAuditingPoliciesOperations',
'ServerBlobAuditingPoliciesOperations',
'DatabaseBlobAuditingPoliciesOperations',
'DatabaseAdvisorsOperations',
'DatabaseAutomaticTuningOperations',
'DatabaseColumnsOperations',
'DatabaseRecommendedActionsOperations',
'DatabaseSchemasOperations',
'DatabaseSecurityAlertPoliciesOperations',
'DatabaseTablesOperations',
'DatabaseVulnerabilityAssessmentRuleBaselinesOperations',
'DatabaseVulnerabilityAssessmentsOperations',
'DatabaseVulnerabilityAssessmentScansOperations',
'DataWarehouseUserActivitiesOperations',
'DeletedServersOperations',
'ElasticPoolOperationsOperations',
'EncryptionProtectorsOperations',
'FailoverGroupsOperations',
'FirewallRulesOperations',
'InstanceFailoverGroupsOperations',
'InstancePoolsOperations',
'JobAgentsOperations',
'JobCredentialsOperations',
'JobExecutionsOperations',
'JobsOperations',
'JobStepExecutionsOperations',
'JobStepsOperations',
'JobTargetExecutionsOperations',
'JobTargetGroupsOperations',
'JobVersionsOperations',
'CapabilitiesOperations',
'LongTermRetentionPoliciesOperations',
'MaintenanceWindowOptionsOperations',
'MaintenanceWindowsOperations',
'ManagedBackupShortTermRetentionPoliciesOperations',
'ManagedDatabaseColumnsOperations',
'ManagedDatabaseQueriesOperations',
'ManagedDatabaseRestoreDetailsOperations',
'ManagedDatabasesOperations',
'ManagedDatabaseSchemasOperations',
'ManagedDatabaseSecurityAlertPoliciesOperations',
'ManagedDatabaseSecurityEventsOperations',
'ManagedDatabaseSensitivityLabelsOperations',
'ManagedDatabaseRecommendedSensitivityLabelsOperations',
'ManagedDatabaseTablesOperations',
'ManagedDatabaseTransparentDataEncryptionOperations',
'ManagedDatabaseVulnerabilityAssessmentRuleBaselinesOperations',
'ManagedDatabaseVulnerabilityAssessmentsOperations',
'ManagedDatabaseVulnerabilityAssessmentScansOperations',
'ManagedInstanceAdministratorsOperations',
'ManagedInstanceAzureADOnlyAuthenticationsOperations',
'ManagedInstanceEncryptionProtectorsOperations',
'ManagedInstanceKeysOperations',
'ManagedInstanceLongTermRetentionPoliciesOperations',
'ManagedInstanceOperationsOperations',
'ManagedInstancePrivateEndpointConnectionsOperations',
'ManagedInstancePrivateLinkResourcesOperations',
'ManagedInstanceTdeCertificatesOperations',
'ManagedInstanceVulnerabilityAssessmentsOperations',
'ManagedRestorableDroppedDatabaseBackupShortTermRetentionPoliciesOperations',
'ManagedServerSecurityAlertPoliciesOperations',
'Operations',
'PrivateEndpointConnectionsOperations',
'PrivateLinkResourcesOperations',
'RecoverableManagedDatabasesOperations',
'RestorePointsOperations',
'SensitivityLabelsOperations',
'RecommendedSensitivityLabelsOperations',
'ServerAdvisorsOperations',
'ServerAutomaticTuningOperations',
'ServerAzureADAdministratorsOperations',
'ServerAzureADOnlyAuthenticationsOperations',
'ServerDevOpsAuditSettingsOperations',
'ServerDnsAliasesOperations',
'ServerKeysOperations',
'ServerOperationsOperations',
'ServerSecurityAlertPoliciesOperations',
'ServerTrustGroupsOperations',
'ServerVulnerabilityAssessmentsOperations',
'SqlAgentOperations',
'SubscriptionUsagesOperations',
'SyncAgentsOperations',
'SyncGroupsOperations',
'SyncMembersOperations',
'TdeCertificatesOperations',
'TimeZonesOperations',
'VirtualClustersOperations',
'VirtualNetworkRulesOperations',
'WorkloadClassifiersOperations',
'WorkloadGroupsOperations',
'TransparentDataEncryptionsOperations',
'BackupShortTermRetentionPoliciesOperations',
'DatabaseExtensionsOperations',
'DatabaseOperationsOperations',
'DatabaseUsagesOperations',
'LedgerDigestUploadsOperations',
'OutboundFirewallRulesOperations',
'ServersOperations',
'UsagesOperations',
'LongTermRetentionBackupsOperations',
'LongTermRetentionManagedInstanceBackupsOperations',
'ManagedInstancesOperations',
'RestorableDroppedDatabasesOperations',
'RestorableDroppedManagedDatabasesOperations',
'ServerConnectionPoliciesOperations',
'DistributedAvailabilityGroupsOperations',
'ServerTrustCertificatesOperations',
'IPv6FirewallRulesOperations',
]
|
Azure/azure-sdk-for-python
|
sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/__init__.py
|
Python
|
mit
| 15,615 | 0.004163 |
# -*- coding: utf-8 -*-
# ########################################################################
# #
# #
# ########################################################################
# #
# Copyright 2015 Vauxoo
# Copyright (C) 2009-2011 Akretion, Emmanuel Samyn, Benoît Guillot #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
##########################################################################
from openerp import fields, models
class ReturnInstruction(models.Model):
_name = "return.instruction"
_description = "Instructions for product return"
name = fields.Char('Title', required=True)
instructions = fields.Text(
'Instructions',
help="Instructions for product return")
is_default = fields.Boolean('Is default',
help="If is default, will be use "
"to set the default value in "
"supplier infos. Be careful to "
"have only one default")
|
damdam-s/rma
|
product_warranty/models/return_instruction.py
|
Python
|
agpl-3.0
| 2,163 | 0 |
import ast
from frontend.context import Context
from frontend.stubs.stubs_paths import libraries
class ImportHandler:
"""Handler for importing other modules during the type inference"""
@staticmethod
def get_ast(path, module_name):
"""Get the AST of a python module
:param path: the path to the python module
:param module_name: the name of the python module
"""
try:
r = open(path)
except FileNotFoundError:
raise ImportError("No module named {}.".format(module_name))
tree = ast.parse(r.read())
r.close()
return tree
@staticmethod
def get_module_ast(module_name, base_folder):
"""Get the AST of a python module
:param module_name: the name of the python module
:param base_folder: the base folder containing the python module
"""
return ImportHandler.get_ast("{}/{}.py".format(base_folder, module_name), module_name)
@staticmethod
def get_builtin_ast(module_name):
"""Return the AST of a built-in module"""
return ImportHandler.get_ast(libraries[module_name], module_name)
@staticmethod
def infer_import(module_name, base_folder, infer_func, solver):
"""Infer the types of a python module"""
context = Context()
if ImportHandler.is_builtin(module_name):
solver.stubs_handler.infer_builtin_lib(module_name, context, solver,
solver.config.used_names, infer_func)
else:
t = ImportHandler.get_module_ast(module_name, base_folder)
solver.infer_stubs(context, infer_func)
for stmt in t.body:
infer_func(stmt, context, solver)
return context
@staticmethod
def is_builtin(module_name):
"""Check if the imported python module is builtin"""
return module_name in libraries
|
gitsimon/spadup-lyra
|
frontend/import_handler.py
|
Python
|
mpl-2.0
| 1,952 | 0.002049 |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from tests.testing_utils import testing
import highend
class TestLogin(testing.AppengineTestCase):
app_module = highend.app
def test_login(self):
response = self.test_app.get('/login')
self.assertEquals(302, response.status_code)
self.assertEquals(
('https://www.google.com/accounts/Login?' +
'continue=http%3A//testbed.example.com/'),
response.location)
|
nicko96/Chrome-Infra
|
appengine/chromium_cq_status/tests/login_test.py
|
Python
|
bsd-3-clause
| 563 | 0.003552 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
jsfenfen/django-calaccess-raw-data
|
example/manage.py
|
Python
|
mit
| 304 | 0 |
import verifytree.VerifyTree as P
import pytest
import os
import logging
import smtplib
from mock import Mock
from mock import patch, call
from mock import MagicMock
from mock import PropertyMock
class Testverifytree:
def setup(self):
self.p = P.VerifyTree()
|
virantha/verifytree
|
test/test_verifytree.py
|
Python
|
apache-2.0
| 275 | 0 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting model 'LikedComment'
db.delete_table('forum_likedcomment')
# Deleting model 'Comment'
db.delete_table(u'comment')
# Adding field 'Node.abs_parent'
db.add_column('forum_node', 'abs_parent', self.gf('django.db.models.fields.related.ForeignKey')(related_name='all_children', null=True, to=orm['forum.Node']), keep_default=False)
# Changing field 'Question.last_activity_by'
db.alter_column(u'question', 'last_activity_by_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['forum.User']))
def backwards(self, orm):
# Adding model 'LikedComment'
db.create_table('forum_likedcomment', (
('comment', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['forum.Comment'])),
('canceled', self.gf('django.db.models.fields.BooleanField')(default=False, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['forum.User'])),
('added_at', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal('forum', ['LikedComment'])
# Adding model 'Comment'
db.create_table(u'comment', (
('comment', self.gf('django.db.models.fields.CharField')(max_length=300)),
('node', self.gf('django.db.models.fields.related.ForeignKey')(related_name='comments', null=True, to=orm['forum.Node'])),
('deleted', self.gf('django.db.models.fields.BooleanField')(default=False, blank=True)),
('added_at', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('deleted_by', self.gf('django.db.models.fields.related.ForeignKey')(related_name='deleted_comments', null=True, to=orm['forum.User'], blank=True)),
('score', self.gf('django.db.models.fields.IntegerField')(default=0)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='comments', to=orm['forum.User'])),
('deleted_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal('forum', ['Comment'])
# Deleting field 'Node.abs_parent'
db.delete_column('forum_node', 'abs_parent_id')
# Changing field 'Question.last_activity_by'
db.alter_column(u'question', 'last_activity_by_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['forum.User']))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'forum.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['forum.User']"})
},
'forum.anonymousnode': {
'Meta': {'object_name': 'AnonymousNode', '_ormbases': ['forum.Node']},
'convertible_to': ('django.db.models.fields.CharField', [], {'default': "'node'", 'max_length': '16'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['forum.Node']", 'unique': 'True', 'primary_key': 'True'}),
'validation_hash': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_content'", 'to': "orm['forum.Node']"})
},
'forum.answer': {
'Meta': {'object_name': 'Answer', 'db_table': "u'answer'"},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'accepted_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['forum.User']", 'null': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['forum.Node']", 'unique': 'True', 'primary_key': 'True'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'forum.authkeyuserassociation': {
'Meta': {'object_name': 'AuthKeyUserAssociation'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'auth_keys'", 'to': "orm['forum.User']"})
},
'forum.award': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user', 'badge'),)", 'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['forum.Badge']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'awards'", 'to': "orm['forum.User']"})
},
'forum.badge': {
'Meta': {'unique_together': "(('name', 'type'),)", 'object_name': 'Badge', 'db_table': "u'badge'"},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'through': "'Award'", 'to': "orm['forum.User']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'multiple': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'type': ('django.db.models.fields.SmallIntegerField', [], {})
},
'forum.favoritequestion': {
'Meta': {'unique_together': "(('question', 'user'),)", 'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['forum.Question']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['forum.User']"})
},
'forum.flaggeditem': {
'Meta': {'object_name': 'FlaggedItem', 'db_table': "u'flagged_item'"},
'canceled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'flagged_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'node': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flaggeditems'", 'null': 'True', 'to': "orm['forum.Node']"}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flaggeditems'", 'to': "orm['forum.User']"})
},
'forum.keyvalue': {
'Meta': {'object_name': 'KeyValue'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'value': ('forum.models.utils.PickledObjectField', [], {})
},
'forum.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['forum.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['forum.User']"})
},
'forum.node': {
'Meta': {'object_name': 'Node'},
'abs_parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'all_children'", 'null': 'True', 'to': "orm['forum.Node']"}),
'active_revision': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'active'", 'unique': 'True', 'null': 'True', 'to': "orm['forum.NodeRevision']"}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'nodes'", 'to': "orm['forum.User']"}),
'body': ('django.db.models.fields.TextField', [], {}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_nodes'", 'null': 'True', 'to': "orm['forum.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_nodes'", 'null': 'True', 'to': "orm['forum.User']"}),
'node_type': ('django.db.models.fields.CharField', [], {'default': "'node'", 'max_length': '16'}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'children'", 'null': 'True', 'to': "orm['forum.Node']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'nodes'", 'to': "orm['forum.Tag']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'forum.noderevision': {
'Meta': {'unique_together': "(('node', 'revision'),)", 'object_name': 'NodeRevision'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'noderevisions'", 'to': "orm['forum.User']"}),
'body': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'node': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['forum.Node']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'forum.question': {
'Meta': {'object_name': 'Question', 'db_table': "u'question'"},
'accepted_answer': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'question_accepting'", 'unique': 'True', 'null': 'True', 'to': "orm['forum.Answer']"}),
'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'closed_questions'", 'null': 'True', 'to': "orm['forum.User']"}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'favorite_questions'", 'through': "'FavoriteQuestion'", 'to': "orm['forum.User']"}),
'favourite_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'last_active_in_questions'", 'null': 'True', 'to': "orm['forum.User']"}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['forum.Node']", 'unique': 'True'}),
'subscribers': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'subscriptions'", 'through': "'QuestionSubscription'", 'to': "orm['forum.User']"}),
'view_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'forum.questionsubscription': {
'Meta': {'object_name': 'QuestionSubscription'},
'auto_subscription': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_view': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2010, 4, 17, 1, 11, 40, 975000)'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['forum.Question']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['forum.User']"})
},
'forum.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'canceled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'node': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reputes'", 'null': 'True', 'to': "orm['forum.Node']"}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['forum.Question']"}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reputes'", 'to': "orm['forum.User']"}),
'user_previous_rep': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'})
},
'forum.subscriptionsettings': {
'Meta': {'object_name': 'SubscriptionSettings'},
'all_questions': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'all_questions_watched_tags': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'enable_notifications': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member_joins': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'new_question': ('django.db.models.fields.CharField', [], {'default': "'d'", 'max_length': '1'}),
'new_question_watched_tags': ('django.db.models.fields.CharField', [], {'default': "'i'", 'max_length': '1'}),
'notify_accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'notify_answers': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'notify_comments': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'notify_comments_own_post': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'notify_reply_to_comments': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'questions_answered': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'questions_asked': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'questions_commented': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'questions_viewed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'subscribed_questions': ('django.db.models.fields.CharField', [], {'default': "'i'", 'max_length': '1'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'subscription_settings'", 'unique': 'True', 'to': "orm['forum.User']"})
},
'forum.tag': {
'Meta': {'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['forum.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['forum.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marked_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'marked_tags'", 'through': "'MarkedTag'", 'to': "orm['forum.User']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'forum.user': {
'Meta': {'object_name': 'User', '_ormbases': ['auth.User']},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'hide_ignored_questions': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_approved': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'forum.validationhash': {
'Meta': {'unique_together': "(('user', 'type'),)", 'object_name': 'ValidationHash'},
'expiration': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2010, 4, 18, 1, 11, 41, 269000)'}),
'hash_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'seed': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['forum.User']"})
},
'forum.vote': {
'Meta': {'object_name': 'Vote', 'db_table': "u'vote'"},
'canceled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'node': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'null': 'True', 'to': "orm['forum.Node']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['forum.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'})
}
}
complete_apps = ['forum']
|
CLLKazan/iCQA
|
qa-engine/forum/migrations/0019_auto__del_likedcomment__del_comment__add_field_node_abs_parent__chg_fi.py
|
Python
|
gpl-3.0
| 27,368 | 0.008112 |
#!/usr/bin/env python
#
# This is be used to verify that all the dependant libraries of a Mac OS X executable
# are present and that they are backwards compatible with at least 10.5.
# Run with an executable as parameter
# Will return 0 if the executable an all libraries are OK
# Returns != 0 and prints some textural description on error
#
# Author: Marius Kintel <marius@kintel.net>
#
# This script lives here:
# https://github.com/kintel/MacOSX-tools
#
import sys
import os
import subprocess
import re
DEBUG = False
def usage():
print >> sys.stderr, "Usage: " + sys.argv[0] + " <executable>"
sys.exit(1)
# Try to find the given library by searching in the typical locations
# Returns the full path to the library or None if the library is not found.
def lookup_library(file):
found = None
if not re.match("/", file):
if re.search("@executable_path", file):
abs = re.sub("^@executable_path", executable_path, file)
if os.path.exists(abs): found = abs
if DEBUG: print "Lib in @executable_path found: " + found
elif re.search("\.app/", file):
found = file
if DEBUG: print "App found: " + found
elif re.search("\.framework/", file):
found = os.path.join("/Library/Frameworks", file)
if DEBUG: print "Framework found: " + found
else:
for path in os.getenv("DYLD_LIBRARY_PATH").split(':'):
abs = os.path.join(path, file)
if os.path.exists(abs): found = abs
if DEBUG: print "Library found: " + found
else:
found = file
return found
# Returns a list of dependent libraries, excluding system libs
def find_dependencies(file):
libs = []
args = ["otool", "-L", file]
if DEBUG: print "Executing " + " ".join(args)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output,err = p.communicate()
if p.returncode != 0:
print "Failed with return code " + str(p.returncode) + ":"
print err
return None
deps = output.split('\n')
for dep in deps:
# print dep
dep = re.sub(".*:$", "", dep) # Take away header line
dep = re.sub("^\t", "", dep) # Remove initial tabs
dep = re.sub(" \(.*\)$", "", dep) # Remove trailing parentheses
if len(dep) > 0 and not re.search("/System/Library", dep) and not re.search("/usr/lib", dep):
libs.append(dep)
return libs
def validate_lib(lib):
p = subprocess.Popen(["otool", "-l", lib], stdout=subprocess.PIPE)
output = p.communicate()[0]
if p.returncode != 0: return False
if re.search("LC_DYLD_INFO_ONLY", output):
print "Error: Requires Snow Leopard: " + lib
return False
p = subprocess.Popen(["lipo", lib, "-verify_arch", "x86_64"], stdout=subprocess.PIPE)
output = p.communicate()[0]
if p.returncode != 0:
print "Error: x86_64 architecture not supported: " + lib
return False
p = subprocess.Popen(["lipo", lib, "-verify_arch", "i386"], stdout=subprocess.PIPE)
output = p.communicate()[0]
if p.returncode != 0:
print "Error: i386 architecture not supported: " + lib
return False
return True
if __name__ == '__main__':
error = False
if len(sys.argv) != 2: usage()
executable = sys.argv[1]
if DEBUG: print "Processing " + executable
executable_path = os.path.dirname(executable)
# processed is a dict {libname : [parents]} - each parent is dependant on libname
processed = {}
pending = [executable]
processed[executable] = []
while len(pending) > 0:
dep = pending.pop()
if DEBUG: print "Evaluating " + dep
deps = find_dependencies(dep)
assert(deps)
for d in deps:
absfile = lookup_library(d)
if not re.match(executable_path, absfile):
print "Error: External dependency " + d
sys.exit(1)
if absfile == None:
print "Not found: " + d
print " ..required by " + str(processed[dep])
error = True
continue
if absfile in processed:
processed[absfile].append(dep)
else:
processed[absfile] = [dep]
if DEBUG: print "Pending: " + absfile
pending.append(absfile)
for dep in processed:
if DEBUG: print "Validating: " + dep
# print " " + str(processed[dep])
if not validate_lib(dep):
print "..required by " + str(processed[dep])
error = True
if error: sys.exit(1)
else: sys.exit(0)
|
battlesnake/OpenSCAD
|
scripts/macosx-sanity-check.py
|
Python
|
gpl-2.0
| 4,696 | 0.010009 |
#!/usr/bin/python2
# -*- coding: utf-8 -*-
import time
import datetime
import inspect
import simplejson as json
def outnode(data):
"""Outnode"""
a = int(data[2])
b = int(data[3])
c = int(data[4])
d = int(data[5])
e = int(data[6])
f = int(data[7])
g = int(data[8])
h = int(data[9])
i = int(data[10])
j = int(data[11])
name = inspect.stack()[0][3] # z nazwy funcji
timestamp = int(time.mktime(datetime.datetime.now().timetuple())) #unix time
template = ({
'name':name,
'temp': str(((256 * (f&3) + e) ^ 512) - 512),
'batvol':str((256 * j) + i),
'timestamp':timestamp
})
return dict((k,v) for (k,v) in template.iteritems())
|
artekw/sensmon
|
sensnode/decoders/outnode.py
|
Python
|
mit
| 761 | 0.015769 |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2008, 2009, 2010, 2011, 2012, 2013 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from __future__ import print_function
# pylint: disable=C0103
"""
BibEdit CLI tool.
Usage: bibedit [options]
General options::
-h, --help print this help
-V, --version print version number
Options to inspect record history::
--list-revisions [recid] list all revisions of a record
--list-revisions-details [recid] list detailed revisions of a record
--get-revision [recid.revdate] print MARCXML of given record revision
--diff-revisions [recidA.revdateB] [recidC.revdateD] print MARCXML difference between
record A dated B and record C dated D
--revert-to-revision [recid.revdate] submit given record revision to
become current revision
--check-revisions [recid] check if revisions are not corrupted
(* stands for all records)
--fix-revisions [recid] fix revisions that are corrupted
(* stands for all records)
--clean-revisions [recid] clean duplicate revisions
(* stands for all records)
"""
__revision__ = "$Id$"
import sys
import zlib
from invenio.legacy.dbquery import run_sql
from intbitset import intbitset
from invenio.legacy.bibedit.utils import get_marcxml_of_revision_id, \
get_record_revision_ids, get_xml_comparison, record_locked_by_other_user, \
record_locked_by_queue, revision_format_valid_p, save_xml_record, \
split_revid, get_info_of_revision_id, get_record_revisions
from invenio.legacy.bibrecord import create_record, records_identical
def print_usage():
"""Print help."""
print(__doc__)
def print_version():
"""Print version information."""
print(__revision__)
def cli_clean_revisions(recid, dry_run=True, verbose=True):
"""Clean revisions of the given recid, by removing duplicate revisions
that do not change the content of the record."""
if recid == '*':
recids = intbitset(run_sql("SELECT DISTINCT id_bibrec FROM hstRECORD"))
else:
try:
recids = [int(recid)]
except ValueError:
print('ERROR: record ID must be integer, not %s.' % recid)
sys.exit(1)
for recid in recids:
all_revisions = run_sql("SELECT marcxml, job_id, job_name, job_person, job_date FROM hstRECORD WHERE id_bibrec=%s ORDER BY job_date ASC", (recid,))
previous_rec = {}
deleted_revisions = 0
for marcxml, job_id, job_name, job_person, job_date in all_revisions:
try:
current_rec = create_record(zlib.decompress(marcxml))[0]
except Exception:
print("ERROR: corrupted revisions found. Please run %s --fix-revisions '*'" % sys.argv[0], file=sys.stderr)
sys.exit(1)
if records_identical(current_rec, previous_rec):
deleted_revisions += 1
if not dry_run:
run_sql("DELETE FROM hstRECORD WHERE id_bibrec=%s AND job_id=%s AND job_name=%s AND job_person=%s AND job_date=%s", (recid, job_id, job_name, job_person, job_date))
previous_rec = current_rec
if verbose and deleted_revisions:
print("record %s: deleted %s duplicate revisions out of %s" % (recid, deleted_revisions, len(all_revisions)))
if verbose:
print("DONE")
def cli_list_revisions(recid, details=False):
"""Print list of all known record revisions (=RECID.REVDATE) for record
RECID.
"""
try:
recid = int(recid)
except ValueError:
print('ERROR: record ID must be integer, not %s.' % recid)
sys.exit(1)
record_rev_list = get_record_revision_ids(recid)
if not details:
out = '\n'.join(record_rev_list)
else:
out = "%s %s %s %s\n" % ("# Revision".ljust(22), "# Task ID".ljust(15),
"# Author".ljust(15), "# Job Details")
out += '\n'.join([get_info_of_revision_id(revid) for revid in record_rev_list])
if out:
print(out)
else:
print('ERROR: Record %s not found.' % recid)
def cli_get_revision(revid):
"""Return MARCXML for record revision REVID (=RECID.REVDATE) of a record."""
if not revision_format_valid_p(revid):
print('ERROR: revision %s is invalid; ' \
'must be NNN.YYYYMMDDhhmmss.' % revid)
sys.exit(1)
out = get_marcxml_of_revision_id(revid)
if out:
print(out)
else:
print('ERROR: Revision %s not found.' % revid)
def cli_diff_revisions(revid1, revid2):
"""Return diffs of MARCXML for record revisions REVID1, REVID2."""
for revid in [revid1, revid2]:
if not revision_format_valid_p(revid):
print('ERROR: revision %s is invalid; ' \
'must be NNN.YYYYMMDDhhmmss.' % revid)
sys.exit(1)
xml1 = get_marcxml_of_revision_id(revid1)
if not xml1:
print('ERROR: Revision %s not found. ' % revid1)
sys.exit(1)
xml2 = get_marcxml_of_revision_id(revid2)
if not xml2:
print('ERROR: Revision %s not found. ' % revid2)
sys.exit(1)
print(get_xml_comparison(revid1, revid2, xml1, xml2))
def cli_revert_to_revision(revid):
"""Submit specified record revision REVID upload, to replace current
version.
"""
if not revision_format_valid_p(revid):
print('ERROR: revision %s is invalid; ' \
'must be NNN.YYYYMMDDhhmmss.' % revid)
sys.exit(1)
xml_record = get_marcxml_of_revision_id(revid)
if xml_record == '':
print('ERROR: Revision %s does not exist. ' % revid)
sys.exit(1)
recid = split_revid(revid)[0]
if record_locked_by_other_user(recid, -1):
print('The record is currently being edited. ' \
'Please try again in a few minutes.')
sys.exit(1)
if record_locked_by_queue(recid):
print('The record is locked because of unfinished upload tasks. ' \
'Please try again in a few minutes.')
sys.exit(1)
save_xml_record(recid, 0, xml_record)
print('Your modifications have now been submitted. They will be ' \
'processed as soon as the task queue is empty.')
def check_rev(recid, verbose=True, fix=False):
revisions = get_record_revisions(recid)
for recid, job_date in revisions:
rev = '%s.%s' % (recid, job_date)
try:
get_marcxml_of_revision_id(rev)
if verbose:
print('%s: ok' % rev)
except zlib.error:
print('%s: invalid' % rev)
if fix:
fix_rev(recid, job_date, verbose)
def fix_rev(recid, job_date, verbose=True):
sql = 'DELETE FROM hstRECORD WHERE id_bibrec = %s AND job_date = "%s"'
run_sql(sql, (recid, job_date))
def cli_check_revisions(recid):
if recid == '*':
print('Checking all records')
recids = intbitset(run_sql("SELECT id FROM bibrec ORDER BY id"))
for index, rec in enumerate(recids):
if index % 1000 == 0 and index:
print(index, 'records processed')
check_rev(rec, verbose=False)
else:
check_rev(recid)
def cli_fix_revisions(recid):
if recid == '*':
print('Fixing all records')
recids = intbitset(run_sql("SELECT id FROM bibrec ORDER BY id"))
for index, rec in enumerate(recids):
if index % 1000 == 0 and index:
print(index, 'records processed')
check_rev(rec, verbose=False, fix=True)
else:
check_rev(recid, fix=True)
def main():
"""Main entry point."""
if '--help' in sys.argv or \
'-h' in sys.argv:
print_usage()
elif '--version' in sys.argv or \
'-V' in sys.argv:
print_version()
else:
try:
cmd = sys.argv[1]
opts = sys.argv[2:]
if not opts:
raise IndexError
except IndexError:
print_usage()
sys.exit(1)
if cmd == '--list-revisions':
try:
recid = opts[0]
except IndexError:
print_usage()
sys.exit(1)
cli_list_revisions(recid, details=False)
elif cmd == '--list-revisions-details':
try:
recid = opts[0]
except IndexError:
print_usage()
sys.exit(1)
cli_list_revisions(recid, details=True)
elif cmd == '--get-revision':
try:
revid = opts[0]
except IndexError:
print_usage()
sys.exit(1)
cli_get_revision(revid)
elif cmd == '--diff-revisions':
try:
revid1 = opts[0]
revid2 = opts[1]
except IndexError:
print_usage()
sys.exit(1)
cli_diff_revisions(revid1, revid2)
elif cmd == '--revert-to-revision':
try:
revid = opts[0]
except IndexError:
print_usage()
sys.exit(1)
cli_revert_to_revision(revid)
elif cmd == '--check-revisions':
try:
recid = opts[0]
except IndexError:
recid = '*'
cli_check_revisions(recid)
elif cmd == '--fix-revisions':
try:
recid = opts[0]
except IndexError:
recid = '*'
cli_fix_revisions(recid)
elif cmd == '--clean-revisions':
try:
recid = opts[0]
except IndexError:
recid = '*'
cli_clean_revisions(recid, dry_run=False)
else:
print("ERROR: Please specify a command. Please see '--help'.")
sys.exit(1)
if __name__ == '__main__':
main()
|
Lilykos/invenio
|
invenio/legacy/bibedit/cli.py
|
Python
|
gpl-2.0
| 10,826 | 0.002402 |
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import with_statement
import time
from collections import OrderedDict
import numpy as np
from pyqtgraph.WidgetGroup import WidgetGroup
from acq4.devices.DAQGeneric import DAQGeneric, DAQGenericTask, DAQGenericTaskGui, DataMapping
from acq4.util import Qt
from acq4.util.Mutex import Mutex
from acq4.util.debug import printExc
Ui_devGui = Qt.importTemplate('.devGuiTemplate')
class AP200DataMapping(DataMapping):
def __init__(self, dev, ivModes, chans=None, mode=None):
## mode can be provided:
## - during __init__
## - explicitly when calling map functions
## - implicitly when calling map functions (uses device's current mode)
self.dev = dev
self.mode = mode
self.ivModes = ivModes
self.gainSwitch = self.dev.getGainSwitchValue()
def setMode(self, mode):
self.mode = mode
def getGain(self, chan, mode, switch=None):
if switch == None:
switch = self.gainSwitch
if mode is None:
if self.mode is None:
mode = self.dev.getMode()
else:
mode = self.mode
if chan != 'command':
return self.dev.interpretGainSwitchValue(switch, mode)
else:
#global ivModes
ivMode = self.ivModes[mode]
if ivMode == 'vc':
return 50.0 # in VC mode, sensitivity is 20mV/V; scale is 1/20e-3 = 50
else:
return 5e8 # in IC mode, sensitivity is 2nA/V; scale is 1/2e-9 = 5e8
def mapToDaq(self, chan, data, mode=None):
gain = self.getGain(chan, mode)
return data * gain
def mapFromDaq(self, chan, data, mode=None):
gain = self.getGain(chan, mode)
return data / gain
class AxoPatch200(DAQGeneric):
sigShowModeDialog = Qt.Signal(object)
sigHideModeDialog = Qt.Signal()
#sigHoldingChanged = Qt.Signal(object) ## provided by DAQGeneric
sigModeChanged = Qt.Signal(object)
def __init__(self, dm, config, name):
# Generate config to use for DAQ
daqConfig = {}
for ch in ['GainChannel', 'LPFChannel', 'ModeChannel']:
if ch not in config:
continue
daqConfig[ch] = config[ch].copy()
#if 'GainChannel' in config:
# daqConfig['gain'] = {'type': 'ai', 'channel': config['GainChannel']}
#if 'LPFChannel' in config:
# daqConfig['LPF'] = {'type': 'ai', 'channel': config['LPFChannel'], 'units': 'Hz'}
if 'ScaledSignal' in config:
#daqConfig['primary'] = {'type': 'ai', 'channel': config['ScaledSignal']}
daqConfig['primary'] = config['ScaledSignal']
if config['ScaledSignal'].get('type', None) != 'ai':
raise Exception("AxoPatch200: ScaledSignal configuration must have type:'ai'")
if 'Command' in config:
#daqConfig['command'] = {'type': 'ao', 'channel': config['Command']}
daqConfig['command'] = config['Command']
if config['Command'].get('type', None) != 'ao':
raise Exception("AxoPatch200: ScaledSignal configuration must have type:'ao'")
## Note that both of these channels can be present, but we will only ever record from one at a time.
## Usually, we'll record from "I OUTPUT" in current clamp and "10 Vm OUTPUT" in voltage clamp.
if 'SecondaryVCSignal' in config:
self.hasSecondaryChannel = True
#daqConfig['secondary'] = {'type': 'ai', 'channel': config['SecondaryVCSignal']}
daqConfig['secondary'] = config['SecondaryVCSignal']
if config['SecondaryVCSignal'].get('type', None) != 'ai':
raise Exception("AxoPatch200: SecondaryVCSignal configuration must have type:'ai'")
elif 'SecondaryICSignal' in config:
self.hasSecondaryChannel = True
#daqConfig['secondary'] = {'type': 'ai', 'channel': config['SecondaryICSignal']}
daqConfig['secondary'] = config['SecondaryICSignal']
if config['SecondaryICSignal'].get('type', None) != 'ai':
raise Exception("AxoPatch200: SecondaryICSignal configuration must have type:'ai'")
else:
self.hasSecondaryChannel = False
self.version = config.get('version', '200B')
# Axopatch gain telegraph
# telegraph should not read below 2 V in CC mode
self.gain_tel = np.array([0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0, 5.5, 6.0, 6.5])
self.gain_vm = np.array([0.5, 0.5, 0.5, 0.5, 1, 2, 5, 10, 20, 50, 100, 200, 500]) * 1e9 ## values in mv/pA
self.gain_im = np.array([0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 50, 100, 200, 500]) ## values in mV/mV
# Axopatch Lowpass Bessel Filter
self.lpf_tel = np.array([2.0, 4.0, 6.0, 8.0, 10.0])
self.lpf_freq = np.array([1.0, 2.0, 5.0, 10.0, 50.0])
if self.version == '200':
# telegraph voltage/output translation from the Axopatch 200 amplifier
self.mode_tel = np.array([6, 4, 2])
self.modeNames = OrderedDict([(0, 'V-Clamp'), (1, 'Track'), (2, 'I-Clamp')])
self.ivModes = {'V-Clamp':'vc', 'Track':'ic', 'I-Clamp':'ic', 'vc':'vc', 'ic':'ic'}
self.modeAliases = {'ic': 'I-Clamp', 'i=0': 'Track', 'vc': 'V-Clamp'}
elif self.version == '200A':
# telegraph voltage/output translation from the Axopatch 200 amplifier
self.mode_tel = np.array([6, 4, 2, 1])
self.modeNames = OrderedDict([(0, 'V-Clamp'), (1, 'Track'), (2, 'I-Clamp Normal'), (3, 'I-Clamp Fast'), ])
self.ivModes = {'V-Clamp':'vc', 'Track':'vc', 'I-Clamp Fast':'ic', 'I-Clamp Normal':'ic', 'vc':'vc', 'ic':'ic'}
self.modeAliases = {'ic': 'I-Clamp Fast', 'i=0': 'Track', 'vc': 'V-Clamp'}
elif self.version == '200B':
# telegraph voltage/output translation from the Axopatch 200 amplifier
self.mode_tel = np.array([6, 4, 3, 2, 1])
self.modeNames = OrderedDict([(0, 'V-Clamp'), (2, 'I=0'), (4, 'I-Clamp Fast'), (3, 'I-Clamp Normal'), (1, 'Track'), ])
self.ivModes = {'V-Clamp':'vc', 'Track':'vc', 'I=0':'ic', 'I-Clamp Fast':'ic', 'I-Clamp Normal':'ic', 'vc':'vc', 'ic':'ic'}
self.modeAliases = {'ic': 'I-Clamp Fast', 'i=0': 'I=0', 'vc': 'V-Clamp'}
self.lpf_freq[-1] = 100.0 # 200B's highest LPF value is 100kHz instead of 50.
else:
raise Exception("AxoPatch200: version must be '200', '200A' or '200B' (got %r)" % self.version)
self.holding = {
'vc': config.get('vcHolding', -0.05),
'ic': config.get('icHolding', 0.0)
}
self.config = config
self.modeLock = Mutex(Mutex.Recursive) ## protects self.mdCanceled
self.devLock = Mutex(Mutex.Recursive) ## protects self.holding, possibly self.config, ..others, perhaps?
self.mdCanceled = False
DAQGeneric.__init__(self, dm, daqConfig, name)
self.modeDialog = Qt.QMessageBox()
self.modeDialog.hide()
self.modeDialog.setModal(False)
self.modeDialog.setWindowTitle("Mode Switch Request")
self.modeDialog.addButton(self.modeDialog.Cancel)
self.modeDialog.buttonClicked.connect(self.modeDialogClicked)
self.sigShowModeDialog.connect(self.showModeDialog)
self.sigHideModeDialog.connect(self.hideModeDialog)
try:
self.setHolding()
except:
printExc("Error while setting holding value:")
dm.declareInterface(name, ['clamp'], self)
def createTask(self, cmd, parentTask):
return AxoPatch200Task(self, cmd, parentTask)
def taskInterface(self, taskRunner):
return AxoPatchTaskGui(self, taskRunner, self.ivModes)
def deviceInterface(self, win):
return AxoPatchDevGui(self)
def getMapping(self, chans=None, mode=None):
return AP200DataMapping(self, self.ivModes, chans, mode )
def setHolding(self, mode=None, value=None, force=False):
#print "setHolding", mode, value
#global ivModes
with self.devLock:
currentMode = self.getMode()
if mode is None:
mode = currentMode
ivMode = self.ivModes[mode] ## determine vc/ic
if value is None:
value = self.holding[ivMode]
else:
self.holding[ivMode] = value
if ivMode == self.ivModes[currentMode] or force:
mapping = self.getMapping(mode=mode)
## override the scale since getChanScale won't necessarily give the correct value
## (we may be about to switch modes)
DAQGeneric.setChanHolding(self, 'command', value, mapping=mapping)
self.sigHoldingChanged.emit('primary', self.holding.copy())
def setChanHolding(self, chan, value=None):
if chan == 'command':
self.setHolding(value=value)
def getHolding(self, mode=None):
#global ivModes
with self.devLock:
if mode is None:
mode = self.getMode()
ivMode = self.ivModes[mode] ## determine vc/ic
return self.holding[ivMode]
def listModes(self):
#global modeNames
return list(self.modeNames.values())
def setMode(self, mode):
"""Set the mode of the AxoPatch (by requesting user intervention). Takes care of switching holding levels in I=0 mode if needed."""
#global modeAliases
startMode = self.getMode()
if mode in self.modeAliases:
mode = self.modeAliases[mode]
if startMode == mode:
return
startIvMode = self.ivModes[startMode]
ivMode = self.ivModes[mode]
if (startIvMode == 'vc' and ivMode == 'ic') or (startIvMode == 'ic' and ivMode == 'vc'):
## switch to I=0 first
self.requestModeSwitch(self.modeAliases['i=0'])
self.setHolding(ivMode, force=True) ## we're in I=0 mode now, so it's ok to force the holding value.
## TODO:
## If mode switches back the wrong direction, we need to reset the holding value and cancel.
self.requestModeSwitch(mode)
def requestModeSwitch(self, mode):
"""Pop up a dialog asking the user to switch the amplifier mode, wait for change. This function is thread-safe."""
#global modeNames
with self.modeLock:
self.mdCanceled = False
app = Qt.QApplication.instance()
msg = 'Please set %s mode switch to %s' % (self.name(), mode)
self.sigShowModeDialog.emit(msg)
#print "Set mode:", mode
## Wait for the mode to change to the one we're waiting for, or for a cancel
while True:
if Qt.QThread.currentThread() == app.thread():
app.processEvents()
else:
Qt.QThread.yieldCurrentThread()
if self.modeDialogCanceled():
#print " Caught user cancel"
raise CancelException('User canceled mode switch request')
currentMode = self.getMode()
if currentMode == mode:
break
if currentMode is None:
#print " Can't determine mode"
raise Exception("Can not determine mode of AxoPatch!")
time.sleep(0.01)
time.sleep(0.2)
#print " ..current:", currentMode
#print " got mode"
self.sigHideModeDialog.emit()
self.sigModeChanged.emit(mode)
def showModeDialog(self, msg):
with self.modeLock:
self.mdCanceled = False
self.modeDialog.setText(msg)
self.modeDialog.show()
self.modeDialog.activateWindow()
def hideModeDialog(self):
self.modeDialog.hide()
def modeDialogCanceled(self):
with self.modeLock:
return self.mdCanceled
def modeDialogClicked(self):
## called when user clicks 'cancel' on the mode dialog
self.mdCanceled = True
self.modeDialog.hide()
def getMode(self):
#print "getMode"
with self.devLock:
#print " got lock"
#global mode_tel, modeNames
m = self.readChannel('ModeChannel', raw=True)
#print " read value", m
if m is None:
return None
mode = self.modeNames[np.argmin(np.abs(self.mode_tel-m))]
return mode
def getLPF(self):
with self.devLock:
#global lpf_tel, lpf_freq
f = self.readChannel('LPFChannel')
if f is None:
return None
return self.lpf_freq[np.argmin(np.abs(self.lpf_tel-f))]
def getGain(self):
with self.devLock:
mode = self.getMode()
if mode is None:
return None
g = self.getGainSwitchValue()
return self.interpretGainSwitchValue(g, mode)
def interpretGainSwitchValue(self, val, mode):
## convert a gain-switch-position integer (as returned from getGainSwitchValue)
## into an actual gain value
#global gain_vm, gain_im, ivModes
if val is None:
return None
if self.ivModes[mode] == 'vc':
return self.gain_vm[val]
else:
return self.gain_im[val]
def getGainSwitchValue(self):
## return the integer value corresponding to the current position of the output gain switch
#global gain_tel
g = self.readChannel('GainChannel', raw=True)
if g is None:
return None
return np.argmin(np.abs(self.gain_tel-g))
def getCmdGain(self, mode=None):
with self.devLock:
if mode is None:
mode = self.getMode()
#global ivModes
ivMode = self.ivModes[mode]
if ivMode == 'vc':
return 50.0 # in VC mode, sensitivity is 20mV/V; scale is 1/20e-3 = 50
else:
return 5e8 # in IC mode, sensitivity is 2nA/V; scale is 1/2e-9 = 5e8
#def getChanScale(self, chan):
#if chan == 'command':
#return self.getCmdGain()
#elif chan == 'primary':
#return self.getGain()
#else:
#return DAQGeneric.getChanScale(self, chan)
#raise Exception("No scale for channel %s" % chan)
def getChanUnits(self, chan):
#global ivModes
iv = self.ivModes[self.getMode()]
if iv == 'vc':
units = ['V', 'A']
else:
units = ['A', 'V']
if chan == 'command':
return units[0]
elif chan == 'secondary':
return units[0]
elif chan == 'primary':
return units[1]
def readChannel(self, ch, **opts):
## this should go away.
return self.getChannelValue(ch, **opts)
#if ch in self.config:
#chOpts = self.config[ch]
#dev = self.dm.getDevice(chOpts['device'])
#return dev.getChannelValue(chOpts['channel'], chOpts.get('mode', None))
#else:
#return None
def reconfigureSecondaryChannel(self, mode):
## Secondary channel changes depending on which mode we're in.
if self.ivModes[mode] == 'vc':
if 'SecondaryVCSignal' in self.config:
self.reconfigureChannel('secondary', self.config['SecondaryVCSignal'])
else:
if 'SecondaryICSignal' in self.config:
self.reconfigureChannel('secondary', self.config['SecondaryICSignal'])
class AxoPatch200Task(DAQGenericTask):
def __init__(self, dev, cmd, parentTask):
## make a few changes for compatibility with multiclamp
if 'daqProtocol' not in cmd:
cmd['daqProtocol'] = {}
if 'command' in cmd:
if 'holding' in cmd:
cmd['daqProtocol']['command'] = {'command': cmd['command'], 'holding': cmd['holding']}
else:
cmd['daqProtocol']['command'] = {'command': cmd['command']}
## Make sure we're recording from the correct secondary channel
if dev.hasSecondaryChannel:
if 'mode' in cmd:
mode = cmd['mode']
else:
mode = dev.getMode()
dev.reconfigureSecondaryChannel(mode)
cmd['daqProtocol']['secondary'] = {'record': True}
cmd['daqProtocol']['primary'] = {'record': True}
DAQGenericTask.__init__(self, dev, cmd['daqProtocol'], parentTask)
self.cmd = cmd
def configure(self):
## Record initial state or set initial value
#if 'holding' in self.cmd:
# self.dev.setHolding(self.cmd['mode'], self.cmd['holding'])
if 'mode' in self.cmd:
self.dev.setMode(self.cmd['mode'])
self.ampState = {'mode': self.dev.getMode(), 'LPF': self.dev.getLPF(), 'gain': self.dev.getGain()}
## Do not configure daq until mode is set. Otherwise, holding values may be incorrect.
DAQGenericTask.configure(self)
self.mapping.setMode(self.ampState['mode'])
#def getChanScale(self, chan):
#print "AxoPatch200Task.getChanScale called."
#if chan == 'primary':
#return self.ampState['gain']
#elif chan == 'command':
#return self.dev.getCmdGain(self.ampState['mode'])
#elif chan == 'secondary':
#return self.dev.getChanScale('secondary')
#else:
#raise Exception("No scale for channel %s" % chan)
def storeResult(self, dirHandle):
#DAQGenericTask.storeResult(self, dirHandle)
#dirHandle.setInfo(self.ampState)
result = self.getResult()
result._info[-1]['ClampState'] = self.ampState
dirHandle.writeFile(result, self.dev.name())
class AxoPatchTaskGui(DAQGenericTaskGui):
def __init__(self, dev, taskRunner, ivModes):
DAQGenericTaskGui.__init__(self, dev, taskRunner, ownUi=False)
self.ivModes = ivModes
self.layout = Qt.QGridLayout()
self.layout.setContentsMargins(0,0,0,0)
self.setLayout(self.layout)
self.splitter1 = Qt.QSplitter()
self.splitter1.setOrientation(Qt.Qt.Horizontal)
self.layout.addWidget(self.splitter1)
self.splitter2 = Qt.QSplitter()
self.splitter2.setOrientation(Qt.Qt.Vertical)
self.modeCombo = Qt.QComboBox()
self.splitter2.addWidget(self.modeCombo)
self.modeCombo.addItems(self.dev.listModes())
self.splitter3 = Qt.QSplitter()
self.splitter3.setOrientation(Qt.Qt.Vertical)
(w1, p1) = self.createChannelWidget('primary')
(w2, p2) = self.createChannelWidget('command')
self.cmdWidget = w2
self.inputWidget = w1
self.cmdPlot = p2
self.inputPlot = p1
#self.ctrlWidget = Qt.QWidget()
#self.ctrl = Ui_protoCtrl()
#self.ctrl.setupUi(self.ctrlWidget)
#self.splitter2.addWidget(self.ctrlWidget)
self.splitter1.addWidget(self.splitter2)
self.splitter1.addWidget(self.splitter3)
self.splitter2.addWidget(w1)
self.splitter2.addWidget(w2)
self.splitter3.addWidget(p1)
self.splitter3.addWidget(p2)
self.splitter1.setSizes([100, 500])
self.stateGroup = WidgetGroup([
(self.splitter1, 'splitter1'),
(self.splitter2, 'splitter2'),
(self.splitter3, 'splitter3'),
])
self.modeCombo.currentIndexChanged.connect(self.modeChanged)
self.modeChanged()
def saveState(self):
"""Return a dictionary representing the current state of the widget."""
state = {}
state['daqState'] = DAQGenericTaskGui.saveState(self)
state['mode'] = self.getMode()
#state['holdingEnabled'] = self.ctrl.holdingCheck.isChecked()
#state['holding'] = self.ctrl.holdingSpin.value()
return state
def restoreState(self, state):
"""Restore the state of the widget from a dictionary previously generated using saveState"""
self.modeCombo.setCurrentIndex(self.modeCombo.findText(state['mode']))
#self.ctrl.holdingCheck.setChecked(state['holdingEnabled'])
#if state['holdingEnabled']:
# self.ctrl.holdingSpin.setValue(state['holding'])
return DAQGenericTaskGui.restoreState(self, state['daqState'])
def generateTask(self, params=None):
daqTask = DAQGenericTaskGui.generateTask(self, params)
task = {
'mode': self.getMode(),
'daqProtocol': daqTask
}
return task
def modeChanged(self):
#global ivModes
ivm = self.ivModes[self.getMode()]
w = self.cmdWidget
if ivm == 'vc':
scale = 1e-3
cmdUnits = 'V'
inpUnits = 'A'
else:
scale = 1e-12
cmdUnits = 'A'
inpUnits = 'V'
self.inputWidget.setUnits(inpUnits)
self.cmdWidget.setUnits(cmdUnits)
self.cmdWidget.setMeta('y', minStep=scale, step=scale*10, value=0.)
self.inputPlot.setLabel('left', units=inpUnits)
self.cmdPlot.setLabel('left', units=cmdUnits)
#w.setScale(scale)
for s in w.getSpins():
s.setOpts(minStep=scale)
self.cmdWidget.updateHolding()
def getMode(self):
return str(self.modeCombo.currentText())
def getChanHolding(self, chan):
if chan == 'command':
return self.dev.getHolding(self.getMode())
else:
raise Exception("Can't get holding value for channel %s" % chan)
class AxoPatchDevGui(Qt.QWidget):
def __init__(self, dev):
Qt.QWidget.__init__(self)
self.dev = dev
self.ui = Ui_devGui()
self.ui.setupUi(self)
self.ui.vcHoldingSpin.setOpts(step=1, minStep=1e-3, dec=True, suffix='V', siPrefix=True)
self.ui.icHoldingSpin.setOpts(step=1, minStep=1e-12, dec=True, suffix='A', siPrefix=True)
for name in dev.modeNames.values():
self.ui.modeCombo.addItem(name)
self.updateStatus()
self.ui.modeCombo.currentIndexChanged.connect(self.modeComboChanged)
self.ui.vcHoldingSpin.valueChanged.connect(self.vcHoldingChanged)
self.ui.icHoldingSpin.valueChanged.connect(self.icHoldingChanged)
self.dev.sigHoldingChanged.connect(self.devHoldingChanged)
self.dev.sigModeChanged.connect(self.devModeChanged)
def updateStatus(self):
#global modeNames
mode = self.dev.getMode()
if mode is None:
return
vcHold = self.dev.getHolding('vc')
icHold = self.dev.getHolding('ic')
self.ui.modeCombo.setCurrentIndex(self.ui.modeCombo.findText(mode))
self.ui.vcHoldingSpin.setValue(vcHold)
self.ui.icHoldingSpin.setValue(icHold)
def devHoldingChanged(self, chan, hval):
if isinstance(hval, dict):
self.ui.vcHoldingSpin.blockSignals(True)
self.ui.icHoldingSpin.blockSignals(True)
self.ui.vcHoldingSpin.setValue(hval['vc'])
self.ui.icHoldingSpin.setValue(hval['ic'])
self.ui.vcHoldingSpin.blockSignals(False)
self.ui.icHoldingSpin.blockSignals(False)
def devModeChanged(self, mode):
self.ui.modeCombo.blockSignals(True)
self.ui.modeCombo.setCurrentIndex(self.ui.modeCombo.findText(mode))
self.ui.modeCombo.blockSignals(False)
def vcHoldingChanged(self):
self.dev.setHolding('vc', self.ui.vcHoldingSpin.value())
def icHoldingChanged(self):
self.dev.setHolding('ic', self.ui.icHoldingSpin.value())
def modeComboChanged(self, m):
try:
self.dev.setMode(str(self.ui.modeCombo.itemText(m)))
except CancelException:
self.updateStatus()
class CancelException(Exception):
pass
|
acq4/acq4
|
acq4/devices/AxoPatch200/AxoPatch200.py
|
Python
|
mit
| 25,005 | 0.012478 |
'''
Copyright 2013 Jonathan Morgan
This file is part of http://github.com/jonathanmorgan/conv2wp.
conv2wp is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
conv2wp is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with http://github.com/jonathanmorgan/conv2wp. If not, see
<http://www.gnu.org/licenses/>.
'''
#===============================================================================#
# Imports.
#===============================================================================#
# base python imports
import datetime
import numbers
import re
# external modules
import MySQLdb
import bs4 # Beautiful Soup HTML parsing.
# conv2wp imports
from conv2wp.models import Author
from conv2wp.models import Batch
from conv2wp.models import Category
from conv2wp.models import Channel
from conv2wp.models import Comment
from conv2wp.models import Item
# python_utils imports
from python_utilities.strings.string_helper import StringHelper
#===============================================================================#
# Class definitions.
#===============================================================================#
class B2E_Importer():
#---------------------------------------------------------------------------#
# CONSTANTs-ish
#---------------------------------------------------------------------------#
STATUS_SUCCESS = "Success!"
STATUS_PREFIX_ERROR = "ERROR - "
B2E_POST_STATUS_PUBLISHED = "published"
B2E_POST_STATUS_DEPRECATED = "deprecated"
B2E_POST_STATUS_DRAFT = "draft"
RSS_DATE_STRFTIME_FORMAT = "%a, %d %b %Y %H:%M:%S"
# values that can be in user_idmode field
B2E_USER_DISPLAY_TYPE_NICKNAME = "nickname" # WHEN 'nickname' THEN u.user_nickname
B2E_USER_DISPLAY_TYPE_LOGIN = "login" # WHEN 'login' THEN u.user_login
B2E_USER_DISPLAY_TYPE_NAMEFL = "namefl" # WHEN 'namefl' THEN CONCAT(u.user_firstname, ' ', u.user_lastname)
B2E_USER_DISPLAY_TYPE_NAMELF = "namelf" # WHEN 'namelf' THEN CONCAT(u.user_lastname, ' ', u.user_firstname)
B2E_USER_DISPLAY_TYPE_FIRSTNAME = "firstname" # WHEN 'firstname' THEN u.user_firstname
B2E_USER_DISPLAY_TYPE_LASTNAME = "lastname" # WHEN 'lastname' THEN u.user_lastname
# replacing old URLs with new URLs.
BLOG_URL_OLD_HOST_AND_PATH_1 = "http://community.detroitnews.com/blogs/index.php/neighborhood/"
BLOG_URL_OLD_HOST_AND_PATH_2 = "http://community.detnews.com/blogs/index.php/neighborhood/"
BLOG_URL_NEW_HOST_AND_PATH = "http://blogs.detroitnews.com/goinghome/"
#---------------------------------------------------------------------------#
# instance variables
#---------------------------------------------------------------------------#
# database information
db_server = "localhost"
db_port = ""
db_database = ""
db_username = ""
db_password = ""
db_table_name_prefix = "evo_"
db_connection = None
db_cursor = None
# channel variables
channel_title = ""
channel_description = ""
channel_wxr_version = "1.2"
channel_generator = ""
channel_base_site_url = ""
channel_base_blog_url = ""
# configuration variables
time_zone = "-0500"
time_zone_offset = -5
store_excerpt = False
#---------------------------------------------------------------------------#
# class methods
#---------------------------------------------------------------------------#
@classmethod
def get_testing_instance( cls, password_IN = "" ):
# return reference
instance_OUT = None
# declare variables
status_message = ""
# create instance
b2e_importer = cls()
# initialize database
b2e_importer.db_database = "b2"
b2e_importer.db_username = "django_user"
b2e_importer.db_password = password_IN
# initialize channel information
b2e_importer.channel_title = "Going home: A journal on Detroit's neighborhoods"
b2e_importer.channel_description = "A Detroit News journal of the city's neighborhoods, starting with the Dobel St. area on the east side, just south of McNichols and east of Van Dyke. "
b2e_importer.channel_generator = "https://github.com/jonathanmorgan/conv2wp"
b2e_importer.channel_base_site_url = "http://detroitnews.com"
b2e_importer.channel_base_blog_url = "http://community.detroitnews.com/blogs/index.php/neighborhood"
# initialize time zone.
b2e_importer.time_zone = "-0500"
b2e_importer.time_zone_offset = -5
instance_OUT = b2e_importer
return instance_OUT
#-- END method get_testing_instance() --#
@classmethod
def test_class( cls, password_IN = "", slug_IN = "" ):
# return reference
instance_OUT = None
# declare variables
status_message = ""
# create instance
b2e_importer = cls.get_testing_instance( password_IN )
# run import for blog 14
status_message = b2e_importer.import_b2e( slug_IN, 14 )
# print the message
print( status_message )
# return instance
instance_OUT = b2e_importer
return instance_OUT
#-- END class method test_class() --#
@classmethod
def find_bad_characters( cls, password_IN = "", blog_id_IN = -1, *args, **kwargs ):
'''
# get posts - if we have a blog ID, limit to that blog.
# For each post:
# - create Item, load with information from post.
# - get author user, add it to Authors.
# - get comments for post, store them in Comments, asociated to Item.
# - get categories for post, look up and associate them.
'''
# return reference
status_OUT = cls.STATUS_SUCCESS
# declare variables
b2e_importer = None
my_db_cursor = None
table_name_prefix = ""
sql_select_posts = ""
post_query_results = None
current_post = None
current_title = ""
current_body = ""
current_fail = False
fail_list = []
fail_count = 0
# create instance
b2e_importer = cls.get_testing_instance( password_IN )
# retrieve database cursor.
my_db_cursor = b2e_importer.get_database_cursor()
# get table prefix
table_name_prefix = b2e_importer.db_table_name_prefix
# create query to retrieve posts and author information.
sql_select_posts = "SELECT * FROM " + table_name_prefix + "posts ep"
sql_select_posts += " INNER JOIN " + table_name_prefix + "categories ec"
sql_select_posts += " ON ec.cat_ID = ep.post_main_cat_ID"
# got a blog ID?
if ( ( blog_id_IN ) and ( blog_id_IN != None ) and ( blog_id_IN != "" ) and ( isinstance( blog_id_IN, numbers.Integral ) == True ) and ( blog_id_IN > 0 ) ):
# we do - add where clause.
sql_select_posts += " WHERE ec.cat_blog_ID IN ( " + str( blog_id_IN ) + " )"
#-- END check to see if ID passed in. --#
# then, ORDER_BY.
sql_select_posts += " ORDER BY ep.post_datecreated ASC;"
# execute query
try:
# execute query and retrieve results
my_db_cursor.execute( sql_select_posts )
query_results = my_db_cursor.fetchall()
# loop over categories.
for current_post in query_results:
# initialize variables
current_fail = False
# get title and body.
current_id = current_post[ "post_ID" ]
current_title = current_post[ "post_title" ]
current_body = [ "post_content" ]
# look in title
try:
# decode from utf-8 to ASCII
current_title.decode( 'utf-8' )
except Exception, e:
current_fail = True
fail_count += 1
print( "post " + str( current_id ) + ": Title failed." )
#-- END decoding title --#
try:
# decode from utf-8 to ASCII
current_body.decode( 'utf-8' )
except Exception, e:
current_fail = True
fail_count += 1
print( "post " + str( current_id ) + ": Body failed." )
#-- END decoding title --#
if ( current_fail == True ):
fail_list.append( current_id )
#-- END check to see if fail --#
#-- END loop over posts. --#
except Exception, e:
status_OUT = cls.STATUS_PREFIX_ERROR + "Exception message: " + str( e )
#-- END try/except around query --#
print( "fail count: " + str( fail_count ) )
if ( len( fail_list) > 0 ):
status_OUT = cls.STATUS_PREFIX_ERROR + "Failure list: " + str( fail_list )
#-- END check for failures --#
return status_OUT
#-- END method find_bad_characters() --#
#---------------------------------------------------------------------------#
# instance methods
#---------------------------------------------------------------------------#
def clean_blog_URL( self, content_IN, find_IN, replace_IN, *args, **kwargs ):
# return reference
value_OUT = ""
# declare variables
soup = None
old_host_and_path = ""
new_host_and_path = ""
re_string = ""
re_compiled = None
anchor_list = None
current_anchor = None
old_href_text = None
new_href_text = None
# store content in return variable.
value_OUT = content_IN
# make sure we have content before we do anything.
if ( ( value_OUT ) and ( value_OUT != None ) and ( value_OUT != "" ) ):
# get BeautifulSoup instance that contains content.
soup = bs4.BeautifulSoup( value_OUT )
#--------------------------------------------------------------------
# look for http://community.detroitnews.com/blogs/index.php/neighborhood/
#--------------------------------------------------------------------
# prepare filtering regular expression
re_string = "^" + find_IN
# compile the regular expression.
re_compiled = re.compile( re_string )
# look for all <a> tags whose href matches that regular expression.
anchor_list = soup.findAll( 'a', attrs = { 'href' : re_compiled } )
# matches?
if ( len( anchor_list ) > 0 ):
# loop over all
for current_anchor in anchor_list:
# get old href text.
old_href_text = current_anchor.get( "href" )
# change old host and path to new host and path.
new_href_text = old_href_text.replace( find_IN, replace_IN )
# change "_" to "-"
new_href_text = new_href_text.replace( "_", "-" )
# then, in original post text, replace old URL with new URL
value_OUT = value_OUT.replace( old_href_text, new_href_text )
#-- END loop over anchor list. --#
#-- END check to see if any matches. --#
#-- END check to see if we need to do anything. --#
return value_OUT
#-- END method clean_blog_URL()
def clean_blog_URLs( self, content_IN, *args, **kwargs ):
# return reference
value_OUT = ""
# declare variables
soup = None
old_host_and_path = ""
new_host_and_path = ""
re_string = ""
re_compiled = None
anchor_list = None
current_anchor = None
old_href_text = None
new_href_text = None
# store content in return variable.
value_OUT = content_IN
# make sure we have content before we do anything.
if ( ( value_OUT ) and ( value_OUT != None ) and ( value_OUT != "" ) ):
# store the host and path we'll switch to
new_host_and_path = self.BLOG_URL_NEW_HOST_AND_PATH
#--------------------------------------------------------------------
# look for http://community.detroitnews.com/blogs/index.php/neighborhood/
#--------------------------------------------------------------------
# what we'll be replacing.
old_host_and_path = self.BLOG_URL_OLD_HOST_AND_PATH_1
# replace.
value_OUT = self.clean_blog_URL( value_OUT, old_host_and_path, new_host_and_path )
#--------------------------------------------------------------------
# look for http://community.detnews.com/blogs/index.php/neighborhood/
#--------------------------------------------------------------------
old_host_and_path = self.BLOG_URL_OLD_HOST_AND_PATH_2
# replace.
value_OUT = self.clean_blog_URL( value_OUT, old_host_and_path, new_host_and_path )
#-- END check to see if we need to do anything. --#
return value_OUT
#-- END method clean_blog_URLs()
def clean_content( self, content_IN, *args, **kwargs ):
# return reference
value_OUT = ""
# store content in return variable.
value_OUT = content_IN
# make sure we have content before we do anything.
if ( ( value_OUT ) and ( value_OUT != None ) and ( value_OUT != "" ) ):
# first, run the unicode escape method.
value_OUT = StringHelper.unicode_escape( value_OUT )
# !un-escape certain characters.
# "<" ==> "<"
value_OUT = value_OUT.replace( "<", "<" )
# ">" ==> ">"
value_OUT = value_OUT.replace( ">", ">" )
# """ ==> """
value_OUT = value_OUT.replace( """, "\"" )
# "&amp;" ==> "&"
value_OUT = value_OUT.replace( "&amp;", "&" )
# clean old blog URLs in links.
value_OUT = self.clean_blog_URLs( value_OUT )
#-- END check to see if we need to do anything. --#
return value_OUT
#-- END method clean_content()
def clean_body_content( self, content_IN, *args, **kwargs ):
# return reference
value_OUT = ""
# store content in return variable.
value_OUT = content_IN
# make sure we have content before we do anything.
if ( ( value_OUT ) and ( value_OUT != None ) and ( value_OUT != "" ) ):
# first, run the shared clean code.
value_OUT = self.clean_content( value_OUT )
#-- END check to see if we need to do anything. --#
return value_OUT
#-- END method clean_body_content()
def clean_comment_content( self, content_IN, *args, **kwargs ):
# return reference
value_OUT = ""
# store content in return variable.
value_OUT = content_IN
# make sure we have content before we do anything.
if ( ( value_OUT ) and ( value_OUT != None ) and ( value_OUT != "" ) ):
# first, run the shared clean code.
value_OUT = self.clean_content( value_OUT )
#-- END check to see if we need to do anything. --#
return value_OUT
#-- END method clean_comment_content()
def close_db_connection( self ):
# got a connection?
if ( ( self.db_connection ) and ( self.db_connection != None ) ):
# yes. Close connection, None out the cursor variable.
self.db_connection.close()
self.db_connection = None
self.db_cursor = None
#-- END check to see if database connection. --#
#-- END method close_db_connection()
def connect_to_database( self, *args, **kwargs ):
'''
Uses database parameters contained in this instance to connect to
database.
Preconditions: at least db_database, db_username, and db_password must be
populated in this instance.
Postconditions: Stores connection in instance. Returns status message.
'''
# return reference
status_OUT = self.STATUS_SUCCESS
# declare variables
my_db_server = ""
my_db_database = ""
my_db_username = ""
my_db_password = ""
my_db_connection = None
# get database information.
my_db_server = self.db_server
my_db_database = self.db_database
my_db_username = self.db_username
my_db_password = self.db_password
# got what we need?
if ( ( my_db_server ) and ( my_db_server != None ) and ( my_db_server != "" ) ):
if ( ( my_db_database ) and ( my_db_database != None ) and ( my_db_database != "" ) ):
if ( ( my_db_username ) and ( my_db_username != None ) and ( my_db_username != "" ) ):
if ( ( my_db_password ) and ( my_db_password != None ) and ( my_db_password != "" ) ):
# connect. No try/except - if this fails, program
# should die.
my_db_connection = MySQLdb.connect( my_db_server, my_db_username, my_db_password, my_db_database )
# If we get here, things are OK. Store connection.
self.db_connection = my_db_connection
else:
status_OUT = self.STATUS_PREFIX_ERROR + "No database password specified, so can't connect."
#-- END check to see if password --#
else:
status_OUT = self.STATUS_PREFIX_ERROR + "No database username specified, so can't connect."
#-- END check to see if username --#
else:
status_OUT = self.STATUS_PREFIX_ERROR + "No database specified, so can't connect."
#-- END check to see if database name --#
else:
status_OUT = self.STATUS_PREFIX_ERROR + "No database password specified, so can't connect."
#-- END check to see if server name --#
return status_OUT
#-- END method connect_to_database() --#
def get_conv2wp_author( self, author_b2e_id_IN, *args, **kwargs ):
'''
Accepts author ID (B2E user ID). First, checks to see if an author
exists for this User ID. If yes, retrieves it. If no, pulls author
information from B2E, uses it to create an Author instance, saves the
instance, then returns it.
Post-conditions: If no Author exists for E2 user ID passed in, creates
Author instance and stores it in database.
'''
# return reference
instance_OUT = None
# declare variables
sql_select_user = ""
my_db_cursor = None
result_count = -1
query_result = None
table_name_prefix = ""
author_user_id = ""
author_user_login = ""
author_user_email = ""
author_user_first_name = ""
author_user_last_name = ""
author_user_nickname = ""
author_user_idmode = ""
author_display_name = ""
# try to find author by their E2 ID.
try:
# Try to get Author.
instance_OUT = Author.objects.all().get( original_user_id = author_b2e_id_IN )
except Exception, e:
# not found - retrieve author information from B2E database.
# retrieve database cursor.
my_db_cursor = self.get_database_cursor()
# get table prefix
table_name_prefix = self.db_table_name_prefix
# create query to retrieve posts and author information.
sql_select_user = "SELECT * FROM " + table_name_prefix + "users WHERE user_ID = " + str( author_b2e_id_IN ) + ";"
# execute query
try:
# execute query and retrieve results
my_db_cursor.execute( sql_select_user )
# got something?
result_count = my_db_cursor.rowcount
if ( result_count > 0 ):
# got something. Got one?
if ( result_count > 1 ):
# more than one match. Error.
print( self.STATUS_PREFIX_ERROR + "More than one user matches ID " + str( author_b2e_id_IN ) + ". That should be impossible..." )
#-- END sanity check. --#
# get single row (assume we won't have multiple)
query_result = my_db_cursor.fetchone()
# create and populate Author instance.
instance_OUT = Author()
# retrieve Author values we will use.
author_user_id = query_result[ "user_ID" ]
author_user_login = query_result[ "user_login" ]
author_user_email = query_result[ "user_email" ]
author_user_first_name = query_result[ "user_firstname" ]
author_user_last_name = query_result[ "user_lastname" ]
author_user_nickname = query_result[ "user_nickname" ]
author_user_idmode = query_result[ "user_idmode" ]
# ==> original_user_id = models.IntegerField()
instance_OUT.original_user_id = author_user_id
# ==> login = models.CharField( max_length = 255, blank = True, null = True )
instance_OUT.login = author_user_login
# ==> email = models.CharField( max_length = 255, blank = True, null = True )
instance_OUT.email = author_user_email
# ==> first_name = models.CharField( max_length = 255, blank = True, null = True )
instance_OUT.first_name = author_user_first_name
# ==> last_name = models.CharField( max_length = 255, blank = True, null = True )
instance_OUT.last_name = author_user_last_name
# ==> display_name = models.CharField( max_length = 255, blank = True, null = True )
# set display name based on idmode value
if ( author_user_idmode == self.B2E_USER_DISPLAY_TYPE_NICKNAME ):
# set display name to nickname
my_display_name = author_user_nickname
elif ( author_user_idmode == self.B2E_USER_DISPLAY_TYPE_LOGIN ):
# set display name to login name
my_display_name = author_user_login
elif ( author_user_idmode == self.B2E_USER_DISPLAY_TYPE_NAMEFL ):
# set display name to first name, last name
my_display_name = author_user_first_name + " " + author_user_last_name
elif ( author_user_idmode == self.B2E_USER_DISPLAY_TYPE_NAMELF ):
# set display name to last name, first name
my_display_name = author_user_last_name + " " + author_user_first_name
elif ( author_user_idmode == self.B2E_USER_DISPLAY_TYPE_FIRSTNAME ):
# set display name to first name
my_display_name = author_user_first_name
elif ( author_user_idmode == self.B2E_USER_DISPLAY_TYPE_LASTNAME ):
# set display name to last name
my_display_name = author_user_last_name
else:
# if nothing set, use nickname.
my_display_name = author_user_nickname
#-- END check to see what we use as display name. --#
# set display name
instance_OUT.display_name = my_display_name
# Fields we aren't populating.
# - middle_name = models.CharField( max_length = 255, blank = True, null = True )
# - suffix = models.CharField( max_length = 255, blank = True, null = True )
# - description = models.TextField( blank = True, null = True )
# - notes = models.TextField( blank = True, null = True )
# - create_date_time = models.DateTimeField( auto_now_add = True )
# - update_date_time = models.DateTimeField( auto_now = True )
# - last_export_date_time = models.DateTimeField( blank = True, null = True )
# save it.
instance_OUT.save()
else:
# No match - return None
print( self.STATUS_PREFIX_ERROR + "No user matches ID " + str( author_b2e_id_IN ) + "." )
instance_OUT = None
#-- END check to see if query found B2E user. --#
except Exception, e:
# Database exception. Output error message, return None.
print( self.STATUS_PREFIX_ERROR + "Database error looking for B2E user " + str( author_b2e_id_IN ) + " - Exception message: " + str( e ) )
instance_OUT = None
#-- END try/except around author query --#
#-- END try/except around retrieving Author --#
return instance_OUT
#-- END method get_conv2wp_author() --#
def get_conv2wp_batch( self, slug_IN, blog_id_IN = -1, *args, **kwargs ):
'''
Accepts required slug (label for this batch, no spaces, please), blog ID
if there is one. Looks for existing batch with slug. If found,
returns it. If not, Creates, saves, and returns batch for this
conversion, based on values contained within this instance.
Postconditions: Batch is stored in database before it is returned. You
must pass in a non-empty slug. If no slug passed in, Exception is
thrown.
'''
# return reference
instance_OUT = None
# look for Batch instance with slug passed in.
try:
# try to get batch object.
instance_OUT = Batch.objects.all().get( slug = slug_IN )
print( " - found existing." )
except Exception, e:
# not found - create new instance.
instance_OUT = Batch()
print( " - created new ( " + str( e ) + " )." )
#-- END check to see if item already is stored. --#
# populate fields.
# slug
instance_OUT.slug = slug_IN
# title
instance_OUT.title = slug_IN + " - Converting B2E to WordPress"
if ( blog_id_IN > 0 ):
# we are just converting a specific blog. Output that ID.
instance_OUT.title += ", blog ID = " + str( blog_id_IN )
#-- END check to see if blog ID. --#
# save
instance_OUT.save()
return instance_OUT
#-- END method get_conv2wp_batch() --#
def get_conv2wp_category( self, category_b2e_id_IN, *args, **kwargs ):
'''
Accepts B2E category ID. First, checks to see if a Category
exists for this ID. If yes, retrieves it. If no, pulls category
information from B2E, uses it to create a Category instance, saves the
instance, then returns it.
Post-conditions: If no Category exists for E2 ID passed in, creates
Category instance and stores it in database.
'''
# return reference
instance_OUT = None
# declare variables
me = "get_conv2wp_category"
sql_select_category = ""
my_db_cursor = None
result_count = -1
query_result = None
table_name_prefix = ""
current_cat_name = ""
current_cat_ID = -1
current_cat_parent_ID = -1
current_cat_blog_ID = -1
current_cat_description = ""
# try to find Category by its B2E ID.
try:
# Try to get Category.
instance_OUT = Category.objects.all().get( term_id = category_b2e_id_IN )
print( " - found existing." )
except Exception, e:
# not found - retrieve category information from B2E database.
print( " - created new ( " + str( e ) + " )." )
# retrieve database cursor.
my_db_cursor = self.get_database_cursor()
# get table prefix
table_name_prefix = self.db_table_name_prefix
# create query to retrieve category information.
sql_select_category = "SELECT * FROM " + table_name_prefix + "categories WHERE cat_ID = " + str( category_b2e_id_IN ) + ";"
#SELECT cat_name, cat_ID, cat_parent_ID, cat_blog_ID, cat_description FROM " + table_name_prefix + "categories
# execute query
try:
# execute query and retrieve results
my_db_cursor.execute( sql_select_category )
# got something?
result_count = my_db_cursor.rowcount
if ( result_count > 0 ):
# got something. Got one?
if ( result_count > 1 ):
# more than one match. Error.
print( self.STATUS_PREFIX_ERROR + "More than one category matches ID " + str( category_b2e_id_IN ) + ". That should be impossible..." )
#-- END sanity check. --#
# get single row (assume we won't have multiple)
query_result = my_db_cursor.fetchone()
# create and populate Category instance.
instance_OUT = Category()
# load values we need.
current_cat_name = query_result[ "cat_name" ]
current_cat_ID = query_result[ "cat_ID" ]
current_cat_parent_ID = query_result[ "cat_parent_ID" ]
current_cat_blog_ID = query_result[ "cat_blog_ID" ]
current_cat_description = query_result[ "cat_description" ]
# output.
print( "- current category: " + str( current_cat_ID ) + " - " + current_cat_name + " - " + str( current_cat_parent_ID ) )
# set values.
instance_OUT.term_id = current_cat_ID
instance_OUT.name = current_cat_name
instance_OUT.description = current_cat_description
# nice name - all lower-case, spaces and underscores replaced
# by hyphens.
current_cat_nicename = current_cat_name.strip()
current_cat_nicename = current_cat_nicename.lower()
current_cat_nicename = current_cat_nicename.replace( " ", "-" )
current_cat_nicename = current_cat_nicename.replace( "_", "-" )
instance_OUT.nice_name = current_cat_nicename
# parent category ID?
if ( ( current_cat_parent_ID ) and ( current_cat_parent_ID != None ) and ( current_cat_parent_ID != "" ) and ( isinstance( current_cat_parent_ID, numbers.Integral ) == True ) and ( current_cat_parent_ID > 0 ) ):
# get parent category
parent_cat_model = self.get_conv2wp_category( current_cat_parent_ID )
# store parent if one present
instance_OUT.parent_category = parent_cat_model
#-- END check to see if parent category ID. --#
# save category.
instance_OUT.save()
else:
# No match - return None
print( self.STATUS_PREFIX_ERROR + "In " + me + "(): No category matches ID " + str( category_b2e_id_IN ) + "." )
instance_OUT = None
#-- END check to see if query found B2E user. --#
except Exception, e:
# Database exception. Output error message, return None.
print( self.STATUS_PREFIX_ERROR + "In " + me + "(): Database error looking for B2E category " + str( category_b2e_id_IN ) + " - Exception message: " + str( e ) )
instance_OUT = None
#-- END try/except around category query --#
#-- END try/except around retrieving Category --#
return instance_OUT
#-- END method get_conv2wp_category() --#
def get_conv2wp_channel( self, batch_IN, blog_id_IN = -1, *args, **kwargs ):
'''
Accepts required batch, blog ID if there is one. Looks for channel for
batch (should only be one, for now). If finds one, returns it. If
not, creates one, saves, associates it with the batch, then returns
channel instance for this conversion, based on values contained within
this instance.
Postconditions: Channel is stored in database before it is returned, and
is associated with Batch passed in. You must pass in a batch. If no
batch passed in, Exception is thrown.
'''
# return reference
instance_OUT = None
# declare variables
pub_date = None
# look for Batch instance with slug passed in.
try:
# try to get batch object.
instance_OUT = Channel.objects.all().get( batch = batch_IN )
print( " - found existing." )
except Exception, e:
# not found - create new instance.
instance_OUT = Channel()
print( " - created new ( " + str( e ) + " )." )
#-- END check to see if item already is stored. --#
# populate fields.
# ==> batch = models.ForeignKey( Batch )
instance_OUT.batch = batch_IN
# ==> title = models.CharField( max_length = 255, blank = True, null = True )
instance_OUT.title = self.channel_title
# ==> link = models.URLField( max_length = 255, blank = True, null = True )
instance_OUT.link = self.channel_base_blog_url
# ==> description = models.TextField( blank = True, null = True )
instance_OUT.description = self.channel_description
# ==> pubdate = models.CharField( max_length = 255, blank = True, null = True )
# ==> pub_date_time = models.DateTimeField( blank = True, null = True )
pub_date = datetime.datetime.now()
instance_OUT.pubdate = pub_date.strftime( self.RSS_DATE_STRFTIME_FORMAT + " " + self.time_zone )
instance_OUT.pub_date_time = pub_date
# ==> generator = models.CharField( max_length = 255, blank = True, null = True )
instance_OUT.generator = self.channel_generator
# ==> wxr_version = models.CharField( max_length = 255, blank = True, null = True )
instance_OUT.wxr_version = self.channel_wxr_version
# ==> base_site_URL = models.URLField( max_length = 255, blank = True, null = True )
instance_OUT.base_site_URL = self.channel_base_site_url
# ==> base_blog_URL = models.URLField( max_length = 255, blank = True, null = True )
instance_OUT.base_blog_URL = self.channel_base_blog_url
# related authors, categories, tags, and terms will be added as posts
# are processed. Need methods on Channel for adding each (look up,
# see if associated, if not, add association).
# ==> authors = models.ManyToManyField( Author, blank = True, null = True )
# ==> categories = models.ManyToManyField( Category, blank = True, null = True )
# ==> tags = models.ManyToManyField( Tag, blank = True, null = True )
# ==> terms = models.ManyToManyField( Term, blank = True, null = True )
# Not setting, or leaving set to default:
# --> cloud tag - example <cloud domain='capitalnewsservice.wordpress.com' port='80' path='/?rsscloud=notify' registerProcedure='' protocol='http-post' />
# - cloud_domain = models.CharField( max_length = 255, blank = True, null = True )
# - cloud_port = models.IntegerField( blank = True, null = True )
# - cloud_path = models.CharField( max_length = 255, blank = True, null = True )
# - cloud_register_procedure = models.CharField( max_length = 255, blank = True, null = True )
# - cloud_protocol = models.CharField( max_length = 255, blank = True, null = True )
# --> blog image
# - blog_image_url = models.URLField( max_length = 255, blank = True, null = True )
# - blog_image_title = models.CharField( max_length = 255, blank = True, null = True )
# - blog_image_link = models.URLField( max_length = 255, blank = True, null = True )
# --> blog open search atom link: <atom:link rel="search" type="application/opensearchdescription+xml" href="http://capitalnewsservice.wordpress.com/osd.xml" title="Capital News Service" />
# - atom_open_search_rel = models.CharField( max_length = 255, blank = True, null = True )
# - atom_open_search_type = models.CharField( max_length = 255, blank = True, null = True )
# - atom_open_search_href = models.URLField( max_length = 255, blank = True, null = True )
# - atom_open_search_title = models.CharField( max_length = 255, blank = True, null = True )
# --> blog hub atom link: <atom:link rel='hub' href='http://capitalnewsservice.wordpress.com/?pushpress=hub'/>
# - atom_blog_hub_rel = models.CharField( max_length = 255, blank = True, null = True )
# - atom_blog_hub_type = models.CharField( max_length = 255, blank = True, null = True )
# - atom_blog_hub_href = models.URLField( max_length = 255, blank = True, null = True )
# - atom_blog_hub_title = models.CharField( max_length = 255, blank = True, null = True )
# - create_date_time = models.DateTimeField( auto_now_add = True )
# - last_export_date_time = models.DateTimeField( blank = True, null = True )
# save
instance_OUT.save()
return instance_OUT
#-- END method get_conv2wp_channel() --#
def get_database_cursor( self, *args, **kwargs ):
'''
If cursor present in instance, returns it. If not, if connection
present, uses it to create, store, and return cursor. If no
connection, creates database connection using nested database
information, then uses it to create, store, and return cursor.
Postconditions: Cursor is stored in instance before it is returned.
'''
# return reference
cursor_OUT = None
# declare variables
my_db_cursor = None
my_db_connection = None
connect_status = None
# got a cursor?
my_db_cursor = self.db_cursor
if ( ( my_db_cursor ) and ( my_db_cursor != None ) ):
# yes - return it.
cursor_OUT = my_db_cursor
else:
# no cursor. Got a connection?
my_db_connection = self.db_connection
if ( ( my_db_connection ) and ( my_db_connection != None ) ):
# yes. Use it to create and store cursor.
# create cursor.
my_db_cursor = my_db_connection.cursor( MySQLdb.cursors.DictCursor )
# store it.
self.db_cursor = my_db_cursor
# return it.
cursor_OUT = self.db_cursor
else:
# no. Create connection, store it, then create cursor, store
# that, then return cursor.
connect_status = self.connect_to_database()
# retrieve connection.
my_db_connection = self.db_connection
# create cursor.
my_db_cursor = my_db_connection.cursor( MySQLdb.cursors.DictCursor )
# store it.
self.db_cursor = my_db_cursor
# return it.
cursor_OUT = self.db_cursor
#-- END check to see if we have a database connection. --#
#-- END check to see if we have a database cursor already. --#
return cursor_OUT
#-- END method get_database_cursor() --#
def import_b2e( self, slug_IN, blog_id_IN = -1, *args, **kwargs ):
'''
Accepts an optional blog ID, imports all authors, posts, and
comments from B2E blog into the conv2wp database tables, so we
can then render them into a WXR file, for importing into
Wordpress.
Preconditions: Must place database information inside this instance.
Postconditions: Categories, authors, blog posts, and comments will be
added to the conv2wp tables, so they can be included in a WXR file.
The B2E database tables will not be changed.
'''
# return reference
status_OUT = self.STATUS_SUCCESS
# declare variables
my_batch = None
my_channel = None
current_status = ""
# get a batch instance
my_batch = self.get_conv2wp_batch( slug_IN, blog_id_IN )
# create a channel - my_channel
my_channel = self.get_conv2wp_channel( my_batch, blog_id_IN )
# get and save categories.
current_status = self.process_categories( my_channel, blog_id_IN )
# check status
if ( current_status == self.STATUS_SUCCESS ):
# process posts.
status_OUT = self.process_posts( blog_id_IN, my_channel )
else:
# error processing categories. Return message.
status_OUT = current_status
#-- END check status of processing categories. --#
# close database connection
self.close_db_connection()
return status_OUT
#-- END method import_b2e --#
def process_categories( self, channel_IN, blog_id_IN = -1, *args, **kwargs ):
# return reference
status_OUT = self.STATUS_SUCCESS
# declare variables
my_db_cursor = None
table_name_prefix = ""
sql_select_categories = ""
query_results = None
current_category = None
current_cat_ID = ""
current_cat_model = None
# retrieve database cursor.
my_db_cursor = self.get_database_cursor()
# get table prefix
table_name_prefix = self.db_table_name_prefix
# create query to retrieve categories.
sql_select_categories = "SELECT cat_name, cat_ID, cat_parent_ID, cat_blog_ID, cat_description FROM " + table_name_prefix + "categories"
# got an ID?
if ( ( blog_id_IN ) and ( blog_id_IN != None ) and ( blog_id_IN != "" ) and ( isinstance( blog_id_IN, numbers.Integral ) == True ) and ( blog_id_IN > 0 ) ):
# we do - add where clause.
sql_select_categories += " WHERE cat_blog_ID IN ( " + str( blog_id_IN ) + " )"
#-- END check to see if ID passed in. --#
# then, ORDER_BY.
sql_select_categories += " ORDER BY cat_blog_ID, cat_parent_id, cat_ID;"
# execute query
try:
# execute query and retrieve results
my_db_cursor.execute( sql_select_categories )
query_results = my_db_cursor.fetchall()
# loop over categories.
for current_category in query_results:
# retrieve category values
current_cat_ID = current_category[ "cat_ID" ]
# get category instance.
current_cat_model = self.get_conv2wp_category( current_cat_ID )
# add category to channel.
channel_IN.add_category( current_cat_model )
#-- END loop over categories. --#
except Exception, e:
status_OUT = self.STATUS_PREFIX_ERROR + "Exception message: " + str( e )
#-- END try/except around query --#
return status_OUT
#-- END method process_categories() --#
def process_post( self, current_post_row_IN, channel_IN, *args, **kwargs ):
'''
Accepts a channel and a post row (result of querying B2E database for
posts we want to migrate, in the form where the row is keyed by
column name, not index of column). Both are required.
Then, does the following:
- creates Item, load with information from post.
- get author user, add it to Authors.
- get comments for post, store them in Comments, asociated to Item.
- get categories for post, look up and associate them.
'''
# return reference
status_OUT = self.STATUS_SUCCESS
# things we retrieve from post.
current_post = None
current_post_id = ""
current_post_creator_user_id = ""
current_post_datestart = ""
current_post_datecreated = ""
current_post_status = ""
current_post_locale = ""
current_post_content = ""
current_post_title = ""
current_post_urltitle = ""
current_post_main_cat_id = -1
current_post_views = -1
current_post_wordcount = -1
current_post_url = ""
current_post_guid = ""
current_post_cleaned_content = ""
# variables to hold pieces of pub date.
pub_date = None
my_tz_offset = None
my_tz_offset_seconds = None
timedelta_time_zone_offset = None
pub_date_GMT = None
pub_date_year = -1
pub_date_month = -1
pub_date_day = -1
# model for storing in new database.
current_item_model = None
# variables for parsing body.
more_index = -1
content_before_more = ""
content_after_more = ""
do_store_excerpt = False
# variables for authors
author_status = ""
# variables for categories
category_status = ""
# variables for comments
comment_status = ""
# place row passed in into current_post
current_post = current_post_row_IN
# retrieve post values
current_post_id = current_post[ "post_ID" ]
current_post_creator_user_id = current_post[ "post_creator_user_ID" ]
current_post_datestart = current_post[ "post_datestart" ] # this is the date the post was published.
current_post_datecreated = current_post[ "post_datecreated" ]
current_post_status = current_post[ "post_status" ]
current_post_locale = current_post[ "post_locale" ]
current_post_content = current_post[ "post_content" ]
current_post_title = current_post[ "post_title" ]
current_post_urltitle = current_post[ "post_urltitle" ]
current_post_main_cat_id = current_post[ "post_main_cat_ID" ]
current_post_views = current_post[ "post_views" ]
current_post_wordcount = current_post[ "post_wordcount" ]
# for now, just output.
print( "- current post: " + str( current_post_id ) + " - " + current_post_title + " - " + current_post_urltitle + " - " + str( current_post_datestart ) )
# check if item already exists.
try:
# try to get item object.
current_item_model = Item.objects.all().get( post_id = current_post_id )
print( " - found existing." )
except Exception, e:
# not found - create new instance.
current_item_model = Item()
print( " - created new ( " + str( e ) + " )." )
#-- END check to see if item already is stored. --#
#---------------------------------------------------------------#
# set values.
#---------------------------------------------------------------#
# ===> channel = models.ForeignKey( Channel )
current_item_model.channel = channel_IN
# ==> post_id = models.IntegerField( blank = True, null = True )
current_item_model.post_id = current_post_id
current_item_model.comment_status = Item.INTERACTION_STATUS_CLOSED
# ==> post_status = models.CharField( max_length = 255, blank = True, null = True, default = POST_STATUS_PUBLISH )
# is post published?
if ( current_post_status == self.B2E_POST_STATUS_PUBLISHED ):
# yes - set it to publish in WordPress
current_item_model.status = Item.POST_STATUS_PUBLISH
else:
# no - set it to draft in WordPress
current_item_model.status = Item.POST_STATUS_DRAFT
#-- END check of status of post. --#
# ==> post_name = models.CharField( max_length = 255, blank = True, null = True )
# slug - convert underscores in the current_post_urltitle to hyphens.
current_item_model.post_name = current_post_urltitle.replace( "_", "-" )
# ==> post_date_time = models.DateTimeField( blank = True, null = True )
# get and parse publication date.
pub_date = current_post_datestart
current_item_model.post_date_time = pub_date
# ==> pubdate = models.CharField( max_length = 255, blank = True, null = True )
# ==> pub_date_time = models.DateTimeField( blank = True, null = True )
# convert post date to the following format: Sun, 01 Aug 2010 16:42:26 +0000 - could use either date, just get offset right (+-HHMM - GMT = +0000, EST = -0500)
# RSS spec: http://cyber.law.harvard.edu/rss/rss.html#optionalChannelElements
# RSS date format = http://asg.web.cmu.edu/rfc/rfc822.html#sec-5
current_item_model.pubdate = pub_date.strftime( self.RSS_DATE_STRFTIME_FORMAT + " " + self.time_zone )
current_item_model.pub_date_time = pub_date
# ==> post_date_time_gmt = models.DateTimeField( blank = True, null = True )
# add 5 hours for GMT.
my_tz_offset = self.time_zone_offset
# convert to seconds
my_tz_offset_seconds = my_tz_offset * 3600
# invert, since we are converting from local to GMT, not the
# other way around.
my_tz_offset_seconds = my_tz_offset_seconds * -1
# create timedelta for offset.
timedelta_time_zone_offset = datetime.timedelta( 0, my_tz_offset_seconds )
# convert pub date to GMT
pub_date_GMT = pub_date + timedelta_time_zone_offset
# store it.
current_item_model.post_date_time_gmt = pub_date_GMT
# parse
pub_date_year = pub_date.strftime( "%Y" )
pub_date_month = pub_date.strftime( "%m" )
pub_date_day = pub_date.strftime( "%d" )
# Link and URL
# sample - http://community.detroitnews.com/blogs/index.php/neighborhood/2013/03/28/another_blessing
current_post_url = "http://community.detroitnews.com/blogs/index.php/neighborhood/" + pub_date_year + "/" + pub_date_month + "/" + pub_date_day + "/" + current_post_urltitle
# ==> link = models.URLField( max_length = 255, blank = True, null = True )
current_item_model.link = current_post_url
# ==> guid = models.CharField( max_length = 255, blank = True, null = True )
# sample - http://community.detroitnews.com/blogs/index.php?title=another_blessing
current_post_guid = "http://community.detroitnews.com/blogs/index.php?title=" + current_post_urltitle
current_item_model.guid = current_post_guid
# ==> title = models.CharField( max_length = 255, blank = True, null = True )
current_item_model.title = StringHelper.unicode_escape( current_post_title )
# ==> content_encoded = models.TextField( blank = True, null = True )
# ==> excerpt_encoded = models.TextField( blank = True, null = True, default = "" )
do_store_excerpt = self.store_excerpt
# locate "<!--more-->" in post.
more_index = current_post_content.lower().find( "<!--more-->" )
# did we find it?
if ( more_index > -1 ):
# get content before and after more
content_before_more = current_post_content[ 0 : more_index ]
content_after_more = current_post_content[ more_index + 11 : len( current_post_content ) ]
# store off excerpt?
if ( do_store_excerpt == True ):
# take everything before "<!--more-->" and place it in excerpt.
content_before_more = self.clean_body_content( content_before_more )
current_item_model.excerpt_encoded = content_before_more
#-- END check to see if we store off excerpt --#
# then, remove "<!--more-->" from complete post, store result in content_encoded.
current_post_cleaned_content = content_before_more + "\n" + content_after_more
else:
# no <!--more--> - just store the content.
current_post_cleaned_content = current_post_content
#-- END check to see if we found "<!--more-->" --#
# escape unicode crap.
current_post_cleaned_content = self.clean_body_content( current_post_cleaned_content )
# set content encoded.
current_item_model.content_encoded = current_post_cleaned_content
# save item.
current_item_model.save()
#---------------------------------------------------------------#
# Author
#---------------------------------------------------------------#
# ==> creators = models.ManyToManyField( Author, blank = True, null = True )
author_status = self.process_post_author( current_post_creator_user_id, current_item_model )
print( " - Author status: " + author_status )
#---------------------------------------------------------------#
# Categories (main, other)
#---------------------------------------------------------------#
# ==> categories = models.ManyToManyField( Category, blank = True, null = True )
category_status = self.process_post_categories( current_post_main_cat_id, current_item_model )
print( " - Category status: " + category_status )
#---------------------------------------------------------------#
# Comments
#---------------------------------------------------------------#
comment_status = self.process_post_comments( current_item_model )
print( " - Comment status: " + comment_status )
# Fields we aren't setting (or are leaving set to default):
# - guid_is_permalink = models.BooleanField( 'Is permalink?', default = False )
# - description = models.TextField( blank = True, null = True, default = "" )
# - comment_status = models.CharField( max_length = 255, choices = INTERACTION_STATUSES, blank = True, default = INTERACTION_STATUS_CLOSED )
# - ping_status = models.CharField( max_length = 255, choices = INTERACTION_STATUSES, blank = True, default = INTERACTION_STATUS_CLOSED )
# - post_parent = models.IntegerField( blank = True, null = True, default = 0 )
# - menu_order = models.IntegerField( blank = True, null = True, default = 0 )
# - post_type = models.CharField( max_length = 255, blank = True, null = True, default = ITEM_TYPE_POST )
# - post_password = models.CharField( max_length = 255, blank = True, null = True )
# - is_sticky = models.BooleanField( default = False )
# - attachment_URL = models.URLField( max_length = 255, blank = True, null = True )
# - tags = models.ManyToManyField( Tag, blank = True, null = True )
# save item?
#current_item_model.save()
return status_OUT
#-- END method process_post() --#
def process_post_author( self, author_b2e_id_IN, item_IN, *args, **kwargs ):
'''
Accepts author ID (B2E user ID), and item for B2E blog post that has been
initialized to contain all information from the post (including post
ID). First, checks to see if an author exists for this User ID. If
yes, retrieves it. If no, pulls author information from E2, uses it to
create an Author instance. Then, associates Author with item (and
channel - item method should do that) and we're done.
Post-conditions: creates Author instance and stores it in database if
needed. Also updates item and that item's related channel so Author
is associated with each if it wasn't before.
'''
# return reference
status_OUT = self.STATUS_SUCCESS
# declare variables
author_model = None
sql_select_user = ""
my_db_cursor = None
query_results = None
table_name_prefix = ""
# try to find author by their B2E ID.
author_model = self.get_conv2wp_author( author_b2e_id_IN )
# got one?
if ( ( author_model ) and ( author_model != None ) ):
# got one. Associate it with item.
item_IN.add_author( author_model )
else:
# could not find author. Output error message.
status_OUT = self.STATUS_PREFIX_ERROR + "Could not find B2E user for ID " + str( author_b2e_id_IN ) + ", so cannot process."
#-- END check to see if we got an author for user ID. --#
return status_OUT
#-- END method process_post_author() --#
def process_post_categories( self, post_main_cat_id_IN, item_IN, *args, **kwargs ):
'''
Accepts post's main category ID and the item that contains the rest of
the details on this post. First retrieves category instance for main
category and adds it to item. Then, looks up all other categories
and adds them as well.
Post-conditions: If categories are first encountered by this process, it
will create them and add them to the database. It also creates the
ties between the categories and the item and channel in which the item
resides in the database, as well.
'''
# return reference
status_OUT = self.STATUS_SUCCESS
# declare variables
me = "process_post_categories"
main_cat_model = None
post_id = -1
# database connection variables.
my_db_cursor = None
table_name_prefix = ""
sql_select_categories = ""
query_results = None
current_category = None
current_cat_ID = ""
current_cat_model = None
# get post ID.
post_id = item_IN.post_id
# main category?
if ( ( post_main_cat_id_IN ) and ( post_main_cat_id_IN != None ) and ( post_main_cat_id_IN != "" ) and ( isinstance( post_main_cat_id_IN, numbers.Integral ) == True ) and ( post_main_cat_id_IN > 0 ) ):
# check if main category already exists (it should).
main_cat_model = self.get_conv2wp_category( post_main_cat_id_IN )
# got main category?
if ( ( main_cat_model ) and ( main_cat_model != None ) ):
# add category to item.
item_IN.add_category( main_cat_model )
print( " - In " + me + ": adding category " + str( post_main_cat_id_IN ) + ": " + str( main_cat_model ) + " to item: " + str( post_id ) )
else:
print( " - In " + me + ": no category found for ID " + str( post_main_cat_id_IN ) + "." )
#-- END check to see if we have a main category --#
else:
print( " - In " + me + ": no main category ID (value passed in: " + str( post_main_cat_id_IN ) + ")." )
#-- END check to see if parent category ID. --#
# check for other categories!
# find categories for this post in B2E evo_postcats table.
# SELECT * FROM evo_postcats WHERE postcat_post_ID = <post_id>;
# retrieve database cursor.
my_db_cursor = self.get_database_cursor()
# get table prefix
table_name_prefix = self.db_table_name_prefix
# create query to retrieve categories.
sql_select_categories = "SELECT * FROM " + table_name_prefix + "postcats WHERE postcat_post_ID = " + str( post_id ) + " ORDER BY postcat_post_ID, postcat_cat_ID;"
# execute query
try:
# execute query and retrieve results
my_db_cursor.execute( sql_select_categories )
query_results = my_db_cursor.fetchall()
# loop over categories.
for current_category in query_results:
# retrieve category values
current_cat_ID = current_category[ "postcat_cat_ID" ]
# get category instance.
current_cat_model = self.get_conv2wp_category( current_cat_ID )
# add category to item.
item_IN.add_category( current_cat_model )
#-- END loop over categories. --#
except Exception, e:
status_OUT = self.STATUS_PREFIX_ERROR + " - In " + me + ": Exception message: " + str( e )
#-- END try/except around query --#
return status_OUT
#-- END method process_post_categories() --#
def process_post_comments( self, item_IN, *args, **kwargs ):
'''
Accepts the item that contains the rest of the details on a given post.
Retrieves all comments for the post, creates Comment instances for
them, then stores comments in the database, associated with the item.
Pre-conditions: Only pulls in published comments. Others are discarded.
Post-conditions: Creates comment records in the database and adds them
to the Item. Does look to see if comment was already added based on
comment ID. If so, does not add again.
'''
# return reference
status_OUT = self.STATUS_SUCCESS
# declare variables
me = "process_post_comments"
post_id = -1
# database connection variables.
my_db_cursor = None
table_name_prefix = ""
sql_select_comments = ""
query_results = None
current_comment = None
current_comment_id = -1
duplicate_check_rs = None
current_comment_model = None
current_comment_author_id = -1
author_model = None
author_display_name = ""
author_email = ""
current_comment_date = None
current_comment_content = ""
comment_date_GMT = None
current_comment_cleaned = ""
# variables to hold pieces of pub date.
my_tz_offset = None
my_tz_offset_seconds = None
timedelta_time_zone_offset = None
pub_date_GMT = None
pub_date_year = -1
pub_date_month = -1
pub_date_day = -1
# get post ID.
post_id = item_IN.post_id
# find comments for this post in B2E evo_comments table.
'''
Reproducing this logic, but not all in SQL:
SELECT
CASE
WHEN u.user_ID IS NULL THEN c.comment_author
ELSE
CASE u.user_idmode
WHEN 'nickname' THEN u.user_nickname
WHEN 'login' THEN u.user_login
WHEN 'namefl' THEN CONCAT(u.user_firstname, ' ', u.user_lastname)
WHEN 'namelf' THEN CONCAT(u.user_lastname, ' ', u.user_firstname)
WHEN 'firstname' THEN u.user_firstname
WHEN 'lastname' THEN u.user_lastname
ELSE u.user_nickname
END
END AS 'author',
CASE WHEN u.user_ID IS NULL THEN c.comment_author_email ELSE u.user_email END AS 'author_email',
CASE WHEN u.user_ID IS NULL THEN c.comment_author_url ELSE u.user_url END AS 'author_url',
comment_id, comment_status, comment_author_IP, comment_content, comment_post_ID, comment_date, comment_karma, comment_type, comment_author_ID
FROM evo_comments as c
LEFT JOIN evo_users as u
ON u.user_ID = c.comment_author_id
WHERE comment_status = 'published'
AND comment_post_ID = <post_id>;
'''
# SELECT * FROM evo_comments WHERE comment_status = 'published' AND comment_post_ID = <post_id>;
# retrieve database cursor.
my_db_cursor = self.get_database_cursor()
# get table prefix
table_name_prefix = self.db_table_name_prefix
# create query to retrieve categories.
sql_select_comments = "SELECT * FROM " + table_name_prefix + "comments WHERE comment_status = 'published' AND comment_post_ID = " + str( post_id ) + " ORDER BY comment_date ASC;"
# execute query
try:
# execute query and retrieve results
my_db_cursor.execute( sql_select_comments )
query_results = my_db_cursor.fetchall()
# loop over comments.
for current_comment in query_results:
# retrieve comment ID
current_comment_id = current_comment[ "comment_ID" ]
# see if it has already been processed.
duplicate_check_rs = item_IN.comment_set.filter( comment_id = current_comment_id )
if ( duplicate_check_rs.count() <= 0 ):
# no matching comment. Process it. Get values.
current_comment_author_id = current_comment[ "comment_author_ID" ]
current_comment_date = current_comment[ "comment_date" ]
current_comment_content = current_comment[ "comment_content" ]
current_comment_url = current_comment[ "comment_author_url" ]
current_comment_ip_address = current_comment[ "comment_author_IP" ]
# create model instance.
current_comment_model = Comment()
# Set values.
# ==> item = models.ForeignKey( Item )
current_comment_model.item = item_IN
# ==> comment_id = models.IntegerField( blank = True, null = True )
current_comment_model.comment_id = current_comment_id
# ==> author_name = models.CharField( max_length = 255, blank = True, null = True )
# ==> author_email = models.CharField( max_length = 255, blank = True, null = True )
# for author information, pull in author, use it.
author_model = self.get_conv2wp_author( current_comment_author_id )
if ( ( author_model ) and ( author_model != None ) ):
author_display_name = author_model.display_name
author_email = author_model.email
#-- END check for author information --#
current_comment_model.author_name = author_display_name
current_comment_model.author_email = author_email
# ==> author_url = models.CharField( max_length = 255, blank = True, null = True )
current_comment_model.author_url = current_comment_url
# ==> author_ip = models.CharField( max_length = 255, blank = True, null = True )
current_comment_model.author_ip = current_comment_ip_address
# ==> comment_date_time = models.DateTimeField( blank = True, null = True )
current_comment_model.comment_date_time = current_comment_date
# ==> comment_date_time_gmt = models.DateTimeField( blank = True, null = True )
# add offset.
my_tz_offset = self.time_zone_offset
# convert to seconds
my_tz_offset_seconds = my_tz_offset * 3600
# invert, since we are converting from local to GMT, not the
# other way around.
my_tz_offset_seconds = my_tz_offset_seconds * -1
# create timedelta for offset.
timedelta_time_zone_offset = datetime.timedelta( 0, my_tz_offset_seconds )
# convert pub date to GMT
comment_date_GMT = current_comment_date + timedelta_time_zone_offset
# store it.
current_comment_model.comment_date_time_gmt = comment_date_GMT
# ==> content_encoded = models.TextField( blank = True, null = True )
# escape unicode crap.
current_comment_cleaned = self.clean_comment_content( current_comment_content )
# set content encoded.
current_comment_model.content_encoded = current_comment_cleaned
# ==> approved = models.BooleanField( default = False )
current_comment_model.approved = True
# Fields we aren't setting.
# - author_url = models.CharField( max_length = 255, blank = True, null = True )
# - comment_type = models.CharField( max_length = 255, blank = True, null = True )
# - parent_comment = models.ForeignKey( 'self', blank = True, null = True )
# - comment_author = models.ForeignKey( Author, blank = True, null = True )
# save the comment.
current_comment_model.save()
print( " - In " + me + "(): Adding comment " + str( current_comment_model ) )
#-- END check to make sure we aren't duplicating comments. --#
#-- END loop over categories. --#
except Exception, e:
status_OUT = self.STATUS_PREFIX_ERROR + " - In " + me + ": Exception message: " + str( e )
#-- END try/except around query --#
return status_OUT
#-- END method process_post_comments() --#
def process_posts( self, blog_id_IN = -1, channel_IN = None, *args, **kwargs ):
'''
# get posts - if we have a blog ID, limit to that blog.
# For each post:
# - create Item, load with information from post.
# - get author user, add it to Authors.
# - get comments for post, store them in Comments, asociated to Item.
# - get categories for post, look up and associate them.
'''
# return reference
status_OUT = self.STATUS_SUCCESS
# declare variables
my_db_cursor = None
table_name_prefix = ""
sql_select_posts = ""
post_query_results = None
current_post = None
# retrieve database cursor.
my_db_cursor = self.get_database_cursor()
# get table prefix
table_name_prefix = self.db_table_name_prefix
# create query to retrieve posts and author information.
sql_select_posts = "SELECT * FROM " + table_name_prefix + "posts ep"
sql_select_posts += " INNER JOIN " + table_name_prefix + "categories ec"
sql_select_posts += " ON ec.cat_ID = ep.post_main_cat_ID"
# got a blog ID?
if ( ( blog_id_IN ) and ( blog_id_IN != None ) and ( blog_id_IN != "" ) and ( isinstance( blog_id_IN, numbers.Integral ) == True ) and ( blog_id_IN > 0 ) ):
# we do - add where clause.
sql_select_posts += " WHERE ec.cat_blog_ID IN ( " + str( blog_id_IN ) + " )"
#-- END check to see if ID passed in. --#
# then, ORDER_BY.
sql_select_posts += " ORDER BY ep.post_datecreated ASC;"
# execute query
try:
# execute query and retrieve results
my_db_cursor.execute( sql_select_posts )
query_results = my_db_cursor.fetchall()
# loop over categories.
for current_post in query_results:
# process post
self.process_post( current_post, channel_IN )
#-- END loop over posts. --#
except Exception, e:
status_OUT = self.STATUS_PREFIX_ERROR + "Exception message: " + str( e )
#-- END try/except around query --#
return status_OUT
#-- END method process_posts() --#
def __del__( self ):
# close database connection.
self.close_db_connection()
#-- END method __del__() --#
def __unicode__( self ):
# return reference
string_OUT = ""
string_OUT = "b2e_importer - server: " + self.db_server + "; database: " + self.db_database + "; username: " + self.db_username
return string_OUT
#-- END __unicode()__ method --#
#-- END B2E_Importer class --#
|
jonathanmorgan/conv2wp
|
b2e/b2e_importer.py
|
Python
|
gpl-3.0
| 77,772 | 0.019454 |
__author__ = 'asdf2014'
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
def modified_huber_loss(y_true, y_pred):
z = y_pred * y_true
loss = -4 * z
loss[z >= -1] = (1 - z[z >= -1]) ** 2
loss[z >= 1.] = 0
return loss
xmin, xmax = -4, 4
xx = np.linspace(xmin, xmax, 100)
plt.plot([xmin, 0, 0, xmax], [1, 1, 0, 0], 'k-',
label="Zero-one loss")
plt.plot(xx, np.where(xx < 1, 1 - xx, 0), 'g-',
label="Hinge loss")
plt.plot(xx, -np.minimum(xx, 0), 'm-',
label="Perceptron loss")
plt.plot(xx, np.log2(1 + np.exp(-xx)), 'r-',
label="Log loss")
plt.plot(xx, np.where(xx < 1, 1 - xx, 0) ** 2, 'b-',
label="Squared hinge loss")
plt.plot(xx, modified_huber_loss(xx, 1), 'y--',
label="Modified Huber loss")
plt.ylim((0, 8))
plt.legend(loc="upper right")
plt.xlabel(r"Decision function $f(x)$")
plt.ylabel("$L(y, f(x))$")
plt.show()
|
MasteringSpark/FirstStep
|
src/main/python/scikit/loss_function.py
|
Python
|
apache-2.0
| 918 | 0.002179 |
# -*- coding: utf-8 -*-
'''
Covenant Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,base64
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import proxy
class source:
def __init__(self):
self.priority = 0
self.language = ['en']
self.domains = ['watchfree.to']
self.base_link = 'http://www.watchfree.ac'
self.base_link = 'https://watchfree.unblockall.xyz'
self.base_link = 'https://watchfree.unblocker.win'
self.moviesearch_link = '/?keyword=%s&search_section=1'
self.tvsearch_link = '/?keyword=%s&search_section=2'
def movie(self, imdb, title, localtitle, aliases, year):
try:
query = self.moviesearch_link % urllib.quote_plus(cleantitle.query(title))
query = urlparse.urljoin(self.base_link, query)
result = str(proxy.request(query, 'free movies'))
if 'page=2' in result or 'page%3D2' in result: result += str(proxy.request(query + '&page=2', 'free movies'))
result = client.parseDOM(result, 'div', attrs = {'class': 'item'})
title = 'watch' + cleantitle.get(title)
years = ['(%s)' % str(year), '(%s)' % str(int(year)+1), '(%s)' % str(int(year)-1)]
result = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title')) for i in result]
result = [(i[0][0], i[1][0]) for i in result if len(i[0]) > 0 and len(i[1]) > 0]
result = [i for i in result if any(x in i[1] for x in years)]
r = [(proxy.parse(i[0]), i[1]) for i in result]
match = [i[0] for i in r if title == cleantitle.get(i[1]) and '(%s)' % str(year) in i[1]]
match2 = [i[0] for i in r]
match2 = [x for y,x in enumerate(match2) if x not in match2[:y]]
if match2 == []: return
for i in match2[:5]:
try:
if len(match) > 0: url = match[0] ; break
r = proxy.request(urlparse.urljoin(self.base_link, i), 'free movies')
r = re.findall('(tt\d+)', r)
if imdb in r: url = i ; break
except:
pass
url = re.findall('(?://.+?|)(/.+)', url)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
query = self.tvsearch_link % urllib.quote_plus(cleantitle.query(tvshowtitle))
query = urlparse.urljoin(self.base_link, query)
result = str(proxy.request(query, 'free movies'))
if 'page=2' in result or 'page%3D2' in result: result += str(proxy.request(query + '&page=2', 'free movies'))
result = client.parseDOM(result, 'div', attrs = {'class': 'item'})
tvshowtitle = 'watch' + cleantitle.get(tvshowtitle)
years = ['(%s)' % str(year), '(%s)' % str(int(year)+1), '(%s)' % str(int(year)-1)]
result = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title')) for i in result]
result = [(i[0][0], i[1][0]) for i in result if len(i[0]) > 0 and len(i[1]) > 0]
result = [i for i in result if any(x in i[1] for x in years)]
r = [(proxy.parse(i[0]), i[1]) for i in result]
match = [i[0] for i in r if tvshowtitle == cleantitle.get(i[1]) and '(%s)' % str(year) in i[1]]
match2 = [i[0] for i in r]
match2 = [x for y,x in enumerate(match2) if x not in match2[:y]]
if match2 == []: return
for i in match2[:5]:
try:
if len(match) > 0: url = match[0] ; break
r = proxy.request(urlparse.urljoin(self.base_link, i), 'free movies')
r = re.findall('(tt\d+)', r)
if imdb in r: url = i ; break
except:
pass
url = re.findall('(?://.+?|)(/.+)', url)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = urlparse.urljoin(self.base_link, url)
result = proxy.request(url, 'tv_episode_item')
result = client.parseDOM(result, 'div', attrs = {'class': 'tv_episode_item'})
title = cleantitle.get(title)
premiered = re.compile('(\d{4})-(\d{2})-(\d{2})').findall(premiered)[0]
premiered = '%s %01d %s' % (premiered[1].replace('01','January').replace('02','February').replace('03','March').replace('04','April').replace('05','May').replace('06','June').replace('07','July').replace('08','August').replace('09','September').replace('10','October').replace('11','November').replace('12','December'), int(premiered[2]), premiered[0])
result = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'span', attrs = {'class': 'tv_episode_name'}), client.parseDOM(i, 'span', attrs = {'class': 'tv_num_versions'})) for i in result]
result = [(i[0], i[1][0], i[2]) for i in result if len(i[1]) > 0] + [(i[0], None, i[2]) for i in result if len(i[1]) == 0]
result = [(i[0], i[1], i[2][0]) for i in result if len(i[2]) > 0] + [(i[0], i[1], None) for i in result if len(i[2]) == 0]
result = [(i[0][0], i[1], i[2]) for i in result if len(i[0]) > 0]
url = [i for i in result if title == cleantitle.get(i[1]) and premiered == i[2]][:1]
if len(url) == 0: url = [i for i in result if premiered == i[2]]
if len(url) == 0 or len(url) > 1: url = [i for i in result if 'season-%01d-episode-%01d' % (int(season), int(episode)) in i[0]]
url = url[0][0]
url = proxy.parse(url)
url = re.findall('(?://.+?|)(/.+)', url)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
url = urlparse.urljoin(self.base_link, url)
result = proxy.request(url, 'link_ite')
links = client.parseDOM(result, 'table', attrs = {'class': 'link_ite.+?'})
for i in links:
try:
url = client.parseDOM(i, 'a', ret='href')
url = [x for x in url if 'gtfo' in x][-1]
url = proxy.parse(url)
url = urlparse.parse_qs(urlparse.urlparse(url).query)['gtfo'][0]
url = base64.b64decode(url)
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
host = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0]
if not host in hostDict: raise Exception()
host = host.encode('utf-8')
quality = client.parseDOM(i, 'div', attrs = {'class': 'quality'})
if any(x in ['[CAM]', '[TS]'] for x in quality): quality = 'CAM'
else: quality = 'SD'
quality = quality.encode('utf-8')
sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'direct': False, 'debridonly': False})
except:
pass
return sources
except:
return sources
def resolve(self, url):
return url
|
felipenaselva/felipe.repository
|
plugin.video.streamhub/resources/lib/sources/en/watchfree.py
|
Python
|
gpl-2.0
| 8,466 | 0.012521 |
import logging
import logging.handlers
import os
import sys
LOG_FORMAT = '[%(asctime)s] %(levelname)s: %(message)s'
MAX_LOG_SIZE = 1024 * 1024 * 10
LOG_BACKUP_COUNT = 5
LOG_PATH = log_path = os.path.join(os.path.expanduser('~'), '.tomviz', 'logs')
LOG_PATHS = {
'stderr': '%s/stderr.log' % LOG_PATH,
'stdout': '%s/stdout.log' % LOG_PATH,
'debug': '%s/debug.log' % LOG_PATH
}
try:
os.makedirs(LOG_PATH)
except os.error:
pass
class LoggerWriter:
def __init__(self, logger, level):
self._logger = logger
self._level = level
def write(self, message):
if message != '\n':
self._logger.log(self._level, message.rstrip('\n'))
def flush(self):
pass
def setup_std_loggers():
stdout_logger = logging.getLogger('stdout')
stdout_logger.setLevel(logging.INFO)
stderr_logger = logging.getLogger('stderr')
stderr_logger.setLevel(logging.ERROR)
stderr_log_writer = LoggerWriter(stderr_logger, logging.ERROR)
stdout_log_writer = LoggerWriter(stdout_logger, logging.INFO)
file_handler = logging.handlers.RotatingFileHandler(
LOG_PATHS['stderr'], maxBytes=MAX_LOG_SIZE,
backupCount=LOG_BACKUP_COUNT)
formatter = logging.Formatter(LOG_FORMAT)
file_handler.setFormatter(formatter)
stderr_logger.addHandler(file_handler)
file_handler = logging.handlers.RotatingFileHandler(
LOG_PATHS['stdout'], maxBytes=MAX_LOG_SIZE,
backupCount=LOG_BACKUP_COUNT)
file_handler.setFormatter(formatter)
stdout_logger.addHandler(file_handler)
sys.stderr = stderr_log_writer
sys.stdout = stdout_log_writer
def setup_loggers(debug=False):
logger = logging.getLogger('tomviz')
logger.setLevel(logging.DEBUG if debug else logging.INFO)
stream_handler = logging.StreamHandler()
file_handler = logging.handlers.RotatingFileHandler(
LOG_PATHS['debug'], maxBytes=MAX_LOG_SIZE,
backupCount=LOG_BACKUP_COUNT)
formatter = logging.Formatter(LOG_FORMAT)
stream_handler.setFormatter(formatter)
file_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
logger.addHandler(file_handler)
|
cryos/tomviz
|
acquisition/tomviz/__init__.py
|
Python
|
bsd-3-clause
| 2,177 | 0 |
from __future__ import absolute_import
# Copyright (c) 2010-2016 openpyxl
from warnings import warn
from .numbers import BUILTIN_FORMATS, BUILTIN_FORMATS_REVERSE
from .proxy import StyleProxy
from .cell_style import StyleArray
class StyleDescriptor(object):
def __init__(self, collection, key):
self.collection = collection
self.key = key
def __set__(self, instance, value):
coll = getattr(instance.parent.parent, self.collection)
if not getattr(instance, "_style"):
instance._style = StyleArray()
setattr(instance._style, self.key, coll.add(value))
def __get__(self, instance, cls):
coll = getattr(instance.parent.parent, self.collection)
if not getattr(instance, "_style"):
instance._style = StyleArray()
idx = getattr(instance._style, self.key)
return StyleProxy(coll[idx])
class NumberFormatDescriptor(object):
key = "numFmtId"
collection = '_number_formats'
def __set__(self, instance, value):
coll = getattr(instance.parent.parent, self.collection)
if value in BUILTIN_FORMATS_REVERSE:
idx = BUILTIN_FORMATS_REVERSE[value]
else:
idx = coll.add(value) + 164
if not getattr(instance, "_style"):
instance._style = StyleArray()
setattr(instance._style, self.key, idx)
def __get__(self, instance, cls):
if not getattr(instance, "_style"):
instance._style = StyleArray()
idx = getattr(instance._style, self.key)
if idx < 164:
return BUILTIN_FORMATS.get(idx, "General")
coll = getattr(instance.parent.parent, self.collection)
return coll[idx - 164]
class StyleableObject(object):
"""
Base class for styleble objects implementing proxy and lookup functions
"""
font = StyleDescriptor('_fonts', "fontId")
fill = StyleDescriptor('_fills', "fillId")
border = StyleDescriptor('_borders', "borderId")
number_format = NumberFormatDescriptor()
protection = StyleDescriptor('_protections', "protectionId")
alignment = StyleDescriptor('_alignments', "alignmentId")
__slots__ = ('parent', '_style')
def __init__(self, sheet, style_array=None):
self.parent = sheet
if style_array is not None:
style_array = StyleArray(style_array)
self._style = style_array
@property
def style_id(self):
if self._style is None:
self._style = StyleArray()
return self.parent.parent._cell_styles.add(self._style)
@property
def has_style(self):
if self._style is None:
return False
return any(self._style)
@property
def pivotButton(self):
if self._style is None:
return False
return bool(self._style[6])
@property
def quotePrefix(self):
if self._style is None:
return False
return bool(self._style[7])
|
aragos/tichu-tournament
|
python/openpyxl/styles/styleable.py
|
Python
|
mit
| 2,984 | 0.002011 |
# AMDG
import unittest
from datetime import datetime
from balance import BasicLoader, RepayLoader
from base_test import BaseTest
class LoaderTests(BaseTest, unittest.TestCase):
def test_basic_loader(self):
loader = BasicLoader('tests/data/basic_loader')
entries, errors = loader.load(return_errors=True)
self.assertEquals(1, len(entries))
entry = entries[0]
self.assertEquals(-5.00, entry.amount)
self.assertEquals(2, len(errors))
self.assertEquals(errors[0]['entry'], '\n')
self.assertTrue(errors[0]['error'].message.startswith('Not a valid entry'))
self.assertEquals(errors[1]['entry'], 'this is a bad line:\n')
self.assertTrue(errors[1]['error'].message.startswith('Not a valid entry'))
def test_repay_loader(self):
loader = RepayLoader('tests/data/repay_loader')
entries, errors = loader.load(return_errors=True)
self.assertEquals(4, len(entries))
entry = entries.pop()
self.assertEquals(-11.00, entry.amount)
self.assertEquals('repay', entry.category)
self.assertEquals('#2', entry.description)
self.assertEquals('Joe', entry.vendor)
self.assertEquals('cash', entry.method)
self.assertEquals(datetime(2014,10,3), entry.date)
for e in entries:
self.assertTrue(e.method in RepayLoader.methods)
self.assertEquals(2, len(errors))
self.assertEquals(errors[0]['entry'], '#hello\n')
self.assertTrue(errors[0]['error'].message.startswith('Not a valid entry'))
self.assertEquals(errors[1]['entry'], 'bad line\n')
self.assertTrue(errors[1]['error'].message.startswith('Not a valid entry'))
if __name__ == '__main__':
unittest.main()
|
pilliq/balance
|
tests/test_loaders.py
|
Python
|
mit
| 1,764 | 0.004535 |
import re
import json
import itertools
from .common import InfoExtractor
from .subtitles import SubtitlesInfoExtractor
from ..utils import (
compat_urllib_request,
compat_str,
get_element_by_attribute,
get_element_by_id,
orderedSet,
ExtractorError,
)
class DailymotionBaseInfoExtractor(InfoExtractor):
@staticmethod
def _build_request(url):
"""Build a request with the family filter disabled"""
request = compat_urllib_request.Request(url)
request.add_header('Cookie', 'family_filter=off')
request.add_header('Cookie', 'ff=off')
return request
class DailymotionIE(DailymotionBaseInfoExtractor, SubtitlesInfoExtractor):
"""Information Extractor for Dailymotion"""
_VALID_URL = r'(?i)(?:https?://)?(?:www\.)?dailymotion\.[a-z]{2,3}/(?:embed/)?video/([^/]+)'
IE_NAME = u'dailymotion'
_FORMATS = [
(u'stream_h264_ld_url', u'ld'),
(u'stream_h264_url', u'standard'),
(u'stream_h264_hq_url', u'hq'),
(u'stream_h264_hd_url', u'hd'),
(u'stream_h264_hd1080_url', u'hd180'),
]
_TESTS = [
{
u'url': u'http://www.dailymotion.com/video/x33vw9_tutoriel-de-youtubeur-dl-des-video_tech',
u'file': u'x33vw9.mp4',
u'md5': u'392c4b85a60a90dc4792da41ce3144eb',
u'info_dict': {
u"uploader": u"Amphora Alex and Van .",
u"title": u"Tutoriel de Youtubeur\"DL DES VIDEO DE YOUTUBE\""
}
},
# Vevo video
{
u'url': u'http://www.dailymotion.com/video/x149uew_katy-perry-roar-official_musi',
u'file': u'USUV71301934.mp4',
u'info_dict': {
u'title': u'Roar (Official)',
u'uploader': u'Katy Perry',
u'upload_date': u'20130905',
},
u'params': {
u'skip_download': True,
},
u'skip': u'VEVO is only available in some countries',
},
# age-restricted video
{
u'url': u'http://www.dailymotion.com/video/xyh2zz_leanna-decker-cyber-girl-of-the-year-desires-nude-playboy-plus_redband',
u'file': u'xyh2zz.mp4',
u'md5': u'0d667a7b9cebecc3c89ee93099c4159d',
u'info_dict': {
u'title': 'Leanna Decker - Cyber Girl Of The Year Desires Nude [Playboy Plus]',
u'uploader': 'HotWaves1012',
u'age_limit': 18,
}
}
]
def _real_extract(self, url):
# Extract id and simplified title from URL
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group(1).split('_')[0].split('?')[0]
url = 'http://www.dailymotion.com/video/%s' % video_id
# Retrieve video webpage to extract further information
request = self._build_request(url)
webpage = self._download_webpage(request, video_id)
# Extract URL, uploader and title from webpage
self.report_extraction(video_id)
# It may just embed a vevo video:
m_vevo = re.search(
r'<link rel="video_src" href="[^"]*?vevo.com[^"]*?videoId=(?P<id>[\w]*)',
webpage)
if m_vevo is not None:
vevo_id = m_vevo.group('id')
self.to_screen(u'Vevo video detected: %s' % vevo_id)
return self.url_result(u'vevo:%s' % vevo_id, ie='Vevo')
video_uploader = self._search_regex([r'(?im)<span class="owner[^\"]+?">[^<]+?<a [^>]+?>([^<]+?)</a>',
# Looking for official user
r'<(?:span|a) .*?rel="author".*?>([^<]+?)</'],
webpage, 'video uploader', fatal=False)
age_limit = self._rta_search(webpage)
video_upload_date = None
mobj = re.search(r'<div class="[^"]*uploaded_cont[^"]*" title="[^"]*">([0-9]{2})-([0-9]{2})-([0-9]{4})</div>', webpage)
if mobj is not None:
video_upload_date = mobj.group(3) + mobj.group(2) + mobj.group(1)
embed_url = 'http://www.dailymotion.com/embed/video/%s' % video_id
embed_page = self._download_webpage(embed_url, video_id,
u'Downloading embed page')
info = self._search_regex(r'var info = ({.*?}),$', embed_page,
'video info', flags=re.MULTILINE)
info = json.loads(info)
if info.get('error') is not None:
msg = 'Couldn\'t get video, Dailymotion says: %s' % info['error']['title']
raise ExtractorError(msg, expected=True)
formats = []
for (key, format_id) in self._FORMATS:
video_url = info.get(key)
if video_url is not None:
m_size = re.search(r'H264-(\d+)x(\d+)', video_url)
if m_size is not None:
width, height = m_size.group(1), m_size.group(2)
else:
width, height = None, None
formats.append({
'url': video_url,
'ext': 'mp4',
'format_id': format_id,
'width': width,
'height': height,
})
if not formats:
raise ExtractorError(u'Unable to extract video URL')
# subtitles
video_subtitles = self.extract_subtitles(video_id, webpage)
if self._downloader.params.get('listsubtitles', False):
self._list_available_subtitles(video_id, webpage)
return
return {
'id': video_id,
'formats': formats,
'uploader': video_uploader,
'upload_date': video_upload_date,
'title': self._og_search_title(webpage),
'subtitles': video_subtitles,
'thumbnail': info['thumbnail_url'],
'age_limit': age_limit,
}
def _get_available_subtitles(self, video_id, webpage):
try:
sub_list = self._download_webpage(
'https://api.dailymotion.com/video/%s/subtitles?fields=id,language,url' % video_id,
video_id, note=False)
except ExtractorError as err:
self._downloader.report_warning(u'unable to download video subtitles: %s' % compat_str(err))
return {}
info = json.loads(sub_list)
if (info['total'] > 0):
sub_lang_list = dict((l['language'], l['url']) for l in info['list'])
return sub_lang_list
self._downloader.report_warning(u'video doesn\'t have subtitles')
return {}
class DailymotionPlaylistIE(DailymotionBaseInfoExtractor):
IE_NAME = u'dailymotion:playlist'
_VALID_URL = r'(?:https?://)?(?:www\.)?dailymotion\.[a-z]{2,3}/playlist/(?P<id>.+?)/'
_MORE_PAGES_INDICATOR = r'<div class="next">.*?<a.*?href="/playlist/.+?".*?>.*?</a>.*?</div>'
_PAGE_TEMPLATE = 'https://www.dailymotion.com/playlist/%s/%s'
def _extract_entries(self, id):
video_ids = []
for pagenum in itertools.count(1):
request = self._build_request(self._PAGE_TEMPLATE % (id, pagenum))
webpage = self._download_webpage(request,
id, u'Downloading page %s' % pagenum)
playlist_el = get_element_by_attribute(u'class', u'row video_list', webpage)
video_ids.extend(re.findall(r'data-id="(.+?)"', playlist_el))
if re.search(self._MORE_PAGES_INDICATOR, webpage, re.DOTALL) is None:
break
return [self.url_result('http://www.dailymotion.com/video/%s' % video_id, 'Dailymotion')
for video_id in orderedSet(video_ids)]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
playlist_id = mobj.group('id')
webpage = self._download_webpage(url, playlist_id)
return {'_type': 'playlist',
'id': playlist_id,
'title': get_element_by_id(u'playlist_name', webpage),
'entries': self._extract_entries(playlist_id),
}
class DailymotionUserIE(DailymotionPlaylistIE):
IE_NAME = u'dailymotion:user'
_VALID_URL = r'(?:https?://)?(?:www\.)?dailymotion\.[a-z]{2,3}/user/(?P<user>[^/]+)'
_MORE_PAGES_INDICATOR = r'<div class="next">.*?<a.*?href="/user/.+?".*?>.*?</a>.*?</div>'
_PAGE_TEMPLATE = 'http://www.dailymotion.com/user/%s/%s'
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
user = mobj.group('user')
webpage = self._download_webpage(url, user)
full_user = self._html_search_regex(
r'<a class="label" href="/%s".*?>(.*?)</' % re.escape(user),
webpage, u'user', flags=re.DOTALL)
return {
'_type': 'playlist',
'id': user,
'title': full_user,
'entries': self._extract_entries(user),
}
|
ashutosh-mishra/youtube-dl
|
youtube_dl/extractor/dailymotion.py
|
Python
|
unlicense
| 8,993 | 0.003114 |
#==================================
#Author Bjorn Burr Nyberg
#University of Bergen
#Contact bjorn.nyberg@uni.no
#Copyright 2013
#==================================
'''This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.'''
#==================================
#Definition of inputs and outputs
#==================================
##[SAFARI]=group
##Polygon=vector
##Precision=number 2
##Output=output vector
#Algorithm body
#==================================
from qgis.core import *
from PyQt4.QtCore import *
from itertools import chain
import processing as st
keepNodes = set([])
layer = st.getobject(Polygon)
fields = QgsFields()
fields.append( QgsField( "id", QVariant.Int ))
crs = layer.crs()
writer = QgsVectorFileWriter(Output, "CP1250", fields, 1,layer.crs(), "ESRI Shapefile")
fet = QgsFeature()
Total = layer.featureCount()
progress.setText('Extracting Nodes')
for enum,feature in enumerate(layer.getFeatures()):
progress.setPercentage(int((100 * enum)/Total))
geomType = feature.geometry()
if geomType.type() == QGis.Polygon:
if geomType.isMultipart():
geom = geomType.asMultiPolygon()
geom = list(chain(*chain(*geom)))
else:
geom = geomType.asPolygon()
geom = list(chain(*geom))
elif geomType.type() == QGis.Line:
if geomType.isMultipart():
geom = geomType.asMultiPolyline()
geom = list(chain(*geom))
else:
geom = geomType.asPolyline()
for points in geom:
if (round(points.x(),Precision),round(points.y(),Precision)) not in keepNodes:
pnt = QgsGeometry.fromPoint(QgsPoint(points.x(),points.y()))
fet.setGeometry(pnt)
writer.addFeature(fet)
keepNodes.update([(round(points.x(),Precision),round(points.y(),Precision))])
del writer
|
BJEBN/Geometric-Analysis
|
Scripts/Old Scripts/Extract_Non_Duplicate_Nodes.py
|
Python
|
gpl-3.0
| 2,434 | 0.013969 |
from . import test_l10n_br_hr
from . import test_hr_employee_dependent
|
OCA/l10n-brazil
|
l10n_br_hr/tests/__init__.py
|
Python
|
agpl-3.0
| 71 | 0 |
#!/usr/bin/env python
#
# Copyright 2015 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import random
import string
import subprocess
import sys
import re
from cli_test_parameters import CLITestParameters
class CLITest:
def __init__(self):
pass
@staticmethod
def check_description(test_case, cli):
parameters = CLITestParameters()
test_case.assertEqual(parameters.get_value(cli.__class__.__name__, 'description'), cli.get_description())
@staticmethod
def check_curl(test_case, cli, output):
parameters = CLITestParameters()
p = re.compile(r'-u ".*?"\s')
a = p.findall(output)
output = output.replace(a[0], '')
test_case.assertEqual(parameters.get_value(cli.__class__.__name__, 'curl').encode('utf-8'), output.encode('utf-8'))
@staticmethod
def get_cli_name_from_class(i):
name = i.__class__.__name__
m = re.findall("([A-Z][a-z]+)", name)
m = [a.lower() for a in m]
cli_name = str.join('-', m)
return cli_name
@staticmethod
def check_cli_help(test_case, cli):
parameters = CLITestParameters()
name = cli.__class__.__name__
expected_output = parameters.get_cli_help(name)
m = re.findall("([A-Z][a-z]+)", name)
m = [a.lower() for a in m]
command = str.join('-', m)
try:
output = subprocess.check_output([command, '-h'])
test_case.assertEqual(expected_output, output)
except subprocess.CalledProcessError as e:
sys.stderr.write("{0}: {1}\n".format(e.output, e.returncode))
@staticmethod
def get_cli_output(cli, args):
output = None
try:
command = CLITest.get_cli_name_from_class(cli)
args.insert(0, command)
output = subprocess.check_output(args=args)
except subprocess.CalledProcessError as e:
sys.stderr.write("{0}: {1}\n".format(e.output, e.returncode))
return output
@staticmethod
def random_string(n):
return ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(n))
@staticmethod
def is_int(s):
try:
int(s)
return True
except ValueError:
return False
|
jdgwartney/boundary-api-cli
|
tests/unit/boundary/cli_test.py
|
Python
|
apache-2.0
| 2,812 | 0.001067 |
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_maps_ext(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def maps_get_all_policy_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_all_policy = ET.Element("maps_get_all_policy")
config = maps_get_all_policy
input = ET.SubElement(maps_get_all_policy, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_all_policy_output_policy_policyname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_all_policy = ET.Element("maps_get_all_policy")
config = maps_get_all_policy
output = ET.SubElement(maps_get_all_policy, "output")
policy = ET.SubElement(output, "policy")
policyname = ET.SubElement(policy, "policyname")
policyname.text = kwargs.pop('policyname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
input = ET.SubElement(maps_get_rules, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_rbridgeid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
rbridgeid = ET.SubElement(rules, "rbridgeid")
rbridgeid.text = kwargs.pop('rbridgeid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_rulename(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
rulename = ET.SubElement(rules, "rulename")
rulename.text = kwargs.pop('rulename')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_groupname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
groupname = ET.SubElement(rules, "groupname")
groupname.text = kwargs.pop('groupname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_monitor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
monitor = ET.SubElement(rules, "monitor")
monitor.text = kwargs.pop('monitor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_op(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
op = ET.SubElement(rules, "op")
op.text = kwargs.pop('op')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
value = ET.SubElement(rules, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
action = ET.SubElement(rules, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_timebase(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
timebase = ET.SubElement(rules, "timebase")
timebase.text = kwargs.pop('timebase')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_policyname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
policyname = ET.SubElement(rules, "policyname")
policyname.text = kwargs.pop('policyname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_all_policy_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_all_policy = ET.Element("maps_get_all_policy")
config = maps_get_all_policy
input = ET.SubElement(maps_get_all_policy, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_all_policy_output_policy_policyname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_all_policy = ET.Element("maps_get_all_policy")
config = maps_get_all_policy
output = ET.SubElement(maps_get_all_policy, "output")
policy = ET.SubElement(output, "policy")
policyname = ET.SubElement(policy, "policyname")
policyname.text = kwargs.pop('policyname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
input = ET.SubElement(maps_get_rules, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_rbridgeid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
rbridgeid = ET.SubElement(rules, "rbridgeid")
rbridgeid.text = kwargs.pop('rbridgeid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_rulename(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
rulename = ET.SubElement(rules, "rulename")
rulename.text = kwargs.pop('rulename')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_groupname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
groupname = ET.SubElement(rules, "groupname")
groupname.text = kwargs.pop('groupname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_monitor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
monitor = ET.SubElement(rules, "monitor")
monitor.text = kwargs.pop('monitor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_op(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
op = ET.SubElement(rules, "op")
op.text = kwargs.pop('op')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
value = ET.SubElement(rules, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
action = ET.SubElement(rules, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_timebase(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
timebase = ET.SubElement(rules, "timebase")
timebase.text = kwargs.pop('timebase')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_policyname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
policyname = ET.SubElement(rules, "policyname")
policyname.text = kwargs.pop('policyname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
BRCDcomm/pynos
|
pynos/versions/ver_6/ver_6_0_1/yang/brocade_maps_ext.py
|
Python
|
apache-2.0
| 12,855 | 0.0021 |
##############################################################################
#
# Copyright (c) 2008-2012 Alistek Ltd (http://www.alistek.com) All Rights Reserved.
# General contacts <info@alistek.com>
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This module is GPLv3 or newer and incompatible
# with OpenERP SA "AGPL + Private Use License"!
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from osv import osv
from osv import fields
class report_print_by_action(osv.osv_memory):
_name = 'aeroo.print_by_action'
def to_print(self, cr, uid, ids, context=None):
this = self.browse(cr, uid, ids[0], context=context)
report_xml = self.pool.get(context['active_model']).browse(cr, uid, context['active_id'], context=context)
print_ids = eval("[%s]" % this.object_ids, {})
data = {'model':report_xml.model, 'ids':print_ids, 'id':print_ids[0], 'report_type': 'aeroo'}
return {
'type': 'ir.actions.report.xml',
'report_name': report_xml.report_name,
'datas': data,
'context':context
}
_columns = {
'name':fields.text('Object Model', readonly=True),
'object_ids':fields.char('Object IDs', size=250, required=True, help="Comma separated records ID"),
}
def _get_model(self, cr, uid, context):
return self.pool.get(context['active_model']).read(cr, uid, context['active_id'], ['model'], context=context)['model']
_defaults = {
'name': _get_model,
}
report_print_by_action()
|
iw3hxn/LibrERP
|
report_aeroo/wizard/report_print_by_action.py
|
Python
|
agpl-3.0
| 2,640 | 0.006061 |
# Patchless XMLRPC Service for Django
# Kind of hacky, and stolen from Crast on irc.freenode.net:#django
# Self documents as well, so if you call it from outside of an XML-RPC Client
# it tells you about itself and its methods
#
# Brendan W. McAdams <brendan.mcadams@thewintergrp.com>
# SimpleXMLRPCDispatcher lets us register xml-rpc calls w/o
# running a full XMLRPC Server. It's up to us to dispatch data
from django.http import HttpResponse
from django.shortcuts import render, get_object_or_404
from buildfarm.models import Package, Queue
from repository.models import Repository, PisiPackage
from source.models import SourcePackage
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
import xmlrpclib
from django.template.loader import render_to_string
from django.utils import simplejson
from django.template import Context, Template
from django import forms
from django.utils import simplejson
from django.db import transaction
from django.shortcuts import redirect
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib import messages
from buildfarm.tasks import build_all_in_queue
class NewQueueForm (forms.ModelForm):
class Meta:
model = Queue
fields = ( 'name', 'builder', 'source_repo', 'binman', 'sandboxed')
def site_index (request):
queues = Queue.objects.all ()
context = { 'queues': queues, 'navhint': 'queue', 'not_reload': 'true', 'form' : NewQueueForm() }
return render (request, "buildfarm/site_index.html", context)
def package_progress_json (request, queue_id):
rdict = {}
q = Queue.objects.get(id=queue_id)
packages = Package.objects.filter(queue=q)
pct =float ( float(q.current) / q.length ) * 100
rdict = { 'percent' : pct, 'total': q.length, 'current': q.current, 'name_current': q.current_package_name }
json = simplejson.dumps(rdict, ensure_ascii=False)
return HttpResponse( json, content_type='application/json')
@staff_member_required
def delete_from_queue (request, package_id):
pkg = get_object_or_404 (Package, id=package_id)
q_id = pkg.queue.id
pkg.delete ()
return redirect ('/buildfarm/queue/%d/' % q_id)
@staff_member_required
def delete_queue (request, queue_id):
queue = get_object_or_404 (Queue, id=queue_id)
queue.delete ()
return redirect ('/manage/')
@staff_member_required
def new_queue (request):
if request.method == 'POST':
# New submission
form = NewQueueForm (request.POST)
rdict = { 'html': "<b>Fail</b>", 'tags': 'fail' }
context = Context ({'form': form})
if form.is_valid ():
rdict = { 'html': "The new queue has been set up", 'tags': 'success' }
model = form.save (commit=False)
model.current = 0
model.length = 0
model.current_package_name = ""
model.save ()
else:
html = render_to_string ('buildfarm/new_queue.html', {'form_queue': form})
rdict = { 'html': html, 'tags': 'fail' }
json = simplejson.dumps(rdict, ensure_ascii=False)
print json
# And send it off.
return HttpResponse( json, content_type='application/json')
else:
form = NewQueueForm ()
context = {'form': form }
return render (request, 'buildfarm/new_queue.html', context)
def queue_index(request, queue_id=None):
q = get_object_or_404 (Queue, id=queue_id)
packages = Package.objects.filter(queue=q).order_by('build_status')
paginator = Paginator (packages, 15)
pkg_count = q.length
if (pkg_count > 0):
pct =float ( float(q.current) / q.length ) * 100
else:
pct = 0
page = request.GET.get("page")
try:
packages = paginator.page(page)
except PageNotAnInteger:
packages = paginator.page (1)
except EmptyPage:
packages = paginator.page (paginator.num_pages)
context = {'navhint': 'queue', 'queue': q, 'package_list': packages, 'total_packages': q.length, 'current_package': q.current, 'total_pct': pct, 'current_package_name': q.current_package_name}
return render (request, "buildfarm/index.html", context)
@staff_member_required
def build_queue (request, queue_id):
queue = Queue.objects.get (id=queue_id)
messages.info (request, "Starting build of \"%s\" queue" % queue.name)
build_all_in_queue.delay (queue_id)
return redirect ('/manage/')
@staff_member_required
def populate_queue (request, queue_id):
q = Queue.objects.get(id=queue_id)
packages = SourcePackage.objects.filter (repository=q.source_repo)
failList = list ()
for package in packages:
binaries = PisiPackage.objects.filter(source_name=package.name)
if len(binaries) == 0:
# We have no binaries
print "New package for source: %s" % (package.name)
failList.append (package)
else:
for package2 in binaries:
if package2.release != package.release:
print "Newer release for: %s" % package2.name
failList.append (package)
break
try:
binary = Package.objects.get(queue=q, name=package.name)
failList.remove (package)
except:
pass
with transaction.commit_on_success():
for fail in failList:
pkg = Package ()
pkg.name = fail.name
pkg.version = fail.version
pkg.build_status = "pending"
pkg.queue = q
pkg.spec_uri = fail.source_uri
pkg.save ()
return redirect ("/buildfarm/queue/%d" % q.id)
|
SolusOS-discontinued/RepoHub
|
buildfarm/views.py
|
Python
|
mit
| 5,488 | 0.030977 |
import tkinter as tk
from tkinter import *
import spotipy
import webbrowser
from PIL import Image, ImageTk
import os
from twitter import *
from io import BytesIO
import urllib.request
import urllib.parse
import PIL.Image
from PIL import ImageTk
import simplejson
song1 = "spotify:artist:58lV9VcRSjABbAbfWS6skp"
song2 = 'spotify:artist:0PFtn5NtBbbUNbU9EAmIWF'
song3 = 'spotify:artist:5INjqkS1o8h1imAzPqGZBb'
song4 = 'spotify:artist:1HwM5zlC5qNWhJtM00yXzG'
song5 = 'spotify:artist:4tZwfgrHOc3mvqYlEYSvVi'
song6 = 'spotify:artist:3AA28KZvwAUcZuOKwyblJQ'
song7 = 'spotify:artist:5T0MSzX9RC5NA6gAI6irSn'
song8 = 'spotify:artist:0SwO7SWeDHJijQ3XNS7xEE'
song9 = 'spotify:artist:1dWEYMPtNmvSVaDNLgB6NV'
# Put in token, token_key, con_secret, con_secret_key
t = Twitter(
auth=OAuth('705153959368007680-F5OUf8pvmOlXku1b7gpJPSAToqzV4Fb', 'bEGLkUJBziLc17EuKLTAMio8ChmFxP9aHYADwRXnxDsoC',
'gYDgR8lcTGcVZS9ucuEIYsMuj', '1dwHsLDN2go3aleQ8Q2vcKRfLETc51ipsP8310ayizL2p3Ycii'))
numberOfTweets = 3
class SetUp(tk.Tk): #inheriting
def __init__(self, *args, **kwargs): #method, initialisng
tk.Tk.__init__(self, *args, **kwargs)
tk.Tk.wm_iconbitmap(self, default="favicon.ico")
container = tk.Frame(self) #container for holding everything
container.pack(side = "top", fill = None, expand = False)
container.pack_propagate(0) # don't shrink
container.grid_rowconfigure(0, weight = 1)
container.grid_columnconfigure(0, weight = 1)
self.frames = {} #dictionary of frames
for F in (StartPage, RadioPage, MapPage, DataPage, InvPage, StatsPage): #loop through the number of pages
frame = F(container, self)
self.frames[F] = frame
frame.grid(row = 0, column = 0, sticky = "nsew") #alignment plus stretch
self.show_frame(StartPage)
def show_frame(self, cont):
frame = self.frames[cont]
frame.tkraise() #raised to the front
def music(self, uri):
spotify = spotipy.Spotify()
results = spotify.artist_top_tracks(uri)
#getting the track and audio link to top song
for track in results['tracks'][:1]:
text2 = track['preview_url']
return text2
def showTweets(self, x, num):
# display a number of new tweets and usernames
for i in range(0, num):
line1 = (x[i]['user']['screen_name'])
line2 = (x[i]['text'])
#w = Label(self, text=line1 + "\n" + line2 + "\n\n")
#w.pack()
self.label = Label(self,text=line1 + "\n" + line2 + "\n\n", width = 100)
self.label.place(x = 215, y = 0)
self.label.pack()
def getTweets(self):
x = t.statuses.home_timeline(screen_name="AndrewKLeech")
return x
def tweet(self):
text = entryWidget.get().strip()
if text == "":
print("Empty")
else:
t.statuses.update(status=text)
entryWidget.delete(0,END)
print("working")
def get_map(self,lat,lng):
latString = str(lat)
lngString = str(lng)
#Map url from google maps, has marker and colors included
url = ("https://maps.googleapis.com/maps/api/staticmap?center="+latString+","+lngString+"&size=450x250&zoom=16&style=feature:road.local%7Celement:geometry%7Ccolor:0x00ff00%7Cweight:1%7Cvisibility:on&style=feature:landscape%7Celement:geometry.fill%7Ccolor:0x000000%7Cvisibility:on&style=feature:landscape%7Celement:geometry.fill%7Ccolor:0x000000%7Cvisibility:on&style=feature:administrative%7Celement:labels%7Cweight:3.9%7Cvisibility:on%7Cinverse_lightness:true&style=feature:poi%7Cvisibility:simplified&markers=color:blue%7Clabel:H%7C"+latString+","+lngString+"&markers=size:tiny%7Ccolor:green%7CDelta+Junction,AK\&sensor=false")
buffer = BytesIO(urllib.request.urlopen(url).read())
pil_image = PIL.Image.open(buffer)
tk_image = ImageTk.PhotoImage(pil_image)
# put the image in program
mapLabel = Label(image=tk_image)
mapLabel.pack()
mainloop()
def get_coordinates(self,from_sensor=False):
if entryWidget2.get().strip() == "":
print("Empty")
mapLabel.pack_forget()
else:
query=entryWidget2.get().strip()
print("working")
query = query.encode('utf-8')
params = {
'address': query,
'sensor': "true" if from_sensor else "false"
}
#url used for google geocodeing api
googleGeocodeUrl = 'http://maps.googleapis.com/maps/api/geocode/json?'
url = googleGeocodeUrl + urllib.parse.urlencode(params)
json_response = urllib.request.urlopen(url)
response = simplejson.loads(json_response.read())
if response['results']:
location = response['results'][0]['geometry']['location']
latitude, longitude = location['lat'], location['lng']
print(query, latitude, longitude)
else:
latitude, longitude = None, None
print(query, "<no results>")
self.get_map(latitude, longitude)
def game(self):
w, h = 500, 500
# Pack pygame in `embed`.
root = tk.Tk()
embed = tk.Frame(root, width=w, height=h)
embed.pack()
# Tell pygame's SDL window which window ID to use
os.environ['SDL_WINDOWID'] = str(embed.winfo_id())
# Show the window so it's assigned an ID.
root.update()
# Game for Pip-Boy
# Imports
import pygame
import random
# Initialise PyGame
pygame.init()
# Set display width and height
display_width = 500
display_height = 500
# Create a gameDisplay using display_width and display_height
gameDisplay = pygame.display.set_mode((display_width, display_height))
# Set the caption of the window to Turret Defense
pygame.display.set_caption('Tank War!')
# Create colours using RGB values
black = (0, 0, 0)
green = (0, 150, 0)
lightGreen = (0, 255, 0)
# Create fonts
smallFont = pygame.font.SysFont(None, 25)
mediumFont = pygame.font.SysFont(None, 50)
largeFont = pygame.font.SysFont(None, 75)
# Initialise the clock for FPS
clock = pygame.time.Clock()
# Tank part dimensions
tankWidth = 40
tankHeight = 20
turretWidth = 5
wheelWidth = 5
# Ground height
ground = .85 * display_height
# Load sounds
fireSound = pygame.mixer.Sound("fireSound.wav")
cannon = pygame.mixer.Sound("cannon.wav")
def text_objects(text, color, size="smallFont"): # Function returns text for blitting
if size == "smallFont":
textSurface = smallFont.render(text, True, color)
if size == "mediumFont":
textSurface = mediumFont.render(text, True, color)
if size == "largeFont":
textSurface = largeFont.render(text, True, color)
return textSurface, textSurface.get_rect()
def text_to_button(msg, color, buttonx, buttony, buttonwidth, buttonheight,
size="smallFont"): # Blits text to button
textSurface, textRect = text_objects(msg, color, size)
textRect.center = ((buttonx + buttonwidth / 2), buttony + (buttonheight / 2))
gameDisplay.blit(textSurface, textRect)
def message_to_screen(msg, color, y_displace=0, size="smallFont"): # Blits the text returned from text_objects
textSurface, textRect = text_objects(msg, color, size)
textRect.center = (int(display_width / 2), int(display_height / 2) + y_displace)
gameDisplay.blit(textSurface, textRect)
def tank(x, y, turretPosition): # Draws the tank and turret
# Casting x and y to be ints
x = int(x)
y = int(y)
# Set possible turret positions
turrets = [(x - 27, y - 2),
(x - 26, y - 5),
(x - 25, y - 8),
(x - 23, y - 12),
(x - 20, y - 14),
(x - 18, y - 15),
(x - 15, y - 17),
(x - 13, y - 19),
(x - 11, y - 21)]
# Draw the tank
pygame.draw.circle(gameDisplay, green, (int(x), int(y)), 10)
pygame.draw.rect(gameDisplay, green, (x - tankHeight, y, tankWidth, tankHeight))
pygame.draw.line(gameDisplay, green, (x, y), turrets[turretPosition], turretWidth)
# Draw the wheels
pygame.draw.circle(gameDisplay, green, (x - 15, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x - 10, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x - 5, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x + 0, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x + 5, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x + 10, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x + 15, y + 20), wheelWidth)
# Return the turret position
return turrets[turretPosition]
def enemyTank(x, y, turretPosition): # Draws the tank and turret
# Casting x and y to be ints
x = int(x)
y = int(y)
# Set possible turret positions
turrets = [(x + 27, y - 2),
(x + 26, y - 5),
(x + 25, y - 8),
(x + 23, y - 12),
(x + 20, y - 14),
(x + 18, y - 15),
(x + 15, y - 17),
(x + 13, y - 19),
(x + 11, y - 21)]
# Draw the tank
pygame.draw.circle(gameDisplay, green, (int(x), int(y)), 10)
pygame.draw.rect(gameDisplay, green, (x - tankHeight, y, tankWidth, tankHeight))
pygame.draw.line(gameDisplay, green, (x, y), turrets[turretPosition], turretWidth)
pygame.draw.circle(gameDisplay, green, (x - 15, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x - 10, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x - 5, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x + 0, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x + 5, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x + 10, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x + 15, y + 20), wheelWidth)
return turrets[turretPosition]
def explosion(x, y): # Draws an explosion on screen
# Play a sound
pygame.mixer.Sound.play(fireSound)
explode = True
while explode:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
choices = [green, lightGreen]
magnitude = 1
while magnitude < 50:
explodeBitX = x + random.randrange(-1 * magnitude, magnitude)
explodeBitY = y + random.randrange(-1 * magnitude, magnitude)
if explodeBitY > ground + 13:
pygame.draw.circle(gameDisplay, black, (explodeBitX, explodeBitY), random.randrange(1, 5))
else:
pygame.draw.circle(gameDisplay, choices[random.randrange(0, 2)], (explodeBitX, explodeBitY),
random.randrange(1, 5))
magnitude += 1
pygame.display.update()
clock.tick(100)
explode = False
def fire(pos, turretPos, gunPower, enemyTankX,
enemyTankY): # Function for shooting and controlling bullet physics
# Play a sound
pygame.mixer.Sound.play(cannon)
damage = 0
fire = True
startingPos = list(pos)
while fire:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
pygame.draw.circle(gameDisplay, green, (startingPos[0], startingPos[1]), 5)
startingPos[0] -= (10 - turretPos) * 2
startingPos[1] += int((((startingPos[0] - pos[0]) * .015 / (gunPower / 50)) ** 2) - (
turretPos + turretPos / (12 - turretPos)))
# If the explosion is on the ground
if startingPos[1] > ground:
hitX = int((startingPos[0]))
hitY = int(startingPos[1])
# If the explosion hits the tank
# Various damages for how close it was
if enemyTankX + 10 > hitX > enemyTankX - 10:
damage = 25
elif enemyTankX + 15 > hitX > enemyTankX - 15:
damage = 20
elif enemyTankX + 20 > hitX > enemyTankX - 20:
damage = 15
elif enemyTankX + 30 > hitX > enemyTankX - 30:
damage = 5
explosion(hitX, hitY)
fire = False
pygame.display.update()
clock.tick(60)
return damage
def enemyFire(pos, turretPos, gunPower, playerX,
playerY): # Function for shooting and controlling bullet physics
# Play a sound
pygame.mixer.Sound.play(cannon)
damage = 0
currentPower = 1
powerFound = False
# How the AI decides what power to uses
while not powerFound:
currentPower += 1
if currentPower > 100:
powerFound = True
fire = True
startingPos = list(pos)
while fire:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
startingPos[0] += (10 - turretPos) * 2
# Make currentPower random between 80% and 120% of the chosen power
gunPower = random.randrange(int(currentPower * .8), int(currentPower * 1.2))
startingPos[1] += int((((startingPos[0] - pos[0]) * .015 / (gunPower / 50)) ** 2) - (
turretPos + turretPos / (12 - turretPos)))
# If the explosion is on the ground
if startingPos[1] > ground:
hitX = int((startingPos[0]))
hitY = int(startingPos[1])
if playerX + 15 > hitX > playerX - 15:
powerFound = True
fire = False
fire = True
startingPos = list(pos)
# When the power is decided, it shoots
while fire:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
pygame.draw.circle(gameDisplay, green, (startingPos[0], startingPos[1]), 5)
startingPos[0] += (10 - turretPos) * 2
startingPos[1] += int((((startingPos[0] - pos[0]) * .015 / (gunPower / 50)) ** 2) - (
turretPos + turretPos / (12 - turretPos)))
# If the explosion is on the ground
if startingPos[1] > ground:
hitX = int((startingPos[0]))
hitY = int(startingPos[1])
# If the explosion hits the tank
# Various damages for how close it was
if playerX + 10 > hitX > playerX - 10:
damage = 25
elif playerX + 15 > hitX > playerX - 15:
damage = 20
elif playerX + 20 > hitX > playerX - 20:
damage = 15
elif playerX + 30 > hitX > playerX - 30:
damage = 5
explosion(hitX, hitY)
fire = False
pygame.display.update()
clock.tick(60)
return damage
def power(level): # Blits the power level
text = smallFont.render("Power: " + str(level) + "%", True, green)
gameDisplay.blit(text, [display_width * .75, 10])
def game_controls(): # Function for controls screen
controls = True
while controls:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
gameDisplay.fill(black)
message_to_screen("Controls!", green, -100, size="largeFont")
message_to_screen("Left and right arrow keys to move the tank!", green, 10, size="smallFont")
message_to_screen("Up and down arrow keys to move the tank's turret!", green, 40, size="smallFont")
message_to_screen("A and D keys change the turret's power!", green, 70, size="smallFont")
message_to_screen("P to pause the game!", green, 100, size="smallFont")
# Buttons
button("Play", 25, 400, 100, 50, green, lightGreen, action="play")
button("Quit", 375, 400, 100, 50, green, lightGreen, action="quit")
pygame.display.update()
clock.tick(15)
def button(text, x, y, width, height, colour, active_colour,
action): # Creates the button, both active and inactive
cursor = pygame.mouse.get_pos()
click = pygame.mouse.get_pressed()
if x + width > cursor[0] > x and y + height > cursor[1] > y:
pygame.draw.rect(gameDisplay, active_colour, (x, y, width, height))
if click[0] == 1 and action != None:
if action == "play":
gameLoop()
if action == "controls":
game_controls()
if action == "quit":
pygame.quit()
quit()
else:
pygame.draw.rect(gameDisplay, colour, (x, y, width, height))
text_to_button(text, black, x, y, width, height)
def pause(): # Pauses the game
paused = True
message_to_screen("Paused", green, -225, size="largeFont")
message_to_screen("C to continue playing", green, -175, size="smallFont")
message_to_screen("Q to quit", green, -150, size="smallFont")
pygame.display.update()
while paused:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_c:
paused = False
elif event.key == pygame.K_q:
pygame.quit()
quit()
clock.tick(5)
def game_intro(): # Function for game introduction screen
intro = True
while intro:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
gameDisplay.fill(black)
message_to_screen("Tank War!", green, -200, size="largeFont")
message_to_screen("Kill the enemy tank before it kills you!", green, -50, size="smallFont")
message_to_screen("Press play to play!", green, 0, size="smallFont")
message_to_screen("Press controls to view the game's controls!", green, 50, size="smallFont")
message_to_screen("Press quit to exit the game!", green, 100, size="smallFont")
# Text on the buttons
button("Play", 25, 400, 100, 50, green, lightGreen, action="play")
button("Controls", 200, 400, 100, 50, green, lightGreen, action="controls")
button("Quit", 375, 400, 100, 50, green, lightGreen, action="quit")
pygame.display.update()
clock.tick(15)
def gameWin(): # Function for game introduction screen
win = True
while win:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
gameDisplay.fill(black)
message_to_screen("You won!", green, -100, size="largeFont")
message_to_screen("Your enemy's tank was destroyed!", green, 0, size="smallFont")
message_to_screen("Replay to replay or quit to quit!", green, 100, size="smallFont")
# Text on the buttons
button("Replay", 25, 400, 100, 50, green, lightGreen, action="play")
button("Quit", 375, 400, 100, 50, green, lightGreen, action="quit")
pygame.display.update()
clock.tick(15)
def over(): # Function for game introduction screen
over = True
while over:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
gameDisplay.fill(black)
message_to_screen("Game over!", green, -100, size="largeFont")
message_to_screen("Your tank was destroyed!", green, 0, size="smallFont")
message_to_screen("Replay to replay or quit to quit!", green, 100, size="smallFont")
# Text on the buttons
button("Replay", 25, 400, 100, 50, green, lightGreen, action="play")
button("Quit", 375, 400, 100, 50, green, lightGreen, action="quit")
pygame.display.update()
clock.tick(15)
def health(playerHealth, enemyHealth, pX, eX): # Health bars
# Player health
if playerHealth > 50:
playerColour = lightGreen
else:
playerColour = green
# Enemy health
if enemyHealth > 50:
enemyColour = lightGreen
else:
enemyColour = green
# Draw the health bars
pygame.draw.rect(gameDisplay, playerColour, (pX - 100, display_height * .7, playerHealth, 10))
pygame.draw.rect(gameDisplay, enemyColour, (eX, display_height * .7, enemyHealth, 10))
def gameLoop(): # Main game loop
gameExit = False
gameOver = False
FPS = 15
# Tank positioning
mainTankX = display_width * .8
mainTankY = display_height * .8
tankMove = 0
curTurretPosition = 0
changeTurretPosition = 0
# Fire power
firePower = 50
change = 0
# enemyTank positioning
enemyTankX = display_width * .2
enemyTankY = display_height * .8
tankMove = 0
# Health
playerHealth = 100
enemyHealth = 100
while not gameExit:
if gameOver == True:
pygame.display.update()
while gameOver == True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
gameExit = True
gameOver = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
gameExit = True
# Movement for tank
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
tankMove = -5
elif event.key == pygame.K_RIGHT:
tankMove = 5
elif event.key == pygame.K_UP:
changeTurretPosition = 1
elif event.key == pygame.K_DOWN:
changeTurretPosition = -1
elif event.key == pygame.K_p:
pause()
elif event.key == pygame.K_SPACE:
# Player's shot
damage = fire(bullet, curTurretPosition, firePower, enemyTankX, enemyTankY)
enemyHealth -= damage
# Enemy moves
movements = ['f', 'b']
move = random.randrange(0, 2)
for x in range(random.randrange(0, 10)):
if display_width * .33 > enemyTankX > display_width * .05:
if movements[move] == "f":
enemyTankX += 5
elif movements[move] == "r":
enemyTankX -= 5
# If the tank moves, re draw the screen
gameDisplay.fill(black)
health(playerHealth, enemyHealth, pX, eX)
bullet = tank(mainTankX, mainTankY, curTurretPosition)
enemyBullet = enemyTank(enemyTankX, enemyTankY, 8)
pygame.draw.rect(gameDisplay, green, (0, ground, display_width, 10))
pygame.display.update()
clock.tick(FPS)
# Enemy's shot
damage = enemyFire(enemyBullet, 8, 33, mainTankX, mainTankY)
playerHealth -= damage
elif event.key == pygame.K_a:
change = -1
elif event.key == pygame.K_d:
change = 1
# If user stops pressing the button, stop moving the tank
elif event.type == pygame.KEYUP:
if event.key == pygame.K_LEFT or event.key == pygame.K_RIGHT:
tankMove = 0
if event.key == pygame.K_UP or event.key == pygame.K_DOWN:
changeTurretPosition = 0
if event.key == pygame.K_a or event.key == pygame.K_d:
change = 0
# Draw the game screen
mainTankX += tankMove
pX = mainTankX
eX = enemyTankX
gameDisplay.fill(black)
health(playerHealth, enemyHealth, pX, eX)
bullet = tank(mainTankX, mainTankY, curTurretPosition)
enemyBullet = enemyTank(enemyTankX, enemyTankY, 8)
pygame.draw.rect(gameDisplay, green, (0, ground, display_width, 10))
# Change power of the bullet
firePower += change
if firePower <= 1:
firePower = 1
if firePower >= 100:
firePower = 100
power(firePower)
# Check if gameOver or gameWin
if playerHealth < 1:
over()
elif enemyHealth < 1:
gameWin()
# Turret positioning
curTurretPosition += changeTurretPosition
if curTurretPosition > 8:
curTurretPosition = 8
elif curTurretPosition < 0:
curTurretPosition = 0
# Avoid tank and walls collision
if mainTankX > display_width:
mainTankX -= 5
if mainTankX < display_width * .66:
mainTankX += 5
pygame.display.update()
clock.tick(FPS)
pygame.quit()
quit()
game_intro()
gameLoop()
class StartPage(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
tk.Frame.configure(self, bg = "black")
radio = tk.Button(self, text ="RADIO", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(RadioPage))
radio.place(x = 15, y = 0)
map = tk.Button(self, text ="MAP", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(MapPage))
map.place(x = 95, y = 0)
data = tk.Button(self, text="DATA", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(DataPage))
data.place(x = 175, y = 0)
inv = tk.Button(self, text ="INV", bg="black", fg="green", width = 10,
command = lambda: controller.game())
inv.place(x = 255, y = 0)
stats = tk.Button(self, text ="STATS", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(StatsPage))
stats.place(x = 335, y = 0)
image = Image.open("Pip Boy Images\mrPip.gif")
photo = ImageTk.PhotoImage(image)
label = tk.Label(self, image = photo, bg = "black", fg = "white", height = 40, width = 40)
label.image = photo #keeping refrence
label.pack(side = BOTTOM, padx = 10, pady = 10)
#to make width for now
label = tk.Label(self, width = 60, bg = "black")
label.pack(side = BOTTOM, pady = 120)
class RadioPage(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
tk.Frame.configure(self, bg = "black")
radio = tk.Button(self, text ="RADIO", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(RadioPage))
radio.place(x = 15, y = 0)
map = tk.Button(self, text ="MAP", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(MapPage))
map.place(x = 95, y = 0)
data = tk.Button(self, text="DATA", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(DataPage))
data.place(x = 175, y = 0)
inv = tk.Button(self, text ="INV", bg="black", fg="green", width = 10,
command = lambda: controller.game())
inv.place(x = 255, y = 0)
stats = tk.Button(self, text ="STATS", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(StatsPage))
stats.place(x = 335, y = 0)
#opening images for buttons
bonjovi1 = Image.open("coverart\Bonjovi.gif")
bonjovi = ImageTk.PhotoImage(bonjovi1)
toto1 = Image.open("coverart\Toto.gif")
toto = ImageTk.PhotoImage(toto1)
tameimpala1 = Image.open("coverart\Tameimpala.gif")
tameimpala = ImageTk.PhotoImage(tameimpala1)
dmx1 = Image.open("coverart\Dmx.gif")
dmx = ImageTk.PhotoImage(dmx1)
daftpunk1 = Image.open("coverart\Daftpunk.gif")
daftpunk = ImageTk.PhotoImage(daftpunk1)
gorrillaz1 = Image.open("coverart\Gorrillaz.gif")
gorrillaz = ImageTk.PhotoImage(gorrillaz1)
estelle1 = Image.open("coverart\estelle.gif")
estelle = ImageTk.PhotoImage(estelle1)
mgmt1 = Image.open("coverart\Mgmt.gif")
mgmt = ImageTk.PhotoImage(mgmt1)
saintmotel1 = Image.open("coverart\Saintmotel.gif")
saintmotel = ImageTk.PhotoImage(saintmotel1)
music1 = tk.Button(self, image = bonjovi, fg = "white", bg = "black", cursor = "hand2", width = 75, height = 75,
command = lambda: webbrowser.open_new(controller.music(song1)))
music1.image = bonjovi #keeping refrence
music1.place(x = 70, y = 70)
music2 = tk.Button(self, image = toto, bg = "black", fg = "white", cursor = "hand2", width = 75, height = 75,
command = lambda: webbrowser.open_new(controller.music(song2)))
music2.image = toto
music2.place(x = 70, y = 145)
music3 = tk.Button(self, image = tameimpala, bg = "black", fg = "white", cursor = "hand2", width = 75, height = 75,
command = lambda: webbrowser.open_new(controller.music(song3)))
music3.image = tameimpala
music3.place(x = 70, y = 220)
music4 = tk.Button(self, image = dmx, bg = "black", fg = "white", cursor = "hand2", width = 75, height = 75,
command = lambda: webbrowser.open_new(controller.music(song4)))
music4.image = dmx
music4.place(x = 175 , y = 70)
music5 = tk.Button(self, image = daftpunk, bg = "black", fg = "white", cursor = "hand2", width = 75, height = 75,
command = lambda: webbrowser.open_new(controller.music(song5)))
music5.image = daftpunk
music5.place( x = 175 , y = 145)
music6 = tk.Button(self, image = gorrillaz, bg = "black", fg = "white", cursor = "hand2", width = 75, height = 75,
command = lambda: webbrowser.open_new(controller.music(song6)))
music6.image = gorrillaz
music6.place(x = 175, y = 220)
music7 = tk.Button(self, image = estelle, bg = "black", fg = "white", cursor = "hand2", width = 75, height = 75,
command = lambda: webbrowser.open_new(controller.music(song7)))
music7.image = estelle
music7.place(x = 280, y = 70)
music8 = tk.Button(self, image = mgmt, bg = "black", fg = "white", cursor = "hand2", width = 75, height = 75,
command = lambda: webbrowser.open_new(controller.music(song8)))
music8.image = mgmt
music8.place(x = 280, y = 145)
music9 = tk.Button(self, image = saintmotel, bg = "black", fg = "white", cursor = "hand2", width = 75, height = 75,
command = lambda: webbrowser.open_new(controller.music(song9)))
music9.image = saintmotel
music9.place(x = 280, y = 220)
class MapPage(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
tk.Frame.configure(self, bg = "black")
radio = tk.Button(self, text ="RADIO", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(RadioPage))
radio.place(x = 15, y = 0)
map = tk.Button(self, text ="MAP", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(MapPage))
map.place(x = 95, y = 0)
data = tk.Button(self, text="DATA", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(DataPage))
data.place(x = 175, y = 0)
inv = tk.Button(self, text ="INV", bg="black", fg="green", width = 10,
command = lambda: controller.game())
inv.place(x = 255, y = 0)
stats = tk.Button(self, text ="STATS", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(StatsPage))
stats.place(x = 335, y = 0)
label = tk.Label(self, text = "map functionality", bg = "black", fg = "white")
label.pack(side = BOTTOM)
global entryWidget2
global mapLabel
# Create a text frame to hold the text Label and the Entry widget
textFrame = Frame(self)
#Create a Label in textFrame
entryLabel = Label(self)
entryLabel["text"] = "Where are you?"
entryLabel.pack(side=LEFT)
# Create an Entry Widget in textFrame
entryWidget2 = Entry(self)
entryWidget2["width"] = 50
entryWidget2.pack(side=LEFT)
textFrame.pack()
mapLabel = Label(self)
button = Button(self, text="Submit", command=controller.get_coordinates)
button.pack(side=BOTTOM)
class DataPage(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
tk.Frame.configure(self, bg = "black")
radio = tk.Button(self, text ="RADIO", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(RadioPage))
radio.place(x = 15, y = 0)
map = tk.Button(self, text ="MAP", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(MapPage))
map.place(x = 95, y = 0)
data = tk.Button(self, text="DATA", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(DataPage))
data.place(x = 175, y = 0)
inv = tk.Button(self, text ="INV", bg="black", fg="green", width = 10,
command = lambda: controller.game())
inv.place(x = 255, y = 0)
stats = tk.Button(self, text ="STATS", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(StatsPage))
stats.place(x = 335, y = 0)
global entryWidget
#Create a Label in textFrame
#controller.showTweets(controller.getTweets(), numberOfTweets)
entryLabel = Label(self)
entryLabel["text"] = "Make a new Tweet:"
entryLabel.pack(side = LEFT)
# Create an Entry Widget in textFrame
entryWidget = Entry(self)
entryWidget["width"] = 50
entryWidget.pack(side=LEFT)
buttonGet = Button(self, text="Get Tweets", command = lambda: controller.showTweets(controller.getTweets(), numberOfTweets))
buttonGet.pack()
button = Button(self, text="Submit", command = controller.tweet)
button.pack()
class InvPage(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
tk.Frame.configure(self, bg = "black")
radio = tk.Button(self, text ="RADIO", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(RadioPage))
radio.place(x = 15, y = 0)
map = tk.Button(self, text ="MAP", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(MapPage))
map.place(x = 95, y = 0)
data = tk.Button(self, text="DATA", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(DataPage))
data.place(x = 175, y = 0)
inv = tk.Button(self, text ="INV", bg="black", fg="green", width = 10,
command = lambda: controller.game())
inv.place(x = 255, y = 0)
stats = tk.Button(self, text ="STATS", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(StatsPage))
stats.place(x = 335, y = 0)
class StatsPage(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
tk.Frame.configure(self, bg = "black")
radio = tk.Button(self, text ="RADIO", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(RadioPage))
radio.place(x = 15, y = 0)
map = tk.Button(self, text ="MAP", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(MapPage))
map.place(x = 95, y = 0)
data = tk.Button(self, text="DATA", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(DataPage))
data.place(x = 175, y = 0)
inv = tk.Button(self, text ="INV", bg="black", fg="green", width = 10,
command = lambda: controller.game())
inv.place(x = 255, y = 0)
stats = tk.Button(self, text ="STATS", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(StatsPage))
stats.place(x = 335, y = 0)
#new buttons
strength = tk.Button(self, text ="STRENGTH", bg="black", fg="green", width = 20,
command = lambda: self.ImageShow("Pip Boy Images\Strength.gif"))
strength.place(x = 35, y = 50)
perception = tk.Button(self, text ="PERCEPTION", bg="black", fg="green", width = 20,
command = lambda: self.ImageShow("Pip Boy Images\Perception.gif"))
perception.place(x = 35, y = 75)
endurance = tk.Button(self, text ="ENDURANCE", bg="black", fg="green", width = 20,
command = lambda: self.ImageShow("Pip Boy Images\Endurance.gif"))
endurance.place(x = 35, y = 100)
charisma = tk.Button(self, text ="CHARISMA", bg="black", fg="green", width = 20,
command = lambda: self.ImageShow("Pip Boy Images\Charisma.gif"))
charisma.place(x = 35, y = 125)
intelligence = tk.Button(self, text ="INTELLIGENCE", bg="black", fg="green", width = 20,
command = lambda: self.ImageShow("Pip Boy Images\Intelligence.gif"))
intelligence.place(x = 35, y = 150)
agility = tk.Button(self, text ="AGILITY", bg="black", fg="green", width = 20,
command = lambda: self.ImageShow("Pip Boy Images\Agility.gif"))
agility.place(x = 35, y = 175)
luck = tk.Button(self, text ="LUCK", bg="black", fg="green", width = 20,
command = lambda: self.ImageShow("Pip Boy Images\Luck.gif"))
luck.place(x = 35, y = 200)
def ImageShow(self, path):
label = tk.Label(self, bg = "black", width = 40, height = 40)
label.place(x = 215, y = 75)
image = Image.open(path)
photo = ImageTk.PhotoImage(image)
label = tk.Label(self, image = photo, bg = "black", fg = "white")
label.image = photo #keeping refrence
label.place(x = 200, y = 75)
app = SetUp()
app.mainloop()
|
AndrewKLeech/Pip-Boy
|
Game.py
|
Python
|
mit
| 42,161 | 0.0347 |
from collections import deque
from hiku.denormalize.graphql import DenormalizeGraphQL
class DenormalizeEntityGraphQL(DenormalizeGraphQL):
def __init__(self, graph, result, root_type_name):
super().__init__(graph, result, root_type_name)
self._type = deque([graph.__types__[root_type_name]])
|
vmagamedov/hiku
|
hiku/federation/denormalize.py
|
Python
|
bsd-3-clause
| 314 | 0 |
def test():
for i in xrange(int(5e3)):
t = []
for j in xrange(int(1e4)):
#t[j] = 'x'
t.append('x')
t = ''.join(t)
test()
|
svaarala/duktape
|
tests/perf/test-string-array-concat.py
|
Python
|
mit
| 174 | 0.011494 |
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Model, NOT_PROVIDED, DateTimeField
from django.utils import timezone
from django.utils.encoding import smart_text
def track_field(field):
"""
Returns whether the given field should be tracked by Auditlog.
Untracked fields are many-to-many relations and relations to the Auditlog LogEntry model.
:param field: The field to check.
:type field: Field
:return: Whether the given field should be tracked.
:rtype: bool
"""
from auditlog.models import LogEntry
# Do not track many to many relations
if field.many_to_many:
return False
# Do not track relations to LogEntry
if getattr(field, 'remote_field', None) is not None and field.remote_field.model == LogEntry:
return False
# 1.8 check
elif getattr(field, 'rel', None) is not None and field.rel.to == LogEntry:
return False
return True
def get_fields_in_model(instance):
"""
Returns the list of fields in the given model instance. Checks whether to use the official _meta API or use the raw
data. This method excludes many to many fields.
:param instance: The model instance to get the fields for
:type instance: Model
:return: The list of fields for the given model (instance)
:rtype: list
"""
assert isinstance(instance, Model)
# Check if the Django 1.8 _meta API is available
use_api = hasattr(instance._meta, 'get_fields') and callable(instance._meta.get_fields)
if use_api:
return [f for f in instance._meta.get_fields() if track_field(f)]
return instance._meta.fields
def get_field_value(obj, field):
"""
Gets the value of a given model instance field.
:param obj: The model instance.
:type obj: Model
:param field: The field you want to find the value of.
:type field: Any
:return: The value of the field as a string.
:rtype: str
"""
if isinstance(field, DateTimeField):
# DateTimeFields are timezone-aware, so we need to convert the field
# to its naive form before we can accuratly compare them for changes.
try:
value = field.to_python(getattr(obj, field.name, None))
if value is not None and settings.USE_TZ and not timezone.is_naive(value):
value = timezone.make_naive(value, timezone=timezone.utc)
except ObjectDoesNotExist:
value = field.default if field.default is not NOT_PROVIDED else None
else:
try:
value = smart_text(getattr(obj, field.name, None))
except ObjectDoesNotExist:
value = field.default if field.default is not NOT_PROVIDED else None
return value
def model_instance_diff(old, new):
"""
Calculates the differences between two model instances. One of the instances may be ``None`` (i.e., a newly
created model or deleted model). This will cause all fields with a value to have changed (from ``None``).
:param old: The old state of the model instance.
:type old: Model
:param new: The new state of the model instance.
:type new: Model
:return: A dictionary with the names of the changed fields as keys and a two tuple of the old and new field values
as value.
:rtype: dict
"""
from auditlog.registry import auditlog
if not(old is None or isinstance(old, Model)):
raise TypeError("The supplied old instance is not a valid model instance.")
if not(new is None or isinstance(new, Model)):
raise TypeError("The supplied new instance is not a valid model instance.")
diff = {}
if old is not None and new is not None:
fields = set(old._meta.fields + new._meta.fields)
model_fields = auditlog.get_model_fields(new._meta.model)
elif old is not None:
fields = set(get_fields_in_model(old))
model_fields = auditlog.get_model_fields(old._meta.model)
elif new is not None:
fields = set(get_fields_in_model(new))
model_fields = auditlog.get_model_fields(new._meta.model)
else:
fields = set()
model_fields = None
# Check if fields must be filtered
if model_fields and (model_fields['include_fields'] or model_fields['exclude_fields']) and fields:
filtered_fields = []
if model_fields['include_fields']:
filtered_fields = [field for field in fields
if field.name in model_fields['include_fields']]
else:
filtered_fields = fields
if model_fields['exclude_fields']:
filtered_fields = [field for field in filtered_fields
if field.name not in model_fields['exclude_fields']]
fields = filtered_fields
for field in fields:
old_value = get_field_value(old, field)
new_value = get_field_value(new, field)
if old_value != new_value:
diff[field.name] = (smart_text(old_value), smart_text(new_value))
if len(diff) == 0:
diff = None
return diff
|
kbussell/django-auditlog
|
src/auditlog/diff.py
|
Python
|
mit
| 5,123 | 0.002733 |
"""
Middleware that checks user standing for the purpose of keeping users with
disabled accounts from accessing the site.
"""
from django.conf import settings
from django.http import HttpResponseForbidden
from django.utils.deprecation import MiddlewareMixin
from django.utils.translation import ugettext as _
from openedx.core.djangolib.markup import HTML, Text
from student.models import UserStanding
class UserStandingMiddleware(MiddlewareMixin):
"""
Checks a user's standing on request. Returns a 403 if the user's
status is 'disabled'.
"""
def process_request(self, request):
user = request.user
try:
user_account = UserStanding.objects.get(user=user.id)
# because user is a unique field in UserStanding, there will either be
# one or zero user_accounts associated with a UserStanding
except UserStanding.DoesNotExist:
pass
else:
if user_account.account_status == UserStanding.ACCOUNT_DISABLED:
msg = Text(_(
'Your account has been disabled. If you believe '
'this was done in error, please contact us at '
'{support_email}'
)).format(
support_email=HTML(u'<a href="mailto:{address}?subject={subject_line}">{address}</a>').format(
address=settings.DEFAULT_FEEDBACK_EMAIL,
subject_line=_('Disabled Account'),
),
)
return HttpResponseForbidden(msg)
|
cpennington/edx-platform
|
common/djangoapps/student/middleware.py
|
Python
|
agpl-3.0
| 1,580 | 0.001266 |
from __future__ import print_function, division
from .str import StrPrinter
from sympy.utilities import default_sort_key
class LambdaPrinter(StrPrinter):
"""
This printer converts expressions into strings that can be used by
lambdify.
"""
def _print_MatrixBase(self, expr):
return "%s(%s)" % (expr.__class__.__name__,
self._print((expr.tolist())))
_print_SparseMatrix = \
_print_MutableSparseMatrix = \
_print_ImmutableSparseMatrix = \
_print_Matrix = \
_print_DenseMatrix = \
_print_MutableDenseMatrix = \
_print_ImmutableMatrix = \
_print_ImmutableDenseMatrix = \
_print_MatrixBase
def _print_Piecewise(self, expr):
result = []
i = 0
for arg in expr.args:
e = arg.expr
c = arg.cond
result.append('((')
result.append(self._print(e))
result.append(') if (')
result.append(self._print(c))
result.append(') else (')
i += 1
result = result[:-1]
result.append(') else None)')
result.append(')'*(2*i - 2))
return ''.join(result)
def _print_Sum(self, expr):
loops = (
'for {i} in range({a}, {b}+1)'.format(
i=self._print(i),
a=self._print(a),
b=self._print(b))
for i, a, b in expr.limits)
return '(builtins.sum({function} {loops}))'.format(
function=self._print(expr.function),
loops=' '.join(loops))
def _print_And(self, expr):
result = ['(']
for arg in sorted(expr.args, key=default_sort_key):
result.extend(['(', self._print(arg), ')'])
result.append(' and ')
result = result[:-1]
result.append(')')
return ''.join(result)
def _print_Or(self, expr):
result = ['(']
for arg in sorted(expr.args, key=default_sort_key):
result.extend(['(', self._print(arg), ')'])
result.append(' or ')
result = result[:-1]
result.append(')')
return ''.join(result)
def _print_Not(self, expr):
result = ['(', 'not (', self._print(expr.args[0]), '))']
return ''.join(result)
def _print_BooleanTrue(self, expr):
return "True"
def _print_BooleanFalse(self, expr):
return "False"
def _print_ITE(self, expr):
result = [
'((', self._print(expr.args[1]),
') if (', self._print(expr.args[0]),
') else (', self._print(expr.args[2]), '))'
]
return ''.join(result)
class NumPyPrinter(LambdaPrinter):
"""
Numpy printer which handles vectorized piecewise functions,
logical operators, etc.
"""
_default_settings = {
"order": "none",
"full_prec": "auto",
}
def _print_seq(self, seq, delimiter=', '):
"General sequence printer: converts to tuple"
# Print tuples here instead of lists because numba supports
# tuples in nopython mode.
return '({},)'.format(delimiter.join(self._print(item) for item in seq))
def _print_MatMul(self, expr):
"Matrix multiplication printer"
return '({0})'.format(').dot('.join(self._print(i) for i in expr.args))
def _print_Piecewise(self, expr):
"Piecewise function printer"
exprs = '[{0}]'.format(','.join(self._print(arg.expr) for arg in expr.args))
conds = '[{0}]'.format(','.join(self._print(arg.cond) for arg in expr.args))
# If [default_value, True] is a (expr, cond) sequence in a Piecewise object
# it will behave the same as passing the 'default' kwarg to select()
# *as long as* it is the last element in expr.args.
# If this is not the case, it may be triggered prematurely.
return 'select({0}, {1}, default=nan)'.format(conds, exprs)
def _print_Relational(self, expr):
"Relational printer for Equality and Unequality"
op = {
'==' :'equal',
'!=' :'not_equal',
'<' :'less',
'<=' :'less_equal',
'>' :'greater',
'>=' :'greater_equal',
}
if expr.rel_op in op:
lhs = self._print(expr.lhs)
rhs = self._print(expr.rhs)
return '{op}({lhs}, {rhs})'.format(op=op[expr.rel_op],
lhs=lhs,
rhs=rhs)
return super(NumPyPrinter, self)._print_Relational(expr)
def _print_And(self, expr):
"Logical And printer"
# We have to override LambdaPrinter because it uses Python 'and' keyword.
# If LambdaPrinter didn't define it, we could use StrPrinter's
# version of the function and add 'logical_and' to NUMPY_TRANSLATIONS.
return '{0}({1})'.format('logical_and', ','.join(self._print(i) for i in expr.args))
def _print_Or(self, expr):
"Logical Or printer"
# We have to override LambdaPrinter because it uses Python 'or' keyword.
# If LambdaPrinter didn't define it, we could use StrPrinter's
# version of the function and add 'logical_or' to NUMPY_TRANSLATIONS.
return '{0}({1})'.format('logical_or', ','.join(self._print(i) for i in expr.args))
def _print_Not(self, expr):
"Logical Not printer"
# We have to override LambdaPrinter because it uses Python 'not' keyword.
# If LambdaPrinter didn't define it, we would still have to define our
# own because StrPrinter doesn't define it.
return '{0}({1})'.format('logical_not', ','.join(self._print(i) for i in expr.args))
# numexpr works by altering the string passed to numexpr.evaluate
# rather than by populating a namespace. Thus a special printer...
class NumExprPrinter(LambdaPrinter):
# key, value pairs correspond to sympy name and numexpr name
# functions not appearing in this dict will raise a TypeError
_numexpr_functions = {
'sin' : 'sin',
'cos' : 'cos',
'tan' : 'tan',
'asin': 'arcsin',
'acos': 'arccos',
'atan': 'arctan',
'atan2' : 'arctan2',
'sinh' : 'sinh',
'cosh' : 'cosh',
'tanh' : 'tanh',
'asinh': 'arcsinh',
'acosh': 'arccosh',
'atanh': 'arctanh',
'ln' : 'log',
'log': 'log',
'exp': 'exp',
'sqrt' : 'sqrt',
'Abs' : 'abs',
'conjugate' : 'conj',
'im' : 'imag',
're' : 'real',
'where' : 'where',
'complex' : 'complex',
'contains' : 'contains',
}
def _print_ImaginaryUnit(self, expr):
return '1j'
def _print_seq(self, seq, delimiter=', '):
# simplified _print_seq taken from pretty.py
s = [self._print(item) for item in seq]
if s:
return delimiter.join(s)
else:
return ""
def _print_Function(self, e):
func_name = e.func.__name__
nstr = self._numexpr_functions.get(func_name, None)
if nstr is None:
# check for implemented_function
if hasattr(e, '_imp_'):
return "(%s)" % self._print(e._imp_(*e.args))
else:
raise TypeError("numexpr does not support function '%s'" %
func_name)
return "%s(%s)" % (nstr, self._print_seq(e.args))
def blacklisted(self, expr):
raise TypeError("numexpr cannot be used with %s" %
expr.__class__.__name__)
# blacklist all Matrix printing
_print_SparseMatrix = \
_print_MutableSparseMatrix = \
_print_ImmutableSparseMatrix = \
_print_Matrix = \
_print_DenseMatrix = \
_print_MutableDenseMatrix = \
_print_ImmutableMatrix = \
_print_ImmutableDenseMatrix = \
blacklisted
# blacklist some python expressions
_print_list = \
_print_tuple = \
_print_Tuple = \
_print_dict = \
_print_Dict = \
blacklisted
def doprint(self, expr):
lstr = super(NumExprPrinter, self).doprint(expr)
return "evaluate('%s')" % lstr
def lambdarepr(expr, **settings):
"""
Returns a string usable for lambdifying.
"""
return LambdaPrinter(settings).doprint(expr)
|
Shaswat27/sympy
|
sympy/printing/lambdarepr.py
|
Python
|
bsd-3-clause
| 8,389 | 0.006556 |
import logging
import time
import types
from autotest.client.shared import error
from virttest import utils_misc, utils_test, aexpect
def run(test, params, env):
"""
KVM migration test:
1) Get a live VM and clone it.
2) Verify that the source VM supports migration. If it does, proceed with
the test.
3) Send a migration command to the source VM and wait until it's finished.
4) Kill off the source VM.
3) Log into the destination VM after the migration is finished.
4) Compare the output of a reference command executed on the source with
the output of the same command on the destination machine.
:param test: QEMU test object.
:param params: Dictionary with test parameters.
:param env: Dictionary with the test environment.
"""
def guest_stress_start(guest_stress_test):
"""
Start a stress test in guest, Could be 'iozone', 'dd', 'stress'
:param type: type of stress test.
"""
from tests import autotest_control
timeout = 0
if guest_stress_test == "autotest":
test_type = params.get("test_type")
func = autotest_control.run_autotest_control
new_params = params.copy()
new_params["test_control_file"] = "%s.control" % test_type
args = (test, new_params, env)
timeout = 60
elif guest_stress_test == "dd":
vm = env.get_vm(env, params.get("main_vm"))
vm.verify_alive()
session = vm.wait_for_login(timeout=login_timeout)
func = session.cmd_output
args = ("for((;;)) do dd if=/dev/zero of=/tmp/test bs=5M "
"count=100; rm -f /tmp/test; done",
login_timeout, logging.info)
logging.info("Start %s test in guest", guest_stress_test)
bg = utils_test.BackgroundTest(func, args)
params["guest_stress_test_pid"] = bg
bg.start()
if timeout:
logging.info("sleep %ds waiting guest test start.", timeout)
time.sleep(timeout)
if not bg.is_alive():
raise error.TestFail("Failed to start guest test!")
def guest_stress_deamon():
"""
This deamon will keep watch the status of stress in guest. If the stress
program is finished before migration this will restart it.
"""
while True:
bg = params.get("guest_stress_test_pid")
action = params.get("action")
if action == "run":
logging.debug("Check if guest stress is still running")
guest_stress_test = params.get("guest_stress_test")
if bg and not bg.is_alive():
logging.debug("Stress process finished, restart it")
guest_stress_start(guest_stress_test)
time.sleep(30)
else:
logging.debug("Stress still on")
else:
if bg and bg.is_alive():
try:
stress_stop_cmd = params.get("stress_stop_cmd")
vm = env.get_vm(env, params.get("main_vm"))
vm.verify_alive()
session = vm.wait_for_login()
if stress_stop_cmd:
logging.warn("Killing background stress process "
"with cmd '%s', you would see some "
"error message in client test result,"
"it's harmless.", stress_stop_cmd)
session.cmd(stress_stop_cmd)
bg.join(10)
except Exception:
pass
break
time.sleep(10)
def get_functions(func_names, locals_dict):
"""
Find sub function(s) in this function with the given name(s).
"""
if not func_names:
return []
funcs = []
for f in func_names.split():
f = locals_dict.get(f)
if isinstance(f, types.FunctionType):
funcs.append(f)
return funcs
def mig_set_speed():
mig_speed = params.get("mig_speed", "1G")
return vm.monitor.migrate_set_speed(mig_speed)
login_timeout = int(params.get("login_timeout", 360))
mig_timeout = float(params.get("mig_timeout", "3600"))
mig_protocol = params.get("migration_protocol", "tcp")
mig_cancel_delay = int(params.get("mig_cancel") == "yes") * 2
mig_exec_cmd_src = params.get("migration_exec_cmd_src")
mig_exec_cmd_dst = params.get("migration_exec_cmd_dst")
if mig_exec_cmd_src and "gzip" in mig_exec_cmd_src:
mig_exec_file = params.get("migration_exec_file", "/var/tmp/exec")
mig_exec_file += "-%s" % utils_misc.generate_random_string(8)
mig_exec_cmd_src = mig_exec_cmd_src % mig_exec_file
mig_exec_cmd_dst = mig_exec_cmd_dst % mig_exec_file
offline = params.get("offline", "no") == "yes"
check = params.get("vmstate_check", "no") == "yes"
living_guest_os = params.get("migration_living_guest", "yes") == "yes"
deamon_thread = None
vm = env.get_vm(params["main_vm"])
vm.verify_alive()
if living_guest_os:
session = vm.wait_for_login(timeout=login_timeout)
# Get the output of migration_test_command
test_command = params.get("migration_test_command")
reference_output = session.cmd_output(test_command)
# Start some process in the background (and leave the session open)
background_command = params.get("migration_bg_command", "")
session.sendline(background_command)
time.sleep(5)
# Start another session with the guest and make sure the background
# process is running
session2 = vm.wait_for_login(timeout=login_timeout)
try:
check_command = params.get("migration_bg_check_command", "")
session2.cmd(check_command, timeout=30)
session2.close()
# run some functions before migrate start.
pre_migrate = get_functions(params.get("pre_migrate"), locals())
for func in pre_migrate:
func()
# Start stress test in guest.
guest_stress_test = params.get("guest_stress_test")
if guest_stress_test:
guest_stress_start(guest_stress_test)
params["action"] = "run"
deamon_thread = utils_test.BackgroundTest(
guest_stress_deamon, ())
deamon_thread.start()
# Migrate the VM
ping_pong = params.get("ping_pong", 1)
for i in xrange(int(ping_pong)):
if i % 2 == 0:
logging.info("Round %s ping..." % str(i / 2))
else:
logging.info("Round %s pong..." % str(i / 2))
vm.migrate(mig_timeout, mig_protocol, mig_cancel_delay,
offline, check,
migration_exec_cmd_src=mig_exec_cmd_src,
migration_exec_cmd_dst=mig_exec_cmd_dst)
# Set deamon thread action to stop after migrate
params["action"] = "stop"
# run some functions after migrate finish.
post_migrate = get_functions(params.get("post_migrate"), locals())
for func in post_migrate:
func()
# Log into the guest again
logging.info("Logging into guest after migration...")
session2 = vm.wait_for_login(timeout=30)
logging.info("Logged in after migration")
# Make sure the background process is still running
session2.cmd(check_command, timeout=30)
# Get the output of migration_test_command
output = session2.cmd_output(test_command)
# Compare output to reference output
if output != reference_output:
logging.info("Command output before migration differs from "
"command output after migration")
logging.info("Command: %s", test_command)
logging.info("Output before:" +
utils_misc.format_str_for_message(reference_output))
logging.info("Output after:" +
utils_misc.format_str_for_message(output))
raise error.TestFail("Command '%s' produced different output "
"before and after migration" % test_command)
finally:
# Kill the background process
if session2 and session2.is_alive():
bg_kill_cmd = params.get("migration_bg_kill_command", None)
if bg_kill_cmd is not None:
try:
session2.cmd(bg_kill_cmd)
except aexpect.ShellTimeoutError:
logging.debug("Remote session not responsive, "
"shutting down VM %s", vm.name)
vm.destroy(gracefully=True)
if deamon_thread is not None:
# Set deamon thread action to stop after migrate
params["action"] = "stop"
deamon_thread.join()
else:
# Just migrate without depending on a living guest OS
vm.migrate(mig_timeout, mig_protocol, mig_cancel_delay, offline,
check, migration_exec_cmd_src=mig_exec_cmd_src,
migration_exec_cmd_dst=mig_exec_cmd_dst)
|
spcui/tp-qemu
|
qemu/tests/migration.py
|
Python
|
gpl-2.0
| 9,706 | 0.000309 |
from __future__ import unicode_literals
template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "AWS CloudFormation Sample Template to create a KMS Key. The Fn::GetAtt is used to retrieve the ARN",
"Resources": {
"myKey": {
"Type": "AWS::KMS::Key",
"Properties": {
"Description": "Sample KmsKey",
"EnableKeyRotation": False,
"Enabled": True,
"KeyPolicy": {
"Version": "2012-10-17",
"Id": "key-default-1",
"Statement": [
{
"Sid": "Enable IAM User Permissions",
"Effect": "Allow",
"Principal": {
"AWS": {
"Fn::Join": [
"",
[
"arn:aws:iam::",
{"Ref": "AWS::AccountId"},
":root",
],
]
}
},
"Action": "kms:*",
"Resource": "*",
}
],
},
},
}
},
"Outputs": {
"KeyArn": {
"Description": "Generated Key Arn",
"Value": {"Fn::GetAtt": ["myKey", "Arn"]},
}
},
}
|
william-richard/moto
|
tests/test_cloudformation/fixtures/kms_key.py
|
Python
|
apache-2.0
| 1,645 | 0.000608 |
"""The www.cartoonmad.com analyzer.
[Entry examples]
- http://www.cartoonmad.com/comic/5640.html
- https://www.cartoonmad.com/comic/5640.html
"""
import re
from urllib.parse import parse_qsl
from cmdlr.analyzer import BaseAnalyzer
from cmdlr.autil import fetch
class Analyzer(BaseAnalyzer):
"""The www.cartoonmad.com analyzer.
[Entry examples]
- http://www.cartoonmad.com/comic/5640.html
- https://www.cartoonmad.com/comic/5640.html
"""
entry_patterns = [
re.compile(
r'^https?://(?:www.)?cartoonmad.com/comic/(\d+)(?:\.html)?$'
),
]
def entry_normalizer(self, url):
"""Normalize all possible entry url to single one form."""
match = self.entry_patterns[0].search(url)
id = match.group(1)
return 'https://www.cartoonmad.com/comic/{}.html'.format(id)
@staticmethod
def __extract_name(fetch_result):
return fetch_result.soup.title.string.split(' - ')[0]
@staticmethod
def __extract_volumes(fetch_result):
a_tags = (fetch_result.soup
.find('legend', string=re.compile('漫畫線上觀看'))
.parent
.find_all(href=re.compile(r'^/comic/')))
return {a.string: fetch_result.absurl(a.get('href'))
for a in a_tags}
@staticmethod
def __extract_finished(fetch_result):
return (True
if fetch_result.soup.find('img', src='/image/chap9.gif')
else False)
@staticmethod
def __extract_description(fetch_result):
return (fetch_result.soup
.find('fieldset', id='info').td.get_text().strip())
@staticmethod
def __extract_authors(fetch_result):
return [fetch_result.soup
.find(string=re.compile('作者:'))
.string.split(':')[1].strip()]
async def get_comic_info(self, url, request, **unused):
"""Get comic info."""
fetch_result = await fetch(url, request, encoding='big5')
return {
'name': self.__extract_name(fetch_result),
'volumes': self.__extract_volumes(fetch_result),
'description': self.__extract_description(fetch_result),
'authors': self.__extract_authors(fetch_result),
'finished': self.__extract_finished(fetch_result),
}
@staticmethod
def __get_imgurl_func(soup, absurl):
# print(soup.find('img', src=re.compile('comicpic.asp')))
src = soup.find('img', src=re.compile(r'comicpic.asp'))['src']
abspath, qs_string = absurl(src).split('?', maxsplit=1)
qs = dict(parse_qsl(qs_string))
file_parts = qs['file'].split('/')
file_parts[-1] = '{:0>3}'
qs['file'] = '/'.join(file_parts)
qs_tpl = '&'.join(['{}={}'.format(key, value)
for key, value in qs.items()])
abspath_tpl = '{}?{}'.format(abspath, qs_tpl)
def get_imgurl(page_number):
return abspath_tpl.format(page_number)
return get_imgurl
async def save_volume_images(self, url, request, save_image, **unused):
"""Get all images in one volume."""
soup, absurl = await fetch(url, request, encoding='big5')
get_img_url = self.__get_imgurl_func(soup, absurl)
page_count = len(soup.find_all('option', value=True))
for page_num in range(1, page_count + 1):
save_image(
page_num,
url=get_img_url(page_num),
headers={'Referer': url},
)
|
civalin/cmdlr
|
src/cmdlr/analyzers/cartoonmad.py
|
Python
|
mit
| 3,587 | 0.000561 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# TODO prog_base.py - A starting template for Python scripts
#
# Copyright 2013 Robert B. Hawkins
#
"""
SYNOPSIS
TODO prog_base [-h,--help] [-v,--verbose] [--version]
DESCRIPTION
TODO This describes how to use this script. This docstring
will be printed by the script if there is an error or
if the user requests help (-h or --help).
EXAMPLES
TODO: Show some examples of how to use this script.
EXIT STATUS
TODO: List exit codes
AUTHOR
Rob Hawkins <webwords@txhawkins.net>
LICENSE
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
VERSION
1.0.0
"""
__author__ = "Rob Hawkins <webwords@txhawkins.net>"
__version__ = "1.0.0"
__date__ = "2013.12.01"
# Version Date Notes
# ------- ---------- -------------------------------------------------------
# 1.0.0 2013.12.01 Starting script template
#
import sys, os, traceback, argparse
import time
import re
#from pexpect import run, spawn
def test ():
global options, args
# TODO: Do something more interesting here...
print 'Hello from the test() function!'
def main ():
global options, args
# TODO: Do something more interesting here...
print 'Hello world!'
if __name__ == '__main__':
try:
start_time = time.time()
#parser = argparse.ArgumentParser(description="This is the program description", usage=globals()['__doc__'])
parser = argparse.ArgumentParser(description='This is the program description')
parser.add_argument('--version', action='version', version='%(prog)s v'+__version__)
parser.add_argument ('-v', '--verbose', action='store_true', help='produce verbose output')
parser.add_argument ('-t', '--test', action='store_true', help='run test suite')
args = parser.parse_args()
#if len(args) < 1:
# parser.error ('missing argument')
if args.verbose: print time.asctime()
if args.test:
test()
else:
main()
if args.verbose: print time.asctime()
if args.verbose: print 'TOTAL TIME IN MINUTES:',
if args.verbose: print (time.time() - start_time) / 60.0
sys.exit(0)
except KeyboardInterrupt, e: # Ctrl-C
raise e
except SystemExit, e: # sys.exit()
raise e
except Exception, e:
print 'ERROR, UNEXPECTED EXCEPTION'
print str(e)
traceback.print_exc()
os._exit(1)
|
braynebuddy/PyBrayne
|
act_twitter.py
|
Python
|
gpl-3.0
| 3,157 | 0.009819 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import uuid
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
('invite', '0001_initial'),
]
operations = [
migrations.DeleteModel('PasswordResetInvitation'),
migrations.CreateModel(
name='PasswordResetInvitation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('activation_code', models.CharField(default=uuid.uuid4, help_text=b'unique id, generated on email submission', unique=True, max_length=36, editable=False)),
('first_name', models.CharField(max_length=36)),
('last_name', models.CharField(max_length=36)),
('username', models.CharField(max_length=36)),
('email', models.EmailField(help_text=b"the potential member's email address", max_length=41)),
('custom_msg', models.TextField(blank=True)),
('date_invited', models.DateField(help_text=b'the day on which the superuser invited the potential member', auto_now=True)),
('is_super_user', models.BooleanField(default=False)),
('groups', models.ManyToManyField(to='auth.Group')),
('permissions', models.ManyToManyField(to='auth.Permission')),
],
options={
'ordering': ['date_invited'],
'abstract': False,
},
bases=(models.Model,),
),
]
|
unt-libraries/django-invite
|
invite/migrations/0002_abstract_invitation.py
|
Python
|
bsd-3-clause
| 1,634 | 0.002448 |
import os
import webapp2
from app import routes
webapp2_config = {'webapp2_extras.sessions':
{'secret_key': 'hfgskahjfgd736987qygukr3279rtigu',
'webapp2_extras.jinja2': {'template_path': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'templates')}}}
application = webapp2.WSGIApplication(debug=True, config=webapp2_config)
routes.add_routes(application)
|
Terhands/saskdance
|
app/main.py
|
Python
|
gpl-3.0
| 373 | 0.010724 |
from mimetypes import guess_type
from django.contrib.syndication.views import Feed
from django.utils.feedgenerator import Atom1Feed
from django.http import Http404
from django.template import TemplateDoesNotExist, RequestContext, NodeList
from ella.core.models import Listing, Category
from ella.core.conf import core_settings
from ella.core.managers import ListingHandler
from ella.photos.models import Format, FormatedPhoto
class RSSTopCategoryListings(Feed):
format_name = None
def __init__(self, *args, **kwargs):
super(RSSTopCategoryListings, self).__init__(*args, **kwargs)
if core_settings.RSS_ENCLOSURE_PHOTO_FORMAT:
self.format_name = core_settings.RSS_ENCLOSURE_PHOTO_FORMAT
if self.format_name is not None:
self.format = Format.objects.get_for_name(self.format_name)
else:
self.format = None
def get_object(self, request, category=''):
bits = category.split('/')
try:
cat = Category.objects.get_by_tree_path(u'/'.join(bits))
except Category.DoesNotExist:
raise Http404()
self.box_context = RequestContext(request)
return cat
def items(self, obj):
qset = Listing.objects.get_queryset_wrapper(category=obj, children=ListingHandler.ALL)
return qset.get_listings(count=core_settings.RSS_NUM_IN_FEED)
# Feed metadata
###########################################################################
def title(self, obj):
return obj.app_data.get('syndication', {}).get('title', obj.title)
def link(self, obj):
return obj.get_absolute_url()
def description(self, obj):
return obj.app_data.get('syndication', {}).get('description', obj.description)
# Item metadata
###########################################################################
def item_guid(self, item):
return str(item.publishable.pk)
def item_pubdate(self, item):
return item.publish_from
def item_title(self, item):
return item.publishable.title
def item_link(self, item):
return item.get_absolute_url()
def item_description(self, item):
if not core_settings.RSS_DESCRIPTION_BOX_TYPE:
return item.publishable.description
p = item.publishable
box = p.box_class(p, core_settings.RSS_DESCRIPTION_BOX_TYPE, NodeList())
try:
desc = box.render(self.box_context)
except TemplateDoesNotExist:
desc = None
if not desc:
desc = item.publishable.description
return desc
def item_author_name(self, item):
return ', '.join(map(unicode, item.publishable.authors.all()))
# Enclosure - Photo
###########################################################################
def item_enclosure_url(self, item):
if not hasattr(item, '__enclosure_url'):
if hasattr(item.publishable, 'feed_enclosure'):
item.__enclosure_url = item.publishable.feed_enclosure()['url']
elif self.format is not None and item.publishable.photo_id:
item.__enclosure_url = FormatedPhoto.objects.get_photo_in_format(item.publishable.photo_id, self.format)['url']
else:
item.__enclosure_url = None
return item.__enclosure_url
def item_enclosure_mime_type(self, item):
enc_url = self.item_enclosure_url(item)
if enc_url:
return guess_type(enc_url)[0]
def item_enclosure_length(self, item):
# make sure get_photo_in_format was called
if hasattr(item.publishable, 'feed_enclosure'):
return item.publishable.feed_enclosure()['size']
elif self.format:
fp, created = FormatedPhoto.objects.get_or_create(photo=item.publishable.photo_id, format=self.format)
return fp.image.size
class AtomTopCategoryListings(RSSTopCategoryListings):
feed_type = Atom1Feed
subtitle = RSSTopCategoryListings.description
|
petrlosa/ella
|
ella/core/feeds.py
|
Python
|
bsd-3-clause
| 4,034 | 0.001735 |
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2015 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import sys
import binascii
from smtplib import SMTPException
from django.db import models
from django.dispatch import receiver
from django.conf import settings
from django.contrib.auth.signals import user_logged_in
from django.db.models.signals import post_save, post_migrate
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import Group, User, Permission
from django.utils import translation as django_translation
from django.template.loader import render_to_string
from django.core.mail import EmailMultiAlternatives, get_connection
from django.utils.translation import LANGUAGE_SESSION_KEY
from social.apps.django_app.default.models import UserSocialAuth
from weblate.lang.models import Language
from weblate.trans.site import get_site_url, get_site_domain
from weblate.accounts.avatar import get_user_display
from weblate.trans.util import report_error
from weblate.trans.signals import user_pre_delete
from weblate import VERSION
from weblate.logger import LOGGER
from weblate.appsettings import ANONYMOUS_USER_NAME, SITE_TITLE
def send_mails(mails):
"""Sends multiple mails in single connection."""
try:
connection = get_connection()
connection.send_messages(mails)
except SMTPException as error:
LOGGER.error('Failed to send email: %s', error)
report_error(error, sys.exc_info())
def get_author_name(user, email=True):
"""Returns formatted author name with email."""
# Get full name from database
full_name = user.first_name
# Use username if full name is empty
if full_name == '':
full_name = user.username
# Add email if we are asked for it
if not email:
return full_name
return '%s <%s>' % (full_name, user.email)
def notify_merge_failure(subproject, error, status):
'''
Notification on merge failure.
'''
subscriptions = Profile.objects.subscribed_merge_failure(
subproject.project,
)
users = set()
mails = []
for subscription in subscriptions:
mails.append(
subscription.notify_merge_failure(subproject, error, status)
)
users.add(subscription.user_id)
for owner in subproject.project.owners.all():
mails.append(
owner.profile.notify_merge_failure(
subproject, error, status
)
)
# Notify admins
mails.append(
get_notification_email(
'en',
'ADMINS',
'merge_failure',
subproject,
{
'subproject': subproject,
'status': status,
'error': error,
}
)
)
send_mails(mails)
def notify_new_string(translation):
'''
Notification on new string to translate.
'''
mails = []
subscriptions = Profile.objects.subscribed_new_string(
translation.subproject.project, translation.language
)
for subscription in subscriptions:
mails.append(
subscription.notify_new_string(translation)
)
send_mails(mails)
def notify_new_language(subproject, language, user):
'''
Notify subscribed users about new language requests
'''
mails = []
subscriptions = Profile.objects.subscribed_new_language(
subproject.project,
user
)
users = set()
for subscription in subscriptions:
mails.append(
subscription.notify_new_language(subproject, language, user)
)
users.add(subscription.user_id)
for owner in subproject.project.owners.all():
mails.append(
owner.profile.notify_new_language(
subproject, language, user
)
)
# Notify admins
mails.append(
get_notification_email(
'en',
'ADMINS',
'new_language',
subproject,
{
'language': language,
'user': user,
},
user=user,
)
)
send_mails(mails)
def notify_new_translation(unit, oldunit, user):
'''
Notify subscribed users about new translation
'''
mails = []
subscriptions = Profile.objects.subscribed_any_translation(
unit.translation.subproject.project,
unit.translation.language,
user
)
for subscription in subscriptions:
mails.append(
subscription.notify_any_translation(unit, oldunit)
)
send_mails(mails)
def notify_new_contributor(unit, user):
'''
Notify about new contributor.
'''
mails = []
subscriptions = Profile.objects.subscribed_new_contributor(
unit.translation.subproject.project,
unit.translation.language,
user
)
for subscription in subscriptions:
mails.append(
subscription.notify_new_contributor(
unit.translation, user
)
)
send_mails(mails)
def notify_new_suggestion(unit, suggestion, user):
'''
Notify about new suggestion.
'''
mails = []
subscriptions = Profile.objects.subscribed_new_suggestion(
unit.translation.subproject.project,
unit.translation.language,
user
)
for subscription in subscriptions:
mails.append(
subscription.notify_new_suggestion(
unit.translation,
suggestion,
unit
)
)
send_mails(mails)
def notify_new_comment(unit, comment, user, report_source_bugs):
'''
Notify about new comment.
'''
mails = []
subscriptions = Profile.objects.subscribed_new_comment(
unit.translation.subproject.project,
comment.language,
user
)
for subscription in subscriptions:
mails.append(
subscription.notify_new_comment(unit, comment, user)
)
# Notify upstream
if comment.language is None and report_source_bugs != '':
send_notification_email(
'en',
report_source_bugs,
'new_comment',
unit.translation,
{
'unit': unit,
'comment': comment,
'subproject': unit.translation.subproject,
},
user=user,
)
send_mails(mails)
def get_notification_email(language, email, notification,
translation_obj=None, context=None, headers=None,
user=None, info=None):
'''
Renders notification email.
'''
cur_language = django_translation.get_language()
context = context or {}
headers = headers or {}
references = None
if 'unit' in context:
unit = context['unit']
references = '{0}/{1}/{2}/{3}'.format(
unit.translation.subproject.project.slug,
unit.translation.subproject.slug,
unit.translation.language.code,
unit.id
)
if references is not None:
references = '<{0}@{1}>'.format(references, get_site_domain())
headers['In-Reply-To'] = references
headers['References'] = references
try:
if info is None:
info = translation_obj.__unicode__()
LOGGER.info(
'sending notification %s on %s to %s',
notification,
info,
email
)
# Load user language
if language is not None:
django_translation.activate(language)
# Template name
context['subject_template'] = 'mail/{}_subject.txt'.format(
notification
)
# Adjust context
context['current_site_url'] = get_site_url()
if translation_obj is not None:
context['translation'] = translation_obj
context['translation_url'] = get_site_url(
translation_obj.get_absolute_url()
)
context['site_title'] = SITE_TITLE
# Render subject
subject = render_to_string(
context['subject_template'],
context
).strip()
# Render body
body = render_to_string(
'mail/{}.txt'.format(notification),
context
)
html_body = render_to_string(
'mail/{}.html'.format(notification),
context
)
# Define headers
headers['Auto-Submitted'] = 'auto-generated'
headers['X-AutoGenerated'] = 'yes'
headers['Precedence'] = 'bulk'
headers['X-Mailer'] = 'Weblate {}'.format(VERSION)
# Reply to header
if user is not None:
headers['Reply-To'] = user.email
# List of recipients
if email == 'ADMINS':
emails = [a[1] for a in settings.ADMINS]
else:
emails = [email]
# Create message
email = EmailMultiAlternatives(
settings.EMAIL_SUBJECT_PREFIX + subject,
body,
to=emails,
headers=headers,
)
email.attach_alternative(
html_body,
'text/html'
)
# Return the mail
return email
finally:
django_translation.activate(cur_language)
def send_notification_email(language, email, notification,
translation_obj=None, context=None, headers=None,
user=None, info=None):
'''
Renders and sends notification email.
'''
email = get_notification_email(
language, email, notification, translation_obj, context, headers,
user, info
)
send_mails([email])
class VerifiedEmail(models.Model):
'''
Storage for verified emails from auth backends.
'''
social = models.ForeignKey(UserSocialAuth)
email = models.EmailField(max_length=254)
def __unicode__(self):
return u'{0} - {1}'.format(
self.social.user.username,
self.email
)
class ProfileManager(models.Manager):
'''
Manager providing shortcuts for subscription queries.
'''
# pylint: disable=W0232
def subscribed_any_translation(self, project, language, user):
return self.filter(
subscribe_any_translation=True,
subscriptions=project,
languages=language
).exclude(
user=user
)
def subscribed_new_language(self, project, user):
return self.filter(
subscribe_new_language=True,
subscriptions=project,
).exclude(
user=user
)
def subscribed_new_string(self, project, language):
return self.filter(
subscribe_new_string=True,
subscriptions=project,
languages=language
)
def subscribed_new_suggestion(self, project, language, user):
ret = self.filter(
subscribe_new_suggestion=True,
subscriptions=project,
languages=language
)
# We don't want to filter out anonymous user
if user is not None and user.is_authenticated():
ret = ret.exclude(user=user)
return ret
def subscribed_new_contributor(self, project, language, user):
return self.filter(
subscribe_new_contributor=True,
subscriptions=project,
languages=language
).exclude(
user=user
)
def subscribed_new_comment(self, project, language, user):
ret = self.filter(
subscribe_new_comment=True,
subscriptions=project
).exclude(
user=user
)
# Source comments go to every subscriber
if language is not None:
ret = ret.filter(languages=language)
return ret
def subscribed_merge_failure(self, project):
return self.filter(subscribe_merge_failure=True, subscriptions=project)
class Profile(models.Model):
'''
User profiles storage.
'''
user = models.OneToOneField(User, unique=True, editable=False)
language = models.CharField(
verbose_name=_(u"Interface Language"),
max_length=10,
choices=settings.LANGUAGES
)
languages = models.ManyToManyField(
Language,
verbose_name=_('Translated languages'),
blank=True,
help_text=_('Choose languages to which you can translate.')
)
secondary_languages = models.ManyToManyField(
Language,
verbose_name=_('Secondary languages'),
related_name='secondary_profile_set',
blank=True,
)
suggested = models.IntegerField(default=0, db_index=True)
translated = models.IntegerField(default=0, db_index=True)
hide_completed = models.BooleanField(
verbose_name=_('Hide completed translations on dashboard'),
default=False
)
secondary_in_zen = models.BooleanField(
verbose_name=_('Show secondary translations in zen mode'),
default=True
)
hide_source_secondary = models.BooleanField(
verbose_name=_('Hide source if there is secondary language'),
default=False
)
subscriptions = models.ManyToManyField(
'trans.Project',
verbose_name=_('Subscribed projects'),
blank=True,
)
subscribe_any_translation = models.BooleanField(
verbose_name=_('Notification on any translation'),
default=False
)
subscribe_new_string = models.BooleanField(
verbose_name=_('Notification on new string to translate'),
default=False
)
subscribe_new_suggestion = models.BooleanField(
verbose_name=_('Notification on new suggestion'),
default=False
)
subscribe_new_contributor = models.BooleanField(
verbose_name=_('Notification on new contributor'),
default=False
)
subscribe_new_comment = models.BooleanField(
verbose_name=_('Notification on new comment'),
default=False
)
subscribe_merge_failure = models.BooleanField(
verbose_name=_('Notification on merge failure'),
default=False
)
subscribe_new_language = models.BooleanField(
verbose_name=_('Notification on new language request'),
default=False
)
SUBSCRIPTION_FIELDS = (
'subscribe_any_translation',
'subscribe_new_string',
'subscribe_new_suggestion',
'subscribe_new_contributor',
'subscribe_new_comment',
'subscribe_merge_failure',
'subscribe_new_language',
)
objects = ProfileManager()
def __unicode__(self):
return self.user.username
def get_user_display(self):
return get_user_display(self.user)
def get_user_display_link(self):
return get_user_display(self.user, True, True)
def get_user_name(self):
return get_user_display(self.user, False)
@models.permalink
def get_absolute_url(self):
return ('user_page', (), {
'user': self.user.username
})
@property
def last_change(self):
'''
Returns date of last change user has done in Weblate.
'''
try:
return self.user.change_set.all()[0].timestamp
except IndexError:
return None
def notify_user(self, notification, translation_obj,
context=None, headers=None, user=None):
'''
Wrapper for sending notifications to user.
'''
if context is None:
context = {}
if headers is None:
headers = {}
# Check whether user is still allowed to access this project
if not translation_obj.has_acl(self.user):
return
# Generate notification
return get_notification_email(
self.language,
self.user.email,
notification,
translation_obj,
context,
headers,
user=user
)
def notify_any_translation(self, unit, oldunit):
'''
Sends notification on translation.
'''
if oldunit.translated:
template = 'changed_translation'
else:
template = 'new_translation'
return self.notify_user(
template,
unit.translation,
{
'unit': unit,
'oldunit': oldunit,
}
)
def notify_new_language(self, subproject, language, user):
'''
Sends notification on new language request.
'''
return self.notify_user(
'new_language',
subproject,
{
'language': language,
'user': user,
},
user=user
)
def notify_new_string(self, translation):
'''
Sends notification on new strings to translate.
'''
return self.notify_user(
'new_string',
translation,
)
def notify_new_suggestion(self, translation, suggestion, unit):
'''
Sends notification on new suggestion.
'''
return self.notify_user(
'new_suggestion',
translation,
{
'suggestion': suggestion,
'unit': unit,
}
)
def notify_new_contributor(self, translation, user):
'''
Sends notification on new contributor.
'''
return self.notify_user(
'new_contributor',
translation,
{
'user': user,
}
)
def notify_new_comment(self, unit, comment, user):
'''
Sends notification about new comment.
'''
return self.notify_user(
'new_comment',
unit.translation,
{
'unit': unit,
'comment': comment,
'subproject': unit.translation.subproject,
},
user=user,
)
def notify_merge_failure(self, subproject, error, status):
'''
Sends notification on merge failure.
'''
return self.notify_user(
'merge_failure',
subproject,
{
'subproject': subproject,
'error': error,
'status': status,
}
)
@property
def full_name(self):
'''
Returns user's full name.
'''
return self.user.first_name
def set_lang(request, profile):
"""
Sets session language based on user preferences.
"""
request.session[LANGUAGE_SESSION_KEY] = profile.language
@receiver(user_logged_in)
def post_login_handler(sender, request, user, **kwargs):
'''
Signal handler for setting user language and
migrating profile if needed.
'''
# Warning about setting password
if (getattr(user, 'backend', '').endswith('.EmailAuth') and
not user.has_usable_password()):
request.session['show_set_password'] = True
# Ensure user has a profile
profile = Profile.objects.get_or_create(user=user)[0]
# Migrate django-registration based verification to python-social-auth
if (user.has_usable_password() and
not user.social_auth.filter(provider='email').exists()):
social = user.social_auth.create(
provider='email',
uid=user.email,
)
VerifiedEmail.objects.create(
social=social,
email=user.email,
)
# Set language for session based on preferences
set_lang(request, profile)
def create_groups(update):
'''
Creates standard groups and gives them permissions.
'''
guest_group, created = Group.objects.get_or_create(name='Guests')
if created or update:
guest_group.permissions.add(
Permission.objects.get(codename='can_see_git_repository'),
Permission.objects.get(codename='add_suggestion'),
)
group, created = Group.objects.get_or_create(name='Users')
if created or update:
group.permissions.add(
Permission.objects.get(codename='upload_translation'),
Permission.objects.get(codename='overwrite_translation'),
Permission.objects.get(codename='save_translation'),
Permission.objects.get(codename='save_template'),
Permission.objects.get(codename='accept_suggestion'),
Permission.objects.get(codename='delete_suggestion'),
Permission.objects.get(codename='vote_suggestion'),
Permission.objects.get(codename='ignore_check'),
Permission.objects.get(codename='upload_dictionary'),
Permission.objects.get(codename='add_dictionary'),
Permission.objects.get(codename='change_dictionary'),
Permission.objects.get(codename='delete_dictionary'),
Permission.objects.get(codename='lock_translation'),
Permission.objects.get(codename='can_see_git_repository'),
Permission.objects.get(codename='add_comment'),
Permission.objects.get(codename='add_suggestion'),
Permission.objects.get(codename='use_mt'),
)
owner_permissions = (
Permission.objects.get(codename='author_translation'),
Permission.objects.get(codename='upload_translation'),
Permission.objects.get(codename='overwrite_translation'),
Permission.objects.get(codename='commit_translation'),
Permission.objects.get(codename='update_translation'),
Permission.objects.get(codename='push_translation'),
Permission.objects.get(codename='automatic_translation'),
Permission.objects.get(codename='save_translation'),
Permission.objects.get(codename='save_template'),
Permission.objects.get(codename='accept_suggestion'),
Permission.objects.get(codename='vote_suggestion'),
Permission.objects.get(codename='override_suggestion'),
Permission.objects.get(codename='delete_comment'),
Permission.objects.get(codename='delete_suggestion'),
Permission.objects.get(codename='ignore_check'),
Permission.objects.get(codename='upload_dictionary'),
Permission.objects.get(codename='add_dictionary'),
Permission.objects.get(codename='change_dictionary'),
Permission.objects.get(codename='delete_dictionary'),
Permission.objects.get(codename='lock_subproject'),
Permission.objects.get(codename='reset_translation'),
Permission.objects.get(codename='lock_translation'),
Permission.objects.get(codename='can_see_git_repository'),
Permission.objects.get(codename='add_comment'),
Permission.objects.get(codename='delete_comment'),
Permission.objects.get(codename='add_suggestion'),
Permission.objects.get(codename='use_mt'),
Permission.objects.get(codename='edit_priority'),
Permission.objects.get(codename='edit_flags'),
Permission.objects.get(codename='manage_acl'),
Permission.objects.get(codename='download_changes'),
Permission.objects.get(codename='view_reports'),
)
group, created = Group.objects.get_or_create(name='Managers')
if created or update:
group.permissions.add(*owner_permissions)
group, created = Group.objects.get_or_create(name='Owners')
if created or update:
group.permissions.add(*owner_permissions)
created = True
try:
anon_user = User.objects.get(
username=ANONYMOUS_USER_NAME,
)
created = False
if anon_user.is_active:
raise ValueError(
'Anonymous user ({}) already exists and enabled, '
'please change ANONYMOUS_USER_NAME setting.'.format(
ANONYMOUS_USER_NAME,
)
)
except User.DoesNotExist:
anon_user = User.objects.create(
username=ANONYMOUS_USER_NAME,
is_active=False,
)
if created or update:
anon_user.set_unusable_password()
anon_user.groups.clear()
anon_user.groups.add(guest_group)
def move_users():
'''
Moves users to default group.
'''
group = Group.objects.get(name='Users')
for user in User.objects.all():
user.groups.add(group)
def remove_user(user):
'''
Removes user account.
'''
# Send signal (to commit any pending changes)
user_pre_delete.send(instance=user, sender=user.__class__)
# Change username
user.username = 'deleted-{0}'.format(user.pk)
while User.objects.filter(username=user.username).exists():
user.username = 'deleted-{0}-{1}'.format(
user.pk,
binascii.b2a_hex(os.urandom(5))
)
# Remove user information
user.first_name = 'Deleted User'
user.last_name = ''
user.email = 'noreply@weblate.org'
# Disable the user
user.is_active = False
user.set_unusable_password()
user.save()
# Remove all social auth associations
user.social_auth.all().delete()
@receiver(post_migrate)
def sync_create_groups(sender, **kwargs):
'''
Create groups on syncdb.
'''
if sender.label == 'accounts':
create_groups(False)
@receiver(post_save, sender=User)
def create_profile_callback(sender, instance, created=False, **kwargs):
'''
Automatically adds user to Users group.
'''
if created:
# Add user to Users group if it exists
try:
group = Group.objects.get(name='Users')
instance.groups.add(group)
except Group.DoesNotExist:
pass
|
quinox/weblate
|
weblate/accounts/models.py
|
Python
|
gpl-3.0
| 26,496 | 0 |
# Copyright 2014 DreamHost, LLC
#
# Author: DreamHost, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import re
import netaddr
from oslo.config import cfg
from akanda.rug.openstack.common import jsonutils
LOG = logging.getLogger(__name__)
DEFAULT_AS = 64512
OPTIONS = [
cfg.StrOpt('provider_rules_path'),
cfg.IntOpt('asn', default=DEFAULT_AS),
cfg.IntOpt('neighbor_asn', default=DEFAULT_AS),
]
cfg.CONF.register_opts(OPTIONS)
EXTERNAL_NET = 'external'
INTERNAL_NET = 'internal'
MANAGEMENT_NET = 'management'
SERVICE_STATIC = 'static'
SERVICE_DHCP = 'dhcp'
SERVICE_RA = 'ra'
def build_config(client, router, interfaces):
provider_rules = load_provider_rules(cfg.CONF.provider_rules_path)
networks = generate_network_config(client, router, interfaces)
gateway = get_default_v4_gateway(client, router, networks)
return {
'asn': cfg.CONF.asn,
'neighbor_asn': cfg.CONF.neighbor_asn,
'default_v4_gateway': gateway,
'networks': networks,
'labels': provider_rules.get('labels', {}),
'floating_ips': generate_floating_config(router),
'tenant_id': router.tenant_id,
'hostname': router.name
}
def get_default_v4_gateway(client, router, networks):
"""Find the IPv4 default gateway for the router.
"""
LOG.debug('networks = %r', networks)
LOG.debug('external interface = %s', router.external_port.mac_address)
# Now find the subnet that our external IP is on, and return its
# gateway.
for n in networks:
if n['network_type'] == EXTERNAL_NET:
v4_addresses = [
addr
for addr in (netaddr.IPAddress(ip.partition('/')[0])
for ip in n['interface']['addresses'])
if addr.version == 4
]
for s in n['subnets']:
subnet = netaddr.IPNetwork(s['cidr'])
if subnet.version != 4:
continue
LOG.debug(
'%s: checking if subnet %s should have the default route',
router.id, s['cidr'])
for addr in v4_addresses:
if addr in subnet:
LOG.debug(
'%s: found gateway %s for subnet %s on network %s',
router.id,
s['gateway_ip'],
s['cidr'],
n['network_id'],
)
return s['gateway_ip']
# Sometimes we are asked to build a configuration for the server
# when the external interface is still marked as "down". We can
# report that case, but we don't treat it as an error here because
# we'll be asked to do it again when the interface comes up.
LOG.info('%s: no default gateway was found', router.id)
return ''
def load_provider_rules(path):
try:
return jsonutils.load(open(path))
except: # pragma nocover
LOG.exception('unable to open provider rules: %s' % path)
def generate_network_config(client, router, interfaces):
iface_map = dict((i['lladdr'], i['ifname']) for i in interfaces)
retval = [
_network_config(
client,
router.external_port,
iface_map[router.external_port.mac_address],
EXTERNAL_NET),
_management_network_config(
router.management_port,
iface_map[router.management_port.mac_address],
interfaces,
)]
retval.extend(
_network_config(
client,
p,
iface_map[p.mac_address],
INTERNAL_NET,
client.get_network_ports(p.network_id))
for p in router.internal_ports)
return retval
def _management_network_config(port, ifname, interfaces):
for iface in interfaces:
if iface['ifname'] == ifname:
return _make_network_config_dict(
iface, MANAGEMENT_NET, port.network_id)
def _network_config(client, port, ifname, network_type, network_ports=[]):
subnets = client.get_network_subnets(port.network_id)
subnets_dict = dict((s.id, s) for s in subnets)
return _make_network_config_dict(
_interface_config(ifname, port, subnets_dict),
network_type,
port.network_id,
subnets_dict=subnets_dict,
network_ports=network_ports)
def _make_network_config_dict(interface, network_type, network_id,
v4_conf=SERVICE_STATIC, v6_conf=SERVICE_STATIC,
subnets_dict={}, network_ports=[]):
return {'interface': interface,
'network_id': network_id,
'v4_conf_service': v4_conf,
'v6_conf_service': v6_conf,
'network_type': network_type,
'subnets': [_subnet_config(s) for s in subnets_dict.values()],
'allocations': _allocation_config(network_ports, subnets_dict)}
def _interface_config(ifname, port, subnets_dict):
def fmt(fixed):
return '%s/%s' % (fixed.ip_address,
subnets_dict[fixed.subnet_id].cidr.prefixlen)
return {'ifname': ifname,
'addresses': [fmt(fixed) for fixed in port.fixed_ips]}
def _subnet_config(subnet):
return {
'cidr': str(subnet.cidr),
'dhcp_enabled': subnet.enable_dhcp and subnet.ipv6_ra_mode != 'slaac',
'dns_nameservers': subnet.dns_nameservers,
'host_routes': subnet.host_routes,
'gateway_ip': (str(subnet.gateway_ip)
if subnet.gateway_ip is not None
else ''),
}
def _allocation_config(ports, subnets_dict):
r = re.compile('[:.]')
allocations = []
for port in ports:
addrs = {
str(fixed.ip_address): subnets_dict[fixed.subnet_id].enable_dhcp
for fixed in port.fixed_ips
}
if not addrs:
continue
allocations.append(
{
'ip_addresses': addrs,
'device_id': port.device_id,
'hostname': '%s.local' % r.sub('-', sorted(addrs.keys())[0]),
'mac_address': port.mac_address
}
)
return allocations
def generate_floating_config(router):
return [
{'floating_ip': str(fip.floating_ip), 'fixed_ip': str(fip.fixed_ip)}
for fip in router.floating_ips
]
|
markmcclain/astara
|
akanda/rug/api/configuration.py
|
Python
|
apache-2.0
| 6,984 | 0.000143 |
import io
import json
class Stream(object):
__shortname__ = "stream"
"""
This is a base class that should be inherited when implementing
different stream types. Should only be created by plugins.
"""
def __init__(self, session):
self.session = session
def __repr__(self):
return "<Stream()>"
def __json__(self):
return dict(type=type(self).shortname())
def open(self):
"""
Attempts to open a connection to the stream.
Returns a file-like object that can be used to read the stream data.
Raises :exc:`StreamError` on failure.
"""
raise NotImplementedError
@property
def json(self):
obj = self.__json__()
return json.dumps(obj)
@classmethod
def shortname(cls):
return cls.__shortname__
class StreamIO(io.IOBase):
pass
__all__ = ["Stream", "StreamIO"]
|
ethanhlc/streamlink
|
src/streamlink/stream/stream.py
|
Python
|
bsd-2-clause
| 915 | 0 |
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2016 Red Hat Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import json
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import env_fallback, return_values
from ansible.module_utils.network.common.utils import to_list, ComplexList
from ansible.module_utils.connection import Connection, ConnectionError
_DEVICE_CONFIGS = {}
ios_provider_spec = {
'host': dict(),
'port': dict(type='int'),
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
'authorize': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
'auth_pass': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTH_PASS']), no_log=True),
'timeout': dict(type='int')
}
ios_argument_spec = {
'provider': dict(type='dict', options=ios_provider_spec),
}
ios_top_spec = {
'host': dict(removed_in_version=2.9),
'port': dict(removed_in_version=2.9, type='int'),
'username': dict(removed_in_version=2.9),
'password': dict(removed_in_version=2.9, no_log=True),
'ssh_keyfile': dict(removed_in_version=2.9, type='path'),
'authorize': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
'auth_pass': dict(removed_in_version=2.9, no_log=True),
'timeout': dict(removed_in_version=2.9, type='int')
}
ios_argument_spec.update(ios_top_spec)
def get_provider_argspec():
return ios_provider_spec
def get_connection(module):
if hasattr(module, '_ios_connection'):
return module._ios_connection
capabilities = get_capabilities(module)
network_api = capabilities.get('network_api')
if network_api == 'cliconf':
module._ios_connection = Connection(module._socket_path)
else:
module.fail_json(msg='Invalid connection type %s' % network_api)
return module._ios_connection
def get_capabilities(module):
if hasattr(module, '_ios_capabilities'):
return module._ios_capabilities
capabilities = Connection(module._socket_path).get_capabilities()
module._ios_capabilities = json.loads(capabilities)
return module._ios_capabilities
def check_args(module, warnings):
pass
def get_defaults_flag(module):
connection = get_connection(module)
out = connection.get('show running-config ?')
out = to_text(out, errors='surrogate_then_replace')
commands = set()
for line in out.splitlines():
if line.strip():
commands.add(line.strip().split()[0])
if 'all' in commands:
return ['all']
else:
return ['full']
def get_config(module, flags=None):
flag_str = ' '.join(to_list(flags))
try:
return _DEVICE_CONFIGS[flag_str]
except KeyError:
connection = get_connection(module)
out = connection.get_config(filter=flags)
cfg = to_text(out, errors='surrogate_then_replace').strip()
_DEVICE_CONFIGS[flag_str] = cfg
return cfg
def to_commands(module, commands):
spec = {
'command': dict(key=True),
'prompt': dict(),
'answer': dict()
}
transform = ComplexList(spec, module)
return transform(commands)
def run_commands(module, commands, check_rc=True):
responses = list()
connection = get_connection(module)
for cmd in to_list(commands):
if isinstance(cmd, dict):
command = cmd['command']
prompt = cmd['prompt']
answer = cmd['answer']
else:
command = cmd
prompt = None
answer = None
try:
out = connection.get(command, prompt, answer)
except ConnectionError as exc:
if check_rc:
module.fail_json(msg=to_text(exc))
else:
out = exc
try:
out = to_text(out, errors='surrogate_or_strict')
except UnicodeError:
module.fail_json(msg=u'Failed to decode output from %s: %s' % (cmd, to_text(out)))
responses.append(out)
return responses
def load_config(module, commands):
connection = get_connection(module)
try:
return connection.edit_config(commands)
except ConnectionError as exc:
module.fail_json(msg=to_text(exc))
|
Nitaco/ansible
|
lib/ansible/module_utils/network/ios/ios.py
|
Python
|
gpl-3.0
| 5,949 | 0.002858 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_export_request(
vault_name: str,
resource_group_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2021-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupJobsExport')
path_format_arguments = {
"vaultName": _SERIALIZER.url("vault_name", vault_name, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class JobsOperations(object):
"""JobsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.recoveryservicesbackup.activestamp.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def export(
self,
vault_name: str,
resource_group_name: str,
filter: Optional[str] = None,
**kwargs: Any
) -> None:
"""Triggers export of jobs specified by filters and returns an OperationID to track.
:param vault_name: The name of the recovery services vault.
:type vault_name: str
:param resource_group_name: The name of the resource group where the recovery services vault is
present.
:type resource_group_name: str
:param filter: OData filter options.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_export_request(
vault_name=vault_name,
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.export.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
export.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupJobsExport'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/recoveryservices/azure-mgmt-recoveryservicesbackup/azure/mgmt/recoveryservicesbackup/activestamp/operations/_jobs_operations.py
|
Python
|
mit
| 5,627 | 0.004087 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from elftools.elf.elffile import ELFFile
from elftools.common.exceptions import ELFError
from elftools.elf.segments import NoteSegment
class ReadELF(object):
def __init__(self, file):
self.elffile = ELFFile(file)
def get_build(self):
for segment in self.elffile.iter_segments():
if isinstance(segment, NoteSegment):
for note in segment.iter_notes():
print note
def main():
if(len(sys.argv) < 2):
print "Missing argument"
sys.exit(1)
with open(sys.argv[1], 'rb') as file:
try:
readelf = ReadELF(file)
readelf.get_build()
except ELFError as err:
sys.stderr.write('ELF error: %s\n' % err)
sys.exit(1)
if __name__ == '__main__':
main()
|
somat/samber
|
elf.py
|
Python
|
mit
| 861 | 0.003484 |
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.models import NOT_PROVIDED
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_rename_table = "RENAME TABLE %(old_table)s TO %(new_table)s"
sql_alter_column_null = "MODIFY %(column)s %(type)s NULL"
sql_alter_column_not_null = "MODIFY %(column)s %(type)s NOT NULL"
sql_alter_column_type = "MODIFY %(column)s %(type)s"
sql_rename_column = "ALTER TABLE %(table)s CHANGE %(old_column)s %(new_column)s %(type)s"
sql_delete_unique = "ALTER TABLE %(table)s DROP INDEX %(name)s"
sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s)"
sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s"
sql_delete_index = "DROP INDEX %(name)s ON %(table)s"
sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"
alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;'
alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;'
sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)"
sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"
def quote_value(self, value):
return self.connection.escape(value)
def skip_default(self, field):
"""
MySQL doesn't accept default values for longtext and longblob
and implicitly treats these columns as nullable.
"""
return field.db_type(self.connection) in {'longtext', 'longblob'}
def add_field(self, model, field):
super(DatabaseSchemaEditor, self).add_field(model, field)
# Simulate the effect of a one-off default.
if self.skip_default(field) and field.default not in {None, NOT_PROVIDED}:
effective_default = self.effective_default(field)
self.execute('UPDATE %(table)s SET %(column)s = %%s' % {
'table': self.quote_name(model._meta.db_table),
'column': self.quote_name(field.column),
}, [effective_default])
|
cyaninc/django-mysql-pymysql
|
src/mysql_pymysql/schema.py
|
Python
|
bsd-3-clause
| 2,073 | 0.00193 |
from django.db import models
from customer.models import Customer
from product.models import Product
from django.utils import timezone
# Create your models here.
class OrderStatus:
IN_BASKET = 0
PAYED = 1
class Order(models.Model):
customer = models.ForeignKey(Customer)
product = models.ForeignKey(Product)
quantity = models.IntegerField(default=1)
status = models.SmallIntegerField()
created_at = models.DateTimeField()
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
if self.pk is None:
self.created_at = timezone.now()
super().save(force_insert, force_update, using, update_fields)
|
MahdiZareie/PyShop
|
shop/models.py
|
Python
|
mit
| 691 | 0.001447 |
# -*- coding: utf-8 -*-
"""
Go to Google Bookmarks: https://www.google.com/bookmarks/
On the bottom left, click "Export bookmarks": https://www.google.com/bookmarks/bookmarks.html?hl=en
After downloading the html file, run this script on it to get the addresses
This script is based on https://gist.github.com/endolith/3896948
"""
import sys
try:
from lxml.html import document_fromstring
except ImportError:
print "You need to install lxml.html"
sys.exit()
try:
from geopy.geocoders import Nominatim
except ImportError:
print "You need to install geopy"
sys.exit()
try:
import simplekml
except ImportError:
print "You need to install simplekml"
sys.exit()
try:
import json
except ImportError:
print "You need to install json"
sys.exit()
try:
from urllib2 import urlopen
except ImportError:
print "You need to install urllib2"
sys.exit()
try:
import re
except ImportError:
print "You need to install re"
sys.exit()
try:
import time
except ImportError:
print "You need to install time"
sys.exit()
filename = r'GoogleBookmarks.html'
def main():
with open(filename) as bookmarks_file:
data = bookmarks_file.read()
geolocator = Nominatim()
kml = simplekml.Kml()
lst = list()
# Hacky and doesn't work for all of the stars:
lat_re = re.compile('markers:[^\]]*latlng[^}]*lat:([^,]*)')
lon_re = re.compile('markers:[^\]]*latlng[^}]*lng:([^}]*)')
coords_in_url = re.compile('\?q=(-?\d{,3}\.\d*),\s*(-?\d{,3}\.\d*)')
doc = document_fromstring(data)
for element, attribute, url, pos in doc.body.iterlinks():
if 'maps.google' in url:
description = element.text or ''
print description.encode('UTF8')
if coords_in_url.search(url):
# Coordinates are in URL itself
latitude = coords_in_url.search(url).groups()[0]
longitude = coords_in_url.search(url).groups()[1]
else:
# Load map and find coordinates in source of page
try:
url = url.encode('ascii', 'xmlcharrefreplace')
sock = urlopen(url.replace(' ','+').encode('UTF8'))
except Exception, e:
print 'Connection problem:'
print repr(e)
print 'Waiting 3 minutes and trying again'
time.sleep(180)
sock = urlopen(url.replace(' ','+').encode('UTF8'))
content = sock.read()
sock.close()
time.sleep(5) # Don't annoy server
try:
latitude = lat_re.findall(content)[0]
longitude = lon_re.findall(content)[0]
except IndexError:
latitude = ""
longitude = ""
try:
lines = content.split('\n') # --> ['Line 1', 'Line 2', 'Line 3']
for line in lines:
if re.search('cacheResponse\(', line):
splitline = line.split('(')[1].split(')')[0] + '"]'
null = None
values = eval(splitline)
print values[8][0][1]
longitude = str(values[0][0][1])
latitude = str(values[0][0][2])
continue
if latitude == "":
# let's try something different....
for line in lines:
if re.search('APP_INITIALIZATION_STATE', line):
splitline = line.split('[')[-1].split(']')[0].split(',')
longitude = str(splitline[1])
latitude = str(splitline[2])
continue
except IndexError:
print '[Coordinates not found]'
continue
print
print latitude, longitude
try:
if latitude != "":
location = geolocator.reverse(latitude+", "+longitude)
print(location.address)
else:
print '[Invalid coordinates]'
except ValueError:
print '[Invalid coordinates]'
print
if latitude != "":
kml.newpoint(name=description, coords=[(float(longitude), float(latitude))])
else:
kml.newpoint(name=description)
lst.append({'latitude': latitude,
'longitude': longitude,
'name': description,
'url': url.encode(encoding='utf-8', errors='replace'),
'address': location.address.encode(encoding='utf-8', errors='replace') if location else 'error'})
# this is here because there's a tendancy for this script to fail part way through...
# so at least you can get a partial result
kml.save("GoogleBookmarks.kml")
with open('GoogleBookmarks.json', mode='w') as listdump:
listdump.write(json.dumps(lst))
sys.stdout.flush()
kml.save("GoogleBookmarks.kml")
with open('GoogleBookmarks.json', mode='w') as listdump:
listdump.write(json.dumps(lst))
if __name__ == '__main__':
main()
|
marado/stars-to-addresses
|
stars-to-addresses.py
|
Python
|
gpl-3.0
| 5,571 | 0.009514 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Weapon()
result.template = "object/weapon/melee/sword/crafted_saber/shared_sword_lightsaber_s11_training.iff"
result.attribute_template_id = 10
result.stfName("weapon_name","sword_lightsaber_type11")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
obi-two/Rebelion
|
data/scripts/templates/object/weapon/melee/sword/crafted_saber/shared_sword_lightsaber_s11_training.py
|
Python
|
mit
| 490 | 0.044898 |
# Copyright 2019 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import datetime
from mock import Mock
from . import tools_tags as tools
from .azure_common import BaseTest
from c7n_azure.filters import TagActionFilter
from c7n_azure.utils import now
from c7n.filters.offhours import Time
class TagsTest(BaseTest):
def test_tag_schema_validate(self):
self.assertTrue(
self.load_policy(
tools.get_policy(filters=[
{'type': 'marked-for-op', 'op': 'delete', 'tag': 'custom'},
]), validate=True))
def _get_filter(self, data):
return TagActionFilter(data=data, manager=Mock)
def _test_filter_scenario(self, resources, expected_count, filter_definition={'op': 'stop'}):
f = self._get_filter(filter_definition)
result = f.process(resources)
self.assertEqual(expected_count, len(result))
def test_tag_filter(self):
date = now().strftime('%Y-%m-%d')
date_future = (now() + datetime.timedelta(days=1)).strftime('%Y-%m-%d')
resources = [tools.get_resource({'custodian_status': 'TTL: stop@{0}'.format(date)}),
tools.get_resource({'custodian_status': 'TTL: stop@{0}'.format(date_future)})]
self._test_filter_scenario(resources, 1)
def test_custom_tag_filter(self):
date = now().strftime('%Y-%m-%d')
resources = [tools.get_resource({'custom_status': 'TTL: stop@{0}'.format(date)})]
filter_definition = {'op': 'stop', 'tag': 'custom_status'}
self._test_filter_scenario(resources, 1, filter_definition)
def test_improper_tag_format(self):
resources = [tools.get_resource({'custodian_status': 'missingcolon}'}),
tools.get_resource({'custodian_status': 'missing: atsign'})]
self._test_filter_scenario(resources, 0)
def test_different_op_returns_no_resource(self):
date = now().strftime('%Y-%m-%d')
resources = [tools.get_resource({'custodian_status': 'TTL: delete@{0}'.format(date)})]
self._test_filter_scenario(resources, 0)
def test_misformatted_date_string(self):
date = "notadate"
resources = [tools.get_resource({'custodian_status': 'TTL: stop@{0}'.format(date)})]
self._test_filter_scenario(resources, 0)
def test_timezone_in_datestring(self):
tz = Time.get_tz('America/Santiago')
date = (now(tz) - datetime.timedelta(hours=1)).strftime('%Y/%m/%d %H%M %Z')
resources = [tools.get_resource({'custodian_status': 'TTL: stop@{0}'.format(date)})]
self._test_filter_scenario(resources, 1)
|
Sutto/cloud-custodian
|
tools/c7n_azure/tests_azure/test_filters_marked_for_op.py
|
Python
|
apache-2.0
| 3,258 | 0.002762 |
#!/usr/bin/python3
import os
import sys
import subprocess
import unittest
def testequal(a,b):
if (a==b):
print ("SUCCESS")
else:
print ("FAIL")
def getPortal():
output=subprocess.check_output(["iscsi-ls","iscsi://localhost:3260"])
print (output)
target=output[7:-25]
#Test if iSCSI portal is created (last part is uid)
return target
def getLun(target,lun):
command=["iscsi-inq",
"iscsi://localhost:3260/%s/%d" % (target,lun)]
try:
output=subprocess.check_output(command,stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
output=e.output
# print (output)
return output
def getLunCapacity(target,lun):
command=["iscsi-readcapacity16",
"iscsi://localhost:3260/%s/%d" % (target,lun)]
try:
output=subprocess.check_output(command,stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
# print e
output=e.output
# print output
Size=0
for line in str(output).split("\n"):
# print line
if (line[:11]=="Total size:"):
Size=int(line[11:])
return Size
class iSCSI_luns(unittest.TestCase):
def test_portal(self):
self.assertEqual(target[:-12],"iqn.2003-01.org.linux-iscsi.testingclient-hostname:sn.")
def test_lun0(self):
self.assertEqual(getLunCapacity(target,0),51200)
def test_lun1(self):
self.assertEqual(getLunCapacity(target,1),0)
def test_lun2(self):
self.assertEqual(getLunCapacity(target,2),66560)
if __name__ == '__main__':
global target
target=getPortal()
# getLun(target,0)
# getLun(target,1)
# getLun(target,2)
unittest.main()
|
raqet/acquisition-client
|
testing/test-iscsi.py
|
Python
|
gpl-3.0
| 1,527 | 0.061559 |
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.decorators import action
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from rdmo.core.exports import XMLResponse
from rdmo.core.permissions import HasModelPermission
from rdmo.core.views import ChoicesViewSet
from rdmo.core.viewsets import CopyModelMixin
from .models import Condition
from .renderers import ConditionRenderer
from .serializers.export import ConditionExportSerializer
from .serializers.v1 import ConditionIndexSerializer, ConditionSerializer
class ConditionViewSet(CopyModelMixin, ModelViewSet):
permission_classes = (HasModelPermission, )
queryset = Condition.objects.select_related('source', 'target_option') \
.prefetch_related('optionsets', 'questionsets', 'questions', 'tasks')
serializer_class = ConditionSerializer
filter_backends = (DjangoFilterBackend,)
filterset_fields = (
'uri',
'key',
'source',
'relation',
'target_text',
'target_option'
)
@action(detail=False)
def index(self, request):
queryset = Condition.objects.select_related('source', 'target_option')
serializer = ConditionIndexSerializer(queryset, many=True)
return Response(serializer.data)
@action(detail=False, permission_classes=[HasModelPermission])
def export(self, request):
serializer = ConditionExportSerializer(self.get_queryset(), many=True)
xml = ConditionRenderer().render(serializer.data)
return XMLResponse(xml, name='conditions')
@action(detail=True, url_path='export', permission_classes=[HasModelPermission])
def detail_export(self, request, pk=None):
serializer = ConditionExportSerializer(self.get_object())
xml = ConditionRenderer().render([serializer.data])
return XMLResponse(xml, name=self.get_object().key)
class RelationViewSet(ChoicesViewSet):
permission_classes = (IsAuthenticated, )
queryset = Condition.RELATION_CHOICES
|
rdmorganiser/rdmo
|
rdmo/conditions/viewsets.py
|
Python
|
apache-2.0
| 2,141 | 0.000934 |
from yowsup.layers.protocol_contacts.protocolentities.iq_sync_get import GetSyncIqProtocolEntity
from yowsup.structs import ProtocolTreeNode
from yowsup.layers.protocol_contacts.protocolentities.test_iq_sync import SyncIqProtocolEntityTest
class GetSyncIqProtocolEntityTest(SyncIqProtocolEntityTest):
def setUp(self):
super(GetSyncIqProtocolEntityTest, self).setUp()
self.ProtocolEntity = GetSyncIqProtocolEntity
users = [
ProtocolTreeNode("user", data = "abc"),
ProtocolTreeNode("user", data = "xyz")
]
syncNode = self.node.getChild("sync")
syncNode.setAttribute("mode", GetSyncIqProtocolEntity.MODE_DELTA)
syncNode.setAttribute("context", GetSyncIqProtocolEntity.CONTEXT_INTERACTIVE)
syncNode.addChildren(users)
|
felix-dumit/campusbot
|
yowsup2/yowsup/layers/protocol_contacts/protocolentities/test_iq_sync_get.py
|
Python
|
mit
| 810 | 0.011111 |
from django.conf.urls.defaults import *
import views
urlpatterns = patterns('',
url(r'^$', views.fuse_index),
url(r'^/(?P<dir_name>[^/]+)$', views.fuse_artist),
)
|
MechanisM/musicdb
|
musicdb/classical/fuse_urls.py
|
Python
|
agpl-3.0
| 173 | 0.00578 |
from oioioi.base.permissions import make_request_condition
from oioioi.base.utils import request_cached
from oioioi.problems.models import Problem
from oioioi.testrun.utils import testrun_problem_instances
from oioioi.zeus.models import ZeusProblemData
def is_zeus_problem(problem):
try:
return bool(problem.zeusproblemdata)
except ZeusProblemData.DoesNotExist:
return False
def filter_zeus_problem_instances(problem_instances):
# Not returning new query_set because `instances` may have some cache in it
problems = frozenset(Problem.objects
.filter(pk__in=[p.problem.pk for p in problem_instances])
.exclude(zeusproblemdata=None))
return [pi for pi in problem_instances if pi.problem in problems]
@request_cached
def zeus_testrun_problem_instances(request):
return filter_zeus_problem_instances(testrun_problem_instances(request))
@make_request_condition
@request_cached
def has_any_zeus_testrun_problem(request):
return len(zeus_testrun_problem_instances(request)) > 0
|
papedaniel/oioioi
|
oioioi/zeus/utils.py
|
Python
|
gpl-3.0
| 1,047 | 0.000955 |
######################################################################
# This file should be kept compatible with Python 2.3, see PEP 291. #
######################################################################
import sys, os
# find_library(name) returns the pathname of a library, or None.
if os.name == "nt":
def _get_build_version():
"""Return the version of MSVC that was used to build Python.
For Python 2.3 and up, the version number is included in
sys.version. For earlier versions, assume the compiler is MSVC 6.
"""
# This function was copied from Lib/distutils/msvccompiler.py
prefix = "MSC v."
i = sys.version.find(prefix)
if i == -1:
return 6
i = i + len(prefix)
s, rest = sys.version[i:].split(" ", 1)
majorVersion = int(s[:-2]) - 6
minorVersion = int(s[2:3]) / 10.0
# I don't think paths are affected by minor version in version 6
if majorVersion == 6:
minorVersion = 0
if majorVersion >= 6:
return majorVersion + minorVersion
# else we don't know what version of the compiler this is
return None
def find_msvcrt():
"""Return the name of the VC runtime dll"""
version = _get_build_version()
if version is None:
# better be safe than sorry
return None
if version <= 6:
clibname = 'msvcrt'
else:
clibname = 'msvcr%d' % (version * 10)
# If python was built with in debug mode
import imp
if imp.get_suffixes()[0][0] == '_d.pyd':
clibname += 'd'
return clibname+'.dll'
def find_library(name):
if name in ('c', 'm'):
return find_msvcrt()
# See MSDN for the REAL search order.
for directory in os.environ['PATH'].split(os.pathsep):
fname = os.path.join(directory, name)
if os.path.isfile(fname):
return fname
if fname.lower().endswith(".dll"):
continue
fname = fname + ".dll"
if os.path.isfile(fname):
return fname
return None
if os.name == "ce":
# search path according to MSDN:
# - absolute path specified by filename
# - The .exe launch directory
# - the Windows directory
# - ROM dll files (where are they?)
# - OEM specified search path: HKLM\Loader\SystemPath
def find_library(name):
return name
if os.name == "posix" and sys.platform == "darwin":
from ctypes.macholib.dyld import dyld_find as _dyld_find
def find_library(name):
possible = ['lib%s.dylib' % name,
'%s.dylib' % name,
'%s.framework/%s' % (name, name)]
for name in possible:
try:
return _dyld_find(name)
except ValueError:
continue
return None
elif sys.platform == "cygwin":
def find_library(name):
for libdir in ['/usr/lib', '/usr/local/lib']:
for libext in ['lib%s.dll.a' % name, 'lib%s.a' % name]:
implib = os.path.join(libdir, libext)
if not os.path.exists(implib):
continue
cmd = "dlltool -I " + implib + " 2>/dev/null"
res = os.popen(cmd).read().replace("\n","")
if not res:
continue
return res
return None
elif os.name == "posix":
# Andreas Degert's find functions, using gcc, /sbin/ldconfig, objdump
import re, tempfile, errno
def _findLib_gcc(name):
expr = r'[^\(\)\s]*lib%s\.[^\(\)\s]*' % re.escape(name)
fdout, ccout = tempfile.mkstemp()
os.close(fdout)
cmd = 'if type gcc >/dev/null 2>&1; then CC=gcc; elif type cc >/dev/null 2>&1; then CC=cc;else exit 10; fi;' \
'LANG=C LC_ALL=C $CC -Wl,-t -o ' + ccout + ' 2>&1 -l' + name
try:
f = os.popen(cmd)
try:
trace = f.read()
finally:
rv = f.close()
finally:
try:
os.unlink(ccout)
except OSError, e:
if e.errno != errno.ENOENT:
raise
if rv == 10:
raise OSError, 'gcc or cc command not found'
res = re.search(expr, trace)
if not res:
return None
return res.group(0)
if sys.platform == "sunos5":
# use /usr/ccs/bin/dump on solaris
def _get_soname(f):
if not f:
return None
cmd = "/usr/ccs/bin/dump -Lpv 2>/dev/null " + f
f = os.popen(cmd)
try:
data = f.read()
finally:
f.close()
res = re.search(r'\[.*\]\sSONAME\s+([^\s]+)', data)
if not res:
return None
return res.group(1)
else:
def _get_soname(f):
# assuming GNU binutils / ELF
if not f:
return None
cmd = 'if ! type objdump >/dev/null 2>&1; then exit 10; fi;' \
"objdump -p -j .dynamic 2>/dev/null " + f
f = os.popen(cmd)
dump = f.read()
rv = f.close()
if rv == 10:
raise OSError, 'objdump command not found'
f = os.popen(cmd)
try:
data = f.read()
finally:
f.close()
res = re.search(r'\sSONAME\s+([^\s]+)', data)
if not res:
return None
return res.group(1)
if (sys.platform.startswith("freebsd")
or sys.platform.startswith("openbsd")
or sys.platform.startswith("dragonfly")):
def _num_version(libname):
# "libxyz.so.MAJOR.MINOR" => [ MAJOR, MINOR ]
parts = libname.split(".")
nums = []
try:
while parts:
nums.insert(0, int(parts.pop()))
except ValueError:
pass
return nums or [ sys.maxint ]
def find_library(name):
ename = re.escape(name)
expr = r':-l%s\.\S+ => \S*/(lib%s\.\S+)' % (ename, ename)
f = os.popen('/sbin/ldconfig -r 2>/dev/null')
try:
data = f.read()
finally:
f.close()
res = re.findall(expr, data)
if not res:
return _get_soname(_findLib_gcc(name))
res.sort(key=_num_version)
return res[-1]
elif sys.platform == "sunos5":
def _findLib_crle(name, is64):
if not os.path.exists('/usr/bin/crle'):
return None
if is64:
cmd = 'env LC_ALL=C /usr/bin/crle -64 2>/dev/null'
else:
cmd = 'env LC_ALL=C /usr/bin/crle 2>/dev/null'
for line in os.popen(cmd).readlines():
line = line.strip()
if line.startswith('Default Library Path (ELF):'):
paths = line.split()[4]
if not paths:
return None
for dir in paths.split(":"):
libfile = os.path.join(dir, "lib%s.so" % name)
if os.path.exists(libfile):
return libfile
return None
def find_library(name, is64 = False):
return _get_soname(_findLib_crle(name, is64) or _findLib_gcc(name))
else:
def _findSoname_ldconfig(name):
import struct
if struct.calcsize('l') == 4:
machine = os.uname()[4] + '-32'
else:
machine = os.uname()[4] + '-64'
mach_map = {
'x86_64-64': 'libc6,x86-64',
'ppc64-64': 'libc6,64bit',
'sparc64-64': 'libc6,64bit',
's390x-64': 'libc6,64bit',
'ia64-64': 'libc6,IA-64',
}
abi_type = mach_map.get(machine, 'libc6')
# XXX assuming GLIBC's ldconfig (with option -p)
expr = r'\s+(lib%s\.[^\s]+)\s+\(%s' % (re.escape(name), abi_type)
f = os.popen('/sbin/ldconfig -p 2>/dev/null')
try:
data = f.read()
finally:
f.close()
res = re.search(expr, data)
if not res:
return None
return res.group(1)
def find_library(name):
return _findSoname_ldconfig(name) or _get_soname(_findLib_gcc(name))
################################################################
# test code
def test():
from ctypes import cdll
if os.name == "nt":
print cdll.msvcrt
print cdll.load("msvcrt")
print find_library("msvcrt")
if os.name == "posix":
# find and load_version
print find_library("m")
print find_library("c")
print find_library("bz2")
# getattr
## print cdll.m
## print cdll.bz2
# load
if sys.platform == "darwin":
print cdll.LoadLibrary("libm.dylib")
print cdll.LoadLibrary("libcrypto.dylib")
print cdll.LoadLibrary("libSystem.dylib")
print cdll.LoadLibrary("System.framework/System")
elif sys.platform == "cygwin":
print cdll.LoadLibrary("cygbz2-1.dll")
print find_library("crypt")
print cdll.LoadLibrary("cygcrypt-0.dll")
else:
print cdll.LoadLibrary("libm.so")
print cdll.LoadLibrary("libcrypt.so")
print find_library("crypt")
if __name__ == "__main__":
test()
|
justathoughtor2/atomicApe
|
cygwin/lib/python2.7/ctypes/util.py
|
Python
|
gpl-3.0
| 9,752 | 0.002051 |
import asyncio
from unittest import mock
import pytest
from waterbutler.core import utils
class TestAsyncRetry:
@pytest.mark.asyncio
async def test_returns_success(self):
mock_func = mock.Mock(return_value='Foo')
retryable = utils.async_retry(5, 0, raven=None)(mock_func)
x = await retryable()
assert x == 'Foo'
assert mock_func.call_count == 1
@pytest.mark.asyncio
async def test_retries_until(self):
mock_func = mock.Mock(side_effect=[Exception(), 'Foo'])
retryable = utils.async_retry(5, 0, raven=None)(mock_func)
x = await retryable()
assert x == 'Foo'
assert mock_func.call_count == 2
@pytest.mark.asyncio
async def test_retries_then_raises(self):
mock_func = mock.Mock(side_effect=Exception('Foo'))
retryable = utils.async_retry(5, 0, raven=None)(mock_func)
with pytest.raises(Exception) as e:
coro = await retryable()
assert e.type == Exception
assert e.value.args == ('Foo',)
assert mock_func.call_count == 6
@pytest.mark.asyncio
async def test_retries_by_its_self(self):
mock_func = mock.Mock(side_effect=Exception())
retryable = utils.async_retry(8, 0, raven=None)(mock_func)
retryable()
await asyncio.sleep(.1)
assert mock_func.call_count == 9
async def test_docstring_survives(self):
async def mytest():
'''This is a docstring'''
pass
retryable = utils.async_retry(8, 0, raven=None)(mytest)
assert retryable.__doc__ == '''This is a docstring'''
@pytest.mark.asyncio
async def test_kwargs_work(self):
async def mytest(mack, *args, **kwargs):
mack()
assert args == ('test', 'Foo')
assert kwargs == {'test': 'Foo', 'baz': 'bam'}
return True
retryable = utils.async_retry(8, 0, raven=None)(mytest)
merk = mock.Mock(side_effect=[Exception(''), 5])
fut = retryable(merk, 'test', 'Foo', test='Foo', baz='bam')
assert await fut
assert merk.call_count == 2
@pytest.mark.asyncio
async def test_all_retry(self):
mock_func = mock.Mock(side_effect=Exception())
retryable = utils.async_retry(8, 0, raven=None)(mock_func)
retryable()
retryable()
await asyncio.sleep(.1)
assert mock_func.call_count == 18
|
TomBaxter/waterbutler
|
tests/core/test_utils.py
|
Python
|
apache-2.0
| 2,451 | 0 |
##
# Copyright (c) 2015-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from pycalendar.datetime import DateTime
from twext.enterprise.jobs.jobitem import JobItem
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.python.filepath import FilePath
from twistedcaldav.config import config
from twistedcaldav.ical import Component
from txdav.caldav.datastore.scheduling.ischedule.delivery import IScheduleRequest
from txdav.caldav.datastore.scheduling.ischedule.resource import IScheduleInboxResource
from txdav.caldav.datastore.scheduling.work import allScheduleWork
from txdav.caldav.datastore.test.common import CaptureProtocol
from txdav.common.datastore.podding.migration.home_sync import CrossPodHomeSync
from txdav.common.datastore.podding.migration.sync_metadata import CalendarMigrationRecord, \
AttachmentMigrationRecord, CalendarObjectMigrationRecord
from txdav.common.datastore.podding.migration.work import HomeCleanupWork, MigratedHomeCleanupWork, MigrationCleanupWork
from txdav.common.datastore.podding.test.util import MultiStoreConduitTest
from txdav.common.datastore.sql_directory import DelegateRecord,\
DelegateGroupsRecord, ExternalDelegateGroupsRecord
from txdav.common.datastore.sql_tables import _BIND_MODE_READ, \
_HOME_STATUS_DISABLED, _HOME_STATUS_NORMAL, _HOME_STATUS_EXTERNAL, \
_HOME_STATUS_MIGRATING
from txdav.common.datastore.test.util import populateCalendarsFrom
from txdav.who.delegates import Delegates
from txweb2.dav.test.util import SimpleRequest
from txweb2.http_headers import MimeType
from txweb2.stream import MemoryStream
class TestCompleteMigrationCycle(MultiStoreConduitTest):
"""
Test that a full migration cycle using L{CrossPodHomeSync} works.
"""
def __init__(self, methodName='runTest'):
super(TestCompleteMigrationCycle, self).__init__(methodName)
self.stash = {}
@inlineCallbacks
def setUp(self):
@inlineCallbacks
def _fakeSubmitRequest(iself, ssl, host, port, request):
pod = (port - 8008) / 100
inbox = IScheduleInboxResource(self.site.resource, self.theStoreUnderTest(pod), podding=True)
response = yield inbox.http_POST(SimpleRequest(
self.site,
"POST",
"http://{host}:{port}/podding".format(host=host, port=port),
request.headers,
request.stream.mem,
))
returnValue(response)
self.patch(IScheduleRequest, "_submitRequest", _fakeSubmitRequest)
self.accounts = FilePath(__file__).sibling("accounts").child("groupAccounts.xml")
self.augments = FilePath(__file__).sibling("accounts").child("augments.xml")
yield super(TestCompleteMigrationCycle, self).setUp()
yield self.populate()
# Speed up work
self.patch(MigrationCleanupWork, "notBeforeDelay", 1)
self.patch(HomeCleanupWork, "notBeforeDelay", 1)
self.patch(MigratedHomeCleanupWork, "notBeforeDelay", 1)
def configure(self):
super(TestCompleteMigrationCycle, self).configure()
config.GroupAttendees.Enabled = True
config.GroupAttendees.ReconciliationDelaySeconds = 0
config.GroupAttendees.AutoUpdateSecondsFromNow = 0
config.AccountingCategories.migration = True
config.AccountingPrincipals = ["*"]
@inlineCallbacks
def populate(self):
yield populateCalendarsFrom(self.requirements0, self.theStoreUnderTest(0))
yield populateCalendarsFrom(self.requirements1, self.theStoreUnderTest(1))
requirements0 = {
"user01": None,
"user02": None,
"user03": None,
"user04": None,
"user05": None,
"user06": None,
"user07": None,
"user08": None,
"user09": None,
"user10": None,
}
requirements1 = {
"puser01": None,
"puser02": None,
"puser03": None,
"puser04": None,
"puser05": None,
"puser06": None,
"puser07": None,
"puser08": None,
"puser09": None,
"puser10": None,
}
@inlineCallbacks
def _createShare(self, shareFrom, shareTo, accept=True):
# Invite
txnindex = 1 if shareFrom[0] == "p" else 0
home = yield self.homeUnderTest(txn=self.theTransactionUnderTest(txnindex), name=shareFrom, create=True)
calendar = yield home.childWithName("calendar")
shareeView = yield calendar.inviteUIDToShare(shareTo, _BIND_MODE_READ, "summary")
yield self.commitTransaction(txnindex)
# Accept
if accept:
inviteUID = shareeView.shareUID()
txnindex = 1 if shareTo[0] == "p" else 0
shareeHome = yield self.homeUnderTest(txn=self.theTransactionUnderTest(txnindex), name=shareTo)
shareeView = yield shareeHome.acceptShare(inviteUID)
sharedName = shareeView.name()
yield self.commitTransaction(txnindex)
else:
sharedName = None
returnValue(sharedName)
def attachmentToString(self, attachment):
"""
Convenience to convert an L{IAttachment} to a string.
@param attachment: an L{IAttachment} provider to convert into a string.
@return: a L{Deferred} that fires with the contents of the attachment.
@rtype: L{Deferred} firing C{bytes}
"""
capture = CaptureProtocol()
attachment.retrieve(capture)
return capture.deferred
now = {
"now": DateTime.getToday().getYear(),
"now1": DateTime.getToday().getYear() + 1,
}
data01_1 = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:uid_data01_1
DTSTART:{now1:04d}0102T140000Z
DURATION:PT1H
CREATED:20060102T190000Z
DTSTAMP:20051222T210507Z
RRULE:FREQ=WEEKLY
SUMMARY:data01_1
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n").format(**now)
data01_1_changed = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:uid_data01_1
DTSTART:{now1:04d}0102T140000Z
DURATION:PT1H
CREATED:20060102T190000Z
DTSTAMP:20051222T210507Z
RRULE:FREQ=WEEKLY
SUMMARY:data01_1_changed
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n").format(**now)
data01_2 = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:uid_data01_2
DTSTART:{now1:04d}0102T160000Z
DURATION:PT1H
CREATED:20060102T190000Z
DTSTAMP:20051222T210507Z
SUMMARY:data01_2
ORGANIZER:mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:puser02@example.com
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n").format(**now)
data01_3 = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:uid_data01_3
DTSTART:{now1:04d}0102T180000Z
DURATION:PT1H
CREATED:20060102T190000Z
DTSTAMP:20051222T210507Z
SUMMARY:data01_3
ORGANIZER:mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:group02@example.com
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n").format(**now)
data02_1 = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:uid_data02_1
DTSTART:{now1:04d}0103T140000Z
DURATION:PT1H
CREATED:20060102T190000Z
DTSTAMP:20051222T210507Z
RRULE:FREQ=WEEKLY
SUMMARY:data02_1
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n").format(**now)
data02_2 = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:uid_data02_2
DTSTART:{now1:04d}0103T160000Z
DURATION:PT1H
CREATED:20060102T190000Z
DTSTAMP:20051222T210507Z
SUMMARY:data02_2
ORGANIZER:mailto:user02@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:puser02@example.com
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n").format(**now)
data02_3 = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:uid_data02_3
DTSTART:{now1:04d}0103T180000Z
DURATION:PT1H
CREATED:20060102T190000Z
DTSTAMP:20051222T210507Z
SUMMARY:data02_3
ORGANIZER:mailto:user02@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:group01@example.com
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n").format(**now)
datap02_1 = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:uid_datap02_1
DTSTART:{now1:04d}0103T140000Z
DURATION:PT1H
CREATED:20060102T190000Z
DTSTAMP:20051222T210507Z
RRULE:FREQ=WEEKLY
SUMMARY:datap02_1
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n").format(**now)
datap02_2 = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:uid_datap02_2
DTSTART:{now1:04d}0103T160000Z
DURATION:PT1H
CREATED:20060102T190000Z
DTSTAMP:20051222T210507Z
SUMMARY:datap02_2
ORGANIZER:mailto:puser02@example.com
ATTENDEE:mailto:puser02@example.com
ATTENDEE:mailto:user01@example.com
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n").format(**now)
datap02_3 = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:uid_datap02_3
DTSTART:{now1:04d}0103T180000Z
DURATION:PT1H
CREATED:20060102T190000Z
DTSTAMP:20051222T210507Z
SUMMARY:datap02_3
ORGANIZER:mailto:puser02@example.com
ATTENDEE:mailto:puser02@example.com
ATTENDEE:mailto:group01@example.com
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n").format(**now)
@inlineCallbacks
def preCheck(self):
"""
Checks prior to starting any tests
"""
for i in range(self.numberOfStores):
txn = self.theTransactionUnderTest(i)
record = yield txn.directoryService().recordWithUID(u"user01")
self.assertEqual(record.serviceNodeUID, "A")
self.assertEqual(record.thisServer(), i == 0)
record = yield txn.directoryService().recordWithUID(u"user02")
self.assertEqual(record.serviceNodeUID, "A")
self.assertEqual(record.thisServer(), i == 0)
record = yield txn.directoryService().recordWithUID(u"puser02")
self.assertEqual(record.serviceNodeUID, "B")
self.assertEqual(record.thisServer(), i == 1)
yield self.commitTransaction(i)
@inlineCallbacks
def initialState(self):
"""
Setup the server with an initial set of data
user01 - migrating user
user02 - has a calendar shared with user01
user03 - shared to by user01
puser01 - user on other pod
puser02 - has a calendar shared with user01
puser03 - shared to by user01
"""
# Data for user01
home = yield self.homeUnderTest(txn=self.theTransactionUnderTest(0), name="user01", create=True)
self.stash["user01_pod0_home_id"] = home.id()
calendar = yield home.childWithName("calendar")
yield calendar.createCalendarObjectWithName("01_1.ics", Component.fromString(self.data01_1))
yield calendar.createCalendarObjectWithName("01_2.ics", Component.fromString(self.data01_2))
obj3 = yield calendar.createCalendarObjectWithName("01_3.ics", Component.fromString(self.data01_3))
attachment, _ignore_location = yield obj3.addAttachment(None, MimeType.fromString("text/plain"), "test.txt", MemoryStream("Here is some text #1."))
self.stash["user01_attachment_id"] = attachment.id()
self.stash["user01_attachment_md5"] = attachment.md5()
self.stash["user01_attachment_mid"] = attachment.managedID()
yield self.commitTransaction(0)
# Data for user02
home = yield self.homeUnderTest(txn=self.theTransactionUnderTest(0), name="user02", create=True)
calendar = yield home.childWithName("calendar")
yield calendar.createCalendarObjectWithName("02_1.ics", Component.fromString(self.data02_1))
yield calendar.createCalendarObjectWithName("02_2.ics", Component.fromString(self.data02_2))
yield calendar.createCalendarObjectWithName("02_3.ics", Component.fromString(self.data02_3))
yield self.commitTransaction(0)
# Data for puser02
home = yield self.homeUnderTest(txn=self.theTransactionUnderTest(1), name="puser02", create=True)
calendar = yield home.childWithName("calendar")
yield calendar.createCalendarObjectWithName("p02_1.ics", Component.fromString(self.datap02_1))
yield calendar.createCalendarObjectWithName("p02_2.ics", Component.fromString(self.datap02_2))
yield calendar.createCalendarObjectWithName("p02_3.ics", Component.fromString(self.datap02_3))
yield self.commitTransaction(1)
# Share calendars
self.stash["sharename_user01_to_user03"] = yield self._createShare("user01", "user03")
self.stash["sharename_user01_to_puser03"] = yield self._createShare("user01", "puser03")
self.stash["sharename_user02_to_user01"] = yield self._createShare("user02", "user01")
self.stash["sharename_puser02_to_user01"] = yield self._createShare("puser02", "user01")
# Add some delegates
txn = self.theTransactionUnderTest(0)
record01 = yield txn.directoryService().recordWithUID(u"user01")
record02 = yield txn.directoryService().recordWithUID(u"user02")
record03 = yield txn.directoryService().recordWithUID(u"user03")
precord01 = yield txn.directoryService().recordWithUID(u"puser01")
group02 = yield txn.directoryService().recordWithUID(u"group02")
group03 = yield txn.directoryService().recordWithUID(u"group03")
# Add user02 and user03 as individual delegates
yield Delegates.addDelegate(txn, record01, record02, True)
yield Delegates.addDelegate(txn, record01, record03, False)
yield Delegates.addDelegate(txn, record01, precord01, False)
# Add group delegates
yield Delegates.addDelegate(txn, record01, group02, True)
yield Delegates.addDelegate(txn, record01, group03, False)
# Add external delegates
yield txn.assignExternalDelegates(u"user02", None, None, u"external1", u"external2")
yield self.commitTransaction(0)
yield self.waitAllEmpty()
@inlineCallbacks
def secondState(self):
"""
Setup the server with data changes appearing after the first sync
"""
txn = self.theTransactionUnderTest(0)
obj = yield self.calendarObjectUnderTest(txn, name="01_1.ics", calendar_name="calendar", home="user01")
yield obj.setComponent(self.data01_1_changed)
obj = yield self.calendarObjectUnderTest(txn, name="02_2.ics", calendar_name="calendar", home="user02")
attachment, _ignore_location = yield obj.addAttachment(None, MimeType.fromString("text/plain"), "test_02.txt", MemoryStream("Here is some text #02."))
self.stash["user02_attachment_id"] = attachment.id()
self.stash["user02_attachment_md5"] = attachment.md5()
self.stash["user02_attachment_mid"] = attachment.managedID()
yield self.commitTransaction(0)
yield self.waitAllEmpty()
@inlineCallbacks
def finalState(self):
"""
Setup the server with data changes appearing before the final sync
"""
txn = self.theTransactionUnderTest(1)
obj = yield self.calendarObjectUnderTest(txn, name="p02_2.ics", calendar_name="calendar", home="puser02")
attachment, _ignore_location = yield obj.addAttachment(None, MimeType.fromString("text/plain"), "test_p02.txt", MemoryStream("Here is some text #p02."))
self.stash["puser02_attachment_id"] = attachment.id()
self.stash["puser02_attachment_mid"] = attachment.managedID()
self.stash["puser02_attachment_md5"] = attachment.md5()
yield self.commitTransaction(1)
yield self.waitAllEmpty()
@inlineCallbacks
def switchAccounts(self):
"""
Switch the migrated user accounts to point to the new pod
"""
for i in range(self.numberOfStores):
txn = self.theTransactionUnderTest(i)
record = yield txn.directoryService().recordWithUID(u"user01")
yield self.changeRecord(record, txn.directoryService().fieldName.serviceNodeUID, u"B", directory=txn.directoryService())
yield self.commitTransaction(i)
for i in range(self.numberOfStores):
txn = self.theTransactionUnderTest(i)
record = yield txn.directoryService().recordWithUID(u"user01")
self.assertEqual(record.serviceNodeUID, "B")
self.assertEqual(record.thisServer(), i == 1)
record = yield txn.directoryService().recordWithUID(u"user02")
self.assertEqual(record.serviceNodeUID, "A")
self.assertEqual(record.thisServer(), i == 0)
record = yield txn.directoryService().recordWithUID(u"puser02")
self.assertEqual(record.serviceNodeUID, "B")
self.assertEqual(record.thisServer(), i == 1)
yield self.commitTransaction(i)
@inlineCallbacks
def postCheck(self):
"""
Checks after migration is done
"""
# Check that the home has been moved
home = yield self.homeUnderTest(self.theTransactionUnderTest(0), name="user01")
self.assertTrue(home.external())
home = yield self.homeUnderTest(self.theTransactionUnderTest(0), name="user01", status=_HOME_STATUS_NORMAL)
self.assertTrue(home is None)
home = yield self.homeUnderTest(self.theTransactionUnderTest(0), name="user01", status=_HOME_STATUS_EXTERNAL)
self.assertTrue(home is not None)
home = yield self.homeUnderTest(self.theTransactionUnderTest(0), name="user01", status=_HOME_STATUS_DISABLED)
self.assertTrue(home is not None)
home = yield self.homeUnderTest(self.theTransactionUnderTest(0), name="user01", status=_HOME_STATUS_MIGRATING)
self.assertTrue(home is None)
yield self.commitTransaction(0)
home = yield self.homeUnderTest(self.theTransactionUnderTest(1), name="user01")
self.assertTrue(home.normal())
home = yield self.homeUnderTest(self.theTransactionUnderTest(1), name="user01", status=_HOME_STATUS_NORMAL)
self.assertTrue(home is not None)
home = yield self.homeUnderTest(self.theTransactionUnderTest(1), name="user01", status=_HOME_STATUS_EXTERNAL)
self.assertTrue(home is None)
home = yield self.homeUnderTest(self.theTransactionUnderTest(1), name="user01", status=_HOME_STATUS_DISABLED)
self.assertTrue(home is not None)
home = yield self.homeUnderTest(self.theTransactionUnderTest(1), name="user01", status=_HOME_STATUS_MIGRATING)
self.assertTrue(home is None)
yield self.commitTransaction(1)
# Check that the notifications have been moved
notifications = yield self.notificationCollectionUnderTest(self.theTransactionUnderTest(0), name="user01", status=_HOME_STATUS_NORMAL)
self.assertTrue(notifications is None)
notifications = yield self.notificationCollectionUnderTest(self.theTransactionUnderTest(0), name="user01", status=_HOME_STATUS_EXTERNAL)
self.assertTrue(notifications is None)
notifications = yield self.notificationCollectionUnderTest(self.theTransactionUnderTest(0), name="user01", status=_HOME_STATUS_DISABLED)
self.assertTrue(notifications is not None)
yield self.commitTransaction(0)
notifications = yield self.notificationCollectionUnderTest(self.theTransactionUnderTest(1), name="user01", status=_HOME_STATUS_NORMAL)
self.assertTrue(notifications is not None)
notifications = yield self.notificationCollectionUnderTest(self.theTransactionUnderTest(1), name="user01", status=_HOME_STATUS_EXTERNAL)
self.assertTrue(notifications is None)
notifications = yield self.notificationCollectionUnderTest(self.theTransactionUnderTest(1), name="user01", status=_HOME_STATUS_DISABLED)
self.assertTrue(notifications is not None)
yield self.commitTransaction(1)
# New pod data
homes = {}
homes["user01"] = yield self.homeUnderTest(self.theTransactionUnderTest(1), name="user01")
homes["user02"] = yield self.homeUnderTest(self.theTransactionUnderTest(1), name="user02")
self.assertTrue(homes["user02"].external())
homes["user03"] = yield self.homeUnderTest(self.theTransactionUnderTest(1), name="user03")
self.assertTrue(homes["user03"].external())
homes["puser01"] = yield self.homeUnderTest(self.theTransactionUnderTest(1), name="puser01")
self.assertTrue(homes["puser01"].normal())
homes["puser02"] = yield self.homeUnderTest(self.theTransactionUnderTest(1), name="puser02")
self.assertTrue(homes["puser02"].normal())
homes["puser03"] = yield self.homeUnderTest(self.theTransactionUnderTest(1), name="puser03")
self.assertTrue(homes["puser03"].normal())
# Check calendar data on new pod
calendars = yield homes["user01"].loadChildren()
calnames = dict([(calendar.name(), calendar) for calendar in calendars])
self.assertEqual(
set(calnames.keys()),
set(("calendar", "tasks", "inbox", self.stash["sharename_user02_to_user01"], self.stash["sharename_puser02_to_user01"],))
)
# Check shared-by user01 on new pod
shared = calnames["calendar"]
invitations = yield shared.sharingInvites()
by_sharee = dict([(invitation.shareeUID, invitation) for invitation in invitations])
self.assertEqual(len(invitations), 2)
self.assertEqual(set(by_sharee.keys()), set(("user03", "puser03",)))
self.assertEqual(by_sharee["user03"].shareeHomeID, homes["user03"].id())
self.assertEqual(by_sharee["puser03"].shareeHomeID, homes["puser03"].id())
# Check shared-to user01 on new pod
shared = calnames[self.stash["sharename_user02_to_user01"]]
self.assertEqual(shared.ownerHome().uid(), "user02")
self.assertEqual(shared.ownerHome().id(), homes["user02"].id())
shared = calnames[self.stash["sharename_puser02_to_user01"]]
self.assertEqual(shared.ownerHome().uid(), "puser02")
self.assertEqual(shared.ownerHome().id(), homes["puser02"].id())
shared = yield homes["puser02"].calendarWithName("calendar")
invitations = yield shared.sharingInvites()
self.assertEqual(len(invitations), 1)
self.assertEqual(invitations[0].shareeHomeID, homes["user01"].id())
yield self.commitTransaction(1)
# Old pod data
homes = {}
homes["user01"] = yield self.homeUnderTest(self.theTransactionUnderTest(0), name="user01")
homes["user02"] = yield self.homeUnderTest(self.theTransactionUnderTest(0), name="user02")
self.assertTrue(homes["user02"].normal())
homes["user03"] = yield self.homeUnderTest(self.theTransactionUnderTest(0), name="user03")
self.assertTrue(homes["user03"].normal())
homes["puser01"] = yield self.homeUnderTest(self.theTransactionUnderTest(0), name="puser01")
self.assertTrue(homes["puser01"] is None)
homes["puser02"] = yield self.homeUnderTest(self.theTransactionUnderTest(0), name="puser02")
self.assertTrue(homes["puser02"].external())
homes["puser03"] = yield self.homeUnderTest(self.theTransactionUnderTest(0), name="puser03")
self.assertTrue(homes["puser03"].external())
# Check shared-by user01 on old pod
shared = yield homes["user03"].calendarWithName(self.stash["sharename_user01_to_user03"])
self.assertEqual(shared.ownerHome().uid(), "user01")
self.assertEqual(shared.ownerHome().id(), homes["user01"].id())
# Check shared-to user01 on old pod
shared = yield homes["user02"].calendarWithName("calendar")
invitations = yield shared.sharingInvites()
self.assertEqual(len(invitations), 1)
self.assertEqual(invitations[0].shareeHomeID, homes["user01"].id())
yield self.commitTransaction(0)
# Delegates on each pod
for pod in range(self.numberOfStores):
txn = self.theTransactionUnderTest(pod)
records = {}
for ctr in range(10):
uid = u"user{:02d}".format(ctr + 1)
records[uid] = yield txn.directoryService().recordWithUID(uid)
for ctr in range(10):
uid = u"puser{:02d}".format(ctr + 1)
records[uid] = yield txn.directoryService().recordWithUID(uid)
for ctr in range(10):
uid = u"group{:02d}".format(ctr + 1)
records[uid] = yield txn.directoryService().recordWithUID(uid)
delegates = yield Delegates.delegatesOf(txn, records["user01"], True, False)
self.assertTrue(records["user02"] in delegates)
self.assertTrue(records["group02"] in delegates)
delegates = yield Delegates.delegatesOf(txn, records["user01"], True, True)
self.assertTrue(records["user02"] in delegates)
self.assertTrue(records["user06"] in delegates)
self.assertTrue(records["user07"] in delegates)
self.assertTrue(records["user08"] in delegates)
delegates = yield Delegates.delegatesOf(txn, records["user01"], False, False)
self.assertTrue(records["user03"] in delegates)
self.assertTrue(records["group03"] in delegates)
self.assertTrue(records["puser01"] in delegates)
delegates = yield Delegates.delegatesOf(txn, records["user01"], False, True)
self.assertTrue(records["user03"] in delegates)
self.assertTrue(records["user07"] in delegates)
self.assertTrue(records["user08"] in delegates)
self.assertTrue(records["user09"] in delegates)
self.assertTrue(records["puser01"] in delegates)
# Attachments
obj = yield self.calendarObjectUnderTest(txn=self.theTransactionUnderTest(1), name="01_3.ics", calendar_name="calendar", home="user01")
attachment = yield obj.attachmentWithManagedID(self.stash["user01_attachment_mid"])
self.assertTrue(attachment is not None)
self.assertEqual(attachment.md5(), self.stash["user01_attachment_md5"])
data = yield self.attachmentToString(attachment)
self.assertEqual(data, "Here is some text #1.")
# Check removal of data from new pod
# Make sure all jobs are done
yield JobItem.waitEmpty(self.theStoreUnderTest(1).newTransaction, reactor, 60)
# No migration state data left
txn = self.theTransactionUnderTest(1)
for migrationType in (CalendarMigrationRecord, CalendarObjectMigrationRecord, AttachmentMigrationRecord,):
records = yield migrationType.all(txn)
self.assertEqual(len(records), 0, msg=migrationType.__name__)
yield self.commitTransaction(1)
# No homes
txn = self.theTransactionUnderTest(1)
oldhome = yield txn.calendarHomeWithUID("user01", status=_HOME_STATUS_DISABLED)
self.assertTrue(oldhome is None)
oldhome = yield txn.notificationsWithUID("user01", status=_HOME_STATUS_DISABLED)
self.assertTrue(oldhome is None)
# Check removal of data from old pod
# Make sure all jobs are done
yield JobItem.waitEmpty(self.theStoreUnderTest(0).newTransaction, reactor, 60)
# No homes
txn = self.theTransactionUnderTest(0)
oldhome = yield txn.calendarHomeWithUID("user01", status=_HOME_STATUS_DISABLED)
self.assertTrue(oldhome is None)
oldhome = yield txn.notificationsWithUID("user01", status=_HOME_STATUS_DISABLED)
self.assertTrue(oldhome is None)
# No delegates
for delegateType in (DelegateRecord, DelegateGroupsRecord, ExternalDelegateGroupsRecord):
records = yield delegateType.query(txn, delegateType.delegator == "user01")
self.assertEqual(len(records), 0, msg=delegateType.__name__)
# No work items
for workType in allScheduleWork:
records = yield workType.query(txn, workType.homeResourceID == self.stash["user01_pod0_home_id"])
self.assertEqual(len(records), 0, msg=workType.__name__)
@inlineCallbacks
def test_migration(self):
"""
Full migration cycle.
"""
yield self.preCheck()
# Step 1. Live full sync
yield self.initialState()
syncer = CrossPodHomeSync(self.theStoreUnderTest(1), "user01")
yield syncer.sync()
# Step 2. Live incremental sync
yield self.secondState()
syncer = CrossPodHomeSync(self.theStoreUnderTest(1), "user01")
yield syncer.sync()
# Step 3. Disable home after final changes
yield self.finalState()
syncer = CrossPodHomeSync(self.theStoreUnderTest(1), "user01")
yield syncer.disableRemoteHome()
# Step 4. Final incremental sync
syncer = CrossPodHomeSync(self.theStoreUnderTest(1), "user01", final=True)
yield syncer.sync()
# Step 5. Final reconcile sync
syncer = CrossPodHomeSync(self.theStoreUnderTest(1), "user01", final=True)
yield syncer.finalSync()
# Step 6. Enable new home
syncer = CrossPodHomeSync(self.theStoreUnderTest(1), "user01", final=True)
yield syncer.enableLocalHome()
# Step 7. Remove old home
syncer = CrossPodHomeSync(self.theStoreUnderTest(1), "user01", final=True)
yield syncer.removeRemoteHome()
yield self.switchAccounts()
yield self.postCheck()
|
macosforge/ccs-calendarserver
|
txdav/common/datastore/podding/migration/test/test_migration.py
|
Python
|
apache-2.0
| 30,577 | 0.002943 |
# record.py
# -------------------------
# Fall 2012; Alex Safatli
# -------------------------
# Software package for handling
# the recording and calculating
# of player scores for the Eclipse
# board game, along with keeping
# track of individual matches.
# Imports
import os, cPickle, datetime
# Match class. Encapsulates a match.
class match():
def __init__(self,participants,results):
self.participants = participants # All players that participated and their roles.
self.results = results # Final VP counts,
self.timestamp = datetime.datetime.now()
self.date = self.timestamp.strftime("%Y-%m-%d")
self.maxvp = max(results.values())
self.changes = {}
def __str__(self):
plyrs = ', '.join(self.participants.keys())
return '[%s] %s' (self.date,plyrs)
# Player class. Encapsulates a player.
class player():
def __init__(self,name,score=1200):
self.name = name
self.score = score
def __str__(self):
return self.name
# Score processing.
class scores():
def __init__(self,playerdb,matchdb):
def loadData(fn):
fh = open(fn,'r')
dt = cPickle.load(fh)
fh.close()
return dt
self.playerdb = playerdb
self.matchdb = matchdb
self.players = {}
self.matches = []
if os.path.isfile(playerdb):
self.players = loadData(playerdb)
if os.path.isfile(matchdb):
self.matches = loadData(matchdb)
def update(self):
def dumpData(fn,db):
fh = open(fn,'w')
cPickle.dump(db,fh)
fh.close()
# Update both databases.
dumpData(self.playerdb,self.players)
dumpData(self.matchdb,self.matches)
def numGames(self,player):
# Count the number of games for player.
num = 0
if player not in self.players:
return 0
for m in self.matches:
if player in m.participants:
num += 0
return num
def processMatch(self,match):
maxvp = match.maxvp
for player in match.participants:
# See how much of a score increase.
vp = match.results[player]
modifier = 1.0 - 0.2*((maxvp-vp)/(maxvp/10.0))
c = self.changeScore(player,modifier)
match.changes[player] = c
self.matches.append(match)
self.update()
def changeScore(self,player,modifier):
if player not in self.players:
# Default player score.
self.players[player] = 100
numgames = self.numGames(player)
incre = int(11*(1-(numgames+1)/1000.0))
if incre < 1:
incre = 1
change = int(incre*modifier)
self.players[player] += change
self.update()
return change
|
AlexSafatli/EclipseBoardGame
|
record.py
|
Python
|
gpl-2.0
| 2,974 | 0.014459 |
# -*- coding:utf8 -*-
# File : tfutils.py
# Author : Jiayuan Mao
# Email : maojiayuan@gmail.com
# Date : 1/31/17
#
# This file is part of TensorArtist.
import re
import tensorflow as tf
class TArtGraphKeys:
PLACEHOLDERS = 'placeholders'
TART_VARIABLES = 'tart_variables'
INFERENCE_SUMMARIES = 'inference_summaries'
SCALAR_VARIABLES = 'scalar_variables'
OPTIMIZER_VARIABLES = 'optimizer_variables'
# DEPRECATED: (2017-12-02)
TART_OPERATORS = 'tart_operators'
def clean_name(tensor, suffix=':0'):
name = tensor.name
if name.endswith(suffix):
name = name[:-len(suffix)]
return name
def escape_name(tensor):
name = tensor.name
return re.sub(':|/', '_', name)
def clean_summary_suffix(name):
return re.sub('_\d+$', '', name)
def remove_tower_name(name):
return re.sub('^tower/\d+/', '', name)
def format_summary_name(name):
name = clean_summary_suffix(name)
name = remove_tower_name(name)
if 'train/' in name:
name = name.replace('train/', '')
name = 'train/' + name
return name
def assign_variable(var, value, session=None, use_locking=False):
from .graph.env import get_default_env
session = session or get_default_env().session
session.run(var.assign(value, use_locking=use_locking))
def fetch_variable(var, session=None):
from .graph.env import get_default_env
session = session or get_default_env().session
try:
return session.run(var)
except tf.errors.FailedPreconditionError:
session.run(var.initializer)
return session.run(var)
def fetch_variables(var_list, session=None):
from .graph.env import get_default_env
session = session or get_default_env().session
try:
return session.run(var_list)
except tf.errors.FailedPredictionError as e:
raise ValueError('Uninitialized variable(s) encountered in fetch_variables') from e
def assign_variables(var_list_or_dict, value_list=None, session=None, use_locking=False):
from .graph.env import get_default_env
session = session or get_default_env().session
assigns = []
if isinstance(var_list_or_dict, dict):
iterator = var_list_or_dict.items()
else:
iterator = zip(var_list_or_dict, value_list)
for var, value in iterator:
assigns.append(tf.assign(var, value, use_locking=use_locking, name='assign_{}'.format(escape_name(var))))
session.run(tf.group(*assigns))
def extend_collection_list(base, *others):
if base is None:
return others
if type(base) is str:
return (base, ) + others
assert isinstance(base, (tuple, list))
return tuple(base) + others
|
vacancy/TensorArtist
|
tartist/nn/tfutils.py
|
Python
|
mit
| 2,691 | 0.00223 |
#!/usr/bin/env python
# cardinal_pythonlib/sqlalchemy/dump.py
"""
===============================================================================
Original code copyright (C) 2009-2021 Rudolf Cardinal (rudolf@pobox.com).
This file is part of cardinal_pythonlib.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===============================================================================
**Functions to help with large-scale dumping of data from SQLAlchemy systems.**
"""
import datetime
import decimal
import sys
from typing import Any, Callable, Dict, TextIO, Type, Union
import pendulum
# noinspection PyProtectedMember
from sqlalchemy.engine import Connectable, create_engine
from sqlalchemy.engine.base import Engine
from sqlalchemy.engine.default import DefaultDialect # for type hints
from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta
from sqlalchemy.inspection import inspect
from sqlalchemy.orm.query import Query
from sqlalchemy.sql.base import Executable
from sqlalchemy.sql.elements import BindParameter
from sqlalchemy.sql.expression import select
from sqlalchemy.sql.schema import MetaData, Table
from sqlalchemy.sql.sqltypes import DateTime, NullType, String
from cardinal_pythonlib.file_io import writeline_nl, writelines_nl
from cardinal_pythonlib.logs import get_brace_style_log_with_null_handler
from cardinal_pythonlib.sql.literals import sql_comment
from cardinal_pythonlib.sqlalchemy.dialect import SqlaDialectName
from cardinal_pythonlib.sqlalchemy.orm_inspect import walk_orm_tree
from cardinal_pythonlib.sqlalchemy.schema import get_table_names
log = get_brace_style_log_with_null_handler(__name__)
SEP1 = sql_comment("=" * 76)
SEP2 = sql_comment("-" * 76)
# =============================================================================
# Dump functions: get DDL and/or data as SQL commands
# =============================================================================
def dump_connection_info(engine: Engine, fileobj: TextIO = sys.stdout) -> None:
"""
Dumps some connection info, as an SQL comment. Obscures passwords.
Args:
engine: the SQLAlchemy :class:`Engine` to dump metadata information
from
fileobj: the file-like object (default ``sys.stdout``) to write
information to
"""
meta = MetaData(bind=engine)
writeline_nl(fileobj, sql_comment(f'Database info: {meta}'))
def dump_ddl(metadata: MetaData,
dialect_name: str,
fileobj: TextIO = sys.stdout,
checkfirst: bool = True) -> None:
"""
Sends schema-creating DDL from the metadata to the dump engine.
This makes ``CREATE TABLE`` statements.
Args:
metadata: SQLAlchemy :class:`MetaData`
dialect_name: string name of SQL dialect to generate DDL in
fileobj: file-like object to send DDL to
checkfirst: if ``True``, use ``CREATE TABLE IF NOT EXISTS`` or
equivalent.
"""
# http://docs.sqlalchemy.org/en/rel_0_8/faq.html#how-can-i-get-the-create-table-drop-table-output-as-a-string # noqa
# https://stackoverflow.com/questions/870925/how-to-generate-a-file-with-ddl-in-the-engines-sql-dialect-in-sqlalchemy # noqa
# https://github.com/plq/scripts/blob/master/pg_dump.py
# noinspection PyUnusedLocal
def dump(querysql, *multiparams, **params):
compsql = querysql.compile(dialect=engine.dialect)
writeline_nl(fileobj, f"{compsql};")
writeline_nl(fileobj,
sql_comment(f"Schema (for dialect {dialect_name}):"))
engine = create_engine(f"{dialect_name}://",
strategy="mock", executor=dump)
metadata.create_all(engine, checkfirst=checkfirst)
# ... checkfirst doesn't seem to be working for the mock strategy...
# http://docs.sqlalchemy.org/en/latest/core/metadata.html
# ... does it implement a *real* check (impossible here), rather than
# issuing CREATE ... IF NOT EXISTS?
def quick_mapper(table: Table) -> Type[DeclarativeMeta]:
"""
Makes a new SQLAlchemy mapper for an existing table.
See
https://www.tylerlesmann.com/2009/apr/27/copying-databases-across-platforms-sqlalchemy/
Args:
table: SQLAlchemy :class:`Table` object
Returns:
a :class:`DeclarativeMeta` class
""" # noqa
# noinspection PyPep8Naming
Base = declarative_base()
class GenericMapper(Base):
__table__ = table
# noinspection PyTypeChecker
return GenericMapper
class StringLiteral(String):
"""
Teach SQLAlchemy how to literalize various things.
See
https://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query
"""
def literal_processor(self,
dialect: DefaultDialect) -> Callable[[Any], str]:
super_processor = super().literal_processor(dialect)
def process(value: Any) -> str:
log.debug("process: {!r}", value)
if isinstance(value, int):
return str(value)
if not isinstance(value, str):
value = str(value)
result = super_processor(value)
if isinstance(result, bytes):
result = result.decode(dialect.encoding)
return result
return process
# noinspection PyPep8Naming
def make_literal_query_fn(dialect: DefaultDialect) -> Callable[[str], str]:
DialectClass = dialect.__class__
# noinspection PyClassHasNoInit,PyAbstractClass
class LiteralDialect(DialectClass):
# https://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query # noqa
colspecs = {
# prevent various encoding explosions
String: StringLiteral,
# teach SA about how to literalize a datetime
DateTime: StringLiteral,
# don't format py2 long integers to NULL
NullType: StringLiteral,
}
def literal_query(statement: str) -> str:
"""
NOTE: This is entirely insecure. DO NOT execute the resulting
strings.
"""
# https://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query # noqa
if isinstance(statement, Query):
statement = statement.statement
return statement.compile(
dialect=LiteralDialect(),
compile_kwargs={'literal_binds': True},
).string + ";"
return literal_query
# noinspection PyProtectedMember
def get_literal_query(statement: Union[Query, Executable],
bind: Connectable = None) -> str:
"""
Takes an SQLAlchemy statement and produces a literal SQL version, with
values filled in.
As per
https://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query
Notes:
- for debugging purposes *only*
- insecure; you should always separate queries from their values
- please also note that this function is quite slow
Args:
statement: the SQL statement (a SQLAlchemy object) to use
bind: if the statement is unbound, you will need to specify an object
here that supports SQL execution
Returns:
a string literal version of the query.
""" # noqa
# log.debug("statement: {!r}", statement)
# log.debug("statement.bind: {!r}", statement.bind)
if isinstance(statement, Query):
if bind is None:
bind = statement.session.get_bind(statement._mapper_zero_or_none())
statement = statement.statement
elif bind is None:
bind = statement.bind
if bind is None: # despite all that
raise ValueError("Attempt to call get_literal_query with an unbound "
"statement and no 'bind' parameter")
# noinspection PyUnresolvedReferences
dialect = bind.dialect
compiler = statement._compiler(dialect)
class LiteralCompiler(compiler.__class__):
# noinspection PyMethodMayBeStatic
def visit_bindparam(self,
bindparam: BindParameter,
within_columns_clause: bool = False,
literal_binds: bool = False,
**kwargs) -> str:
return super().render_literal_bindparam(
bindparam,
within_columns_clause=within_columns_clause,
literal_binds=literal_binds,
**kwargs
)
# noinspection PyUnusedLocal
def render_literal_value(self, value: Any, type_) -> str:
"""Render the value of a bind parameter as a quoted literal.
This is used for statement sections that do not accept bind
paramters on the target driver/database.
This should be implemented by subclasses using the quoting services
of the DBAPI.
"""
if isinstance(value, str):
value = value.replace("'", "''")
return "'%s'" % value
elif value is None:
return "NULL"
elif isinstance(value, (float, int)):
return repr(value)
elif isinstance(value, decimal.Decimal):
return str(value)
elif (isinstance(value, datetime.datetime) or
isinstance(value, datetime.date) or
isinstance(value, datetime.time) or
isinstance(value, pendulum.DateTime) or
isinstance(value, pendulum.Date) or
isinstance(value, pendulum.Time)):
# All have an isoformat() method.
return f"'{value.isoformat()}'"
# return (
# "TO_DATE('%s','YYYY-MM-DD HH24:MI:SS')"
# % value.strftime("%Y-%m-%d %H:%M:%S")
# )
else:
raise NotImplementedError(
"Don't know how to literal-quote value %r" % value)
compiler = LiteralCompiler(dialect, statement)
return compiler.process(statement) + ";"
def dump_table_as_insert_sql(engine: Engine,
table_name: str,
fileobj: TextIO,
wheredict: Dict[str, Any] = None,
include_ddl: bool = False,
multirow: bool = False) -> None:
"""
Reads a table from the database, and writes SQL to replicate the table's
data to the output ``fileobj``.
Args:
engine: SQLAlchemy :class:`Engine`
table_name: name of the table
fileobj: file-like object to write to
wheredict: optional dictionary of ``{column_name: value}`` to use as
``WHERE`` filters
include_ddl: if ``True``, include the DDL to create the table as well
multirow: write multi-row ``INSERT`` statements
"""
# https://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query # noqa
# http://docs.sqlalchemy.org/en/latest/faq/sqlexpressions.html
# http://www.tylerlesmann.com/2009/apr/27/copying-databases-across-platforms-sqlalchemy/ # noqa
# https://github.com/plq/scripts/blob/master/pg_dump.py
log.info("dump_data_as_insert_sql: table_name={}", table_name)
writelines_nl(fileobj, [
SEP1,
sql_comment(f"Data for table: {table_name}"),
SEP2,
sql_comment(f"Filters: {wheredict}"),
])
dialect = engine.dialect
if not dialect.supports_multivalues_insert:
multirow = False
if multirow:
log.warning("dump_data_as_insert_sql: multirow parameter substitution "
"not working yet")
multirow = False
# literal_query = make_literal_query_fn(dialect)
meta = MetaData(bind=engine)
log.debug("... retrieving schema")
table = Table(table_name, meta, autoload=True)
if include_ddl:
log.debug("... producing DDL")
dump_ddl(table.metadata, dialect_name=engine.dialect.name,
fileobj=fileobj)
# NewRecord = quick_mapper(table)
# columns = table.columns.keys()
log.debug("... fetching records")
# log.debug("meta: {}", meta) # obscures password
# log.debug("table: {}", table)
# log.debug("table.columns: {!r}", table.columns)
# log.debug("multirow: {}", multirow)
query = select(table.columns)
if wheredict:
for k, v in wheredict.items():
col = table.columns.get(k)
query = query.where(col == v)
# log.debug("query: {}", query)
cursor = engine.execute(query)
if multirow:
row_dict_list = []
for r in cursor:
row_dict_list.append(dict(r))
# log.debug("row_dict_list: {}", row_dict_list)
if row_dict_list:
statement = table.insert().values(row_dict_list)
# log.debug("statement: {!r}", statement)
# insert_str = literal_query(statement)
insert_str = get_literal_query(statement, bind=engine)
# NOT WORKING FOR MULTIROW INSERTS. ONLY SUBSTITUTES FIRST ROW.
writeline_nl(fileobj, insert_str)
else:
writeline_nl(fileobj, sql_comment("No data!"))
else:
found_one = False
for r in cursor:
found_one = True
row_dict = dict(r)
statement = table.insert(values=row_dict)
# insert_str = literal_query(statement)
insert_str = get_literal_query(statement, bind=engine)
# log.debug("row_dict: {}", row_dict)
# log.debug("insert_str: {}", insert_str)
writeline_nl(fileobj, insert_str)
if not found_one:
writeline_nl(fileobj, sql_comment("No data!"))
writeline_nl(fileobj, SEP2)
log.debug("... done")
def dump_database_as_insert_sql(engine: Engine,
fileobj: TextIO = sys.stdout,
include_ddl: bool = False,
multirow: bool = False) -> None:
"""
Reads an entire database and writes SQL to replicate it to the output
file-like object.
Args:
engine: SQLAlchemy :class:`Engine`
fileobj: file-like object to write to
include_ddl: if ``True``, include the DDL to create the table as well
multirow: write multi-row ``INSERT`` statements
"""
for tablename in get_table_names(engine):
dump_table_as_insert_sql(
engine=engine,
table_name=tablename,
fileobj=fileobj,
include_ddl=include_ddl,
multirow=multirow
)
def dump_orm_object_as_insert_sql(engine: Engine,
obj: object,
fileobj: TextIO) -> None:
"""
Takes a SQLAlchemy ORM object, and writes ``INSERT`` SQL to replicate it
to the output file-like object.
Args:
engine: SQLAlchemy :class:`Engine`
obj: SQLAlchemy ORM object to write
fileobj: file-like object to write to
"""
# literal_query = make_literal_query_fn(engine.dialect)
insp = inspect(obj)
# insp: an InstanceState
# http://docs.sqlalchemy.org/en/latest/orm/internals.html#sqlalchemy.orm.state.InstanceState # noqa
# insp.mapper: a Mapper
# http://docs.sqlalchemy.org/en/latest/orm/mapping_api.html#sqlalchemy.orm.mapper.Mapper # noqa
# Don't do this:
# table = insp.mapper.mapped_table
# Do this instead. The method above gives you fancy data types like list
# and Arrow on the Python side. We want the bog-standard datatypes drawn
# from the database itself.
meta = MetaData(bind=engine)
table_name = insp.mapper.mapped_table.name
# log.debug("table_name: {}", table_name)
table = Table(table_name, meta, autoload=True)
# log.debug("table: {}", table)
# NewRecord = quick_mapper(table)
# columns = table.columns.keys()
query = select(table.columns)
# log.debug("query: {}", query)
for orm_pkcol in insp.mapper.primary_key:
core_pkcol = table.columns.get(orm_pkcol.name)
pkval = getattr(obj, orm_pkcol.name)
query = query.where(core_pkcol == pkval)
# log.debug("query: {}", query)
cursor = engine.execute(query)
row = cursor.fetchone() # should only be one...
row_dict = dict(row)
# log.debug("obj: {}", obj)
# log.debug("row_dict: {}", row_dict)
statement = table.insert(values=row_dict)
# insert_str = literal_query(statement)
insert_str = get_literal_query(statement, bind=engine)
writeline_nl(fileobj, insert_str)
def bulk_insert_extras(dialect_name: str,
fileobj: TextIO,
start: bool) -> None:
"""
Writes bulk ``INSERT`` preamble (start=True) or end (start=False).
For MySQL, this temporarily switches off autocommit behaviour and index/FK
checks, for speed, then re-enables them at the end and commits.
Args:
dialect_name: SQLAlchemy dialect name (see :class:`SqlaDialectName`)
fileobj: file-like object to write to
start: if ``True``, write preamble; if ``False``, write end
"""
lines = []
if dialect_name == SqlaDialectName.MYSQL:
if start:
lines = [
"SET autocommit=0;",
"SET unique_checks=0;",
"SET foreign_key_checks=0;",
]
else:
lines = [
"SET foreign_key_checks=1;",
"SET unique_checks=1;",
"COMMIT;",
]
writelines_nl(fileobj, lines)
def dump_orm_tree_as_insert_sql(engine: Engine,
baseobj: object,
fileobj: TextIO) -> None:
"""
Sends an object, and all its relations (discovered via "relationship"
links) as ``INSERT`` commands in SQL, to ``fileobj``.
Args:
engine: SQLAlchemy :class:`Engine`
baseobj: starting SQLAlchemy ORM object
fileobj: file-like object to write to
Problem: foreign key constraints.
- MySQL/InnoDB doesn't wait to the end of a transaction to check FK
integrity (which it should):
https://stackoverflow.com/questions/5014700/in-mysql-can-i-defer-referential-integrity-checks-until-commit # noqa
- PostgreSQL can.
- Anyway, slightly ugly hacks...
https://dev.mysql.com/doc/refman/5.5/en/optimizing-innodb-bulk-data-loading.html
- Not so obvious how we can iterate through the list of ORM objects and
guarantee correct insertion order with respect to all FKs.
""" # noqa
writeline_nl(
fileobj,
sql_comment("Data for all objects related to the first below:"))
bulk_insert_extras(engine.dialect.name, fileobj, start=True)
for part in walk_orm_tree(baseobj):
dump_orm_object_as_insert_sql(engine, part, fileobj)
bulk_insert_extras(engine.dialect.name, fileobj, start=False)
|
RudolfCardinal/pythonlib
|
cardinal_pythonlib/sqlalchemy/dump.py
|
Python
|
apache-2.0
| 19,483 | 0 |
# -*- coding: utf-8 -*-
"""
MediaProvider
A device centric multimedia solution
----------------------------------------------------------------------------
(C) direct Netware Group - All rights reserved
https://www.direct-netware.de/redirect?mp;core
The following license agreement remains valid unless any additions or
changes are being made by direct Netware Group in a written form.
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 2 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
----------------------------------------------------------------------------
https://www.direct-netware.de/redirect?licenses;gpl
----------------------------------------------------------------------------
#echo(mpCoreVersion)#
#echo(__FILEPATH__)#
"""
from dNG.data.upnp.resources.mp_entry import MpEntry
from dNG.database.condition_definition import ConditionDefinition
from dNG.database.connection import Connection
from dNG.database.sort_definition import SortDefinition
from dNG.plugins.hook import Hook
from dNG.runtime.value_exception import ValueException
from .abstract_segment import AbstractSegment
from .criteria_definition import CriteriaDefinition
class CommonMpEntrySegment(AbstractSegment):
"""
"CommonMpEntrySegment" provides UPnP searches for "MpEntry" instances.
:author: direct Netware Group et al.
:copyright: direct Netware Group - All rights reserved
:package: mp
:subpackage: core
:since: v0.2.00
:license: https://www.direct-netware.de/redirect?licenses;gpl
GNU General Public License 2
"""
def __init__(self):
"""
Constructor __init__(SearchResources)
:since: v0.2.00
"""
AbstractSegment.__init__(self)
self.condition_definition = None
"""
Database query condition definition
"""
self.pre_condition_failed = False
"""
True if a pre-condition fails
"""
#
def _ensure_condition_definition(self):
"""
Checks and sets the database query condition definition based on the defined
UPnP criteria definition specified.
:since: v0.2.00
"""
if ((not self.pre_condition_failed) and self.condition_definition is None):
self.condition_definition = self._rewrite_criteria_definition_walker(self.criteria_definition)
#
#
def get_count(self):
"""
Returns the total number of matches in this UPnP search segment.
:return: (int) Number of matches
:since: v0.2.00
"""
self._ensure_condition_definition()
return (0
if (self.pre_condition_failed) else
MpEntry.get_entries_count_with_condition(self.condition_definition)
)
#
def get_list(self):
"""
Returns the list of UPnP resource search segment results as defined by
"offset" and "limit".
:return: (list) List of search segment results
:since: v0.2.00
"""
_return = [ ]
self._ensure_condition_definition()
sort_definition = SortDefinition()
# @TODO: if (len(self.sort_tuples) > 0): MpEntry._db_append_didl_field_sort_definition
sort_definition.append("title", SortDefinition.ASCENDING)
if (not self.pre_condition_failed):
with Connection.get_instance():
entries = MpEntry.load_entries_list_with_condition(self.condition_definition,
self.offset,
self.limit,
sort_definition
)
for entry in entries:
if (self.client_user_agent is not None): entry.set_client_user_agent(self.client_user_agent)
_return.append(entry)
#
#
#
return _return
#
def _get_property_attribute_name(self, _property):
"""
Returns the database attribute name for the given lower-case property.
:param property: Lower-case property
:return: (str) Database attribute name
:since: v0.2.00
"""
_return = None
if (_property == "@id"): _return = "id"
elif (_property == "@refid"): _return = "resource"
elif (_property in ( "dc:date", "upnp:recordedStartDateTime" )): _return = "time_sortable"
elif (_property == "dc:description"): _return = "description"
elif (_property == "dc:title"): _return = "title"
elif (_property == "res@size"): _return = "size"
elif (_property == "upnp:class"): _return = "identity"
if (_return is None): raise ValueException("UPnP property '{0}' not defined".format(_property))
return _return
#
def _rewrite_criteria_definition_walker(self, criteria_definition):
"""
Adds the specified criteria to the given database query condition
definition.
:param criteria_definition: Criteria definition instance
:return: (object) Database condition definition instance
:since: v0.2.00
"""
condition_concatenation = (ConditionDefinition.AND
if (criteria_definition.get_concatenation() == CriteriaDefinition.AND) else
ConditionDefinition.OR
)
_return = ConditionDefinition(condition_concatenation)
for criteria in criteria_definition.get_criteria():
condition_method = None
criteria_property = criteria.get("property")
criteria_type = criteria['type']
criteria_value = None
if (criteria_property == "@id"
and "value" in criteria
and "://" in criteria['value']
): criteria_value = criteria['value'].split("://", 1)[1]
if (criteria_property == "@refid" and
criteria_type in ( CriteriaDefinition.TYPE_DEFINED_MATCH, CriteriaDefinition.TYPE_NOT_DEFINED_MATCH)
):
value_list = Hook.call("mp.upnp.MpResource.getReferenceDbIdentities")
if (type(value_list) is list
and len(value_list) > 0
):
if (criteria_type == CriteriaDefinition.TYPE_DEFINED_MATCH): _return.add_in_list_match_condition("identity", value_list)
else: _return.add_not_in_list_match_condition("identity", value_list)
elif (criteria_type == CriteriaDefinition.TYPE_DEFINED_MATCH):
self.pre_condition_failed = True
break
#
elif (criteria_type == CriteriaDefinition.TYPE_SUB_CRITERIA):
condition_definition = self._rewrite_criteria_definition_walker(criteria['criteria_definition'])
if (self.pre_condition_failed): break
else: _return.add_sub_condition(condition_definition)
elif (criteria_type == CriteriaDefinition.TYPE_CASE_INSENSITIVE_MATCH):
condition_method = _return.add_case_insensitive_match_condition
criteria_value = "*{0}*".format(criteria['value'])
elif (criteria_type == CriteriaDefinition.TYPE_CASE_INSENSITIVE_NO_MATCH):
condition_method = _return.add_case_insensitive_no_match_condition
criteria_value = "*{0}*".format(criteria['value'])
elif (criteria_type == CriteriaDefinition.TYPE_DEFINED_MATCH):
attribute = self._get_property_attribute_name(criteria['property'])
_return.add_exact_no_match_condition(attribute, None)
elif (criteria_type == CriteriaDefinition.TYPE_DERIVED_CRITERIA):
if (criteria_property != "upnp:class"): raise ValueException("UPnP 'derivedFrom' criteria is only supported for the 'upnp:class' property")
criteria_value = criteria['value'].lower().strip()
condition_definition = (_return
if (_return.get_concatenation() == ConditionDefinition.OR) else
ConditionDefinition()
)
old_conditions_count = condition_definition.get_conditions_count()
Hook.call("mp.upnp.MpResource.applyValueDerivedDbCondition",
condition_definition = condition_definition,
value = criteria_value
)
if (old_conditions_count == condition_definition.get_conditions_count()):
self.pre_condition_failed = True
break
#
if (_return.get_concatenation() == ConditionDefinition.AND):
_return.add_sub_condition(condition_definition)
#
elif (criteria_type == CriteriaDefinition.TYPE_EXACT_MATCH):
condition_method = _return.add_exact_match_condition
elif (criteria_type == CriteriaDefinition.TYPE_EXACT_NO_MATCH):
condition_method = _return.add_exact_no_match_condition
elif (criteria_type == CriteriaDefinition.TYPE_GREATER_THAN_MATCH):
condition_method = _return.add_greater_than_match_condition
elif (criteria_type == CriteriaDefinition.TYPE_GREATER_THAN_OR_EQUAL_MATCH):
condition_method = _return.add_greater_than_or_equal_match_condition
elif (criteria_type == CriteriaDefinition.TYPE_LESS_THAN_MATCH):
condition_method = _return.add_less_than_match_condition
elif (criteria_type == CriteriaDefinition.TYPE_LESS_THAN_OR_EQUAL_MATCH):
condition_method = _return.add_less_than_or_equal_match_condition
elif (criteria_type == CriteriaDefinition.TYPE_NOT_DEFINED_MATCH):
attribute = self._get_property_attribute_name(criteria_property)
_return.add_exact_match_condition(attribute, None)
#
if (condition_method is not None):
if (criteria_value is None): criteria_value = criteria['value']
attribute = self._get_property_attribute_name(criteria_property)
value = (Hook.call("mp.upnp.MpResource.getDbIdentity", value = criteria_value)
if (criteria_property == "upnp:class") else
self._rewrite_value(criteria_value)
)
condition_method(attribute, value)
#
#
if (self.pre_condition_failed): _return = None
return _return
#
def _rewrite_value(self, value):
"""
Rewrites the value to be used in a database query.
:param value: Value to be rewritten
:return: (str) Rewritten value
:since: v0.2.00
"""
_return = value.strip()
_return = Connection.get_instance().escape_like_condition(_return)
_return = _return.replace("*", "%")
return _return
#
def set_criteria_definition(self, criteria_definition):
"""
Sets the UPnP search criteria definition used.
:param criteria_definition: Criteria definition instance
:since: v0.2.00
"""
AbstractSegment.set_criteria_definition(self, criteria_definition)
self.condition_definition = None
#
@staticmethod
def apply_value_derived_db_condition(params, last_return = None):
"""
Called for "mp.upnp.MpResource.applyValueDerivedDbCondition"
:param params: Parameter specified
:param last_return: The return value from the last hook called.
:return: (mixed) Return value
:since: v0.2.00
"""
if ("condition_definition" not in params
or "value" not in params
): raise ValueException("Missing required arguments")
condition_definition = params['condition_definition']
value = "{0}.".format(params['value'])
is_generic_container = "object.container.".startswith(value)
is_audio_container = "object.container.genre.musicGenre.".startswith(value)
is_image_container = "object.container.album.photoAlbum.".startswith(value)
is_video_container = "object.container.genre.movieGenre.".startswith(value)
if (is_generic_container or is_audio_container or is_image_container or is_video_container):
and_condition_definition = ConditionDefinition(ConditionDefinition.AND)
and_condition_definition.add_exact_match_condition("cds_type", MpEntry.DB_CDS_TYPE_CONTAINER)
and_condition_definition.add_exact_match_condition("identity", "MpUpnpResource")
if (is_audio_container):
and_condition_definition.add_exact_match_condition("mimetype", "text/x-directory-upnp-audio")
elif (is_image_container):
and_condition_definition.add_exact_match_condition("mimetype", "text/x-directory-upnp-image")
elif (is_video_container):
and_condition_definition.add_exact_match_condition("mimetype", "text/x-directory-upnp-video")
#
condition_definition.add_sub_condition(and_condition_definition)
#
if ("object.item.audioitem.".startswith(value)): condition_definition.add_exact_match_condition("identity", "MpUpnpAudioResource")
if ("object.item.imageitem.".startswith(value)): condition_definition.add_exact_match_condition("identity", "MpUpnpImageResource")
if ("object.item.videoitem.".startswith(value)): condition_definition.add_exact_match_condition("identity", "MpUpnpVideoResource")
return last_return
#
@classmethod
def is_search_criteria_definition_supported(cls, criteria_definition):
"""
Checks if only supported MpEntry instance attributes are queried.
:param cls: Python class
:param criteria_definition: Criteria definition instance
:return: (bool) True if only supported MpEntry instance attributes are
queried
:since: v0.2.00
"""
return cls._is_search_criteria_definition_supported_walker(criteria_definition)
#
@classmethod
def _is_search_criteria_definition_supported_walker(cls, criteria_definition):
"""
Checks recursively if only supported MpEntry instance attributes are
queried.
:param cls: Python class
:param criteria_definition: Criteria definition instance
:return: (bool) True if only supported MpEntry instance attributes are
queried
:since: v0.2.00
"""
_return = True
for criteria in criteria_definition.get_criteria():
_return = (cls._is_search_criteria_definition_supported_walker(criteria['criteria_definition'])
if (criteria['type'] == CriteriaDefinition.TYPE_SUB_CRITERIA) else
(criteria['property'] in ( "@id",
"@refid",
"dc:date",
"dc:title",
"res@size",
"upnp:class",
"upnp:recordedStartDateTime"
)
)
)
if (not _return): break
#
return _return
#
#
|
dNG-git/mp_core
|
src/dNG/data/upnp/search/common_mp_entry_segment.py
|
Python
|
gpl-2.0
| 16,034 | 0.005987 |
# Copyright: (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import base64
import json
import os
import random
import re
import stat
import tempfile
import time
from abc import ABCMeta, abstractmethod
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleActionSkip, AnsibleActionFail
from ansible.executor.module_common import modify_module
from ansible.module_utils.json_utils import _filter_non_json_lines
from ansible.module_utils.six import binary_type, string_types, text_type, iteritems, with_metaclass
from ansible.module_utils.six.moves import shlex_quote
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.parsing.utils.jsonify import jsonify
from ansible.release import __version__
from ansible.utils.unsafe_proxy import wrap_var
from ansible.vars.clean import remove_internal_keys
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class ActionBase(with_metaclass(ABCMeta, object)):
'''
This class is the base class for all action plugins, and defines
code common to all actions. The base class handles the connection
by putting/getting files and executing commands based on the current
action in use.
'''
# A set of valid arguments
_VALID_ARGS = frozenset([])
def __init__(self, task, connection, play_context, loader, templar, shared_loader_obj):
self._task = task
self._connection = connection
self._play_context = play_context
self._loader = loader
self._templar = templar
self._shared_loader_obj = shared_loader_obj
self._cleanup_remote_tmp = False
self._supports_check_mode = True
self._supports_async = False
# Backwards compat: self._display isn't really needed, just import the global display and use that.
self._display = display
self._used_interpreter = None
@abstractmethod
def run(self, tmp=None, task_vars=None):
""" Action Plugins should implement this method to perform their
tasks. Everything else in this base class is a helper method for the
action plugin to do that.
:kwarg tmp: Deprecated parameter. This is no longer used. An action plugin that calls
another one and wants to use the same remote tmp for both should set
self._connection._shell.tmpdir rather than this parameter.
:kwarg task_vars: The variables (host vars, group vars, config vars,
etc) associated with this task.
:returns: dictionary of results from the module
Implementors of action modules may find the following variables especially useful:
* Module parameters. These are stored in self._task.args
"""
result = {}
if tmp is not None:
result['warning'] = ['ActionModule.run() no longer honors the tmp parameter. Action'
' plugins should set self._connection._shell.tmpdir to share'
' the tmpdir']
del tmp
if self._task.async_val and not self._supports_async:
raise AnsibleActionFail('async is not supported for this task.')
elif self._play_context.check_mode and not self._supports_check_mode:
raise AnsibleActionSkip('check mode is not supported for this task.')
elif self._task.async_val and self._play_context.check_mode:
raise AnsibleActionFail('check mode and async cannot be used on same task.')
# Error if invalid argument is passed
if self._VALID_ARGS:
task_opts = frozenset(self._task.args.keys())
bad_opts = task_opts.difference(self._VALID_ARGS)
if bad_opts:
raise AnsibleActionFail('Invalid options for %s: %s' % (self._task.action, ','.join(list(bad_opts))))
if self._connection._shell.tmpdir is None and self._early_needs_tmp_path():
self._make_tmp_path()
return result
def _remote_file_exists(self, path):
cmd = self._connection._shell.exists(path)
result = self._low_level_execute_command(cmd=cmd, sudoable=True)
if result['rc'] == 0:
return True
return False
def _configure_module(self, module_name, module_args, task_vars=None):
'''
Handles the loading and templating of the module code through the
modify_module() function.
'''
if task_vars is None:
task_vars = dict()
# Search module path(s) for named module.
for mod_type in self._connection.module_implementation_preferences:
# Check to determine if PowerShell modules are supported, and apply
# some fixes (hacks) to module name + args.
if mod_type == '.ps1':
# win_stat, win_file, and win_copy are not just like their
# python counterparts but they are compatible enough for our
# internal usage
if module_name in ('stat', 'file', 'copy') and self._task.action != module_name:
module_name = 'win_%s' % module_name
# Remove extra quotes surrounding path parameters before sending to module.
if module_name in ('win_stat', 'win_file', 'win_copy', 'slurp') and module_args and hasattr(self._connection._shell, '_unquote'):
for key in ('src', 'dest', 'path'):
if key in module_args:
module_args[key] = self._connection._shell._unquote(module_args[key])
module_path = self._shared_loader_obj.module_loader.find_plugin(module_name, mod_type)
if module_path:
break
else: # This is a for-else: http://bit.ly/1ElPkyg
# Use Windows version of ping module to check module paths when
# using a connection that supports .ps1 suffixes. We check specifically
# for win_ping here, otherwise the code would look for ping.ps1
if '.ps1' in self._connection.module_implementation_preferences:
ping_module = 'win_ping'
else:
ping_module = 'ping'
module_path2 = self._shared_loader_obj.module_loader.find_plugin(ping_module, self._connection.module_implementation_preferences)
if module_path2 is not None:
raise AnsibleError("The module %s was not found in configured module paths" % (module_name))
else:
raise AnsibleError("The module %s was not found in configured module paths. "
"Additionally, core modules are missing. If this is a checkout, "
"run 'git pull --rebase' to correct this problem." % (module_name))
# insert shared code and arguments into the module
final_environment = dict()
self._compute_environment_string(final_environment)
(module_data, module_style, module_shebang) = modify_module(module_name, module_path, module_args, self._templar,
task_vars=task_vars,
module_compression=self._play_context.module_compression,
async_timeout=self._task.async_val,
become=self._play_context.become,
become_method=self._play_context.become_method,
become_user=self._play_context.become_user,
become_password=self._play_context.become_pass,
become_flags=self._play_context.become_flags,
environment=final_environment)
return (module_style, module_shebang, module_data, module_path)
def _compute_environment_string(self, raw_environment_out=None):
'''
Builds the environment string to be used when executing the remote task.
'''
final_environment = dict()
if self._task.environment is not None:
environments = self._task.environment
if not isinstance(environments, list):
environments = [environments]
# The order of environments matters to make sure we merge
# in the parent's values first so those in the block then
# task 'win' in precedence
for environment in environments:
if environment is None or len(environment) == 0:
continue
temp_environment = self._templar.template(environment)
if not isinstance(temp_environment, dict):
raise AnsibleError("environment must be a dictionary, received %s (%s)" % (temp_environment, type(temp_environment)))
# very deliberately using update here instead of combine_vars, as
# these environment settings should not need to merge sub-dicts
final_environment.update(temp_environment)
if len(final_environment) > 0:
final_environment = self._templar.template(final_environment)
if isinstance(raw_environment_out, dict):
raw_environment_out.clear()
raw_environment_out.update(final_environment)
return self._connection._shell.env_prefix(**final_environment)
def _early_needs_tmp_path(self):
'''
Determines if a tmp path should be created before the action is executed.
'''
return getattr(self, 'TRANSFERS_FILES', False)
def _is_pipelining_enabled(self, module_style, wrap_async=False):
'''
Determines if we are required and can do pipelining
'''
# any of these require a true
for condition in [
self._connection.has_pipelining,
self._play_context.pipelining or self._connection.always_pipeline_modules, # pipelining enabled for play or connection requires it (eg winrm)
module_style == "new", # old style modules do not support pipelining
not C.DEFAULT_KEEP_REMOTE_FILES, # user wants remote files
not wrap_async or self._connection.always_pipeline_modules, # async does not normally support pipelining unless it does (eg winrm)
self._play_context.become_method != 'su', # su does not work with pipelining,
# FIXME: we might need to make become_method exclusion a configurable list
]:
if not condition:
return False
return True
def _get_admin_users(self):
'''
Returns a list of admin users that are configured for the current shell
plugin
'''
try:
admin_users = self._connection._shell.get_option('admin_users')
except AnsibleError:
# fallback for old custom plugins w/o get_option
admin_users = ['root']
return admin_users
def _is_become_unprivileged(self):
'''
The user is not the same as the connection user and is not part of the
shell configured admin users
'''
# if we don't use become then we know we aren't switching to a
# different unprivileged user
if not self._play_context.become:
return False
# if we use become and the user is not an admin (or same user) then
# we need to return become_unprivileged as True
admin_users = self._get_admin_users()
try:
remote_user = self._connection.get_option('remote_user')
except AnsibleError:
remote_user = self._play_context.remote_user
return bool(self._play_context.become_user not in admin_users + [remote_user])
def _make_tmp_path(self, remote_user=None):
'''
Create and return a temporary path on a remote box.
'''
become_unprivileged = self._is_become_unprivileged()
try:
remote_tmp = self._connection._shell.get_option('remote_tmp')
except AnsibleError:
remote_tmp = '~/.ansible/tmp'
# deal with tmpdir creation
basefile = 'ansible-tmp-%s-%s' % (time.time(), random.randint(0, 2**48))
# Network connection plugins (network_cli, netconf, etc.) execute on the controller, rather than the remote host.
# As such, we want to avoid using remote_user for paths as remote_user may not line up with the local user
# This is a hack and should be solved by more intelligent handling of remote_tmp in 2.7
if getattr(self._connection, '_remote_is_local', False):
tmpdir = C.DEFAULT_LOCAL_TMP
else:
tmpdir = self._remote_expand_user(remote_tmp, sudoable=False)
cmd = self._connection._shell.mkdtemp(basefile=basefile, system=become_unprivileged, tmpdir=tmpdir)
result = self._low_level_execute_command(cmd, sudoable=False)
# error handling on this seems a little aggressive?
if result['rc'] != 0:
if result['rc'] == 5:
output = 'Authentication failure.'
elif result['rc'] == 255 and self._connection.transport in ('ssh',):
if self._play_context.verbosity > 3:
output = u'SSH encountered an unknown error. The output was:\n%s%s' % (result['stdout'], result['stderr'])
else:
output = (u'SSH encountered an unknown error during the connection. '
'We recommend you re-run the command using -vvvv, which will enable SSH debugging output to help diagnose the issue')
elif u'No space left on device' in result['stderr']:
output = result['stderr']
else:
output = ('Authentication or permission failure. '
'In some cases, you may have been able to authenticate and did not have permissions on the target directory. '
'Consider changing the remote tmp path in ansible.cfg to a path rooted in "/tmp". '
'Failed command was: %s, exited with result %d' % (cmd, result['rc']))
if 'stdout' in result and result['stdout'] != u'':
output = output + u", stdout output: %s" % result['stdout']
if self._play_context.verbosity > 3 and 'stderr' in result and result['stderr'] != u'':
output += u", stderr output: %s" % result['stderr']
raise AnsibleConnectionFailure(output)
else:
self._cleanup_remote_tmp = True
try:
stdout_parts = result['stdout'].strip().split('%s=' % basefile, 1)
rc = self._connection._shell.join_path(stdout_parts[-1], u'').splitlines()[-1]
except IndexError:
# stdout was empty or just space, set to / to trigger error in next if
rc = '/'
# Catch failure conditions, files should never be
# written to locations in /.
if rc == '/':
raise AnsibleError('failed to resolve remote temporary directory from %s: `%s` returned empty string' % (basefile, cmd))
self._connection._shell.tmpdir = rc
return rc
def _should_remove_tmp_path(self, tmp_path):
'''Determine if temporary path should be deleted or kept by user request/config'''
return tmp_path and self._cleanup_remote_tmp and not C.DEFAULT_KEEP_REMOTE_FILES and "-tmp-" in tmp_path
def _remove_tmp_path(self, tmp_path):
'''Remove a temporary path we created. '''
if tmp_path is None and self._connection._shell.tmpdir:
tmp_path = self._connection._shell.tmpdir
if self._should_remove_tmp_path(tmp_path):
cmd = self._connection._shell.remove(tmp_path, recurse=True)
# If we have gotten here we have a working ssh configuration.
# If ssh breaks we could leave tmp directories out on the remote system.
tmp_rm_res = self._low_level_execute_command(cmd, sudoable=False)
if tmp_rm_res.get('rc', 0) != 0:
display.warning('Error deleting remote temporary files (rc: %s, stderr: %s})'
% (tmp_rm_res.get('rc'), tmp_rm_res.get('stderr', 'No error string available.')))
else:
self._connection._shell.tmpdir = None
def _transfer_file(self, local_path, remote_path):
self._connection.put_file(local_path, remote_path)
return remote_path
def _transfer_data(self, remote_path, data):
'''
Copies the module data out to the temporary module path.
'''
if isinstance(data, dict):
data = jsonify(data)
afd, afile = tempfile.mkstemp(dir=C.DEFAULT_LOCAL_TMP)
afo = os.fdopen(afd, 'wb')
try:
data = to_bytes(data, errors='surrogate_or_strict')
afo.write(data)
except Exception as e:
raise AnsibleError("failure writing module data to temporary file for transfer: %s" % to_native(e))
afo.flush()
afo.close()
try:
self._transfer_file(afile, remote_path)
finally:
os.unlink(afile)
return remote_path
def _fixup_perms2(self, remote_paths, remote_user=None, execute=True):
"""
We need the files we upload to be readable (and sometimes executable)
by the user being sudo'd to but we want to limit other people's access
(because the files could contain passwords or other private
information. We achieve this in one of these ways:
* If no sudo is performed or the remote_user is sudo'ing to
themselves, we don't have to change permissions.
* If the remote_user sudo's to a privileged user (for instance, root),
we don't have to change permissions
* If the remote_user sudo's to an unprivileged user then we attempt to
grant the unprivileged user access via file system acls.
* If granting file system acls fails we try to change the owner of the
file with chown which only works in case the remote_user is
privileged or the remote systems allows chown calls by unprivileged
users (e.g. HP-UX)
* If the chown fails we can set the file to be world readable so that
the second unprivileged user can read the file.
Since this could allow other users to get access to private
information we only do this if ansible is configured with
"allow_world_readable_tmpfiles" in the ansible.cfg
"""
if remote_user is None:
remote_user = self._play_context.remote_user
if self._connection._shell.SHELL_FAMILY == 'powershell':
# This won't work on Powershell as-is, so we'll just completely skip until
# we have a need for it, at which point we'll have to do something different.
return remote_paths
if self._is_become_unprivileged():
# Unprivileged user that's different than the ssh user. Let's get
# to work!
# Try to use file system acls to make the files readable for sudo'd
# user
if execute:
chmod_mode = 'rx'
setfacl_mode = 'r-x'
else:
chmod_mode = 'rX'
# NOTE: this form fails silently on freebsd. We currently
# never call _fixup_perms2() with execute=False but if we
# start to we'll have to fix this.
setfacl_mode = 'r-X'
res = self._remote_set_user_facl(remote_paths, self._play_context.become_user, setfacl_mode)
if res['rc'] != 0:
# File system acls failed; let's try to use chown next
# Set executable bit first as on some systems an
# unprivileged user can use chown
if execute:
res = self._remote_chmod(remote_paths, 'u+x')
if res['rc'] != 0:
raise AnsibleError('Failed to set file mode on remote temporary files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr'])))
res = self._remote_chown(remote_paths, self._play_context.become_user)
if res['rc'] != 0 and remote_user in self._get_admin_users():
# chown failed even if remote_user is administrator/root
raise AnsibleError('Failed to change ownership of the temporary files Ansible needs to create despite connecting as a privileged user. '
'Unprivileged become user would be unable to read the file.')
elif res['rc'] != 0:
if C.ALLOW_WORLD_READABLE_TMPFILES:
# chown and fs acls failed -- do things this insecure
# way only if the user opted in in the config file
display.warning('Using world-readable permissions for temporary files Ansible needs to create when becoming an unprivileged user. '
'This may be insecure. For information on securing this, see '
'https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user')
res = self._remote_chmod(remote_paths, 'a+%s' % chmod_mode)
if res['rc'] != 0:
raise AnsibleError('Failed to set file mode on remote files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr'])))
else:
raise AnsibleError('Failed to set permissions on the temporary files Ansible needs to create when becoming an unprivileged user '
'(rc: %s, err: %s}). For information on working around this, see '
'https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user'
% (res['rc'], to_native(res['stderr'])))
elif execute:
# Can't depend on the file being transferred with execute permissions.
# Only need user perms because no become was used here
res = self._remote_chmod(remote_paths, 'u+x')
if res['rc'] != 0:
raise AnsibleError('Failed to set execute bit on remote files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr'])))
return remote_paths
def _remote_chmod(self, paths, mode, sudoable=False):
'''
Issue a remote chmod command
'''
cmd = self._connection._shell.chmod(paths, mode)
res = self._low_level_execute_command(cmd, sudoable=sudoable)
return res
def _remote_chown(self, paths, user, sudoable=False):
'''
Issue a remote chown command
'''
cmd = self._connection._shell.chown(paths, user)
res = self._low_level_execute_command(cmd, sudoable=sudoable)
return res
def _remote_set_user_facl(self, paths, user, mode, sudoable=False):
'''
Issue a remote call to setfacl
'''
cmd = self._connection._shell.set_user_facl(paths, user, mode)
res = self._low_level_execute_command(cmd, sudoable=sudoable)
return res
def _execute_remote_stat(self, path, all_vars, follow, tmp=None, checksum=True):
'''
Get information from remote file.
'''
if tmp is not None:
display.warning('_execute_remote_stat no longer honors the tmp parameter. Action'
' plugins should set self._connection._shell.tmpdir to share'
' the tmpdir')
del tmp # No longer used
module_args = dict(
path=path,
follow=follow,
get_checksum=checksum,
checksum_algo='sha1',
)
mystat = self._execute_module(module_name='stat', module_args=module_args, task_vars=all_vars,
wrap_async=False)
if mystat.get('failed'):
msg = mystat.get('module_stderr')
if not msg:
msg = mystat.get('module_stdout')
if not msg:
msg = mystat.get('msg')
raise AnsibleError('Failed to get information on remote file (%s): %s' % (path, msg))
if not mystat['stat']['exists']:
# empty might be matched, 1 should never match, also backwards compatible
mystat['stat']['checksum'] = '1'
# happens sometimes when it is a dir and not on bsd
if 'checksum' not in mystat['stat']:
mystat['stat']['checksum'] = ''
elif not isinstance(mystat['stat']['checksum'], string_types):
raise AnsibleError("Invalid checksum returned by stat: expected a string type but got %s" % type(mystat['stat']['checksum']))
return mystat['stat']
def _remote_checksum(self, path, all_vars, follow=False):
'''
Produces a remote checksum given a path,
Returns a number 0-4 for specific errors instead of checksum, also ensures it is different
0 = unknown error
1 = file does not exist, this might not be an error
2 = permissions issue
3 = its a directory, not a file
4 = stat module failed, likely due to not finding python
5 = appropriate json module not found
'''
x = "0" # unknown error has occurred
try:
remote_stat = self._execute_remote_stat(path, all_vars, follow=follow)
if remote_stat['exists'] and remote_stat['isdir']:
x = "3" # its a directory not a file
else:
x = remote_stat['checksum'] # if 1, file is missing
except AnsibleError as e:
errormsg = to_text(e)
if errormsg.endswith(u'Permission denied'):
x = "2" # cannot read file
elif errormsg.endswith(u'MODULE FAILURE'):
x = "4" # python not found or module uncaught exception
elif 'json' in errormsg:
x = "5" # json module needed
finally:
return x # pylint: disable=lost-exception
def _remote_expand_user(self, path, sudoable=True, pathsep=None):
''' takes a remote path and performs tilde/$HOME expansion on the remote host '''
# We only expand ~/path and ~username/path
if not path.startswith('~'):
return path
# Per Jborean, we don't have to worry about Windows as we don't have a notion of user's home
# dir there.
split_path = path.split(os.path.sep, 1)
expand_path = split_path[0]
if expand_path == '~':
# Network connection plugins (network_cli, netconf, etc.) execute on the controller, rather than the remote host.
# As such, we want to avoid using remote_user for paths as remote_user may not line up with the local user
# This is a hack and should be solved by more intelligent handling of remote_tmp in 2.7
if getattr(self._connection, '_remote_is_local', False):
pass
elif sudoable and self._play_context.become and self._play_context.become_user:
expand_path = '~%s' % self._play_context.become_user
else:
# use remote user instead, if none set default to current user
expand_path = '~%s' % (self._play_context.remote_user or self._connection.default_user or '')
# use shell to construct appropriate command and execute
cmd = self._connection._shell.expand_user(expand_path)
data = self._low_level_execute_command(cmd, sudoable=False)
try:
initial_fragment = data['stdout'].strip().splitlines()[-1]
except IndexError:
initial_fragment = None
if not initial_fragment:
# Something went wrong trying to expand the path remotely. Try using pwd, if not, return
# the original string
cmd = self._connection._shell.pwd()
pwd = self._low_level_execute_command(cmd, sudoable=False).get('stdout', '').strip()
if pwd:
expanded = pwd
else:
expanded = path
elif len(split_path) > 1:
expanded = self._connection._shell.join_path(initial_fragment, *split_path[1:])
else:
expanded = initial_fragment
return expanded
def _strip_success_message(self, data):
'''
Removes the BECOME-SUCCESS message from the data.
'''
if data.strip().startswith('BECOME-SUCCESS-'):
data = re.sub(r'^((\r)?\n)?BECOME-SUCCESS.*(\r)?\n', '', data)
return data
def _update_module_args(self, module_name, module_args, task_vars):
# set check mode in the module arguments, if required
if self._play_context.check_mode:
if not self._supports_check_mode:
raise AnsibleError("check mode is not supported for this operation")
module_args['_ansible_check_mode'] = True
else:
module_args['_ansible_check_mode'] = False
# set no log in the module arguments, if required
module_args['_ansible_no_log'] = self._play_context.no_log or C.DEFAULT_NO_TARGET_SYSLOG
# set debug in the module arguments, if required
module_args['_ansible_debug'] = C.DEFAULT_DEBUG
# let module know we are in diff mode
module_args['_ansible_diff'] = self._play_context.diff
# let module know our verbosity
module_args['_ansible_verbosity'] = display.verbosity
# give the module information about the ansible version
module_args['_ansible_version'] = __version__
# give the module information about its name
module_args['_ansible_module_name'] = module_name
# set the syslog facility to be used in the module
module_args['_ansible_syslog_facility'] = task_vars.get('ansible_syslog_facility', C.DEFAULT_SYSLOG_FACILITY)
# let module know about filesystems that selinux treats specially
module_args['_ansible_selinux_special_fs'] = C.DEFAULT_SELINUX_SPECIAL_FS
# give the module the socket for persistent connections
module_args['_ansible_socket'] = getattr(self._connection, 'socket_path')
if not module_args['_ansible_socket']:
module_args['_ansible_socket'] = task_vars.get('ansible_socket')
# make sure all commands use the designated shell executable
module_args['_ansible_shell_executable'] = self._play_context.executable
# make sure modules are aware if they need to keep the remote files
module_args['_ansible_keep_remote_files'] = C.DEFAULT_KEEP_REMOTE_FILES
# make sure all commands use the designated temporary directory if created
if self._is_become_unprivileged(): # force fallback on remote_tmp as user cannot normally write to dir
module_args['_ansible_tmpdir'] = None
else:
module_args['_ansible_tmpdir'] = self._connection._shell.tmpdir
# make sure the remote_tmp value is sent through in case modules needs to create their own
try:
module_args['_ansible_remote_tmp'] = self._connection._shell.get_option('remote_tmp')
except KeyError:
# here for 3rd party shell plugin compatibility in case they do not define the remote_tmp option
module_args['_ansible_remote_tmp'] = '~/.ansible/tmp'
def _update_connection_options(self, options, variables=None):
''' ensures connections have the appropriate information '''
update = {}
if getattr(self.connection, 'glob_option_vars', False):
# if the connection allows for it, pass any variables matching it.
if variables is not None:
for varname in variables:
if varname.match('ansible_%s_' % self.connection._load_name):
update[varname] = variables[varname]
# always override existing with options
update.update(options)
self.connection.set_options(update)
def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=None, wrap_async=False):
'''
Transfer and run a module along with its arguments.
'''
if tmp is not None:
display.warning('_execute_module no longer honors the tmp parameter. Action plugins'
' should set self._connection._shell.tmpdir to share the tmpdir')
del tmp # No longer used
if delete_remote_tmp is not None:
display.warning('_execute_module no longer honors the delete_remote_tmp parameter.'
' Action plugins should check self._connection._shell.tmpdir to'
' see if a tmpdir existed before they were called to determine'
' if they are responsible for removing it.')
del delete_remote_tmp # No longer used
tmpdir = self._connection._shell.tmpdir
# We set the module_style to new here so the remote_tmp is created
# before the module args are built if remote_tmp is needed (async).
# If the module_style turns out to not be new and we didn't create the
# remote tmp here, it will still be created. This must be done before
# calling self._update_module_args() so the module wrapper has the
# correct remote_tmp value set
if not self._is_pipelining_enabled("new", wrap_async) and tmpdir is None:
self._make_tmp_path()
tmpdir = self._connection._shell.tmpdir
if task_vars is None:
task_vars = dict()
# if a module name was not specified for this execution, use the action from the task
if module_name is None:
module_name = self._task.action
if module_args is None:
module_args = self._task.args
self._update_module_args(module_name, module_args, task_vars)
# FUTURE: refactor this along with module build process to better encapsulate "smart wrapper" functionality
(module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars)
display.vvv("Using module file %s" % module_path)
if not shebang and module_style != 'binary':
raise AnsibleError("module (%s) is missing interpreter line" % module_name)
self._used_interpreter = shebang
remote_module_path = None
if not self._is_pipelining_enabled(module_style, wrap_async):
# we might need remote tmp dir
if tmpdir is None:
self._make_tmp_path()
tmpdir = self._connection._shell.tmpdir
remote_module_filename = self._connection._shell.get_remote_filename(module_path)
remote_module_path = self._connection._shell.join_path(tmpdir, 'AnsiballZ_%s' % remote_module_filename)
args_file_path = None
if module_style in ('old', 'non_native_want_json', 'binary'):
# we'll also need a tmp file to hold our module arguments
args_file_path = self._connection._shell.join_path(tmpdir, 'args')
if remote_module_path or module_style != 'new':
display.debug("transferring module to remote %s" % remote_module_path)
if module_style == 'binary':
self._transfer_file(module_path, remote_module_path)
else:
self._transfer_data(remote_module_path, module_data)
if module_style == 'old':
# we need to dump the module args to a k=v string in a file on
# the remote system, which can be read and parsed by the module
args_data = ""
for k, v in iteritems(module_args):
args_data += '%s=%s ' % (k, shlex_quote(text_type(v)))
self._transfer_data(args_file_path, args_data)
elif module_style in ('non_native_want_json', 'binary'):
self._transfer_data(args_file_path, json.dumps(module_args))
display.debug("done transferring module to remote")
environment_string = self._compute_environment_string()
remote_files = []
if tmpdir and remote_module_path:
remote_files = [tmpdir, remote_module_path]
if args_file_path:
remote_files.append(args_file_path)
sudoable = True
in_data = None
cmd = ""
if wrap_async and not self._connection.always_pipeline_modules:
# configure, upload, and chmod the async_wrapper module
(async_module_style, shebang, async_module_data, async_module_path) = self._configure_module(module_name='async_wrapper', module_args=dict(),
task_vars=task_vars)
async_module_remote_filename = self._connection._shell.get_remote_filename(async_module_path)
remote_async_module_path = self._connection._shell.join_path(tmpdir, async_module_remote_filename)
self._transfer_data(remote_async_module_path, async_module_data)
remote_files.append(remote_async_module_path)
async_limit = self._task.async_val
async_jid = str(random.randint(0, 999999999999))
# call the interpreter for async_wrapper directly
# this permits use of a script for an interpreter on non-Linux platforms
# TODO: re-implement async_wrapper as a regular module to avoid this special case
interpreter = shebang.replace('#!', '').strip()
async_cmd = [interpreter, remote_async_module_path, async_jid, async_limit, remote_module_path]
if environment_string:
async_cmd.insert(0, environment_string)
if args_file_path:
async_cmd.append(args_file_path)
else:
# maintain a fixed number of positional parameters for async_wrapper
async_cmd.append('_')
if not self._should_remove_tmp_path(tmpdir):
async_cmd.append("-preserve_tmp")
cmd = " ".join(to_text(x) for x in async_cmd)
else:
if self._is_pipelining_enabled(module_style):
in_data = module_data
else:
cmd = remote_module_path
cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path).strip()
# Fix permissions of the tmpdir path and tmpdir files. This should be called after all
# files have been transferred.
if remote_files:
# remove none/empty
remote_files = [x for x in remote_files if x]
self._fixup_perms2(remote_files, self._play_context.remote_user)
# actually execute
res = self._low_level_execute_command(cmd, sudoable=sudoable, in_data=in_data)
# parse the main result
data = self._parse_returned_data(res)
# NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE
# get internal info before cleaning
if data.pop("_ansible_suppress_tmpdir_delete", False):
self._cleanup_remote_tmp = False
# remove internal keys
remove_internal_keys(data)
if wrap_async:
# async_wrapper will clean up its tmpdir on its own so we want the controller side to
# forget about it now
self._connection._shell.tmpdir = None
# FIXME: for backwards compat, figure out if still makes sense
data['changed'] = True
# pre-split stdout/stderr into lines if needed
if 'stdout' in data and 'stdout_lines' not in data:
# if the value is 'False', a default won't catch it.
txt = data.get('stdout', None) or u''
data['stdout_lines'] = txt.splitlines()
if 'stderr' in data and 'stderr_lines' not in data:
# if the value is 'False', a default won't catch it.
txt = data.get('stderr', None) or u''
data['stderr_lines'] = txt.splitlines()
display.debug("done with _execute_module (%s, %s)" % (module_name, module_args))
return data
def _parse_returned_data(self, res):
try:
filtered_output, warnings = _filter_non_json_lines(res.get('stdout', u''))
for w in warnings:
display.warning(w)
data = json.loads(filtered_output)
if 'ansible_facts' in data and isinstance(data['ansible_facts'], dict):
data['ansible_facts'] = wrap_var(data['ansible_facts'])
data['_ansible_parsed'] = True
except ValueError:
# not valid json, lets try to capture error
data = dict(failed=True, _ansible_parsed=False)
data['module_stdout'] = res.get('stdout', u'')
if 'stderr' in res:
data['module_stderr'] = res['stderr']
if res['stderr'].startswith(u'Traceback'):
data['exception'] = res['stderr']
# try to figure out if we are missing interpreter
if self._used_interpreter is not None and '%s: No such file or directory' % self._used_interpreter.lstrip('!#') in data['module_stderr']:
data['msg'] = "The module failed to execute correctly, you probably need to set the interpreter."
else:
data['msg'] = "MODULE FAILURE"
data['msg'] += '\nSee stdout/stderr for the exact error'
if 'rc' in res:
data['rc'] = res['rc']
return data
def _low_level_execute_command(self, cmd, sudoable=True, in_data=None, executable=None, encoding_errors='surrogate_then_replace', chdir=None):
'''
This is the function which executes the low level shell command, which
may be commands to create/remove directories for temporary files, or to
run the module code or python directly when pipelining.
:kwarg encoding_errors: If the value returned by the command isn't
utf-8 then we have to figure out how to transform it to unicode.
If the value is just going to be displayed to the user (or
discarded) then the default of 'replace' is fine. If the data is
used as a key or is going to be written back out to a file
verbatim, then this won't work. May have to use some sort of
replacement strategy (python3 could use surrogateescape)
:kwarg chdir: cd into this directory before executing the command.
'''
display.debug("_low_level_execute_command(): starting")
# if not cmd:
# # this can happen with powershell modules when there is no analog to a Windows command (like chmod)
# display.debug("_low_level_execute_command(): no command, exiting")
# return dict(stdout='', stderr='', rc=254)
if chdir:
display.debug("_low_level_execute_command(): changing cwd to %s for this command" % chdir)
cmd = self._connection._shell.append_command('cd %s' % chdir, cmd)
allow_same_user = C.BECOME_ALLOW_SAME_USER
same_user = self._play_context.become_user == self._play_context.remote_user
if sudoable and self._play_context.become and (allow_same_user or not same_user):
display.debug("_low_level_execute_command(): using become for this command")
if self._connection.transport != 'network_cli' and self._play_context.become_method != 'enable':
cmd = self._play_context.make_become_cmd(cmd, executable=executable)
if self._connection.allow_executable:
if executable is None:
executable = self._play_context.executable
# mitigation for SSH race which can drop stdout (https://github.com/ansible/ansible/issues/13876)
# only applied for the default executable to avoid interfering with the raw action
cmd = self._connection._shell.append_command(cmd, 'sleep 0')
if executable:
cmd = executable + ' -c ' + shlex_quote(cmd)
display.debug("_low_level_execute_command(): executing: %s" % (cmd,))
# Change directory to basedir of task for command execution when connection is local
if self._connection.transport == 'local':
cwd = os.getcwd()
os.chdir(self._loader.get_basedir())
try:
rc, stdout, stderr = self._connection.exec_command(cmd, in_data=in_data, sudoable=sudoable)
finally:
if self._connection.transport == 'local':
os.chdir(cwd)
# stdout and stderr may be either a file-like or a bytes object.
# Convert either one to a text type
if isinstance(stdout, binary_type):
out = to_text(stdout, errors=encoding_errors)
elif not isinstance(stdout, text_type):
out = to_text(b''.join(stdout.readlines()), errors=encoding_errors)
else:
out = stdout
if isinstance(stderr, binary_type):
err = to_text(stderr, errors=encoding_errors)
elif not isinstance(stderr, text_type):
err = to_text(b''.join(stderr.readlines()), errors=encoding_errors)
else:
err = stderr
if rc is None:
rc = 0
# be sure to remove the BECOME-SUCCESS message now
out = self._strip_success_message(out)
display.debug(u"_low_level_execute_command() done: rc=%d, stdout=%s, stderr=%s" % (rc, out, err))
return dict(rc=rc, stdout=out, stdout_lines=out.splitlines(), stderr=err, stderr_lines=err.splitlines())
def _get_diff_data(self, destination, source, task_vars, source_file=True):
diff = {}
display.debug("Going to peek to see if file has changed permissions")
peek_result = self._execute_module(module_name='file', module_args=dict(path=destination, _diff_peek=True), task_vars=task_vars, persist_files=True)
if not peek_result.get('failed', False) or peek_result.get('rc', 0) == 0:
if peek_result.get('state') == 'absent':
diff['before'] = ''
elif peek_result.get('appears_binary'):
diff['dst_binary'] = 1
elif peek_result.get('size') and C.MAX_FILE_SIZE_FOR_DIFF > 0 and peek_result['size'] > C.MAX_FILE_SIZE_FOR_DIFF:
diff['dst_larger'] = C.MAX_FILE_SIZE_FOR_DIFF
else:
display.debug("Slurping the file %s" % source)
dest_result = self._execute_module(module_name='slurp', module_args=dict(path=destination), task_vars=task_vars, persist_files=True)
if 'content' in dest_result:
dest_contents = dest_result['content']
if dest_result['encoding'] == 'base64':
dest_contents = base64.b64decode(dest_contents)
else:
raise AnsibleError("unknown encoding in content option, failed: %s" % dest_result)
diff['before_header'] = destination
diff['before'] = dest_contents
if source_file:
st = os.stat(source)
if C.MAX_FILE_SIZE_FOR_DIFF > 0 and st[stat.ST_SIZE] > C.MAX_FILE_SIZE_FOR_DIFF:
diff['src_larger'] = C.MAX_FILE_SIZE_FOR_DIFF
else:
display.debug("Reading local copy of the file %s" % source)
try:
with open(source, 'rb') as src:
src_contents = src.read()
except Exception as e:
raise AnsibleError("Unexpected error while reading source (%s) for diff: %s " % (source, str(e)))
if b"\x00" in src_contents:
diff['src_binary'] = 1
else:
diff['after_header'] = source
diff['after'] = src_contents
else:
display.debug("source of file passed in")
diff['after_header'] = 'dynamically generated'
diff['after'] = source
if self._play_context.no_log:
if 'before' in diff:
diff["before"] = ""
if 'after' in diff:
diff["after"] = " [[ Diff output has been hidden because 'no_log: true' was specified for this result ]]\n"
return diff
def _find_needle(self, dirname, needle):
'''
find a needle in haystack of paths, optionally using 'dirname' as a subdir.
This will build the ordered list of paths to search and pass them to dwim
to get back the first existing file found.
'''
# dwim already deals with playbook basedirs
path_stack = self._task.get_search_path()
# if missing it will return a file not found exception
return self._loader.path_dwim_relative_stack(path_stack, dirname, needle)
|
caphrim007/ansible
|
lib/ansible/plugins/action/__init__.py
|
Python
|
gpl-3.0
| 50,188 | 0.003447 |
import time, os
import io
import time
import picamera
os.putenv('SDL_VIDEODRIVER', 'fbcon')
os.putenv('SDL_FBDEV' , '/dev/fb1')
os.putenv('SDL_MOUSEDRV' , 'TSLIB')
os.putenv('SDL_MOUSEDEV' , '/dev/input/touchscreen')
os.putenv('SDL_AUDIODRIVER' , 'alsa')
# Create an in-memory stream
#my_stream = io.BytesIO()
#with picamera.PiCamera() as camera:
path="/home/pi/videos"
files = os.listdir(path)
files = sorted(files)
last_file = files[-1]
# index = -1
i = 0
while i < len(last_file):
if last_file[i].isdigit():
break
i = i+1;
start_index = i
while i < len(last_file):
if not last_file[i].isdigit():
break
i = i+1
end_index = i
print files
print last_file
print start_index
print end_index
#end_index = end_index-1
print int(last_file[start_index:end_index])
|
lumened/touch-flux
|
src/demo.py
|
Python
|
gpl-2.0
| 805 | 0.012422 |
# Function to scan for pseudolandmarks along the y-axis
import cv2
import os
import numpy as np
from plantcv.plantcv._debug import _debug
from plantcv.plantcv import params
from plantcv.plantcv import outputs
from plantcv.plantcv import fatal_error
def y_axis_pseudolandmarks(img, obj, mask, label="default"):
"""
Divide up object contour into 19 equidistant segments and generate landmarks for each
Inputs:
img = This is a copy of the original plant image generated using np.copy if debug is true it will be drawn on
obj = a contour of the plant object (this should be output from the object_composition.py fxn)
mask = this is a binary image. The object should be white and the background should be black
label = optional label parameter, modifies the variable name of observations recorded
Returns:
left = List of landmarks within the left side
right = List of landmarks within the right side
center_h = List of landmarks within the center
:param img: numpy.ndarray
:param obj: list
:param mask: numpy.ndarray
:param label: str
:return left: list
:return right: list
:return center_h: list
"""
# Lets get some landmarks scanning along the y-axis
if not np.any(obj):
return ('NA', 'NA'), ('NA', 'NA'), ('NA', 'NA')
x, y, width, height = cv2.boundingRect(obj)
extent = height
# Outputs
left = []
right = []
center_h = []
left_list = []
right_list = []
center_h_list = []
# If height is greater than 21 pixels make 20 increments (5% intervals)
if extent >= 21:
inc = int(extent / 21)
# Define variable for max points and min points
pts_max = []
pts_min = []
# Get max and min points for each of the intervals
for i in range(1, 21):
if i == 1:
pt_max = y
pt_min = y + (inc * i)
else:
pt_max = y + (inc * (i - 1))
pt_min = y + (inc * i)
# Put these in an array
pts_max.append(pt_max)
pts_min.append(pt_min)
# Combine max and min into a set of tuples
point_range = list(zip(pts_max, pts_min))
# define some list variables to fill
row_median = []
row_ave = []
max_width = []
left_points = []
right_points = []
y_vals = []
x_centroids = []
y_centroids = []
# For each of the 20 intervals
for pt in point_range:
# Get the lower and upper bounds
# (lower and higher in terms of value; low point is actually towards top of photo, higher is lower of photo)
low_point, high_point = pt
# Get all rows within these two points
rows = []
lps = []
rps = []
# Get a continuous list of the values between the top and the bottom of the interval save as vals
vals = list(range(low_point, high_point))
# For each row... get all coordinates from object contour that match row
for v in vals:
# Value is all entries that match the row
value = obj[v == obj[:, 0, 1]]
if len(value) > 0:
# Could potentially be more than two points in all contour in each pixel row
# Grab largest x coordinate (column)
largest = value[:, 0, 0].max()
# Grab smallest x coordinate (column)
smallest = value[:, 0, 0].min()
# Take the difference between the two (this is how far across the object is on this plane)
row_width = largest - smallest
# Append this value to a list
rows.append(row_width)
lps.append(smallest)
rps.append(largest)
if len(value) == 0:
row_width = 1
rows.append(row_width)
lps.append(1)
rps.append(1)
# For each of the points find the median and average width
row_median.append(np.median(np.array(rows)))
row_ave.append(np.mean(np.array(rows)))
max_width.append(np.max(np.array(rows)))
left_points.append(np.mean(smallest))
right_points.append(np.mean(largest))
yval = int((high_point + low_point) / 2)
y_vals.append(yval)
# Make a copy of the mask; we want to get landmark points from this
window = np.copy(mask)
window[:low_point] = 0
window[high_point:] = 0
s = cv2.moments(window)
# Centroid (center of mass x, center of mass y)
if largest - smallest > 3:
if s['m00'] > 0.001:
smx, smy = (s['m10'] / s['m00'], s['m01'] / s['m00'])
x_centroids.append(int(smx))
y_centroids.append(int(smy))
if s['m00'] < 0.001:
smx, smy = (s['m10'] / 0.001, s['m01'] / 0.001)
x_centroids.append(int(smx))
y_centroids.append(int(smy))
else:
smx = (largest + smallest) / 2
smy = yval
x_centroids.append(int(smx))
y_centroids.append(int(smy))
left = list(zip(left_points, y_vals))
left = np.array(left)
left.shape = (20, 1, 2)
right = list(zip(right_points, y_vals))
right = np.array(right)
right.shape = (20, 1, 2)
center_h = list(zip(x_centroids, y_centroids))
center_h = np.array(center_h)
center_h.shape = (20, 1, 2)
img2 = np.copy(img)
for i in left:
x = i[0, 0]
y = i[0, 1]
cv2.circle(img2, (int(x), int(y)), params.line_thickness, (255, 0, 0), -1)
for i in right:
x = i[0, 0]
y = i[0, 1]
cv2.circle(img2, (int(x), int(y)), params.line_thickness, (255, 0, 255), -1)
for i in center_h:
x = i[0, 0]
y = i[0, 1]
cv2.circle(img2, (int(x), int(y)), params.line_thickness, (0, 79, 255), -1)
_debug(visual=img2,
filename=os.path.join(params.debug_outdir, (str(params.device) + '_y_axis_pseudolandmarks.png')))
elif extent < 21:
# If the length of the object is less than 20 pixels just make the object a 20 pixel rectangle
x, y, width, height = cv2.boundingRect(obj)
y_coords = list(range(y, y + 20))
l_points = [x] * 20
left = list(zip(l_points, y_coords))
left = np.array(left)
left.shape = (20, 1, 2)
r_points = [x + width] * 20
right = list(zip(r_points, y_coords))
right = np.array(right)
right.shape = (20, 1, 2)
m = cv2.moments(mask, binaryImage=True)
# Centroid (center of mass x, center of mass y)
if m['m00'] == 0:
fatal_error('Check input parameters, first moment=0')
else:
cmx, cmy = (m['m10'] / m['m00'], m['m01'] / m['m00'])
c_points = [cmx] * 20
center_h = list(zip(c_points, y_coords))
center_h = np.array(center_h)
center_h.shape = (20, 1, 2)
img2 = np.copy(img)
for i in left:
x = i[0, 0]
y = i[0, 1]
cv2.circle(img2, (int(x), int(y)), params.line_thickness, (255, 0, 0), -1)
for i in right:
x = i[0, 0]
y = i[0, 1]
cv2.circle(img2, (int(x), int(y)), params.line_thickness, (255, 0, 255), -1)
for i in center_h:
x = i[0, 0]
y = i[0, 1]
cv2.circle(img2, (int(x), int(y)), params.line_thickness, (0, 79, 255), -1)
_debug(visual=img2,
filename=os.path.join(params.debug_outdir, (str(params.device) + '_y_axis_pseudolandmarks.png')))
# Store into global measurements
for pt in left:
left_list.append(pt[0].tolist())
for pt in right:
right_list.append(pt[0].tolist())
for pt in center_h:
center_h_list.append(pt[0].tolist())
outputs.add_observation(sample=label, variable='left_lmk', trait='left landmark coordinates',
method='plantcv.plantcv.x_axis_pseudolandmarks', scale='none', datatype=tuple,
value=tuple(left_list), label='none')
outputs.add_observation(sample=label, variable='right_lmk', trait='right landmark coordinates',
method='plantcv.plantcv.x_axis_pseudolandmarks', scale='none', datatype=tuple,
value=tuple(right_list), label='none')
outputs.add_observation(sample=label, variable='center_h_lmk', trait='center horizontal landmark coordinates',
method='plantcv.plantcv.x_axis_pseudolandmarks', scale='none', datatype=tuple,
value=tuple(center_h_list), label='none')
return left, right, center_h
|
danforthcenter/plantcv
|
plantcv/plantcv/y_axis_pseudolandmarks.py
|
Python
|
mit
| 9,161 | 0.002729 |
# stdlib
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
# third party
from nacl.signing import VerifyKey
from pydantic import BaseModel
from pydantic.error_wrappers import ValidationError as PydanticValidationError
# relative
from .....common.message import ImmediateSyftMessage
from .....common.message import SignedMessage
from .....common.uid import UID
from .....io.address import Address
from ....abstract.node_service_interface import NodeServiceInterface
from ....common.exceptions import AuthorizationError
from ....common.exceptions import BadPayloadException
from ....common.exceptions import PermissionsNotDefined
# Inner Payload message using Pydantic.
class Payload(BaseModel):
class Config:
orm_mode = True
class RequestPayload(Payload):
pass
class ReplyPayload(Payload):
pass
class NewSyftMessage(ImmediateSyftMessage):
"""A base class from which all message classes should inherit.
Note:
This will eventually replace the old `SyftMessage` class.
"""
__attr_allowlist__ = ["id", "address", "reply_to", "reply", "msg_id", "kwargs"]
signed_type = SignedMessage
request_payload_type = RequestPayload
reply_payload_type = ReplyPayload
def __init__(
self,
address: Address,
kwargs: Optional[Dict[str, Any]] = None,
msg_id: Optional[UID] = None,
reply_to: Optional[Address] = None,
reply: bool = False,
) -> None:
super().__init__(address=address, msg_id=msg_id)
self.reply_to = reply_to
self.reply = reply
self.kwargs = kwargs if kwargs else {}
@property
def payload(self) -> Payload:
kwargs_dict = {}
if hasattr(self.kwargs, "upcast"):
kwargs_dict = self.kwargs.upcast() # type: ignore
else:
kwargs_dict = self.kwargs # type: ignore
try:
# If it's not a reply message then load kwargs as a proper request payload.
if not self.reply:
return self.request_payload_type(**kwargs_dict)
# If it's a reply message, then load kwargs as a proper reply payload.
else:
return self.reply_payload_type(**kwargs_dict)
except PydanticValidationError:
raise BadPayloadException
def run(
self, node: NodeServiceInterface, verify_key: Optional[VerifyKey] = None
) -> ReplyPayload:
raise NotImplementedError
def get_permissions(self) -> List:
"""Returns the list of permission classes applicable to the given message."""
raise NotImplementedError
def check_permissions(
self, node: NodeServiceInterface, verify_key: Optional[VerifyKey] = None
) -> None:
"""Check if the user has relevant permissions to run this message.
Args:
node (NodeServiceInterface): node interface used to invoke this message.
verify_key (Optional[VerifyKey], optional): user signed verification key. Defaults to None.
Raises:
AuthorizationError: Error when one of the permission is denied.
"""
if not len(self.get_permissions()):
raise PermissionsNotDefined
for permission_class in self.get_permissions():
if not permission_class().has_permission(
msg=self, node=node, verify_key=verify_key
):
raise AuthorizationError(
f"You don't have access to perform {self} action."
)
|
OpenMined/PySyft
|
packages/syft/src/syft/core/node/common/node_service/generic_payload/syft_message.py
|
Python
|
apache-2.0
| 3,573 | 0.002239 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_flaskpypi
----------------------------------
Tests for `flaskpypi` module.
"""
import pytest
from flaskpypi import flaskpypi
# Code from https://wiki.python.org/moin/PyPISimple
from xml.etree import ElementTree
from urllib.request import urlopen
def get_distributions(simple_index='https://pypi.python.org/simple/'):
with urlopen(simple_index) as f:
tree = ElementTree.parse(f)
return [a.text for a in tree.iter('a')]
def scrape_links(dist, simple_index='https://pypi.python.org/simple/'):
with urlopen(simple_index + dist + '/') as f:
tree = ElementTree.parse(f)
return [a.attrib['href'] for a in tree.iter('a')]
def test_this_is_a_test():
assert True
|
waynew/flaskpypi
|
tests/test_flaskpypi.py
|
Python
|
bsd-3-clause
| 752 | 0.00266 |
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""nsx_gw_devices
Revision ID: 19180cf98af6
Revises: 117643811bca
Create Date: 2014-02-26 02:46:26.151741
"""
# revision identifiers, used by Alembic.
revision = '19180cf98af6'
down_revision = '117643811bca'
# Change to ['*'] if this migration applies to all plugins
migration_for_plugins = [
'neutron.plugins.nicira.NeutronPlugin.NvpPluginV2',
'neutron.plugins.nicira.NeutronServicePlugin.NvpAdvancedPlugin',
'neutron.plugins.vmware.plugin.NsxPlugin',
'neutron.plugins.vmware.plugin.NsxServicePlugin'
]
from alembic import op
import sqlalchemy as sa
from neutron.db import migration
def upgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.create_table(
'networkgatewaydevicereferences',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('network_gateway_id', sa.String(length=36), nullable=True),
sa.Column('interface_name', sa.String(length=64), nullable=True),
sa.ForeignKeyConstraint(['network_gateway_id'], ['networkgateways.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', 'network_gateway_id', 'interface_name'))
# Copy data from networkgatewaydevices into networkgatewaydevicereference
op.execute("INSERT INTO networkgatewaydevicereferences SELECT "
"id, network_gateway_id, interface_name FROM "
"networkgatewaydevices")
# drop networkgatewaydevices
op.drop_table('networkgatewaydevices')
op.create_table(
'networkgatewaydevices',
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('nsx_id', sa.String(length=36), nullable=True),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('connector_type', sa.String(length=10), nullable=True),
sa.Column('connector_ip', sa.String(length=64), nullable=True),
sa.Column('status', sa.String(length=16), nullable=True),
sa.PrimaryKeyConstraint('id'))
# Create a networkgatewaydevice for each existing reference.
# For existing references nsx_id == neutron_id
# Do not fill conenctor info as they would be unknown
op.execute("INSERT INTO networkgatewaydevices (id, nsx_id, tenant_id) "
"SELECT gw_dev_ref.id, gw_dev_ref.id as nsx_id, tenant_id "
"FROM networkgatewaydevicereferences AS gw_dev_ref "
"INNER JOIN networkgateways AS net_gw ON "
"gw_dev_ref.network_gateway_id=net_gw.id")
def downgrade(active_plugins=None, options=None):
pass
|
shakamunyi/neutron-vrrp
|
neutron/db/migration/alembic_migrations/versions/19180cf98af6_nsx_gw_devices.py
|
Python
|
apache-2.0
| 3,313 | 0.000906 |
#!/usr/bin/python
# Google Spreadsheet DHT Sensor Data-logging Example
# Depends on the 'gspread' and 'oauth2client' package being installed. If you
# have pip installed execute:
# sudo pip install gspread oauth2client
# Also it's _very important_ on the Raspberry Pi to install the python-openssl
# package because the version of Python is a bit old and can fail with Google's
# new OAuth2 based authentication. Run the following command to install the
# the package:
# sudo apt-get update
# sudo apt-get install python-openssl
# Copyright (c) 2014 Adafruit Industries
# Author: Tony DiCola
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import json
import sys
import time
import datetime
import Adafruit_DHT
import gspread
from oauth2client.client import SignedJwtAssertionCredentials
# Type of sensor, can be Adafruit_DHT.DHT11, Adafruit_DHT.DHT22, or Adafruit_DHT.AM2302.
DHT_TYPE = Adafruit_DHT.DHT11
# Example of sensor connected to Raspberry Pi pin 23
DHT_PIN = 5
DHT_SECOND_PIN = 6
# Example of sensor connected to Beaglebone Black pin P8_11
#DHT_PIN = 'P8_11'
# Google Docs OAuth credential JSON file. Note that the process for authenticating
# with Google docs has changed as of ~April 2015. You _must_ use OAuth2 to log
# in and authenticate with the gspread library. Unfortunately this process is much
# more complicated than the old process. You _must_ carefully follow the steps on
# this page to create a new OAuth service in your Google developer console:
# http://gspread.readthedocs.org/en/latest/oauth2.html
#
# Once you've followed the steps above you should have downloaded a .json file with
# your OAuth2 credentials. This file has a name like SpreadsheetData-<gibberish>.json.
# Place that file in the same directory as this python script.
#
# Now one last _very important_ step before updating the spreadsheet will work.
# Go to your spreadsheet in Google Spreadsheet and share it to the email address
# inside the 'client_email' setting in the SpreadsheetData-*.json file. For example
# if the client_email setting inside the .json file has an email address like:
# 149345334675-md0qff5f0kib41meu20f7d1habos3qcu@developer.gserviceaccount.com
# Then use the File -> Share... command in the spreadsheet to share it with read
# and write acess to the email address above. If you don't do this step then the
# updates to the sheet will fail!
GDOCS_OAUTH_JSON = 'Humidity-Logger-96344a3d42df.json'
# Google Docs spreadsheet name.
GDOCS_SPREADSHEET_NAME = 'Luftfeuchtigkeitslog'
# How long to wait (in seconds) between measurements.
FREQUENCY_SECONDS = 900
def login_open_sheet(oauth_key_file, spreadsheet):
"""Connect to Google Docs spreadsheet and return the first worksheet."""
try:
json_key = json.load(open(oauth_key_file))
credentials = SignedJwtAssertionCredentials(json_key['client_email'],
json_key['private_key'],
['https://spreadsheets.google.com/feeds'])
gc = gspread.authorize(credentials)
worksheet = gc.open(spreadsheet).sheet1
return worksheet
except Exception as ex:
print 'Unable to login and get spreadsheet. Check OAuth credentials, spreadsheet name, and make sure spreadsheet is shared to the client_email address in the OAuth .json file!'
print 'Google sheet login failed with error:', ex
sys.exit(1)
def getReadingFromSensor(inputpin):
attempts = 0
while attempts < 3:
# Attempt to get sensor reading.
humidity, temp = Adafruit_DHT.read(DHT_TYPE, inputpin)
# Reading the sensor depends on timing so to make sure we are not busy anymore insert a little sleep
# Skip to the next reading if a valid measurement couldn't be taken.
# This might happen if the CPU is under a lot of load and the sensor
# can't be reliably read (timing is critical to read the sensor).
if humidity is None or temp is None:
time.sleep(2)
attempts += 1
continue
print 'Temperature{0}: {1:0.1f} C'.format(inputpin,temp)
print 'Humidity{0}: {1:0.1f} %'.format(inputpin,humidity)
return temp, humidity
print 'Logging sensor measurements to {0} every {1} seconds.'.format(GDOCS_SPREADSHEET_NAME, FREQUENCY_SECONDS)
print 'Press Ctrl-C to quit.'
worksheet = None
while True:
# Login if necessary.
if worksheet is None:
worksheet = login_open_sheet(GDOCS_OAUTH_JSON, GDOCS_SPREADSHEET_NAME)
temp, humidity = getReadingFromSensor(DHT_PIN)
time.sleep(1)
temp2, humidity2 = getReadingFromSensor(DHT_SECOND_PIN)
# Append the data to a logfile
text_file = open('TempLog.csv', 'a')
text_file.write('{},{},{},{},{}\n'.format(datetime.datetime.now(),temp,humidity,temp2,humidity2))
text_file.close()
# Append the data in the spreadsheet, including a timestamp
try:
worksheet.append_row((datetime.datetime.now(), temp, humidity, temp2, humidity2))
except:
# Error appending data, most likely because credentials are stale.
# Null out the worksheet so a login is performed at the top of the loop.
print 'Append error, logging in again'
worksheet = None
time.sleep(FREQUENCY_SECONDS)
continue
# Wait 30 seconds before continuing
print 'Wrote a row to {0}'.format(GDOCS_SPREADSHEET_NAME)
time.sleep(FREQUENCY_SECONDS)
|
mh03r932/raspi2dht11
|
examples/google_spreadsheet_twosens.py
|
Python
|
mit
| 6,208 | 0.016591 |
# Copyright (c) 2007 Enough Project.
# See LICENSE for details.
"""The things a keymap does:
1. Pass given keys to 'next' keymap (considered more 'specific') which
is stronger/overrides the keymap itself.
2. If the next keymap does not know the key, then it tries to handle
it itself according to a map it holds that maps specific (modifier,
key) to funcs, and then, also according to a map of broader groups
to funcs."""
import pygame
import functools
import itertools
from lib.observer import Observable
def discard_eventarg(func):
@functools.wraps(func)
def handler(event):
return func()
return handler
def filter_eventtypes(event_types):
"""Returns a decorator that takes pygame events and passes them on
to the decorated function only if they are within the given event
types"""
def decorate(func):
@functools.wraps(func)
def new_func(event):
if event.type in event_types:
return func(event)
return new_func
return decorate
def handler(include_keyup=False,
include_event=False):
def decorate(func):
if not include_event:
func = discard_eventarg(func)
if not include_keyup:
func = filter_eventtypes([pygame.KEYDOWN])(func)
return func
return decorate
keydown_noarg = handler()
def mod_name(x):
mods = []
if x & pygame.KMOD_CTRL:
mods.append('Control')
if x & pygame.KMOD_SHIFT:
mods.append('Shift')
if x & pygame.KMOD_META:
mods.append('Winkey')
if x & pygame.KMOD_ALT:
mods.append('Alt')
return ' + '.join(mods)
class Key(object):
def __init__(self, modifier, key):
self.modifier = modifier
self.key = key
def _essence(self):
return (self.modifier, self.key)
def __cmp__(self, other):
if isinstance(other, Key):
return cmp(self._essence(), other._essence())
else:
return cmp(type(self), type(other))
def __hash__(self):
return hash(self._essence())
def name(self):
key_name = pygame.key.name(self.key)
if self.modifier:
return '%s+%s' % (mod_name(self.modifier), key_name)
else:
return key_name
__repr__ = name
@classmethod
def from_pygame_event(cls, event):
mod = 0
if event.mod & pygame.KMOD_CTRL:
mod |= pygame.KMOD_CTRL
elif event.mod & pygame.KMOD_ALT:
mod |= pygame.KMOD_ALT
elif event.mod & pygame.KMOD_SHIFT:
mod |= pygame.KMOD_SHIFT
return cls(mod, event.key)
class Group(object):
def __init__(self, name, allowed_modifiers, keys):
self.allowed_modifiers = set(allowed_modifiers)
self.keys = set(keys)
self._name = name
def name(self):
return self._name
def overlaps(self, key):
if isinstance(key, Group):
return bool(self.keys & key.keys) and bool(self.allowed_modifiers &
key.allowed_modifiers)
elif isinstance(key, Key):
return key in self
else:
return NotImplemented
def __contains__(self, key):
return key.key in self.keys and key.modifier in self.allowed_modifiers
# TODO: Its bad to assume anything about K_* here...
import string
alphanumeric = Group('Alphanumeric', [pygame.KMOD_SHIFT, 0],
[ord(x) for x in string.letters+string.digits] +
[pygame.K_UNDERSCORE, pygame.K_MINUS])
all_printable = Group('Printable symbols', [pygame.KMOD_SHIFT, 0],
[ord(x) for x in string.printable])
digits = Group('Digit', [0], [ord(x) for x in string.digits])
extended_digits = Group('Extended digits', [0], [ord(x) for x in string.digits+'abcdef'])
class Keymap(object):
def __init__(self):
self.obs_activation = Observable()
self.obs_dict = Observable()
# Cache these cause they are rather expensive to generate and
# used a LOT.
self.notify_remove_item = self.obs_dict.notify.remove_item
self.notify_add_item = self.obs_dict.notify.add_item
self.notify_replace_item = self.obs_dict.notify.replace_item
self.next_keymap = None
self.key_registrations = {}
self.group_registrations = {}
self.disabled_group_registrations = {}
self.is_active = False
def __contains__(self, key):
if self.next_keymap is not None and key in self.next_keymap:
return True
if key in self.key_registrations:
return True
if key in self.group_registrations:
return True
return False
def iterkeys(self):
for key, value in self.iteritems():
yield key
def iteritems(self):
overridden = set()
if self.next_keymap is not None:
for key, value in self.next_keymap.iteritems():
overridden.add(key)
yield key, value
for group, value in self.group_registrations.iteritems():
yield group, value
for key, value in self.key_registrations.iteritems():
if key not in overridden:
yield key, value
__iter__ = iterkeys
def __getitem__(self, key):
if self.next_keymap is not None and key in self.next_keymap:
return self.next_keymap[key]
if key in self.key_registrations:
return self.key_registrations[key]
if key in self.group_registrations:
return self.group_registrations[key]
raise KeyError("Unknown key", key)
def set_next_keymap(self, keymap):
if self.next_keymap is keymap:
return
if self.next_keymap is not None:
if self.is_active:
self.next_keymap.deactivate()
# TODO: How to export to function?
for key, value in self.next_keymap.iteritems():
if keymap is not None and isinstance(key, Key) and key in keymap:
# The key will remain overrided
continue
self._next_keymap_remove_item(key, value)
self.next_keymap.obs_dict.remove_observer(self)
prev_keymap = self.next_keymap
self.next_keymap = keymap
if self.next_keymap is not None:
self.next_keymap.obs_dict.add_observer(self, '_next_keymap_')
for key, value in self.next_keymap.iteritems():
if prev_keymap is not None and isinstance(key, Key) and key in prev_keymap:
# The key was overridden and remains so, but with a new value
self._next_keymap_replace_item(key, prev_keymap[key], value)
else:
self._next_keymap_add_item(key, value)
if self.is_active:
self.next_keymap.activate()
def _shadow_groups(self, key):
for group in self.group_registrations.keys():
if not group.overlaps(key):
continue
assert group not in self.disabled_group_registrations
gvalue = self.group_registrations.pop(group)
self.disabled_group_registrations[group] = gvalue
self.notify_remove_item(group, gvalue)
def _unshadow_groups(self, key):
for group in self.disabled_group_registrations.keys():
if not group.overlaps(key):
continue
assert group not in self.group_registrations
gvalue = self.disabled_group_registrations.pop(group)
self.group_registrations[group] = gvalue
self.notify_add_item(group, gvalue)
def _next_keymap_add_item(self, key, func):
self._shadow_groups(key)
if key in self.key_registrations:
self.notify_replace_item(key, self.key_registrations[key], func)
else:
self.notify_add_item(key, func)
def _next_keymap_remove_item(self, key, func):
if key in self.key_registrations:
self.notify_replace_item(key, func, self.key_registrations[key])
else:
self.notify_remove_item(key, func)
self._unshadow_groups(key)
def _next_keymap_replace_item(self, key, old_func, new_func):
self.notify_replace_item(key, old_func, new_func)
def activate(self):
self.is_active = True
self.obs_activation.notify.activated()
if self.next_keymap is not None:
self.next_keymap.activate()
def deactivate(self):
self.is_active = False
if self.next_keymap is not None:
self.next_keymap.deactivate()
self.obs_activation.notify.deactivated()
def register_key(self, key, func):
assert isinstance(key, Key)
assert func.__doc__, "Must use documented functions (%r)" % (func,)
for group, group_func in itertools.chain(self.group_registrations.iteritems(),
self.disabled_group_registrations.iteritems()):
assert key not in group
self.unregister_key(key)
r = self.key_registrations
r[key] = func
if self.next_keymap is not None and key in self.next_keymap:
return
self.notify_add_item(key, func)
def unregister_key(self, key):
assert isinstance(key, Key)
r = self.key_registrations
old_func = r.pop(key, None)
if old_func is not None:
if self.next_keymap is not None and key in self.next_keymap:
return
self.notify_remove_item(key, old_func)
def register_group(self, group, func):
assert func.__doc__, "Must use documented functions (%r)" % (func,)
if self.next_keymap is not None:
for key, func in self.next_keymap.iteritems():
if group.overlaps(key):
self.disabled_group_registrations[group] = func
return
self.group_registrations[group] = func
def unregister_group(self, group):
self.group_registrations.pop(group, None)
self.disabled_group_registrations.pop(group, None)
def key_event(self, event):
assert event.type in [pygame.KEYUP, pygame.KEYDOWN]
key = Key.from_pygame_event(event)
if self.next_keymap is not None and self.next_keymap.key_event(event):
return True
if key in self.key_registrations:
func = self.key_registrations[key]
func(event)
return True
for group, func in self.group_registrations.iteritems():
if key in group:
func(event)
return True
return False
|
krfkeith/enough
|
gui/Keymap.py
|
Python
|
gpl-3.0
| 10,827 | 0.002586 |
import jarray
g = gs.open(gs.args[0])
istates = gs.associated(g, "initialState", True).getInitialStates()
ssrv = gs.service("stable")
def copy_path(values, coreNodes):
n = len(coreNodes)
path = jarray.zeros(n, 'b')
i = 0
for idx in coreNodes:
path[i] = values[idx]
i += 1
return path
def unfold_rec(values, jokers, stack, coreNodes):
if len(jokers) < 1:
path = copy_path(values, coreNodes)
if False:
for p in stack:
idx = 0
ident = True
for v in p:
if v != path[idx]:
ident = False
break
idx += 1
if ident:
return
stack.append( path )
return
idx, mx = jokers[0]
njk = jokers[1:]
for v in xrange(mx):
values[idx] = v
unfold_rec(values, njk, stack, coreNodes)
values[idx] = -1
def unfold(values, maxvalues, stack, coreNodes):
n = len(values)
jokers = [ (idx, maxvalues[idx]+1) for idx in xrange(n) if values[idx] == -1 ]
unfold_rec(values, jokers, stack, coreNodes)
return stack
def find_stable_states(model, nodeOrder):
maxvalues = []
coreNodes = []
inputNodes = []
coreOrder = []
idx = 0
for n in nodeOrder:
if n.isInput():
inputNodes.append(idx)
else:
coreNodes.append(idx)
coreOrder.append(n)
maxvalues.append( n.getMaxValue() )
idx += 1
unfoldNodes = xrange(len(coreNodes))
searcher = ssrv.getStableStateSearcher(model)
searcher.call()
paths = searcher.getPaths()
values = paths.getPath()
stack = []
for l in paths:
path = copy_path(values, coreNodes)
#stack.append(l)
unfold(path, maxvalues, stack, unfoldNodes)
for path in stack:
name = istates.nameState(path, coreOrder)
if name is None:
name = ""
state = ""
for v in path:
if v < 0: state += "*"
else: state += "%d" % v
print name + "\t" + state
# Get stable states for all perturbations
model = g.getModel()
find_stable_states(model, g.getNodeOrder())
|
ComputationalSystemsBiology/GINsimScripts
|
stable_core/stable_core.py
|
Python
|
gpl-3.0
| 2,265 | 0.007947 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "TootList.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
gkoehler/TootList
|
TootList/manage.py
|
Python
|
mit
| 257 | 0 |
import random
from datetime import date, timedelta, datetime
from django.core.urlresolvers import reverse
from ._utils import generate_random_event, TestCase, hours_ago, hours_ahead
from ..models import Event
class EventManagerTestCase(TestCase):
def test_upcoming_future(self):
event_future = generate_random_event(hours_ahead(1), hours_ahead(2))
self.assertTrue(event_future in Event.objects.upcoming())
def test_upcoming_in_progress(self):
event_inprogress = generate_random_event(hours_ago(1), hours_ahead(1))
self.assertTrue(event_inprogress in Event.objects.upcoming())
self.assertTrue(event_inprogress in Event.objects.upcoming(days=1))
def test_upcoming_happened_today(self):
""" don't run this at 12am! go to bed """
event_happened_today = generate_random_event(hours_ago(2),
hours_ago(1))
self.assertTrue(event_happened_today in Event.objects.upcoming())
self.assertTrue(event_happened_today in Event.objects.upcoming(days=0))
self.assertTrue(event_happened_today in Event.objects.upcoming(days=1))
def test_upcoming_happened_yesterday(self):
yesterday_event = generate_random_event(hours_ago(25),
hours_ago(24))
self.assertFalse(yesterday_event in Event.objects.upcoming())
self.assertFalse(yesterday_event in Event.objects.upcoming(days=0))
self.assertFalse(yesterday_event in Event.objects.upcoming(days=1))
def test_upcoming_tmrw(self):
event_tmrw = generate_random_event(hours_ahead(24),
hours_ahead(25))
self.assertFalse(event_tmrw in Event.objects.upcoming(days=0))
self.assertTrue(event_tmrw in Event.objects.upcoming(days=1))
def test_upcoming_3_days(self):
event_3_days = generate_random_event(hours_ahead(24 * 3),
hours_ahead(24 * 3 + 1))
self.assertTrue(event_3_days in Event.objects.upcoming(days=3))
self.assertFalse(event_3_days in Event.objects.upcoming(days=2))
def test_upcoming_asc_order(self):
events = [generate_random_event(hours_ago(i), hours_ago(i + 1))
for i in random.sample(xrange(-48, 48), 10)]
upcoming = list(Event.objects.upcoming())
self.assertTrue(upcoming == sorted(upcoming,
key=lambda e: e.start_date))
def test_upcoming_no_site(self):
event = generate_random_event(hours_ahead(1), hours_ahead(2))
self.assertTrue(event in Event.on_site.upcoming())
event.sites.clear()
self.assertFalse(event in Event.on_site.upcoming())
|
armstrong/armstrong.apps.events
|
armstrong/apps/events/tests/managers.py
|
Python
|
apache-2.0
| 2,622 | 0.002288 |
"""
Iterator based sre token scanner
"""
import sre_parse, sre_compile, sre_constants
from sre_constants import BRANCH, SUBPATTERN
from re import VERBOSE, MULTILINE, DOTALL
import re
__all__ = ['Scanner', 'pattern']
FLAGS = (VERBOSE | MULTILINE | DOTALL)
class Scanner(object):
def __init__(self, lexicon, flags=FLAGS):
self.actions = [None]
# combine phrases into a compound pattern
s = sre_parse.Pattern()
s.flags = flags
p = []
for idx, token in enumerate(lexicon):
phrase = token.pattern
try:
subpattern = sre_parse.SubPattern(s,
[(SUBPATTERN, (idx + 1, sre_parse.parse(phrase, flags)))])
except sre_constants.error:
raise
p.append(subpattern)
self.actions.append(token)
p = sre_parse.SubPattern(s, [(BRANCH, (None, p))])
self.scanner = sre_compile.compile(p)
def iterscan(self, string, idx=0, context=None):
"""
Yield match, end_idx for each match
"""
match = self.scanner.scanner(string, idx).match
actions = self.actions
lastend = idx
end = len(string)
while True:
m = match()
if m is None:
break
matchbegin, matchend = m.span()
if lastend == matchend:
break
action = actions[m.lastindex]
if action is not None:
rval, next_pos = action(m, context)
if next_pos is not None and next_pos != matchend:
# "fast forward" the scanner
matchend = next_pos
match = self.scanner.scanner(string, matchend).match
yield rval, matchend
lastend = matchend
def pattern(pattern, flags=FLAGS):
def decorator(fn):
fn.pattern = pattern
fn.regex = re.compile(pattern, flags)
return fn
return decorator
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/lib/django-0.96/django/utils/simplejson/scanner.py
|
Python
|
bsd-3-clause
| 2,009 | 0.002987 |
from django.conf.urls import patterns, url
import views as views
urlpatterns = patterns('',
#url(r'^$', views.index, name='index'),
#url(r'index.html', views.index, name='index')
)
|
jxp360/golfapp
|
golfapp/apps/piffycup/urls.py
|
Python
|
gpl-2.0
| 191 | 0.026178 |
import requests
from allauth.socialaccount.providers.oauth2.views import (OAuth2Adapter,
OAuth2LoginView,
OAuth2CallbackView)
from .provider import BasecampProvider
class BasecampOAuth2Adapter(OAuth2Adapter):
provider_id = BasecampProvider.id
access_token_url = 'https://launchpad.37signals.com/authorization/token?type=web_server' # noqa
authorize_url = 'https://launchpad.37signals.com/authorization/new'
profile_url = 'https://launchpad.37signals.com/authorization.json'
def complete_login(self, request, app, token, **kwargs):
headers = {'Authorization': 'Bearer {0}'.format(token.token)}
resp = requests.get(self.profile_url, headers=headers)
extra_data = resp.json()
return self.get_provider().sociallogin_from_response(request,
extra_data)
oauth2_login = OAuth2LoginView.adapter_view(BasecampOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(BasecampOAuth2Adapter)
|
Alexander-M-Waldman/local_currency_site
|
lib/python2.7/site-packages/allauth/socialaccount/providers/basecamp/views.py
|
Python
|
gpl-3.0
| 1,123 | 0 |
from traceback import format_exc
from django.core.management.base import BaseCommand
from ...toolbox import send_scheduled_messages
class Command(BaseCommand):
help = 'Sends scheduled messages (both in pending and error statuses).'
def add_arguments(self, parser):
parser.add_argument(
'--priority', action='store', dest='priority', default=None,
help='Allows to filter scheduled messages by a priority number. Defaults to None.')
def handle(self, *args, **options):
priority = options.get('priority', None)
priority_str = ''
if priority is not None:
priority_str = f'with priority {priority} '
self.stdout.write(f'Sending scheduled messages {priority_str} ...\n')
try:
send_scheduled_messages(priority=priority)
except Exception as e:
self.stderr.write(self.style.ERROR(f'Error on send: {e}\n{format_exc()}'))
else:
self.stdout.write('Sending done.\n')
|
idlesign/django-sitemessage
|
sitemessage/management/commands/sitemessage_send_scheduled.py
|
Python
|
bsd-3-clause
| 1,014 | 0.001972 |
# -*- coding: utf-8 *-*
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
import requests
import os
from optparse import make_option
from django.core.exceptions import ImproperlyConfigured
from django.core.management.base import BaseCommand, CommandError
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
from django.conf import settings
from wger.exercises.models import Exercise, ExerciseImage
class Command(BaseCommand):
'''
Download exercise images from wger.de and updates the local database
The script assumes that the local IDs correspond to the remote ones, which
is the case if the user installed the exercises from the JSON fixtures.
Otherwise, the exercise is simply skipped
'''
option_list = BaseCommand.option_list + (
make_option('--remote-url',
action='store',
dest='remote_url',
default='https://wger.de',
help='Remote URL to fetch the exercises from (default: https://wger.de)'),
)
help = ('Download exercise images from wger.de and update the local database\n'
'\n'
'ATTENTION: The script will download the images from the server and add them\n'
' to your local exercises. The exercises are identified by\n'
' their UUID field, if you manually edited or changed it\n'
' the script will not be able to match them.')
def handle(self, *args, **options):
if not settings.MEDIA_ROOT:
raise ImproperlyConfigured('Please set MEDIA_ROOT in your settings file')
remote_url = options['remote_url']
try:
val = URLValidator()
val(remote_url)
except ValidationError:
raise CommandError('Please enter a valid URL')
exercise_api = "{0}/api/v2/exercise/?limit=999"
image_api = "{0}/api/v2/exerciseimage/?exercise={1}"
thumbnail_api = "{0}/api/v2/exerciseimage/{1}/thumbnails/"
# Get all exercises
result = requests.get(exercise_api.format(remote_url)).json()
for exercise_json in result['results']:
exercise_name = exercise_json['name'].encode('utf-8')
exercise_uuid = exercise_json['uuid']
exercise_id = exercise_json['id']
self.stdout.write('')
self.stdout.write(u"*** Processing {0} (ID: {1}, UUID: {2})".format(exercise_name,
exercise_id,
exercise_uuid))
try:
exercise = Exercise.objects.get(uuid=exercise_uuid)
except Exercise.DoesNotExist:
self.stdout.write(' Remote exercise not found in local DB, skipping...')
continue
# Get all images
images = requests.get(image_api.format(remote_url, exercise_id)).json()
if images['count']:
for image_json in images['results']:
image_id = image_json['id']
result = requests.get(thumbnail_api.format(remote_url, image_id)).json()
image_name = os.path.basename(result['original'])
self.stdout.write(' Fetching image {0} - {1}'.format(image_id, image_name))
try:
image = ExerciseImage.objects.get(pk=image_id)
self.stdout.write(' --> Image already present locally, skipping...')
continue
except ExerciseImage.DoesNotExist:
self.stdout.write(' --> Image not found in local DB, creating now...')
image = ExerciseImage()
image.pk = image_id
# Save the downloaded image, see link for details
# http://stackoverflow.com/questions/1308386/programmatically-saving-image-to-
retrieved_image = requests.get(result['original'])
img_temp = NamedTemporaryFile(delete=True)
img_temp.write(retrieved_image.content)
img_temp.flush()
image.exercise = exercise
image.is_main = image_json['is_main']
image.status = image_json['status']
image.image.save(
os.path.basename(image_name),
File(img_temp),
)
image.save()
else:
self.stdout.write(' No images for this exercise, nothing to do')
|
DeveloperMal/wger
|
wger/exercises/management/commands/download-exercise-images.py
|
Python
|
agpl-3.0
| 5,449 | 0.002936 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2021 SoftBank Robotics. All rights reserved.
# Use of this source code is governed by a BSD-style license (see the COPYING file).
""" Topological sort """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
__all__ = ["DagError", "assert_dag", "topological_sort"]
class DagError(Exception):
""" Dag Exception """
def __init__(self, node, parent, result):
""" DagError Init """
Exception.__init__(self)
self.node = node
self.parent = parent
self.result = result
def __str__(self):
""" String Representation """
return "Circular dependency error: Starting from '%s', node '%s' depends on '%s', complete path %s" \
% (self.node, self.parent, self.node, self.result)
def assert_dag(data):
"""
Check if data is a dag
>>> assert_dag({
... 'a' : ( 'g', 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' ),
... 'e' : ( 'g', 'c' )})
>>> assert_dag({
... 'a' : ( 'g', 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' ),
... 'e' : ( 'e', 'c' )})
Traceback (most recent call last):
...
DagError: Circular dependency error: Starting from 'e', node 'e' depends on 'e', complete path []
"""
for node, _ in data.items():
_topological_sort(data, node, node, True)
def topological_sort(data, heads):
"""
Topological sort
data should be a dictionary like that (it's a dag):
{
'a' : ( 'b', 'c', 'd' ),
'b' : ( 'e', 'c' )
}
heads are the top of the dag, the result will include all specified heads and their deps
This function return a list. Head will be the last element.
Warning: this sort always find a solution even is data is not a dag!!
If a depend on b and b depend on a, the solution is [ a, b ].
This is ok in our case but could be a problem in other situation.
(you know what? try to use the result you will see if it work!).
>>> topological_sort({
... 'head' : ['telepathe', 'opennao-tools', 'naoqi'],
... 'toolchain' : [],
... 'python-pc' : ['toolchain'],
... 'telepathe' : ['naoqi'],
... 'qt-pc' : ['toolchain'],
... 'opennao-tools': ['toolchain'],
... 'naoqi' : ['qt-pc', 'python-pc', 'streamer', 'toolchain']}, 'head' )
['toolchain', 'qt-pc', 'python-pc', 'streamer', 'naoqi', 'telepathe', 'opennao-tools', 'head']
>>> topological_sort({
... 'a' : ( 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' )}, 'a')
['e', 'c', 'b', 'd', 'a']
>>> topological_sort({
... 'a' : ( 'g', 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' ),
... 'e' : ( 'e', 'c' )}, 'a')
['g', 'c', 'e', 'b', 'd', 'a']
>>> topological_sort({
... 'a' : ( 'g', 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' ),
... 'e' : ( 'g', 'c' )}, 'a')
['g', 'c', 'e', 'b', 'd', 'a']
>>> topological_sort({
... 'a' : ( 'b' ),
... 'b' : ( 'a' ),
... }, 'a')
['b', 'a']
>>> topological_sort({
... 'a' : ( 'g', 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' ),
... 'q' : ( 'u', 'i' ),
... 'i' : ( 'y', 'o' ),
... 'e' : ( 'g', 'c' )}, 'a')
['g', 'c', 'e', 'b', 'd', 'a']
>>> topological_sort({
... 'a' : ( 'g', 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' ),
... 'q' : ( 'u', 'i' ),
... 'i' : ( 'y', 'o' ),
... 'e' : ( 'g', 'c' )}, [ 'a', 'q' ])
['g', 'c', 'e', 'b', 'd', 'a', 'u', 'y', 'o', 'i', 'q']
"""
if isinstance(heads, list):
data['internalfakehead'] = heads
head = 'internalfakehead'
result = _topological_sort(data, head, head)
return [x for x in result if x != 'internalfakehead']
head = heads
return _topological_sort(data, head, head)
def _topological_sort(data, head, top_node, raise_exception=False, result=None, visited=None):
""" Internal function """
if not result:
result = []
if not visited:
visited = []
deps = data.get(head, list())
if head in visited:
if head == top_node and raise_exception:
raise DagError(head, head, result)
return result
visited.append(head)
for i in deps:
try:
result.index(i)
except ValueError:
# the item does not exist
result = _topological_sort(data, i, top_node, raise_exception, result, visited)
result.append(head)
return result
if __name__ == "__main__":
import doctest
doctest.testmod()
|
aldebaran/qibuild
|
python/qisys/sort.py
|
Python
|
bsd-3-clause
| 4,651 | 0.00172 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.