text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# Author: Ovidiu Predescu
# Date: July 2011
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unittest for the twisted-style reactor.
"""
from __future__ import absolute_import, division, print_function, with_statement
import logging
import os
import shutil
import signal
import sys
import tempfile
import threading
import warnings
try:
import fcntl
from twisted.internet.defer import Deferred, inlineCallbacks, returnValue
from twisted.internet.interfaces import IReadDescriptor, IWriteDescriptor
from twisted.internet.protocol import Protocol
from twisted.python import log
from tornado.platform.twisted import TornadoReactor, TwistedIOLoop
from zope.interface import implementer
have_twisted = True
except ImportError:
have_twisted = False
# The core of Twisted 12.3.0 is available on python 3, but twisted.web is not
# so test for it separately.
try:
from twisted.web.client import Agent, readBody
from twisted.web.resource import Resource
from twisted.web.server import Site
# As of Twisted 15.0.0, twisted.web is present but fails our
# tests due to internal str/bytes errors.
have_twisted_web = sys.version_info < (3,)
except ImportError:
have_twisted_web = False
try:
import thread # py2
except ImportError:
import _thread as thread # py3
from tornado.escape import utf8
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado.platform.auto import set_close_exec
from tornado.platform.select import SelectIOLoop
from tornado.testing import bind_unused_port
from tornado.test.util import unittest
from tornado.util import import_object
from tornado.web import RequestHandler, Application
skipIfNoTwisted = unittest.skipUnless(have_twisted,
"twisted module not present")
skipIfPy26 = unittest.skipIf(sys.version_info < (2, 7),
"twisted incompatible with singledispatch in py26")
def save_signal_handlers():
saved = {}
for sig in [signal.SIGINT, signal.SIGTERM, signal.SIGCHLD]:
saved[sig] = signal.getsignal(sig)
if "twisted" in repr(saved):
if not issubclass(IOLoop.configured_class(), TwistedIOLoop):
# when the global ioloop is twisted, we expect the signal
# handlers to be installed. Otherwise, it means we're not
# cleaning up after twisted properly.
raise Exception("twisted signal handlers already installed")
return saved
def restore_signal_handlers(saved):
for sig, handler in saved.items():
signal.signal(sig, handler)
class ReactorTestCase(unittest.TestCase):
def setUp(self):
self._saved_signals = save_signal_handlers()
self._io_loop = IOLoop()
self._reactor = TornadoReactor(self._io_loop)
def tearDown(self):
self._io_loop.close(all_fds=True)
restore_signal_handlers(self._saved_signals)
@skipIfNoTwisted
class ReactorWhenRunningTest(ReactorTestCase):
def test_whenRunning(self):
self._whenRunningCalled = False
self._anotherWhenRunningCalled = False
self._reactor.callWhenRunning(self.whenRunningCallback)
self._reactor.run()
self.assertTrue(self._whenRunningCalled)
self.assertTrue(self._anotherWhenRunningCalled)
def whenRunningCallback(self):
self._whenRunningCalled = True
self._reactor.callWhenRunning(self.anotherWhenRunningCallback)
self._reactor.stop()
def anotherWhenRunningCallback(self):
self._anotherWhenRunningCalled = True
@skipIfNoTwisted
class ReactorCallLaterTest(ReactorTestCase):
def test_callLater(self):
self._laterCalled = False
self._now = self._reactor.seconds()
self._timeout = 0.001
dc = self._reactor.callLater(self._timeout, self.callLaterCallback)
self.assertEqual(self._reactor.getDelayedCalls(), [dc])
self._reactor.run()
self.assertTrue(self._laterCalled)
self.assertTrue(self._called - self._now > self._timeout)
self.assertEqual(self._reactor.getDelayedCalls(), [])
def callLaterCallback(self):
self._laterCalled = True
self._called = self._reactor.seconds()
self._reactor.stop()
@skipIfNoTwisted
class ReactorTwoCallLaterTest(ReactorTestCase):
def test_callLater(self):
self._later1Called = False
self._later2Called = False
self._now = self._reactor.seconds()
self._timeout1 = 0.0005
dc1 = self._reactor.callLater(self._timeout1, self.callLaterCallback1)
self._timeout2 = 0.001
dc2 = self._reactor.callLater(self._timeout2, self.callLaterCallback2)
self.assertTrue(self._reactor.getDelayedCalls() == [dc1, dc2] or
self._reactor.getDelayedCalls() == [dc2, dc1])
self._reactor.run()
self.assertTrue(self._later1Called)
self.assertTrue(self._later2Called)
self.assertTrue(self._called1 - self._now > self._timeout1)
self.assertTrue(self._called2 - self._now > self._timeout2)
self.assertEqual(self._reactor.getDelayedCalls(), [])
def callLaterCallback1(self):
self._later1Called = True
self._called1 = self._reactor.seconds()
def callLaterCallback2(self):
self._later2Called = True
self._called2 = self._reactor.seconds()
self._reactor.stop()
@skipIfNoTwisted
class ReactorCallFromThreadTest(ReactorTestCase):
def setUp(self):
super(ReactorCallFromThreadTest, self).setUp()
self._mainThread = thread.get_ident()
def tearDown(self):
self._thread.join()
super(ReactorCallFromThreadTest, self).tearDown()
def _newThreadRun(self):
self.assertNotEqual(self._mainThread, thread.get_ident())
if hasattr(self._thread, 'ident'): # new in python 2.6
self.assertEqual(self._thread.ident, thread.get_ident())
self._reactor.callFromThread(self._fnCalledFromThread)
def _fnCalledFromThread(self):
self.assertEqual(self._mainThread, thread.get_ident())
self._reactor.stop()
def _whenRunningCallback(self):
self._thread = threading.Thread(target=self._newThreadRun)
self._thread.start()
def testCallFromThread(self):
self._reactor.callWhenRunning(self._whenRunningCallback)
self._reactor.run()
@skipIfNoTwisted
class ReactorCallInThread(ReactorTestCase):
def setUp(self):
super(ReactorCallInThread, self).setUp()
self._mainThread = thread.get_ident()
def _fnCalledInThread(self, *args, **kwargs):
self.assertNotEqual(thread.get_ident(), self._mainThread)
self._reactor.callFromThread(lambda: self._reactor.stop())
def _whenRunningCallback(self):
self._reactor.callInThread(self._fnCalledInThread)
def testCallInThread(self):
self._reactor.callWhenRunning(self._whenRunningCallback)
self._reactor.run()
class Reader(object):
def __init__(self, fd, callback):
self._fd = fd
self._callback = callback
def logPrefix(self):
return "Reader"
def close(self):
self._fd.close()
def fileno(self):
return self._fd.fileno()
def readConnectionLost(self, reason):
self.close()
def connectionLost(self, reason):
self.close()
def doRead(self):
self._callback(self._fd)
if have_twisted:
Reader = implementer(IReadDescriptor)(Reader)
class Writer(object):
def __init__(self, fd, callback):
self._fd = fd
self._callback = callback
def logPrefix(self):
return "Writer"
def close(self):
self._fd.close()
def fileno(self):
return self._fd.fileno()
def connectionLost(self, reason):
self.close()
def doWrite(self):
self._callback(self._fd)
if have_twisted:
Writer = implementer(IWriteDescriptor)(Writer)
@skipIfNoTwisted
class ReactorReaderWriterTest(ReactorTestCase):
def _set_nonblocking(self, fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
def setUp(self):
super(ReactorReaderWriterTest, self).setUp()
r, w = os.pipe()
self._set_nonblocking(r)
self._set_nonblocking(w)
set_close_exec(r)
set_close_exec(w)
self._p1 = os.fdopen(r, "rb", 0)
self._p2 = os.fdopen(w, "wb", 0)
def tearDown(self):
super(ReactorReaderWriterTest, self).tearDown()
self._p1.close()
self._p2.close()
def _testReadWrite(self):
"""
In this test the writer writes an 'x' to its fd. The reader
reads it, check the value and ends the test.
"""
self.shouldWrite = True
def checkReadInput(fd):
self.assertEquals(fd.read(1), b'x')
self._reactor.stop()
def writeOnce(fd):
if self.shouldWrite:
self.shouldWrite = False
fd.write(b'x')
self._reader = Reader(self._p1, checkReadInput)
self._writer = Writer(self._p2, writeOnce)
self._reactor.addWriter(self._writer)
# Test that adding the reader twice adds it only once to
# IOLoop.
self._reactor.addReader(self._reader)
self._reactor.addReader(self._reader)
def testReadWrite(self):
self._reactor.callWhenRunning(self._testReadWrite)
self._reactor.run()
def _testNoWriter(self):
"""
In this test we have no writer. Make sure the reader doesn't
read anything.
"""
def checkReadInput(fd):
self.fail("Must not be called.")
def stopTest():
# Close the writer here since the IOLoop doesn't know
# about it.
self._writer.close()
self._reactor.stop()
self._reader = Reader(self._p1, checkReadInput)
# We create a writer, but it should never be invoked.
self._writer = Writer(self._p2, lambda fd: fd.write('x'))
# Test that adding and removing the writer leaves us with no writer.
self._reactor.addWriter(self._writer)
self._reactor.removeWriter(self._writer)
# Test that adding and removing the reader doesn't cause
# unintended effects.
self._reactor.addReader(self._reader)
# Wake up after a moment and stop the test
self._reactor.callLater(0.001, stopTest)
def testNoWriter(self):
self._reactor.callWhenRunning(self._testNoWriter)
self._reactor.run()
# Test various combinations of twisted and tornado http servers,
# http clients, and event loop interfaces.
@skipIfNoTwisted
@unittest.skipIf(not have_twisted_web, 'twisted web not present')
class CompatibilityTests(unittest.TestCase):
def setUp(self):
self.saved_signals = save_signal_handlers()
self.io_loop = IOLoop()
self.io_loop.make_current()
self.reactor = TornadoReactor(self.io_loop)
def tearDown(self):
self.reactor.disconnectAll()
self.io_loop.clear_current()
self.io_loop.close(all_fds=True)
restore_signal_handlers(self.saved_signals)
def start_twisted_server(self):
class HelloResource(Resource):
isLeaf = True
def render_GET(self, request):
return "Hello from twisted!"
site = Site(HelloResource())
port = self.reactor.listenTCP(0, site, interface='127.0.0.1')
self.twisted_port = port.getHost().port
def start_tornado_server(self):
class HelloHandler(RequestHandler):
def get(self):
self.write("Hello from tornado!")
app = Application([('/', HelloHandler)],
log_function=lambda x: None)
server = HTTPServer(app, io_loop=self.io_loop)
sock, self.tornado_port = bind_unused_port()
server.add_sockets([sock])
def run_ioloop(self):
self.stop_loop = self.io_loop.stop
self.io_loop.start()
self.reactor.fireSystemEvent('shutdown')
def run_reactor(self):
self.stop_loop = self.reactor.stop
self.stop = self.reactor.stop
self.reactor.run()
def tornado_fetch(self, url, runner):
responses = []
client = AsyncHTTPClient(self.io_loop)
def callback(response):
responses.append(response)
self.stop_loop()
client.fetch(url, callback=callback)
runner()
self.assertEqual(len(responses), 1)
responses[0].rethrow()
return responses[0]
def twisted_fetch(self, url, runner):
# http://twistedmatrix.com/documents/current/web/howto/client.html
chunks = []
client = Agent(self.reactor)
d = client.request(b'GET', utf8(url))
class Accumulator(Protocol):
def __init__(self, finished):
self.finished = finished
def dataReceived(self, data):
chunks.append(data)
def connectionLost(self, reason):
self.finished.callback(None)
def callback(response):
finished = Deferred()
response.deliverBody(Accumulator(finished))
return finished
d.addCallback(callback)
def shutdown(failure):
if hasattr(self, 'stop_loop'):
self.stop_loop()
elif failure is not None:
# loop hasn't been initialized yet; try our best to
# get an error message out. (the runner() interaction
# should probably be refactored).
try:
failure.raiseException()
except:
logging.error('exception before starting loop', exc_info=True)
d.addBoth(shutdown)
runner()
self.assertTrue(chunks)
return ''.join(chunks)
def twisted_coroutine_fetch(self, url, runner):
body = [None]
@gen.coroutine
def f():
# This is simpler than the non-coroutine version, but it cheats
# by reading the body in one blob instead of streaming it with
# a Protocol.
client = Agent(self.reactor)
response = yield client.request(b'GET', utf8(url))
with warnings.catch_warnings():
# readBody has a buggy DeprecationWarning in Twisted 15.0:
# https://twistedmatrix.com/trac/changeset/43379
warnings.simplefilter('ignore', category=DeprecationWarning)
body[0] = yield readBody(response)
self.stop_loop()
self.io_loop.add_callback(f)
runner()
return body[0]
def testTwistedServerTornadoClientIOLoop(self):
self.start_twisted_server()
response = self.tornado_fetch(
'http://127.0.0.1:%d' % self.twisted_port, self.run_ioloop)
self.assertEqual(response.body, 'Hello from twisted!')
def testTwistedServerTornadoClientReactor(self):
self.start_twisted_server()
response = self.tornado_fetch(
'http://127.0.0.1:%d' % self.twisted_port, self.run_reactor)
self.assertEqual(response.body, 'Hello from twisted!')
def testTornadoServerTwistedClientIOLoop(self):
self.start_tornado_server()
response = self.twisted_fetch(
'http://127.0.0.1:%d' % self.tornado_port, self.run_ioloop)
self.assertEqual(response, 'Hello from tornado!')
def testTornadoServerTwistedClientReactor(self):
self.start_tornado_server()
response = self.twisted_fetch(
'http://127.0.0.1:%d' % self.tornado_port, self.run_reactor)
self.assertEqual(response, 'Hello from tornado!')
@skipIfPy26
def testTornadoServerTwistedCoroutineClientIOLoop(self):
self.start_tornado_server()
response = self.twisted_coroutine_fetch(
'http://127.0.0.1:%d' % self.tornado_port, self.run_ioloop)
self.assertEqual(response, 'Hello from tornado!')
@skipIfNoTwisted
@skipIfPy26
class ConvertDeferredTest(unittest.TestCase):
def test_success(self):
@inlineCallbacks
def fn():
if False:
# inlineCallbacks doesn't work with regular functions;
# must have a yield even if it's unreachable.
yield
returnValue(42)
f = gen.convert_yielded(fn())
self.assertEqual(f.result(), 42)
def test_failure(self):
@inlineCallbacks
def fn():
if False:
yield
1 / 0
f = gen.convert_yielded(fn())
with self.assertRaises(ZeroDivisionError):
f.result()
if have_twisted:
# Import and run as much of twisted's test suite as possible.
# This is unfortunately rather dependent on implementation details,
# but there doesn't appear to be a clean all-in-one conformance test
# suite for reactors.
#
# This is a list of all test suites using the ReactorBuilder
# available in Twisted 11.0.0 and 11.1.0 (and a blacklist of
# specific test methods to be disabled).
twisted_tests = {
'twisted.internet.test.test_core.ObjectModelIntegrationTest': [],
'twisted.internet.test.test_core.SystemEventTestsBuilder': [
'test_iterate', # deliberately not supported
# Fails on TwistedIOLoop and AsyncIOLoop.
'test_runAfterCrash',
],
'twisted.internet.test.test_fdset.ReactorFDSetTestsBuilder': [
"test_lostFileDescriptor", # incompatible with epoll and kqueue
],
'twisted.internet.test.test_process.ProcessTestsBuilder': [
# Only work as root. Twisted's "skip" functionality works
# with py27+, but not unittest2 on py26.
'test_changeGID',
'test_changeUID',
# This test sometimes fails with EPIPE on a call to
# kqueue.control. Happens consistently for me with
# trollius but not asyncio or other IOLoops.
'test_childConnectionLost',
],
# Process tests appear to work on OSX 10.7, but not 10.6
# 'twisted.internet.test.test_process.PTYProcessTestsBuilder': [
# 'test_systemCallUninterruptedByChildExit',
# ],
'twisted.internet.test.test_tcp.TCPClientTestsBuilder': [
'test_badContext', # ssl-related; see also SSLClientTestsMixin
],
'twisted.internet.test.test_tcp.TCPPortTestsBuilder': [
# These use link-local addresses and cause firewall prompts on mac
'test_buildProtocolIPv6AddressScopeID',
'test_portGetHostOnIPv6ScopeID',
'test_serverGetHostOnIPv6ScopeID',
'test_serverGetPeerOnIPv6ScopeID',
],
'twisted.internet.test.test_tcp.TCPConnectionTestsBuilder': [],
'twisted.internet.test.test_tcp.WriteSequenceTests': [],
'twisted.internet.test.test_tcp.AbortConnectionTestCase': [],
'twisted.internet.test.test_threads.ThreadTestsBuilder': [],
'twisted.internet.test.test_time.TimeTestsBuilder': [],
# Extra third-party dependencies (pyOpenSSL)
# 'twisted.internet.test.test_tls.SSLClientTestsMixin': [],
'twisted.internet.test.test_udp.UDPServerTestsBuilder': [],
'twisted.internet.test.test_unix.UNIXTestsBuilder': [
# Platform-specific. These tests would be skipped automatically
# if we were running twisted's own test runner.
'test_connectToLinuxAbstractNamespace',
'test_listenOnLinuxAbstractNamespace',
# These tests use twisted's sendmsg.c extension and sometimes
# fail with what looks like uninitialized memory errors
# (more common on pypy than cpython, but I've seen it on both)
'test_sendFileDescriptor',
'test_sendFileDescriptorTriggersPauseProducing',
'test_descriptorDeliveredBeforeBytes',
'test_avoidLeakingFileDescriptors',
],
'twisted.internet.test.test_unix.UNIXDatagramTestsBuilder': [
'test_listenOnLinuxAbstractNamespace',
],
'twisted.internet.test.test_unix.UNIXPortTestsBuilder': [],
}
if sys.version_info >= (3,):
# In Twisted 15.2.0 on Python 3.4, the process tests will try to run
# but fail, due in part to interactions between Tornado's strict
# warnings-as-errors policy and Twisted's own warning handling
# (it was not obvious how to configure the warnings module to
# reconcile the two), and partly due to what looks like a packaging
# error (process_cli.py missing). For now, just skip it.
del twisted_tests['twisted.internet.test.test_process.ProcessTestsBuilder']
for test_name, blacklist in twisted_tests.items():
try:
test_class = import_object(test_name)
except (ImportError, AttributeError):
continue
for test_func in blacklist:
if hasattr(test_class, test_func):
# The test_func may be defined in a mixin, so clobber
# it instead of delattr()
setattr(test_class, test_func, lambda self: None)
def make_test_subclass(test_class):
class TornadoTest(test_class):
_reactors = ["tornado.platform.twisted._TestReactor"]
def setUp(self):
# Twisted's tests expect to be run from a temporary
# directory; they create files in their working directory
# and don't always clean up after themselves.
self.__curdir = os.getcwd()
self.__tempdir = tempfile.mkdtemp()
os.chdir(self.__tempdir)
super(TornadoTest, self).setUp()
def tearDown(self):
super(TornadoTest, self).tearDown()
os.chdir(self.__curdir)
shutil.rmtree(self.__tempdir)
def flushWarnings(self, *args, **kwargs):
# This is a hack because Twisted and Tornado have
# differing approaches to warnings in tests.
# Tornado sets up a global set of warnings filters
# in runtests.py, while Twisted patches the filter
# list in each test. The net effect is that
# Twisted's tests run with Tornado's increased
# strictness (BytesWarning and ResourceWarning are
# enabled) but without our filter rules to ignore those
# warnings from Twisted code.
filtered = []
for w in super(TornadoTest, self).flushWarnings(
*args, **kwargs):
if w['category'] in (BytesWarning, ResourceWarning):
continue
filtered.append(w)
return filtered
def buildReactor(self):
self.__saved_signals = save_signal_handlers()
return test_class.buildReactor(self)
def unbuildReactor(self, reactor):
test_class.unbuildReactor(self, reactor)
# Clean up file descriptors (especially epoll/kqueue
# objects) eagerly instead of leaving them for the
# GC. Unfortunately we can't do this in reactor.stop
# since twisted expects to be able to unregister
# connections in a post-shutdown hook.
reactor._io_loop.close(all_fds=True)
restore_signal_handlers(self.__saved_signals)
TornadoTest.__name__ = test_class.__name__
return TornadoTest
test_subclass = make_test_subclass(test_class)
globals().update(test_subclass.makeTestCaseClasses())
# Since we're not using twisted's test runner, it's tricky to get
# logging set up well. Most of the time it's easiest to just
# leave it turned off, but while working on these tests you may want
# to uncomment one of the other lines instead.
log.defaultObserver.stop()
# import sys; log.startLogging(sys.stderr, setStdout=0)
# log.startLoggingWithObserver(log.PythonLoggingObserver().emit, setStdout=0)
# import logging; logging.getLogger('twisted').setLevel(logging.WARNING)
# Twisted recently introduced a new logger; disable that one too.
try:
from twisted.logger import globalLogBeginner
except ImportError:
pass
else:
globalLogBeginner.beginLoggingTo([])
if have_twisted:
class LayeredTwistedIOLoop(TwistedIOLoop):
"""Layers a TwistedIOLoop on top of a TornadoReactor on a SelectIOLoop.
This is of course silly, but is useful for testing purposes to make
sure we're implementing both sides of the various interfaces
correctly. In some tests another TornadoReactor is layered on top
of the whole stack.
"""
def initialize(self, **kwargs):
# When configured to use LayeredTwistedIOLoop we can't easily
# get the next-best IOLoop implementation, so use the lowest common
# denominator.
self.real_io_loop = SelectIOLoop(make_current=False)
reactor = TornadoReactor(io_loop=self.real_io_loop)
super(LayeredTwistedIOLoop, self).initialize(reactor=reactor, **kwargs)
self.add_callback(self.make_current)
def close(self, all_fds=False):
super(LayeredTwistedIOLoop, self).close(all_fds=all_fds)
# HACK: This is the same thing that test_class.unbuildReactor does.
for reader in self.reactor._internalReaders:
self.reactor.removeReader(reader)
reader.connectionLost(None)
self.real_io_loop.close(all_fds=all_fds)
def stop(self):
# One of twisted's tests fails if I don't delay crash()
# until the reactor has started, but if I move this to
# TwistedIOLoop then the tests fail when I'm *not* running
# tornado-on-twisted-on-tornado. I'm clearly missing something
# about the startup/crash semantics, but since stop and crash
# are really only used in tests it doesn't really matter.
def f():
self.reactor.crash()
# Become current again on restart. This is needed to
# override real_io_loop's claim to being the current loop.
self.add_callback(self.make_current)
self.reactor.callWhenRunning(f)
if __name__ == "__main__":
unittest.main()
| tao12345666333/tornado-zh | tornado/test/twisted_test.py | Python | mit | 27,525 | 0.000327 |
#!/usr/bin/python
# -*- encoding: utf-8 -*-
import os
import marshal
import cPickle
import array
class HuffmanNode(object):
recurPrint = False
def __init__(self, ch=None, fq=None, lnode=None, rnode=None, parent=None):
self.L = lnode
self.R = rnode
self.p = parent
self.c = ch
self.fq = fq
def __repr__(self):
if HuffmanNode.recurPrint:
lnode = self.L if self.L else '#'
rnode = self.R if self.R else '#'
return ''.join( ('(%s:%d)'%(self.c, self.fq), str(lnode), str(rnode) ) )
else:
return '(%s:%d)'%(self.c, self.fq)
def __cmp__(self, other):
if not isinstance(other, HuffmanNode):
return super(HuffmanNode, self).__cmp__(other)
return cmp(self.fq, other.fq)
def _pop_first_two_nodes(nodes):
if len(nodes)>1:
first=nodes.pop(0)
second=nodes.pop(0)
return first, second
else:
#print "[popFirstTwoNodes] nodes's length <= 1"
return nodes[0], None
def _build_tree(nodes):
nodes.sort()
while(True):
first, second = _pop_first_two_nodes(nodes)
if not second:
return first
parent = HuffmanNode(lnode=first, rnode=second, fq=first.fq+second.fq)
first.p = parent
second.p = parent
nodes.insert(0, parent)
nodes.sort()
def _gen_huffman_code(node, dict_codes, buffer_stack=[]):
if not node.L and not node.R:
dict_codes[node.c] = ''.join(buffer_stack)
return
buffer_stack.append('0')
_gen_huffman_code(node.L, dict_codes, buffer_stack)
buffer_stack.pop()
buffer_stack.append('1')
_gen_huffman_code(node.R, dict_codes, buffer_stack)
buffer_stack.pop()
def _cal_freq(long_str):
from collections import defaultdict
d = defaultdict(int)
for c in long_str:
d[c] += 1
return d
MAX_BITS = 8
class Encoder(object):
def __init__(self, filename_or_long_str=None):
if filename_or_long_str:
if os.path.exists(filename_or_long_str):
self.encode(filename_or_long_str)
else:
#print '[Encoder] take \'%s\' as a string to be encoded.'\
# % filename_or_long_str
self.long_str = filename_or_long_str
def __get_long_str(self):
return self._long_str
def __set_long_str(self, s):
self._long_str = s
if s:
self.root = self._get_tree_root()
self.code_map = self._get_code_map()
self.array_codes, self.code_length = self._encode()
long_str = property(__get_long_str, __set_long_str)
def _get_tree_root(self):
d = _cal_freq(self.long_str)
return _build_tree(
[HuffmanNode(ch=ch, fq=int(fq)) for ch, fq in d.iteritems()]
)
def _get_code_map(self):
a_dict={}
_gen_huffman_code(self.root, a_dict)
return a_dict
def _encode(self):
array_codes = array.array('B')
code_length = 0
buff, length = 0, 0
for ch in self.long_str:
code = self.code_map[ch]
for bit in list(code):
if bit=='1':
buff = (buff << 1) | 0x01
else: # bit == '0'
buff = (buff << 1)
length += 1
if length == MAX_BITS:
array_codes.extend([buff])
buff, length = 0, 0
code_length += len(code)
if length != 0:
array_codes.extend([buff << (MAX_BITS-length)])
return array_codes, code_length
def encode(self, filename):
fp = open(filename, 'rb')
self.long_str = fp.read()
fp.close()
def write(self, filename):
if self._long_str:
fcompressed = open(filename, 'wb')
marshal.dump(
(cPickle.dumps(self.root), self.code_length, self.array_codes),
fcompressed)
fcompressed.close()
else:
print "You haven't set 'long_str' attribute."
class Decoder(object):
def __init__(self, filename_or_raw_str=None):
if filename_or_raw_str:
if os.path.exists(filename_or_raw_str):
filename = filename_or_raw_str
self.read(filename)
else:
print '[Decoder] take \'%s\' as raw string' % filename_or_raw_str
raw_string = filename_or_raw_str
unpickled_root, length, array_codes = marshal.loads(raw_string)
self.root = cPickle.loads(unpickled_root)
self.code_length = length
self.array_codes = array.array('B', array_codes)
def _decode(self):
string_buf = []
total_length = 0
node = self.root
for code in self.array_codes:
buf_length = 0
while (buf_length < MAX_BITS and total_length != self.code_length):
buf_length += 1
total_length += 1
if code >> (MAX_BITS - buf_length) & 1:
node = node.R
if node.c:
string_buf.append(node.c)
node = self.root
else:
node = node.L
if node.c:
string_buf.append(node.c)
node = self.root
return ''.join(string_buf)
def read(self, filename):
fp = open(filename, 'rb')
unpickled_root, length, array_codes = marshal.load(fp)
self.root = cPickle.loads(unpickled_root)
self.code_length = length
self.array_codes = array.array('B', array_codes)
fp.close()
def decode_as(self, filename):
decoded = self._decode()
fout = open(filename, 'wb')
fout.write(decoded)
fout.close()
if __name__=='__main__':
original_file = 'filename.txt'
compressed_file = 'compressed.scw'
decompressed_file = 'filename2.txt'
# first way to use Encoder/Decoder
enc = Encoder(original_file)
enc.write(compressed_file)
dec = Decoder(compressed_file)
dec.decode_as(decompressed_file)
# second way
#enc = Encoder()
#enc.encode(original_file)
#enc.write(compressed_file)
#dec = Decoder()
#dec.read(compressed_file)
#dec.decode_as(decompressed_file)
| fepe55/RAMB0 | python/huffman2.py | Python | mit | 6,447 | 0.005119 |
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
import libcloud.security
# This assumes you don't have SSL set up.
# Note: Code like this poses a security risk (MITM attack) and
# that's the reason why you should never use it for anything else
# besides testing. You have been warned.
libcloud.security.VERIFY_SSL_CERT = False
OpenStack = get_driver(Provider.OPENSTACK)
driver = OpenStack('your_auth_username', 'your_auth_password',
ex_force_auth_url='http://192.168.1.101:5000/v2.0',
ex_force_auth_version='2.0_password')
| Jc2k/libcloud | docs/examples/compute/openstack_simple.py | Python | apache-2.0 | 610 | 0 |
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import absolute_import
from telemetry.core import os_version as os_version_module
# TODO(rnephew): Since TestConditions are being used for more than
# just story expectations now, this should be decoupled and refactored
# to be clearer.
class _TestCondition(object):
def ShouldDisable(self, platform, finder_options):
raise NotImplementedError
def __str__(self):
raise NotImplementedError
def GetSupportedPlatformNames(self):
"""Returns a set of supported platforms' names."""
raise NotImplementedError
class _TestConditionByPlatformList(_TestCondition):
def __init__(self, platforms, name):
self._platforms = platforms
self._name = name
def ShouldDisable(self, platform, finder_options):
del finder_options # Unused.
return platform.GetOSName() in self._platforms
def __str__(self):
return self._name
def GetSupportedPlatformNames(self):
return set(self._platforms)
class _AllTestCondition(_TestCondition):
def ShouldDisable(self, platform, finder_options):
del platform, finder_options # Unused.
return True
def __str__(self):
return 'All'
def GetSupportedPlatformNames(self):
return {'all'}
class _TestConditionAndroidSvelte(_TestCondition):
"""Matches android devices with a svelte (low-memory) build."""
def ShouldDisable(self, platform, finder_options):
del finder_options # Unused.
return platform.GetOSName() == 'android' and platform.IsSvelte()
def __str__(self):
return 'Android Svelte'
def GetSupportedPlatformNames(self):
return {'android'}
class _TestConditionByAndroidModel(_TestCondition):
def __init__(self, model, name=None):
self._model = model
self._name = name if name else model
def ShouldDisable(self, platform, finder_options):
return (platform.GetOSName() == 'android' and
self._model == platform.GetDeviceTypeName())
def __str__(self):
return self._name
def GetSupportedPlatformNames(self):
return {'android'}
class _TestConditionAndroidWebview(_TestCondition):
def ShouldDisable(self, platform, finder_options):
return (platform.GetOSName() == 'android' and
finder_options.browser_type.startswith('android-webview'))
def __str__(self):
return 'Android Webview'
def GetSupportedPlatformNames(self):
return {'android'}
class _TestConditionAndroidNotWebview(_TestCondition):
def ShouldDisable(self, platform, finder_options):
return (platform.GetOSName() == 'android' and not
finder_options.browser_type.startswith('android-webview'))
def __str__(self):
return 'Android but not webview'
def GetSupportedPlatformNames(self):
return {'android'}
class _TestConditionByMacVersion(_TestCondition):
def __init__(self, version, name=None):
self._version = version
self._name = name
def __str__(self):
return self._name
def GetSupportedPlatformNames(self):
return {'mac'}
def ShouldDisable(self, platform, finder_options):
if platform.GetOSName() != 'mac':
return False
return platform.GetOSVersionDetailString().startswith(self._version)
class _TestConditionByWinVersion(_TestCondition):
def __init__(self, version, name):
self._version = version
self._name = name
def __str__(self):
return self._name
def GetSupportedPlatformNames(self):
return {'win'}
def ShouldDisable(self, platform, finder_options):
if platform.GetOSName() != 'win':
return False
return platform.GetOSVersionName() == self._version
class _TestConditionFuchsiaWebEngineShell(_TestCondition):
def ShouldDisable(self, platform, finder_options):
return (platform.GetOSName() == 'fuchsia' and
finder_options.browser_type.startswith('web-engine-shell'))
def __str__(self):
return 'Fuchsia with web-engine-shell'
def GetSupportedPlatformNames(self):
return {'fuchsia', 'fuchsia-board-astro', 'fuchsia-board-sherlock'}
class _TestConditionFuchsiaByBoard(_TestCondition):
def __init__(self, board):
self._board = 'fuchsia-board-' + board
def ShouldDisable(self, platform, finder_options):
return (platform.GetOSName() == 'fuchsia' and
platform.GetDeviceTypeName() == self._board)
def __str__(self):
return 'Fuchsia on ' + self._board
def GetSupportedPlatformNames(self):
return {'fuchsia', 'fuchsia-board-' + self._board}
class _TestConditionLogicalAndConditions(_TestCondition):
def __init__(self, conditions, name):
self._conditions = conditions
self._name = name
def __str__(self):
return self._name
def GetSupportedPlatformNames(self):
platforms = set()
for cond in self._conditions:
platforms.update(cond.GetSupportedPlatformNames())
return platforms
def ShouldDisable(self, platform, finder_options):
return all(
c.ShouldDisable(platform, finder_options) for c in self._conditions)
class _TestConditionLogicalOrConditions(_TestCondition):
def __init__(self, conditions, name):
self._conditions = conditions
self._name = name
def __str__(self):
return self._name
def GetSupportedPlatformNames(self):
platforms = set()
for cond in self._conditions:
platforms.update(cond.GetSupportedPlatformNames())
return platforms
def ShouldDisable(self, platform, finder_options):
return any(
c.ShouldDisable(platform, finder_options) for c in self._conditions)
ALL = _AllTestCondition()
ALL_MAC = _TestConditionByPlatformList(['mac'], 'Mac')
ALL_WIN = _TestConditionByPlatformList(['win'], 'Win')
WIN_7 = _TestConditionByWinVersion(os_version_module.WIN7, 'Win 7')
WIN_10 = _TestConditionByWinVersion(os_version_module.WIN10, 'Win 10')
ALL_LINUX = _TestConditionByPlatformList(['linux'], 'Linux')
ALL_CHROMEOS = _TestConditionByPlatformList(['chromeos'], 'ChromeOS')
ALL_ANDROID = _TestConditionByPlatformList(['android'], 'Android')
# Fuchsia setup, while similar to mobile, renders, Desktop pages.
ALL_DESKTOP = _TestConditionByPlatformList(
['mac', 'linux', 'win', 'chromeos', 'fuchsia'], 'Desktop')
ALL_MOBILE = _TestConditionByPlatformList(['android'], 'Mobile')
ANDROID_NEXUS5 = _TestConditionByAndroidModel('Nexus 5')
_ANDROID_NEXUS5X = _TestConditionByAndroidModel('Nexus 5X')
_ANDROID_NEXUS5XAOSP = _TestConditionByAndroidModel('AOSP on BullHead')
ANDROID_NEXUS5X = _TestConditionLogicalOrConditions(
[_ANDROID_NEXUS5X, _ANDROID_NEXUS5XAOSP], 'Nexus 5X')
_ANDROID_NEXUS6 = _TestConditionByAndroidModel('Nexus 6')
_ANDROID_NEXUS6AOSP = _TestConditionByAndroidModel('AOSP on Shamu')
ANDROID_NEXUS6 = _TestConditionLogicalOrConditions(
[_ANDROID_NEXUS6, _ANDROID_NEXUS6AOSP], 'Nexus 6')
ANDROID_NEXUS6P = _TestConditionByAndroidModel('Nexus 6P')
ANDROID_NEXUS7 = _TestConditionByAndroidModel('Nexus 7')
ANDROID_GO = _TestConditionByAndroidModel('gobo', 'Android Go')
ANDROID_ONE = _TestConditionByAndroidModel('W6210', 'Android One')
ANDROID_SVELTE = _TestConditionAndroidSvelte()
ANDROID_LOW_END = _TestConditionLogicalOrConditions(
[ANDROID_GO, ANDROID_SVELTE, ANDROID_ONE], 'Android Low End')
ANDROID_PIXEL2 = _TestConditionByAndroidModel('Pixel 2')
ANDROID_WEBVIEW = _TestConditionAndroidWebview()
ANDROID_NOT_WEBVIEW = _TestConditionAndroidNotWebview()
# MAC_10_11 Includes:
# Mac 10.11 Perf, Mac Retina Perf, Mac Pro 10.11 Perf, Mac Air 10.11 Perf
MAC_10_11 = _TestConditionByMacVersion('10.11', 'Mac 10.11')
# Mac 10_12 Includes:
# Mac 10.12 Perf, Mac Mini 8GB 10.12 Perf
MAC_10_12 = _TestConditionByMacVersion('10.12', 'Mac 10.12')
ANDROID_NEXUS6_WEBVIEW = _TestConditionLogicalAndConditions(
[ANDROID_NEXUS6, ANDROID_WEBVIEW], 'Nexus6 Webview')
ANDROID_NEXUS5X_WEBVIEW = _TestConditionLogicalAndConditions(
[ANDROID_NEXUS5X, ANDROID_WEBVIEW], 'Nexus5X Webview')
ANDROID_GO_WEBVIEW = _TestConditionLogicalAndConditions(
[ANDROID_GO, ANDROID_WEBVIEW], 'Android Go Webview')
ANDROID_PIXEL2_WEBVIEW = _TestConditionLogicalAndConditions(
[ANDROID_PIXEL2, ANDROID_WEBVIEW], 'Pixel2 Webview')
FUCHSIA_WEB_ENGINE_SHELL = _TestConditionFuchsiaWebEngineShell()
FUCHSIA_ASTRO = _TestConditionFuchsiaByBoard('astro')
FUCHSIA_SHERLOCK = _TestConditionFuchsiaByBoard('sherlock')
EXPECTATION_NAME_MAP = {
'All': ALL,
'Android_Go': ANDROID_GO,
'Android_One': ANDROID_ONE,
'Android_Svelte': ANDROID_SVELTE,
'Android_Low_End': ANDROID_LOW_END,
'Android_Webview': ANDROID_WEBVIEW,
'Android_but_not_webview': ANDROID_NOT_WEBVIEW,
'Mac': ALL_MAC,
'Win': ALL_WIN,
'Win_7': WIN_7,
'Win_10': WIN_10,
'Linux': ALL_LINUX,
'ChromeOS': ALL_CHROMEOS,
'Android': ALL_ANDROID,
'Desktop': ALL_DESKTOP,
'Mobile': ALL_MOBILE,
'Nexus_5': ANDROID_NEXUS5,
'Nexus_5X': ANDROID_NEXUS5X,
'Nexus_6': ANDROID_NEXUS6,
'Nexus_6P': ANDROID_NEXUS6P,
'Nexus_7': ANDROID_NEXUS7,
'Pixel_2': ANDROID_PIXEL2,
'Mac_10.11': MAC_10_11,
'Mac_10.12': MAC_10_12,
'Nexus6_Webview': ANDROID_NEXUS6_WEBVIEW,
'Nexus5X_Webview': ANDROID_NEXUS5X_WEBVIEW,
'Android_Go_Webview': ANDROID_GO_WEBVIEW,
'Pixel2_Webview': ANDROID_PIXEL2_WEBVIEW,
'Fuchsia_WebEngineShell': FUCHSIA_WEB_ENGINE_SHELL,
'Fuchsia_Astro': FUCHSIA_ASTRO,
'Fuchsia_Sherlock': FUCHSIA_SHERLOCK,
}
| catapult-project/catapult | telemetry/telemetry/story/expectations.py | Python | bsd-3-clause | 9,428 | 0.005834 |
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
''' unit test template for ONTAP Ansible module '''
from __future__ import print_function
import json
import pytest
from units.compat import unittest
from units.compat.mock import patch, Mock
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
import ansible.module_utils.netapp as netapp_utils
from ansible.modules.storage.netapp.na_ontap_lun_copy \
import NetAppOntapLUNCopy as my_module # module under test
if not netapp_utils.has_netapp_lib():
pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
def set_module_args(args):
"""prepare arguments so that they will be picked up during module creation"""
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access
class AnsibleExitJson(Exception):
"""Exception class to be raised by module.exit_json and caught by the test case"""
pass
class AnsibleFailJson(Exception):
"""Exception class to be raised by module.fail_json and caught by the test case"""
pass
def exit_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over exit_json; package return data into an exception"""
if 'changed' not in kwargs:
kwargs['changed'] = False
raise AnsibleExitJson(kwargs)
def fail_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over fail_json; package return data into an exception"""
kwargs['failed'] = True
raise AnsibleFailJson(kwargs)
class MockONTAPConnection(object):
''' mock server connection to ONTAP host '''
def __init__(self, kind=None, parm1=None):
''' save arguments '''
self.type = kind
self.parm1 = parm1
self.xml_in = None
self.xml_out = None
def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
''' mock invoke_successfully returning xml data '''
self.xml_in = xml
if self.type == 'destination_vserver':
xml = self.build_lun_info(self.parm1)
self.xml_out = xml
return xml
@staticmethod
def build_lun_info(data):
''' build xml data for lun-info '''
xml = netapp_utils.zapi.NaElement('xml')
attributes = {
'num-records': 1,
}
xml.translate_struct(attributes)
return xml
class TestMyModule(unittest.TestCase):
''' a group of related Unit Tests '''
def setUp(self):
self.mock_module_helper = patch.multiple(basic.AnsibleModule,
exit_json=exit_json,
fail_json=fail_json)
self.mock_module_helper.start()
self.addCleanup(self.mock_module_helper.stop)
self.mock_lun_copy = {
'source_vserver': 'ansible',
'destination_path': '/vol/test/test_copy_dest_dest_new_reviewd_new',
'source_path': '/vol/test/test_copy_1',
'destination_vserver': 'ansible',
'state': 'present'
}
def mock_args(self):
return {
'source_vserver': self.mock_lun_copy['source_vserver'],
'destination_path': self.mock_lun_copy['destination_path'],
'source_path': self.mock_lun_copy['source_path'],
'destination_vserver': self.mock_lun_copy['destination_vserver'],
'state': self.mock_lun_copy['state'],
'hostname': 'hostname',
'username': 'username',
'password': 'password',
}
# self.server = MockONTAPConnection()
def get_lun_copy_mock_object(self, kind=None):
"""
Helper method to return an na_ontap_lun_copy object
:param kind: passes this param to MockONTAPConnection()
:return: na_ontap_interface object
"""
lun_copy_obj = my_module()
lun_copy_obj.autosupport_log = Mock(return_value=None)
if kind is None:
lun_copy_obj.server = MockONTAPConnection()
else:
lun_copy_obj.server = MockONTAPConnection(kind=kind)
return lun_copy_obj
def test_module_fail_when_required_args_missing(self):
''' required arguments are reported as errors '''
with pytest.raises(AnsibleFailJson) as exc:
set_module_args({})
my_module()
print('Info: %s' % exc.value.args[0]['msg'])
def test_create_error_missing_param(self):
''' Test if create throws an error if required param 'destination_vserver' is not specified'''
data = self.mock_args()
del data['destination_vserver']
set_module_args(data)
with pytest.raises(AnsibleFailJson) as exc:
self.get_lun_copy_mock_object('lun_copy').copy_lun()
msg = 'Error: Missing one or more required parameters for copying lun: ' \
'destination_path, source_path, destination_path'
expected = sorted(','.split(msg))
received = sorted(','.split(exc.value.args[0]['msg']))
assert expected == received
def test_successful_copy(self):
''' Test successful create '''
# data = self.mock_args()
set_module_args(self.mock_args())
with pytest.raises(AnsibleExitJson) as exc:
self.get_lun_copy_mock_object().apply()
assert exc.value.args[0]['changed']
def test_copy_idempotency(self):
''' Test create idempotency '''
set_module_args(self.mock_args())
with pytest.raises(AnsibleExitJson) as exc:
self.get_lun_copy_mock_object('destination_vserver').apply()
assert not exc.value.args[0]['changed']
| andmos/ansible | test/units/modules/storage/netapp/test_na_ontap_lun_copy.py | Python | gpl-3.0 | 5,814 | 0.00172 |
"""Base UptimeRobot entity."""
from __future__ import annotations
from pyuptimerobot import UptimeRobotMonitor
from homeassistant.helpers.entity import DeviceInfo, EntityDescription
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
from .const import ATTR_TARGET, ATTRIBUTION, DOMAIN
class UptimeRobotEntity(CoordinatorEntity):
"""Base UptimeRobot entity."""
_attr_attribution = ATTRIBUTION
def __init__(
self,
coordinator: DataUpdateCoordinator,
description: EntityDescription,
monitor: UptimeRobotMonitor,
) -> None:
"""Initialize UptimeRobot entities."""
super().__init__(coordinator)
self.entity_description = description
self._monitor = monitor
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, str(self.monitor.id))},
name=self.monitor.friendly_name,
manufacturer="UptimeRobot Team",
entry_type="service",
model=self.monitor.type.name,
configuration_url=f"https://uptimerobot.com/dashboard#{self.monitor.id}",
)
self._attr_extra_state_attributes = {
ATTR_TARGET: self.monitor.url,
}
self._attr_unique_id = str(self.monitor.id)
@property
def _monitors(self) -> list[UptimeRobotMonitor]:
"""Return all monitors."""
return self.coordinator.data or []
@property
def monitor(self) -> UptimeRobotMonitor:
"""Return the monitor for this entity."""
return next(
(
monitor
for monitor in self._monitors
if str(monitor.id) == self.entity_description.key
),
self._monitor,
)
@property
def monitor_available(self) -> bool:
"""Returtn if the monitor is available."""
return bool(self.monitor.status == 2)
| aronsky/home-assistant | homeassistant/components/uptimerobot/entity.py | Python | apache-2.0 | 1,942 | 0.000515 |
# coding: utf-8
""" General utilities. """
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Standard library
import collections
import sys
import logging
import multiprocessing
# Third-party
import numpy as np
__all__ = ['get_pool']
# Create logger
logger = logging.getLogger(__name__)
class SerialPool(object):
def close(self):
return
def map(self, *args, **kwargs):
return map(*args, **kwargs)
def get_pool(mpi=False, threads=None):
""" Get a pool object to pass to emcee for parallel processing.
If mpi is False and threads is None, pool is None.
Parameters
----------
mpi : bool
Use MPI or not. If specified, ignores the threads kwarg.
threads : int (optional)
If mpi is False and threads is specified, use a Python
multiprocessing pool with the specified number of threads.
"""
if mpi:
from emcee.utils import MPIPool
# Initialize the MPI pool
pool = MPIPool()
# Make sure the thread we're running on is the master
if not pool.is_master():
pool.wait()
sys.exit(0)
logger.debug("Running with MPI...")
elif threads > 1:
logger.debug("Running with multiprocessing on {} cores..."
.format(threads))
pool = multiprocessing.Pool(threads)
else:
logger.debug("Running serial...")
pool = SerialPool()
return pool
def gram_schmidt(y):
""" Modified Gram-Schmidt orthonormalization of the matrix y(n,n) """
n = y.shape[0]
if y.shape[1] != n:
raise ValueError("Invalid shape: {}".format(y.shape))
mo = np.zeros(n)
# Main loop
for i in range(n):
# Remove component in direction i
for j in range(i):
esc = np.sum(y[j]*y[i])
y[i] -= y[j]*esc
# Normalization
mo[i] = np.linalg.norm(y[i])
y[i] /= mo[i]
return mo
class use_backend(object):
def __init__(self, backend):
import matplotlib.pyplot as plt
from IPython.core.interactiveshell import InteractiveShell
from IPython.core.pylabtools import backend2gui
self.shell = InteractiveShell.instance()
self.old_backend = backend2gui[str(plt.get_backend())]
self.new_backend = backend
def __enter__(self):
gui, backend = self.shell.enable_matplotlib(self.new_backend)
def __exit__(self, type, value, tb):
gui, backend = self.shell.enable_matplotlib(self.old_backend)
def inherit_docs(cls):
for name, func in vars(cls).items():
if not func.__doc__:
for parent in cls.__bases__:
try:
parfunc = getattr(parent, name)
except AttributeError: # parent doesn't have function
break
if parfunc and getattr(parfunc, '__doc__', None):
func.__doc__ = parfunc.__doc__
break
return cls
class ImmutableDict(collections.Mapping):
def __init__(self, somedict):
self._dict = dict(somedict) # make a copy
self._hash = None
def __getitem__(self, key):
return self._dict[key]
def __len__(self):
return len(self._dict)
def __iter__(self):
return iter(self._dict)
def __hash__(self):
if self._hash is None:
self._hash = hash(frozenset(self._dict.items()))
return self._hash
def __eq__(self, other):
return self._dict == other._dict
| abonaca/gary | gary/util.py | Python | mit | 3,607 | 0.001941 |
from statistics import median
import numpy as np
from PyQt5.QtCore import QRegExp, pyqtSlot, pyqtSignal
from PyQt5.QtGui import QRegExpValidator, QIcon
from PyQt5.QtWidgets import QWidget, QSpinBox, QLabel, QComboBox, QSlider
from urh import settings
from urh.dev import config
from urh.dev.BackendHandler import BackendHandler, Backends
from urh.dev.VirtualDevice import VirtualDevice
from urh.plugins.NetworkSDRInterface.NetworkSDRInterfacePlugin import NetworkSDRInterfacePlugin
from urh.plugins.PluginManager import PluginManager
from urh.ui.ui_send_recv_device_settings import Ui_FormDeviceSettings
from urh.util.ProjectManager import ProjectManager
class DeviceSettingsWidget(QWidget):
selected_device_changed = pyqtSignal()
gain_edited = pyqtSignal()
device_parameters_changed = pyqtSignal(dict)
def __init__(self, project_manager: ProjectManager, is_tx: bool, backend_handler: BackendHandler = None,
continuous_send_mode=False, parent=None):
super().__init__(parent)
self.ui = Ui_FormDeviceSettings()
self.ui.setupUi(self)
self.__device = None # type: VirtualDevice
self.is_tx = is_tx
self.is_rx = not is_tx
if backend_handler is None:
self.backend_handler = BackendHandler()
else:
self.backend_handler = backend_handler
if self.is_rx:
self.ui.spinBoxNRepeat.hide()
self.ui.labelNRepeat.hide()
else:
self.ui.labelDCCorrection.hide()
self.ui.checkBoxDCCorrection.hide()
self.bw_sr_are_locked = settings.read("lock_bandwidth_sample_rate", True, bool)
self.ui.cbDevice.clear()
items = self.get_devices_for_combobox(continuous_send_mode)
self.ui.cbDevice.addItems(items)
self.bootstrap(project_manager.device_conf, enforce_default=True)
self.ui.btnLockBWSR.setChecked(self.bw_sr_are_locked)
self.on_btn_lock_bw_sr_clicked()
ip_range = "(?:[0-1]?[0-9]?[0-9]|2[0-4][0-9]|25[0-5])"
ip_regex = QRegExp("^" + ip_range
+ "\\." + ip_range
+ "\\." + ip_range
+ "\\." + ip_range + "$")
self.ui.lineEditIP.setValidator(QRegExpValidator(ip_regex))
self.create_connects()
self.sync_gain_sliders()
def bootstrap(self, conf_dict: dict, enforce_default=False):
def set_val(ui_widget, key: str, default):
try:
value = conf_dict[key]
except KeyError:
value = default if enforce_default else None
if value is not None:
ui_widget.setValue(value)
self.set_bandwidth_status()
self.ui.cbDevice.setCurrentText(conf_dict.get("name", ""))
dev_name = self.ui.cbDevice.currentText()
self.set_device_ui_items_visibility(dev_name, overwrite_settings=True)
set_val(self.ui.spinBoxFreq, "frequency", config.DEFAULT_FREQUENCY)
set_val(self.ui.spinBoxSampleRate, "sample_rate", config.DEFAULT_SAMPLE_RATE)
set_val(self.ui.spinBoxBandwidth, "bandwidth", config.DEFAULT_BANDWIDTH)
set_val(self.ui.spinBoxGain, self.rx_tx_prefix + "gain", config.DEFAULT_GAIN)
set_val(self.ui.spinBoxIFGain, self.rx_tx_prefix + "if_gain", config.DEFAULT_IF_GAIN)
set_val(self.ui.spinBoxBasebandGain, self.rx_tx_prefix + "baseband_gain", config.DEFAULT_BB_GAIN)
set_val(self.ui.spinBoxFreqCorrection, "freq_correction", config.DEFAULT_FREQ_CORRECTION)
set_val(self.ui.spinBoxNRepeat, "num_sending_repeats", settings.read('num_sending_repeats', 1, type=int))
self.ui.lineEditSubdevice.setText(conf_dict.get("subdevice", ""))
if self.rx_tx_prefix + "antenna_index" in conf_dict:
self.ui.comboBoxAntenna.setCurrentIndex(conf_dict[self.rx_tx_prefix + "antenna_index"])
if self.rx_tx_prefix + "gain" not in conf_dict:
self.set_default_rf_gain()
if self.rx_tx_prefix + "if_gain" not in conf_dict:
self.set_default_if_gain()
if self.rx_tx_prefix + "baseband_gain" not in conf_dict:
self.set_default_bb_gain()
if self.is_rx:
checked = conf_dict.get("apply_dc_correction", True)
if isinstance(checked, str):
checked = True if checked == "True" else False
self.ui.checkBoxDCCorrection.setChecked(checked)
checked = conf_dict.get("bias_tee_enabled", False)
if isinstance(checked, str):
checked = True if checked == "True" else False
self.ui.checkBoxBiasTee.setChecked(checked)
self.emit_editing_finished_signals()
@property
def device(self) -> VirtualDevice:
return self.__device
@device.setter
def device(self, value: VirtualDevice):
self.__device = value
@property
def rx_tx_prefix(self) -> str:
return "rx_" if self.is_rx else "tx_"
@property
def selected_device_conf(self) -> dict:
device_name = self.ui.cbDevice.currentText()
key = device_name if device_name in config.DEVICE_CONFIG.keys() else "Fallback"
return config.DEVICE_CONFIG[key]
def create_connects(self):
self.ui.spinBoxFreq.editingFinished.connect(self.on_spinbox_frequency_editing_finished)
self.ui.spinBoxSampleRate.editingFinished.connect(self.on_spinbox_sample_rate_editing_finished)
self.ui.spinBoxGain.editingFinished.connect(self.on_spinbox_gain_editing_finished)
self.ui.spinBoxGain.valueChanged.connect(self.on_spinbox_gain_value_changed)
self.ui.sliderGain.valueChanged.connect(self.on_slider_gain_value_changed)
self.ui.spinBoxIFGain.editingFinished.connect(self.on_spinbox_if_gain_editing_finished)
self.ui.spinBoxIFGain.valueChanged.connect(self.on_spinbox_if_gain_value_changed)
self.ui.sliderIFGain.valueChanged.connect(self.on_slider_if_gain_value_changed)
self.ui.spinBoxBasebandGain.editingFinished.connect(self.on_spinbox_baseband_gain_editing_finished)
self.ui.spinBoxBasebandGain.valueChanged.connect(self.on_spinbox_baseband_gain_value_changed)
self.ui.sliderBasebandGain.valueChanged.connect(self.on_slider_baseband_gain_value_changed)
self.ui.spinBoxBandwidth.editingFinished.connect(self.on_spinbox_bandwidth_editing_finished)
self.ui.spinBoxPort.editingFinished.connect(self.on_spinbox_port_editing_finished)
self.ui.lineEditIP.editingFinished.connect(self.on_line_edit_ip_editing_finished)
self.ui.lineEditSubdevice.editingFinished.connect(self.on_line_edit_subdevice_editing_finished)
self.ui.comboBoxAntenna.currentIndexChanged.connect(self.on_combobox_antenna_current_index_changed)
self.ui.comboBoxChannel.currentIndexChanged.connect(self.on_combobox_channel_current_index_changed)
self.ui.spinBoxFreqCorrection.editingFinished.connect(self.on_spinbox_freq_correction_editing_finished)
self.ui.comboBoxDirectSampling.currentIndexChanged.connect(self.on_combobox_direct_sampling_index_changed)
self.ui.cbDevice.currentIndexChanged.connect(self.on_cb_device_current_index_changed)
self.ui.spinBoxNRepeat.editingFinished.connect(self.on_num_repeats_changed)
self.ui.btnLockBWSR.clicked.connect(self.on_btn_lock_bw_sr_clicked)
self.ui.btnRefreshDeviceIdentifier.clicked.connect(self.on_btn_refresh_device_identifier_clicked)
self.ui.comboBoxDeviceIdentifier.currentIndexChanged.connect(
self.on_combo_box_device_identifier_current_index_changed)
self.ui.comboBoxDeviceIdentifier.editTextChanged.connect(self.on_combo_box_device_identifier_edit_text_changed)
self.ui.checkBoxBiasTee.clicked.connect(self.on_check_box_bias_tee_clicked)
self.ui.checkBoxDCCorrection.clicked.connect(self.on_check_box_dc_correction_clicked)
def set_gain_defaults(self):
self.set_default_rf_gain()
self.set_default_if_gain()
self.set_default_bb_gain()
def set_default_rf_gain(self):
conf = self.selected_device_conf
prefix = self.rx_tx_prefix
if prefix + "rf_gain" in conf:
key = prefix + "rf_gain"
gain = conf[key][int(np.percentile(range(len(conf[key])), 25))]
self.ui.spinBoxGain.setValue(gain)
def set_default_if_gain(self):
conf = self.selected_device_conf
prefix = self.rx_tx_prefix
if prefix + "if_gain" in conf:
key = prefix + "if_gain"
if_gain = conf[key][int(median(range(len(conf[key]))))]
self.ui.spinBoxIFGain.setValue(if_gain)
def set_default_bb_gain(self):
conf = self.selected_device_conf
prefix = self.rx_tx_prefix
if prefix + "baseband_gain" in conf:
key = prefix + "baseband_gain"
baseband_gain = conf[key][int(np.percentile(list(range(len(conf[key]))), 25))]
self.ui.spinBoxBasebandGain.setValue(baseband_gain)
def sync_gain_sliders(self):
self.ui.spinBoxGain.valueChanged.emit(self.ui.spinBoxGain.value())
self.ui.spinBoxIFGain.valueChanged.emit(self.ui.spinBoxIFGain.value())
self.ui.spinBoxBasebandGain.valueChanged.emit(self.ui.spinBoxBasebandGain.value())
def set_device_ui_items_visibility(self, device_name: str, overwrite_settings=True):
key = device_name if device_name in config.DEVICE_CONFIG.keys() else "Fallback"
conf = config.DEVICE_CONFIG[key]
key_ui_dev_param_map = {"center_freq": "Freq", "sample_rate": "SampleRate", "bandwidth": "Bandwidth"}
for key, ui_item in key_ui_dev_param_map.items():
spinbox = getattr(self.ui, "spinBox" + ui_item) # type: QSpinBox
label = getattr(self.ui, "label" + ui_item) # type: QLabel
if key in conf:
spinbox.setVisible(True)
label.setVisible(True)
if isinstance(conf[key], list):
spinbox.setMinimum(min(conf[key]))
spinbox.setMaximum(max(conf[key]))
spinbox.setSingleStep(conf[key][1] - conf[key][0])
spinbox.auto_update_step_size = False
if "default_" + key in conf:
spinbox.setValue(conf["default_" + key])
else:
spinbox.setMinimum(conf[key].start)
spinbox.setMaximum(conf[key].stop)
spinbox.auto_update_step_size = True
spinbox.adjust_step()
else:
spinbox.setVisible(False)
label.setVisible(False)
self.ui.btnLockBWSR.setVisible("sample_rate" in conf and "bandwidth" in conf)
if self.device is not None:
self.ui.labelSubdevice.setVisible(self.device.subdevice is not None)
self.ui.lineEditSubdevice.setVisible(self.device.subdevice is not None)
if "freq_correction" in conf:
self.ui.labelFreqCorrection.setVisible(True)
self.ui.spinBoxFreqCorrection.setVisible(True)
self.ui.spinBoxFreqCorrection.setMinimum(conf["freq_correction"].start)
self.ui.spinBoxFreqCorrection.setMaximum(conf["freq_correction"].stop)
self.ui.spinBoxFreqCorrection.setSingleStep(conf["freq_correction"].step)
else:
self.ui.labelFreqCorrection.setVisible(False)
self.ui.spinBoxFreqCorrection.setVisible(False)
if "direct_sampling" in conf:
self.ui.labelDirectSampling.setVisible(True)
self.ui.comboBoxDirectSampling.setVisible(True)
items = [self.ui.comboBoxDirectSampling.itemText(i) for i in range(self.ui.comboBoxDirectSampling.count())]
if items != conf["direct_sampling"]:
self.ui.comboBoxDirectSampling.clear()
self.ui.comboBoxDirectSampling.addItems(conf["direct_sampling"])
else:
self.ui.labelDirectSampling.setVisible(False)
self.ui.comboBoxDirectSampling.setVisible(False)
prefix = self.rx_tx_prefix
key_ui_gain_map = {prefix + "rf_gain": "Gain", prefix + "if_gain": "IFGain",
prefix + "baseband_gain": "BasebandGain"}
for conf_key, ui_element in key_ui_gain_map.items():
getattr(self.ui, "label" + ui_element).setVisible(conf_key in conf)
spinbox = getattr(self.ui, "spinBox" + ui_element) # type: QSpinBox
slider = getattr(self.ui, "slider" + ui_element) # type: QSlider
if conf_key in conf:
gain_values = conf[conf_key]
assert len(gain_values) >= 2
spinbox.setMinimum(gain_values[0])
spinbox.setMaximum(gain_values[-1])
if overwrite_settings:
spinbox.setValue(gain_values[len(gain_values) // 2])
spinbox.setSingleStep(gain_values[1] - gain_values[0])
spinbox.setVisible(True)
slider.setMaximum(len(gain_values) - 1)
else:
spinbox.setVisible(False)
slider.setVisible(False)
getattr(self.ui, "slider" + ui_element).setVisible(conf_key in conf)
if overwrite_settings:
key_ui_channel_ant_map = {prefix + "antenna": "Antenna", prefix + "channel": "Channel"}
for conf_key, ui_element in key_ui_channel_ant_map.items():
getattr(self.ui, "label" + ui_element).setVisible(conf_key in conf)
combobox = getattr(self.ui, "comboBox" + ui_element) # type: QComboBox
if conf_key in conf:
combobox.clear()
combobox.addItems(conf[conf_key])
if conf_key + "_default_index" in conf:
combobox.setCurrentIndex(conf[conf_key + "_default_index"])
combobox.setVisible(True)
else:
combobox.setVisible(False)
multi_dev_support = hasattr(self.device, "has_multi_device_support") and self.device.has_multi_device_support
self.ui.labelDeviceIdentifier.setVisible(multi_dev_support)
self.ui.btnRefreshDeviceIdentifier.setVisible(multi_dev_support)
self.ui.comboBoxDeviceIdentifier.setVisible(multi_dev_support)
self.ui.lineEditIP.setVisible("ip" in conf)
self.ui.labelIP.setVisible("ip" in conf)
self.ui.spinBoxPort.setVisible("port" in conf)
self.ui.labelPort.setVisible("port" in conf)
show_dc_correction = self.is_rx and self.device is not None and self.device.apply_dc_correction is not None
self.ui.checkBoxDCCorrection.setVisible(show_dc_correction)
self.ui.labelDCCorrection.setVisible(show_dc_correction)
show_bias_tee = "bias_tee_enabled" in conf and self.device is not None and self.device.bias_tee_enabled is not None
self.ui.labelBiasTee.setVisible(show_bias_tee)
self.ui.checkBoxBiasTee.setVisible(show_bias_tee)
def get_devices_for_combobox(self, continuous_send_mode):
items = []
for device_name in self.backend_handler.DEVICE_NAMES:
dev = self.backend_handler.device_backends[device_name.lower()]
if self.is_tx and dev.is_enabled and dev.supports_tx:
if not continuous_send_mode:
items.append(device_name)
elif dev.selected_backend != Backends.grc:
items.append(device_name)
elif self.is_rx and dev.is_enabled and dev.supports_rx:
items.append(device_name)
if PluginManager().is_plugin_enabled("NetworkSDRInterface"):
items.append(NetworkSDRInterfacePlugin.NETWORK_SDR_NAME)
return items
def set_bandwidth_status(self):
if hasattr(self, "device") and self.device is not None and self.device.backend != Backends.none:
self.ui.spinBoxBandwidth.setEnabled(self.device.bandwidth_is_adjustable)
self.ui.btnLockBWSR.setEnabled(self.device.bandwidth_is_adjustable)
if not self.device.bandwidth_is_adjustable:
self.bw_sr_are_locked = False
self.ui.spinBoxBandwidth.setToolTip(self.tr("Your driver of RTL-SDR does not support "
"setting the bandwidth. "
"If you need this feature, install a recent version."))
else:
self.ui.spinBoxBandwidth.setToolTip("")
self.bw_sr_are_locked = self.ui.btnLockBWSR.isChecked()
def emit_editing_finished_signals(self):
self.ui.spinBoxFreq.editingFinished.emit()
self.ui.spinBoxBandwidth.editingFinished.emit()
self.ui.spinBoxGain.editingFinished.emit()
self.ui.spinBoxIFGain.editingFinished.emit()
self.ui.spinBoxBasebandGain.editingFinished.emit()
self.ui.spinBoxNRepeat.editingFinished.emit()
self.ui.spinBoxSampleRate.editingFinished.emit()
self.ui.spinBoxFreqCorrection.editingFinished.emit()
self.ui.lineEditIP.editingFinished.emit()
self.ui.lineEditSubdevice.editingFinished.emit()
self.ui.spinBoxPort.editingFinished.emit()
self.ui.comboBoxAntenna.currentIndexChanged.emit(self.ui.comboBoxAntenna.currentIndex())
self.ui.comboBoxChannel.currentIndexChanged.emit(self.ui.comboBoxChannel.currentIndex())
self.ui.checkBoxDCCorrection.clicked.emit(self.ui.checkBoxDCCorrection.isChecked())
self.ui.checkBoxBiasTee.clicked.emit(self.ui.checkBoxBiasTee.isChecked())
def emit_device_parameters_changed(self):
settings = {"name": str(self.device.name)}
for attrib in ("frequency", "sample_rate", "bandwidth", "gain", "if_gain", "baseband_gain", "freq_correction",
"antenna_index", "num_sending_repeats", "apply_dc_correction", "subdevice", "bias_tee_enabled"):
try:
value = getattr(self.device, attrib, None)
if value is not None:
if "gain" in attrib or attrib == "antenna_index":
attrib = self.rx_tx_prefix + attrib
settings[attrib] = value
except (ValueError, AttributeError):
continue
self.device_parameters_changed.emit(settings)
@pyqtSlot()
def on_btn_lock_bw_sr_clicked(self):
self.bw_sr_are_locked = self.ui.btnLockBWSR.isChecked()
settings.write("lock_bandwidth_sample_rate", self.bw_sr_are_locked)
if self.bw_sr_are_locked:
self.ui.btnLockBWSR.setIcon(QIcon(":/icons/icons/lock.svg"))
self.ui.spinBoxBandwidth.setValue(self.ui.spinBoxSampleRate.value())
self.ui.spinBoxBandwidth.editingFinished.emit()
else:
self.ui.btnLockBWSR.setIcon(QIcon(":/icons/icons/unlock.svg"))
@pyqtSlot()
def on_spinbox_sample_rate_editing_finished(self):
self.device.sample_rate = self.ui.spinBoxSampleRate.value()
if self.bw_sr_are_locked:
self.ui.spinBoxBandwidth.setValue(self.ui.spinBoxSampleRate.value())
self.device.bandwidth = self.ui.spinBoxBandwidth.value()
@pyqtSlot()
def on_spinbox_frequency_editing_finished(self):
self.device.frequency = self.ui.spinBoxFreq.value()
@pyqtSlot()
def on_spinbox_bandwidth_editing_finished(self):
self.device.bandwidth = self.ui.spinBoxBandwidth.value()
if self.bw_sr_are_locked:
self.ui.spinBoxSampleRate.setValue(self.ui.spinBoxBandwidth.value())
self.device.sample_rate = self.ui.spinBoxSampleRate.value()
@pyqtSlot()
def on_line_edit_ip_editing_finished(self):
self.device.ip = self.ui.lineEditIP.text()
@pyqtSlot()
def on_line_edit_subdevice_editing_finished(self):
self.device.subdevice = self.ui.lineEditSubdevice.text()
@pyqtSlot()
def on_spinbox_port_editing_finished(self):
self.device.port = self.ui.spinBoxPort.value()
@pyqtSlot(int)
def on_combobox_antenna_current_index_changed(self, index: int):
self.device.antenna_index = index
@pyqtSlot(int)
def on_combobox_channel_current_index_changed(self, index: int):
self.device.channel_index = index
@pyqtSlot()
def on_spinbox_freq_correction_editing_finished(self):
self.device.freq_correction = self.ui.spinBoxFreqCorrection.value()
@pyqtSlot(int)
def on_combobox_direct_sampling_index_changed(self, index: int):
self.device.direct_sampling_mode = index
@pyqtSlot()
def on_spinbox_gain_editing_finished(self):
self.device.gain = self.ui.spinBoxGain.value()
@pyqtSlot(int)
def on_spinbox_gain_value_changed(self, value: int):
dev_conf = self.selected_device_conf
try:
self.ui.sliderGain.setValue(dev_conf[self.rx_tx_prefix + "rf_gain"].index(value))
except (ValueError, KeyError):
pass
@pyqtSlot(int)
def on_slider_gain_value_changed(self, value: int):
dev_conf = self.selected_device_conf
self.ui.spinBoxGain.setValue(dev_conf[self.rx_tx_prefix + "rf_gain"][value])
@pyqtSlot()
def on_spinbox_if_gain_editing_finished(self):
self.device.if_gain = self.ui.spinBoxIFGain.value()
@pyqtSlot(int)
def on_slider_if_gain_value_changed(self, value: int):
dev_conf = self.selected_device_conf
self.ui.spinBoxIFGain.setValue(dev_conf[self.rx_tx_prefix + "if_gain"][value])
@pyqtSlot(int)
def on_spinbox_if_gain_value_changed(self, value: int):
dev_conf = self.selected_device_conf
try:
self.ui.sliderIFGain.setValue(dev_conf[self.rx_tx_prefix + "if_gain"].index(value))
except (ValueError, KeyError):
pass
@pyqtSlot()
def on_num_repeats_changed(self):
self.device.num_sending_repeats = self.ui.spinBoxNRepeat.value()
@pyqtSlot()
def on_spinbox_baseband_gain_editing_finished(self):
self.device.baseband_gain = self.ui.spinBoxBasebandGain.value()
@pyqtSlot(int)
def on_slider_baseband_gain_value_changed(self, value: int):
dev_conf = self.selected_device_conf
self.ui.spinBoxBasebandGain.setValue(dev_conf[self.rx_tx_prefix + "baseband_gain"][value])
@pyqtSlot(int)
def on_spinbox_baseband_gain_value_changed(self, value: int):
dev_conf = self.selected_device_conf
try:
self.ui.sliderBasebandGain.setValue(dev_conf[self.rx_tx_prefix + "baseband_gain"].index(value))
except (ValueError, KeyError):
pass
def update_for_new_device(self, overwrite_settings=True):
if self.device is not None:
self.device.free_data()
# Here init_device of dialogs gets called
self.selected_device_changed.emit()
dev_name = self.ui.cbDevice.currentText()
self.set_device_ui_items_visibility(dev_name, overwrite_settings=overwrite_settings)
if overwrite_settings:
self.set_gain_defaults()
self.sync_gain_sliders()
self.set_bandwidth_status()
self.ui.comboBoxDeviceIdentifier.clear()
@pyqtSlot()
def on_cb_device_current_index_changed(self):
self.update_for_new_device(overwrite_settings=True)
@pyqtSlot()
def on_btn_refresh_device_identifier_clicked(self):
if self.device is None:
return
self.ui.comboBoxDeviceIdentifier.clear()
self.ui.comboBoxDeviceIdentifier.addItems(self.device.get_device_list())
@pyqtSlot(bool)
def on_check_box_bias_tee_clicked(self, checked: bool):
if self.device is not None:
self.device.bias_tee_enabled = bool(checked)
@pyqtSlot(bool)
def on_check_box_dc_correction_clicked(self, checked: bool):
self.device.apply_dc_correction = bool(checked)
@pyqtSlot()
def on_combo_box_device_identifier_current_index_changed(self):
if self.device is not None:
self.device.device_serial = self.ui.comboBoxDeviceIdentifier.currentText()
self.device.device_number = self.ui.comboBoxDeviceIdentifier.currentIndex()
@pyqtSlot(str)
def on_combo_box_device_identifier_edit_text_changed(self, new_text: str):
self.device.device_serial = new_text
if __name__ == '__main__':
from PyQt5.QtWidgets import QApplication
from urh.controller.MainController import MainController
app = QApplication([])
mc = MainController()
widget = DeviceSettingsWidget(mc.project_manager, is_tx=False)
widget.show()
app.exec_()
| jopohl/urh | src/urh/controller/widgets/DeviceSettingsWidget.py | Python | gpl-3.0 | 24,781 | 0.00339 |
#!/usr/bin/env python
# Copyright 2014-2019 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors: Sandeep Sharma <sanshar@gmail.com>
# James Smith <james.smith9113@gmail.com>
#
"""
All output is deleted after the run to keep the directory neat. Comment out the
cleanup section to view output.
"""
import time
import numpy
import math
import os
from pyscf import gto, scf, ao2mo, mcscf, tools, fci
from pyscf.shciscf import shci, settings
t0 = time.time()
alpha = 0.007297351
mol = gto.M(
atom="C 0 0 0; C 0 0 1.3119", basis="cc-pvqz", verbose=5, symmetry=1, spin=2
)
myhf = scf.RHF(mol)
myhf.kernel()
##USE SHCISCF
solver1 = shci.SHCI(mol)
solver1.irrep_nelec = {"A1g": (2, 1), "A1u": (1, 1), "E1ux": (1, 1), "E1uy": (1, 0)}
solver1.prefix = "solver1"
solver1.epsilon2 = 1.0e-7
solver1.stochastic = False
solver2 = shci.SHCI(mol)
solver2.irrep_nelec = {"A1g": (2, 1), "A1u": (1, 1), "E1ux": (1, 0), "E1uy": (1, 1)}
solver2.prefix = "solver2"
solver2.epsilon2 = 1.0e-7
solver2.stochastic = False
mycas = shci.SHCISCF(myhf, 8, 8)
mcscf.state_average_mix_(mycas, [solver1, solver2], numpy.ones(2) / 2)
mycas.kernel()
print("Total Time: ", time.time() - t0)
# File cleanup
solver1.cleanup_dice_files()
| gkc1000/pyscf | pyscf/shciscf/examples/03_c2_diffsymm.py | Python | apache-2.0 | 1,773 | 0.002256 |
# #
# Copyright 2013-2014 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
Toy build unit test
@author: Kenneth Hoste (Ghent University)
"""
import glob
import grp
import os
import re
import shutil
import stat
import sys
import tempfile
from test.framework.utilities import EnhancedTestCase
from unittest import TestLoader
from unittest import main as unittestmain
from vsc.utils.fancylogger import setLogLevelDebug, logToScreen
import easybuild.tools.module_naming_scheme # required to dynamically load test module naming scheme(s)
from easybuild.framework.easyconfig.easyconfig import EasyConfig
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.filetools import mkdir, read_file, write_file
from easybuild.tools.modules import modules_tool
class ToyBuildTest(EnhancedTestCase):
"""Toy build unit test."""
def setUp(self):
"""Test setup."""
super(ToyBuildTest, self).setUp()
fd, self.dummylogfn = tempfile.mkstemp(prefix='easybuild-dummy', suffix='.log')
os.close(fd)
# adjust PYTHONPATH such that test easyblocks are found
import easybuild
eb_blocks_path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'sandbox'))
if not eb_blocks_path in sys.path:
sys.path.append(eb_blocks_path)
easybuild = reload(easybuild)
import easybuild.easyblocks
reload(easybuild.easyblocks)
reload(easybuild.tools.module_naming_scheme)
# clear log
write_file(self.logfile, '')
def tearDown(self):
"""Cleanup."""
super(ToyBuildTest, self).tearDown()
# remove logs
if os.path.exists(self.dummylogfn):
os.remove(self.dummylogfn)
def check_toy(self, installpath, outtxt, version='0.0', versionprefix='', versionsuffix=''):
"""Check whether toy build succeeded."""
full_version = ''.join([versionprefix, version, versionsuffix])
# check for success
success = re.compile("COMPLETED: Installation ended successfully")
self.assertTrue(success.search(outtxt), "COMPLETED message found in '%s" % outtxt)
# if the module exists, it should be fine
toy_module = os.path.join(installpath, 'modules', 'all', 'toy', full_version)
msg = "module for toy build toy/%s found (path %s)" % (full_version, toy_module)
self.assertTrue(os.path.exists(toy_module), msg)
# module file is symlinked according to moduleclass
toy_module_symlink = os.path.join(installpath, 'modules', 'tools', 'toy', full_version)
self.assertTrue(os.path.islink(toy_module_symlink))
self.assertTrue(os.path.exists(toy_module_symlink))
# make sure installation log file and easyconfig file are copied to install dir
software_path = os.path.join(installpath, 'software', 'toy', full_version)
install_log_path_pattern = os.path.join(software_path, 'easybuild', 'easybuild-toy-%s*.log' % version)
self.assertTrue(len(glob.glob(install_log_path_pattern)) == 1, "Found 1 file at %s" % install_log_path_pattern)
# make sure test report is available
test_report_path_pattern = os.path.join(software_path, 'easybuild', 'easybuild-toy-%s*test_report.md' % version)
self.assertTrue(len(glob.glob(test_report_path_pattern)) == 1, "Found 1 file at %s" % test_report_path_pattern)
ec_file_path = os.path.join(software_path, 'easybuild', 'toy-%s.eb' % full_version)
self.assertTrue(os.path.exists(ec_file_path))
devel_module_path = os.path.join(software_path, 'easybuild', 'toy-%s-easybuild-devel' % full_version)
self.assertTrue(os.path.exists(devel_module_path))
def test_toy_build(self, extra_args=None, ec_file=None, tmpdir=None, verify=True, fails=False, verbose=True,
raise_error=False, test_report=None, versionsuffix=''):
"""Perform a toy build."""
if extra_args is None:
extra_args = []
test_readme = False
if ec_file is None:
ec_file = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0.eb')
test_readme = True
full_ver = '0.0%s' % versionsuffix
args = [
ec_file,
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
'--robot=%s' % os.pathsep.join([self.test_buildpath, os.path.dirname(__file__)]),
]
if tmpdir is not None:
args.append('--tmpdir=%s' % tmpdir)
if test_report is not None:
args.append('--dump-test-report=%s' % test_report)
args.extend(extra_args)
myerr = None
try:
outtxt = self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=verbose,
raise_error=raise_error)
except Exception, err:
myerr = err
if raise_error:
raise myerr
if verify:
self.check_toy(self.test_installpath, outtxt, versionsuffix=versionsuffix)
if test_readme:
# make sure postinstallcmds were used
toy_install_path = os.path.join(self.test_installpath, 'software', 'toy', full_ver)
self.assertEqual(read_file(os.path.join(toy_install_path, 'README')), "TOY\n")
# make sure full test report was dumped, and contains sensible information
if test_report is not None:
self.assertTrue(os.path.exists(test_report))
if fails:
test_result = 'FAIL'
else:
test_result = 'SUCCESS'
regex_patterns = [
r"Test result[\S\s]*Build succeeded for %d out of 1" % (not fails),
r"Overview of tested easyconfig[\S\s]*%s[\S\s]*%s" % (test_result, os.path.basename(ec_file)),
r"Time info[\S\s]*start:[\S\s]*end:",
r"EasyBuild info[\S\s]*framework version:[\S\s]*easyblocks ver[\S\s]*command line[\S\s]*configuration",
r"System info[\S\s]*cpu model[\S\s]*os name[\S\s]*os version[\S\s]*python version",
r"List of loaded modules",
r"Environment",
]
test_report_txt = read_file(test_report)
for regex_pattern in regex_patterns:
regex = re.compile(regex_pattern, re.M)
msg = "Pattern %s found in full test report: %s" % (regex.pattern, test_report_txt)
self.assertTrue(regex.search(test_report_txt), msg)
return outtxt
def test_toy_broken(self):
"""Test deliberately broken toy build."""
tmpdir = tempfile.mkdtemp()
broken_toy_ec = os.path.join(tmpdir, "toy-broken.eb")
toy_ec_file = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0.eb')
broken_toy_ec_txt = read_file(toy_ec_file)
broken_toy_ec_txt += "checksums = ['clearywrongchecksum']"
write_file(broken_toy_ec, broken_toy_ec_txt)
error_regex = "Checksum verification .* failed"
self.assertErrorRegex(EasyBuildError, error_regex, self.test_toy_build, ec_file=broken_toy_ec, tmpdir=tmpdir,
verify=False, fails=True, verbose=False, raise_error=True)
# make sure log file is retained, also for failed build
log_path_pattern = os.path.join(tmpdir, 'easybuild-*', 'easybuild-toy-0.0*.log')
self.assertTrue(len(glob.glob(log_path_pattern)) == 1, "Log file found at %s" % log_path_pattern)
# make sure individual test report is retained, also for failed build
test_report_fp_pattern = os.path.join(tmpdir, 'easybuild-*', 'easybuild-toy-0.0*test_report.md')
self.assertTrue(len(glob.glob(test_report_fp_pattern)) == 1, "Test report %s found" % test_report_fp_pattern)
# test dumping full test report (doesn't raise an exception)
test_report_fp = os.path.join(self.test_buildpath, 'full_test_report.md')
self.test_toy_build(ec_file=broken_toy_ec, tmpdir=tmpdir, verify=False, fails=True, verbose=False,
raise_error=True, test_report=test_report_fp)
# cleanup
shutil.rmtree(tmpdir)
def test_toy_tweaked(self):
"""Test toy build with tweaked easyconfig, for testing extra easyconfig parameters."""
test_ecs_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs')
ec_file = os.path.join(self.test_buildpath, 'toy-0.0-tweaked.eb')
shutil.copy2(os.path.join(test_ecs_dir, 'toy-0.0.eb'), ec_file)
# tweak easyconfig by appending to it
ec_extra = '\n'.join([
"versionsuffix = '-tweaked'",
"modextrapaths = {'SOMEPATH': ['foo/bar', 'baz']}",
"modextravars = {'FOO': 'bar'}",
"modloadmsg = 'THANKS FOR LOADING ME, I AM %(name)s v%(version)s'",
"modtclfooter = 'puts stderr \"oh hai!\"'",
])
write_file(ec_file, ec_extra, append=True)
args = [
ec_file,
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--force',
]
outtxt = self.eb_main(args, do_build=True, verbose=True, raise_error=True)
self.check_toy(self.test_installpath, outtxt, versionsuffix='-tweaked')
toy_module = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0-tweaked')
toy_module_txt = read_file(toy_module)
self.assertTrue(re.search('setenv\s*FOO\s*"bar"', toy_module_txt))
self.assertTrue(re.search('prepend-path\s*SOMEPATH\s*\$root/foo/bar', toy_module_txt))
self.assertTrue(re.search('prepend-path\s*SOMEPATH\s*\$root/baz', toy_module_txt))
self.assertTrue(re.search('module-info mode load.*\n\s*puts stderr\s*.*I AM toy v0.0', toy_module_txt))
self.assertTrue(re.search('puts stderr "oh hai!"', toy_module_txt))
def test_toy_buggy_easyblock(self):
"""Test build using a buggy/broken easyblock, make sure a traceback is reported."""
ec_file = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0.eb')
kwargs = {
'ec_file': ec_file,
'extra_args': ['--easyblock=EB_toy_buggy'],
'raise_error': True,
'verify': False,
'verbose': False,
}
err_regex = r"crashed with an error.*Traceback[\S\s]*toy_buggy.py.*build_step[\S\s]*global name 'run_cmd'"
self.assertErrorRegex(EasyBuildError, err_regex, self.test_toy_build, **kwargs)
def test_toy_build_formatv2(self):
"""Perform a toy build (format v2)."""
# set $MODULEPATH such that modules for specified dependencies are found
modulepath = os.environ.get('MODULEPATH')
os.environ['MODULEPATH'] = os.path.abspath(os.path.join(os.path.dirname(__file__), 'modules'))
args = [
os.path.join(os.path.dirname(__file__), 'easyconfigs', 'v2.0', 'toy.eb'),
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
'--robot=%s' % os.pathsep.join([self.test_buildpath, os.path.dirname(__file__)]),
'--software-version=0.0',
'--toolchain=dummy,dummy',
'--experimental',
]
outtxt = self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=True)
self.check_toy(self.test_installpath, outtxt)
# restore
if modulepath is not None:
os.environ['MODULEPATH'] = modulepath
else:
del os.environ['MODULEPATH']
def test_toy_build_with_blocks(self):
"""Test a toy build with multiple blocks."""
orig_sys_path = sys.path[:]
# add directory in which easyconfig file can be found to Python search path, since we're not specifying it full path below
tmpdir = tempfile.mkdtemp()
# note get_paths_for expects easybuild/easyconfigs subdir
ecs_path = os.path.join(tmpdir, "easybuild", "easyconfigs")
os.makedirs(ecs_path)
shutil.copy2(os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0-multiple.eb'), ecs_path)
sys.path.append(tmpdir)
args = [
'toy-0.0-multiple.eb',
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
]
outtxt = self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=True)
for toy_prefix, toy_version, toy_suffix in [
('', '0.0', '-somesuffix'),
('someprefix-', '0.0', '-somesuffix')
]:
self.check_toy(self.test_installpath, outtxt, version=toy_version,
versionprefix=toy_prefix, versionsuffix=toy_suffix)
# cleanup
shutil.rmtree(tmpdir)
sys.path = orig_sys_path
def test_toy_build_formatv2_sections(self):
"""Perform a toy build (format v2, using sections)."""
versions = {
'0.0': {'versionprefix': '', 'versionsuffix': ''},
'1.0': {'versionprefix': '', 'versionsuffix': ''},
'1.1': {'versionprefix': 'stable-', 'versionsuffix': ''},
'1.5': {'versionprefix': 'stable-', 'versionsuffix': '-early'},
'1.6': {'versionprefix': 'stable-', 'versionsuffix': '-early'},
'2.0': {'versionprefix': 'stable-', 'versionsuffix': '-early'},
'3.0': {'versionprefix': 'stable-', 'versionsuffix': '-mature'},
}
for version, specs in versions.items():
args = [
os.path.join(os.path.dirname(__file__), 'easyconfigs', 'v2.0', 'toy-with-sections.eb'),
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
'--robot=%s' % os.pathsep.join([self.test_buildpath, os.path.dirname(__file__)]),
'--software-version=%s' % version,
'--toolchain=dummy,dummy',
'--experimental',
]
outtxt = self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=True)
specs['version'] = version
self.check_toy(self.test_installpath, outtxt, **specs)
def test_toy_download_sources(self):
"""Test toy build with sources that still need to be 'downloaded'."""
tmpdir = tempfile.mkdtemp()
# copy toy easyconfig file, and append source_urls to it
shutil.copy2(os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0.eb'), tmpdir)
source_url = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'sandbox', 'sources', 'toy')
ec_file = os.path.join(tmpdir, 'toy-0.0.eb')
write_file(ec_file, '\nsource_urls = ["file://%s"]\n' % source_url, append=True)
# unset $EASYBUILD_XPATH env vars, to make sure --prefix is picked up
for cfg_opt in ['build', 'install', 'source']:
del os.environ['EASYBUILD_%sPATH' % cfg_opt.upper()]
sourcepath = os.path.join(tmpdir, 'mysources')
args = [
ec_file,
'--prefix=%s' % tmpdir,
'--sourcepath=%s' % ':'.join([sourcepath, '/bar']), # include senseless path which should be ignored
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
]
outtxt = self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=True)
self.check_toy(tmpdir, outtxt)
self.assertTrue(os.path.exists(os.path.join(sourcepath, 't', 'toy', 'toy-0.0.tar.gz')))
shutil.rmtree(tmpdir)
def test_toy_permissions(self):
"""Test toy build with custom umask settings."""
toy_ec_file = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0.eb')
args = [
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
]
# set umask hard to verify default reliably
orig_umask = os.umask(0022)
# test specifying a non-existing group
allargs = [toy_ec_file] + args + ['--group=thisgroupdoesnotexist']
outtxt, err = self.eb_main(allargs, logfile=self.dummylogfn, do_build=True, return_error=True)
err_regex = re.compile("Failed to get group ID .* group does not exist")
self.assertTrue(err_regex.search(outtxt), "Pattern '%s' found in '%s'" % (err_regex.pattern, outtxt))
# determine current group name (at least we can use that)
gid = os.getgid()
curr_grp = grp.getgrgid(gid).gr_name
for umask, cfg_group, ec_group, dir_perms, fil_perms, bin_perms in [
(None, None, None, 0755, 0644, 0755), # default: inherit session umask
(None, None, curr_grp, 0750, 0640, 0750), # default umask, but with specified group in ec
(None, curr_grp, None, 0750, 0640, 0750), # default umask, but with specified group in cfg
(None, 'notagrp', curr_grp, 0750, 0640, 0750), # default umask, but with specified group in both cfg and ec
('000', None, None, 0777, 0666, 0777), # stupid empty umask
('032', None, None, 0745, 0644, 0745), # no write/execute for group, no write for other
('030', None, curr_grp, 0740, 0640, 0740), # no write for group, with specified group
('077', None, None, 0700, 0600, 0700), # no access for other/group
]:
if cfg_group is None and ec_group is None:
allargs = [toy_ec_file]
elif ec_group is not None:
shutil.copy2(toy_ec_file, self.test_buildpath)
tmp_ec_file = os.path.join(self.test_buildpath, os.path.basename(toy_ec_file))
write_file(tmp_ec_file, "\ngroup = '%s'" % ec_group, append=True)
allargs = [tmp_ec_file]
allargs.extend(args)
if umask is not None:
allargs.append("--umask=%s" % umask)
if cfg_group is not None:
allargs.append("--group=%s" % cfg_group)
outtxt = self.eb_main(allargs, logfile=self.dummylogfn, do_build=True, verbose=True)
# verify that installation was correct
self.check_toy(self.test_installpath, outtxt)
# group specified in easyconfig overrules configured group
group = cfg_group
if ec_group is not None:
group = ec_group
# verify permissions
paths_perms = [
# no write permissions for group/other, regardless of umask
(('software', 'toy', '0.0'), dir_perms & ~ 0022),
(('software', 'toy', '0.0', 'bin'), dir_perms & ~ 0022),
(('software', 'toy', '0.0', 'bin', 'toy'), bin_perms & ~ 0022),
]
# only software subdirs are chmod'ed for 'protected' installs, so don't check those if a group is specified
if group is None:
paths_perms.extend([
(('software', ), dir_perms),
(('software', 'toy'), dir_perms),
(('software', 'toy', '0.0', 'easybuild', '*.log'), fil_perms),
(('modules', ), dir_perms),
(('modules', 'all'), dir_perms),
(('modules', 'all', 'toy'), dir_perms),
(('modules', 'all', 'toy', '0.0'), fil_perms),
])
for path, correct_perms in paths_perms:
fullpath = glob.glob(os.path.join(self.test_installpath, *path))[0]
perms = os.stat(fullpath).st_mode & 0777
msg = "Path %s has %s permissions: %s" % (fullpath, oct(correct_perms), oct(perms))
self.assertEqual(perms, correct_perms, msg)
if group is not None:
path_gid = os.stat(fullpath).st_gid
self.assertEqual(path_gid, grp.getgrnam(group).gr_gid)
# cleanup for next iteration
shutil.rmtree(self.test_installpath)
# restore original umask
os.umask(orig_umask)
def test_toy_gid_sticky_bits(self):
"""Test setting gid and sticky bits."""
subdirs = [
(('',), False),
(('software',), False),
(('software', 'toy'), False),
(('software', 'toy', '0.0'), True),
(('modules', 'all'), False),
(('modules', 'all', 'toy'), False),
]
# no gid/sticky bits by default
self.test_toy_build()
for subdir, _ in subdirs:
fullpath = os.path.join(self.test_installpath, *subdir)
perms = os.stat(fullpath).st_mode
self.assertFalse(perms & stat.S_ISGID, "no gid bit on %s" % fullpath)
self.assertFalse(perms & stat.S_ISVTX, "no sticky bit on %s" % fullpath)
# git/sticky bits are set, but only on (re)created directories
self.test_toy_build(extra_args=['--set-gid-bit', '--sticky-bit'])
for subdir, bits_set in subdirs:
fullpath = os.path.join(self.test_installpath, *subdir)
perms = os.stat(fullpath).st_mode
if bits_set:
self.assertTrue(perms & stat.S_ISGID, "gid bit set on %s" % fullpath)
self.assertTrue(perms & stat.S_ISVTX, "sticky bit set on %s" % fullpath)
else:
self.assertFalse(perms & stat.S_ISGID, "no gid bit on %s" % fullpath)
self.assertFalse(perms & stat.S_ISVTX, "no sticky bit on %s" % fullpath)
# start with a clean slate, now gid/sticky bits should be set on everything
shutil.rmtree(self.test_installpath)
self.test_toy_build(extra_args=['--set-gid-bit', '--sticky-bit'])
for subdir, _ in subdirs:
fullpath = os.path.join(self.test_installpath, *subdir)
perms = os.stat(fullpath).st_mode
self.assertTrue(perms & stat.S_ISGID, "gid bit set on %s" % fullpath)
self.assertTrue(perms & stat.S_ISVTX, "sticky bit set on %s" % fullpath)
def test_allow_system_deps(self):
"""Test allow_system_deps easyconfig parameter."""
tmpdir = tempfile.mkdtemp()
# copy toy easyconfig file, and append source_urls to it
shutil.copy2(os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0.eb'), tmpdir)
ec_file = os.path.join(tmpdir, 'toy-0.0.eb')
write_file(ec_file, "\nallow_system_deps = [('Python', SYS_PYTHON_VERSION)]\n", append=True)
self.test_toy_build(ec_file=ec_file)
shutil.rmtree(tmpdir)
def test_toy_hierarchical(self):
"""Test toy build under example hierarchical module naming scheme."""
self.setup_hierarchical_modules()
mod_prefix = os.path.join(self.test_installpath, 'modules', 'all')
args = [
os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0.eb'),
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
'--robot=%s' % os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs'),
'--module-naming-scheme=HierarchicalMNS',
]
# test module paths/contents with gompi build
extra_args = [
'--try-toolchain=goolf,1.4.10',
]
self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
# make sure module file is installed in correct path
toy_module_path = os.path.join(mod_prefix, 'MPI', 'GCC', '4.7.2', 'OpenMPI', '1.6.4', 'toy', '0.0')
self.assertTrue(os.path.exists(toy_module_path))
# check that toolchain load is expanded to loads for toolchain dependencies,
# except for the ones that extend $MODULEPATH to make the toy module available
modtxt = read_file(toy_module_path)
for dep in ['goolf', 'GCC', 'OpenMPI']:
load_regex = re.compile("load %s" % dep)
self.assertFalse(load_regex.search(modtxt), "Pattern '%s' not found in %s" % (load_regex.pattern, modtxt))
for dep in ['OpenBLAS', 'FFTW', 'ScaLAPACK']:
load_regex = re.compile("load %s" % dep)
self.assertTrue(load_regex.search(modtxt), "Pattern '%s' found in %s" % (load_regex.pattern, modtxt))
os.remove(toy_module_path)
# test module path with GCC/4.7.2 build
extra_args = [
'--try-toolchain=GCC,4.7.2',
]
self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
# make sure module file is installed in correct path
toy_module_path = os.path.join(mod_prefix, 'Compiler', 'GCC', '4.7.2', 'toy', '0.0')
self.assertTrue(os.path.exists(toy_module_path))
# no dependencies or toolchain => no module load statements in module file
modtxt = read_file(toy_module_path)
self.assertFalse(re.search("module load", modtxt))
os.remove(toy_module_path)
# test module path with GCC/4.7.2 build, pretend to be an MPI lib by setting moduleclass
extra_args = [
'--try-toolchain=GCC,4.7.2',
'--try-amend=moduleclass=mpi',
]
self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
# make sure module file is installed in correct path
toy_module_path = os.path.join(mod_prefix, 'Compiler', 'GCC', '4.7.2', 'toy', '0.0')
self.assertTrue(os.path.exists(toy_module_path))
# 'module use' statements to extend $MODULEPATH are present
modtxt = read_file(toy_module_path)
modpath_extension = os.path.join(mod_prefix, 'MPI', 'GCC', '4.7.2', 'toy', '0.0')
self.assertTrue(re.search("^module\s*use\s*%s" % modpath_extension, modtxt, re.M))
os.remove(toy_module_path)
# ... unless they shouldn't be
extra_args.append('--try-amend=include_modpath_extensions=') # pass empty string as equivalent to False
self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
modtxt = read_file(toy_module_path)
modpath_extension = os.path.join(mod_prefix, 'MPI', 'GCC', '4.7.2', 'toy', '0.0')
self.assertFalse(re.search("^module\s*use\s*%s" % modpath_extension, modtxt, re.M))
os.remove(toy_module_path)
# test module path with dummy/dummy build
extra_args = [
'--try-toolchain=dummy,dummy',
]
self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
# make sure module file is installed in correct path
toy_module_path = os.path.join(mod_prefix, 'Core', 'toy', '0.0')
self.assertTrue(os.path.exists(toy_module_path))
# no dependencies or toolchain => no module load statements in module file
modtxt = read_file(toy_module_path)
self.assertFalse(re.search("module load", modtxt))
os.remove(toy_module_path)
# test module path with dummy/dummy build, pretend to be a compiler by setting moduleclass
extra_args = [
'--try-toolchain=dummy,dummy',
'--try-amend=moduleclass=compiler',
]
self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
# make sure module file is installed in correct path
toy_module_path = os.path.join(mod_prefix, 'Core', 'toy', '0.0')
self.assertTrue(os.path.exists(toy_module_path))
# no dependencies or toolchain => no module load statements in module file
modtxt = read_file(toy_module_path)
modpath_extension = os.path.join(mod_prefix, 'Compiler', 'toy', '0.0')
self.assertTrue(re.search("^module\s*use\s*%s" % modpath_extension, modtxt, re.M))
os.remove(toy_module_path)
# building a toolchain module should also work
args = ['gompi-1.4.10.eb'] + args[1:]
modules_tool().purge()
self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
def test_toy_advanced(self):
"""Test toy build with extensions and non-dummy toolchain."""
test_dir = os.path.abspath(os.path.dirname(__file__))
os.environ['MODULEPATH'] = os.path.join(test_dir, 'modules')
test_ec = os.path.join(test_dir, 'easyconfigs', 'toy-0.0-gompi-1.3.12.eb')
self.test_toy_build(ec_file=test_ec, versionsuffix='-gompi-1.3.12')
def test_toy_hidden(self):
"""Test installing a hidden module."""
ec_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'toy-0.0.eb')
self.test_toy_build(ec_file=ec_file, extra_args=['--hidden'], verify=False)
# module file is hidden
toy_module = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '.0.0')
self.assertTrue(os.path.exists(toy_module), 'Found hidden module %s' % toy_module)
# installed software is not hidden
toybin = os.path.join(self.test_installpath, 'software', 'toy', '0.0', 'bin', 'toy')
self.assertTrue(os.path.exists(toybin))
def test_module_filepath_tweaking(self):
"""Test using --suffix-modules-path."""
# install test module naming scheme dynamically
test_mns_parent_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'sandbox')
sys.path.append(test_mns_parent_dir)
reload(easybuild)
reload(easybuild.tools)
reload(easybuild.tools.module_naming_scheme)
mns_path = "easybuild.tools.module_naming_scheme.test_module_naming_scheme"
__import__(mns_path, globals(), locals(), [''])
eb_file = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0.eb')
args = [
eb_file,
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--force',
'--debug',
'--suffix-modules-path=foobarbaz',
'--module-naming-scheme=TestModuleNamingScheme',
]
self.eb_main(args, do_build=True, verbose=True)
mod_file_prefix = os.path.join(self.test_installpath, 'modules')
self.assertTrue(os.path.exists(os.path.join(mod_file_prefix, 'foobarbaz', 'toy', '0.0')))
self.assertTrue(os.path.exists(os.path.join(mod_file_prefix, 'TOOLS', 'toy', '0.0')))
self.assertTrue(os.path.islink(os.path.join(mod_file_prefix, 'TOOLS', 'toy', '0.0')))
self.assertTrue(os.path.exists(os.path.join(mod_file_prefix, 't', 'toy', '0.0')))
self.assertTrue(os.path.islink(os.path.join(mod_file_prefix, 't', 'toy', '0.0')))
def test_toy_archived_easyconfig(self):
"""Test archived easyconfig for a succesful build."""
repositorypath = os.path.join(self.test_installpath, 'easyconfigs_archive')
extra_args = [
'--repository=FileRepository',
'--repositorypath=%s' % repositorypath,
]
self.test_toy_build(raise_error=True, extra_args=extra_args)
archived_ec = os.path.join(repositorypath, 'toy', 'toy-0.0.eb')
self.assertTrue(os.path.exists(archived_ec))
ec = EasyConfig(archived_ec)
self.assertEqual(ec.name, 'toy')
self.assertEqual(ec.version, '0.0')
def suite():
""" return all the tests in this file """
return TestLoader().loadTestsFromTestCase(ToyBuildTest)
if __name__ == '__main__':
#logToScreen(enable=True)
#setLogLevelDebug()
unittestmain()
| geimer/easybuild-framework | test/framework/toy_build.py | Python | gpl-2.0 | 33,739 | 0.004742 |
# -*- coding: utf-8 -*-
'''
flixnet Add-on
Copyright (C) 2016 flixnet
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from resources.lib.modules import trakt
from resources.lib.modules import cleantitle
from resources.lib.modules import cleangenre
from resources.lib.modules import control
from resources.lib.modules import client
from resources.lib.modules import cache
from resources.lib.modules import playcount
from resources.lib.modules import workers
from resources.lib.modules import views
from resources.lib.modules import utils
import os,sys,re,json,zipfile,StringIO,urllib,urllib2,urlparse,datetime
params = dict(urlparse.parse_qsl(sys.argv[2].replace('?',''))) if len(sys.argv) > 1 else dict()
action = params.get('action')
class seasons:
def __init__(self):
self.list = []
self.lang = control.apiLanguage()['tvdb']
self.datetime = (datetime.datetime.utcnow() - datetime.timedelta(hours = 5))
self.today_date = (self.datetime).strftime('%Y-%m-%d')
self.tvdb_key = 'MUQ2MkYyRjkwMDMwQzQ0NA=='
self.tvdb_info_link = 'http://thetvdb.com/api/%s/series/%s/all/%s.zip' % (self.tvdb_key.decode('base64'), '%s', '%s')
self.tvdb_by_imdb = 'http://thetvdb.com/api/GetSeriesByRemoteID.php?imdbid=%s'
self.tvdb_by_query = 'http://thetvdb.com/api/GetSeries.php?seriesname=%s'
self.tvdb_image = 'http://thetvdb.com/banners/'
self.tvdb_poster = 'http://thetvdb.com/banners/_cache/'
def get(self, tvshowtitle, year, imdb, tvdb, idx=True, create_directory=True):
if control.window.getProperty('PseudoTVRunning') == 'True':
return episodes().get(tvshowtitle, year, imdb, tvdb)
if idx == True:
self.list = cache.get(self.tvdb_list, 24, tvshowtitle, year, imdb, tvdb, self.lang)
if create_directory == True: self.seasonDirectory(self.list)
return self.list
else:
self.list = self.tvdb_list(tvshowtitle, year, imdb, tvdb, 'en')
return self.list
def tvdb_list(self, tvshowtitle, year, imdb, tvdb, lang, limit=''):
try:
if imdb == '0':
try:
imdb = trakt.SearchTVShow(tvshowtitle, year, full=False)[0]
imdb = imdb.get('show', '0')
imdb = imdb.get('ids', {}).get('imdb', '0')
imdb = 'tt' + re.sub('[^0-9]', '', str(imdb))
if not imdb: imdb = '0'
except:
imdb = '0'
if tvdb == '0' and not imdb == '0':
url = self.tvdb_by_imdb % imdb
result = client.request(url, timeout='10')
try: tvdb = client.parseDOM(result, 'seriesid')[0]
except: tvdb = '0'
try: name = client.parseDOM(result, 'SeriesName')[0]
except: name = '0'
dupe = re.compile('[***]Duplicate (\d*)[***]').findall(name)
if len(dupe) > 0: tvdb = str(dupe[0])
if tvdb == '': tvdb = '0'
if tvdb == '0':
url = self.tvdb_by_query % (urllib.quote_plus(tvshowtitle))
years = [str(year), str(int(year)+1), str(int(year)-1)]
tvdb = client.request(url, timeout='10')
tvdb = re.sub(r'[^\x00-\x7F]+', '', tvdb)
tvdb = client.replaceHTMLCodes(tvdb)
tvdb = client.parseDOM(tvdb, 'Series')
tvdb = [(x, client.parseDOM(x, 'SeriesName'), client.parseDOM(x, 'FirstAired')) for x in tvdb]
tvdb = [(x, x[1][0], x[2][0]) for x in tvdb if len(x[1]) > 0 and len(x[2]) > 0]
tvdb = [x for x in tvdb if cleantitle.get(tvshowtitle) == cleantitle.get(x[1])]
tvdb = [x[0][0] for x in tvdb if any(y in x[2] for y in years)][0]
tvdb = client.parseDOM(tvdb, 'seriesid')[0]
if tvdb == '': tvdb = '0'
except:
return
try:
if tvdb == '0': return
url = self.tvdb_info_link % (tvdb, 'en')
data = urllib2.urlopen(url, timeout=30).read()
zip = zipfile.ZipFile(StringIO.StringIO(data))
result = zip.read('%s.xml' % 'en')
artwork = zip.read('banners.xml')
zip.close()
dupe = client.parseDOM(result, 'SeriesName')[0]
dupe = re.compile('[***]Duplicate (\d*)[***]').findall(dupe)
if len(dupe) > 0:
tvdb = str(dupe[0]).encode('utf-8')
url = self.tvdb_info_link % (tvdb, 'en')
data = urllib2.urlopen(url, timeout=30).read()
zip = zipfile.ZipFile(StringIO.StringIO(data))
result = zip.read('%s.xml' % 'en')
artwork = zip.read('banners.xml')
zip.close()
if not lang == 'en':
url = self.tvdb_info_link % (tvdb, lang)
data = urllib2.urlopen(url, timeout=30).read()
zip = zipfile.ZipFile(StringIO.StringIO(data))
result2 = zip.read('%s.xml' % lang)
zip.close()
else:
result2 = result
artwork = artwork.split('<Banner>')
artwork = [i for i in artwork if '<Language>en</Language>' in i and '<BannerType>season</BannerType>' in i]
artwork = [i for i in artwork if not 'seasonswide' in re.findall('<BannerPath>(.+?)</BannerPath>', i)[0]]
result = result.split('<Episode>')
result2 = result2.split('<Episode>')
item = result[0] ; item2 = result2[0]
episodes = [i for i in result if '<EpisodeNumber>' in i]
episodes = [i for i in episodes if not '<SeasonNumber>0</SeasonNumber>' in i]
episodes = [i for i in episodes if not '<EpisodeNumber>0</EpisodeNumber>' in i]
seasons = [i for i in episodes if '<EpisodeNumber>1</EpisodeNumber>' in i]
locals = [i for i in result2 if '<EpisodeNumber>' in i]
result = '' ; result2 = ''
if limit == '':
episodes = []
elif limit == '-1':
seasons = []
else:
episodes = [i for i in episodes if '<SeasonNumber>%01d</SeasonNumber>' % int(limit) in i]
seasons = []
try: poster = client.parseDOM(item, 'poster')[0]
except: poster = ''
if not poster == '': poster = self.tvdb_image + poster
else: poster = '0'
poster = client.replaceHTMLCodes(poster)
poster = poster.encode('utf-8')
try: banner = client.parseDOM(item, 'banner')[0]
except: banner = ''
if not banner == '': banner = self.tvdb_image + banner
else: banner = '0'
banner = client.replaceHTMLCodes(banner)
banner = banner.encode('utf-8')
try: fanart = client.parseDOM(item, 'fanart')[0]
except: fanart = ''
if not fanart == '': fanart = self.tvdb_image + fanart
else: fanart = '0'
fanart = client.replaceHTMLCodes(fanart)
fanart = fanart.encode('utf-8')
if not poster == '0': pass
elif not fanart == '0': poster = fanart
elif not banner == '0': poster = banner
if not banner == '0': pass
elif not fanart == '0': banner = fanart
elif not poster == '0': banner = poster
try: status = client.parseDOM(item, 'Status')[0]
except: status = ''
if status == '': status = 'Ended'
status = client.replaceHTMLCodes(status)
status = status.encode('utf-8')
try: studio = client.parseDOM(item, 'Network')[0]
except: studio = ''
if studio == '': studio = '0'
studio = client.replaceHTMLCodes(studio)
studio = studio.encode('utf-8')
try: genre = client.parseDOM(item, 'Genre')[0]
except: genre = ''
genre = [x for x in genre.split('|') if not x == '']
genre = ' / '.join(genre)
if genre == '': genre = '0'
genre = client.replaceHTMLCodes(genre)
genre = genre.encode('utf-8')
try: duration = client.parseDOM(item, 'Runtime')[0]
except: duration = ''
if duration == '': duration = '0'
duration = client.replaceHTMLCodes(duration)
duration = duration.encode('utf-8')
try: rating = client.parseDOM(item, 'Rating')[0]
except: rating = ''
if rating == '': rating = '0'
rating = client.replaceHTMLCodes(rating)
rating = rating.encode('utf-8')
try: votes = client.parseDOM(item, 'RatingCount')[0]
except: votes = '0'
if votes == '': votes = '0'
votes = client.replaceHTMLCodes(votes)
votes = votes.encode('utf-8')
try: mpaa = client.parseDOM(item, 'ContentRating')[0]
except: mpaa = ''
if mpaa == '': mpaa = '0'
mpaa = client.replaceHTMLCodes(mpaa)
mpaa = mpaa.encode('utf-8')
try: cast = client.parseDOM(item, 'Actors')[0]
except: cast = ''
cast = [x for x in cast.split('|') if not x == '']
try: cast = [(x.encode('utf-8'), '') for x in cast]
except: cast = []
try: label = client.parseDOM(item2, 'SeriesName')[0]
except: label = '0'
label = client.replaceHTMLCodes(label)
label = label.encode('utf-8')
try: plot = client.parseDOM(item2, 'Overview')[0]
except: plot = ''
if plot == '': plot = '0'
plot = client.replaceHTMLCodes(plot)
plot = plot.encode('utf-8')
except:
pass
for item in seasons:
try:
premiered = client.parseDOM(item, 'FirstAired')[0]
if premiered == '' or '-00' in premiered: premiered = '0'
premiered = client.replaceHTMLCodes(premiered)
premiered = premiered.encode('utf-8')
if status == 'Ended': pass
elif premiered == '0': raise Exception()
elif int(re.sub('[^0-9]', '', str(premiered))) > int(re.sub('[^0-9]', '', str(self.today_date))): raise Exception()
season = client.parseDOM(item, 'SeasonNumber')[0]
season = '%01d' % int(season)
season = season.encode('utf-8')
thumb = [i for i in artwork if client.parseDOM(i, 'Season')[0] == season]
try: thumb = client.parseDOM(thumb[0], 'BannerPath')[0]
except: thumb = ''
if not thumb == '': thumb = self.tvdb_image + thumb
else: thumb = '0'
thumb = client.replaceHTMLCodes(thumb)
thumb = thumb.encode('utf-8')
if thumb == '0': thumb = poster
self.list.append({'season': season, 'tvshowtitle': tvshowtitle, 'label': label, 'year': year, 'premiered': premiered, 'status': status, 'studio': studio, 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'cast': cast, 'plot': plot, 'imdb': imdb, 'tvdb': tvdb, 'poster': poster, 'banner': banner, 'fanart': fanart, 'thumb': thumb})
except:
pass
for item in episodes:
try:
premiered = client.parseDOM(item, 'FirstAired')[0]
if premiered == '' or '-00' in premiered: premiered = '0'
premiered = client.replaceHTMLCodes(premiered)
premiered = premiered.encode('utf-8')
if status == 'Ended': pass
elif premiered == '0': raise Exception()
elif int(re.sub('[^0-9]', '', str(premiered))) > int(re.sub('[^0-9]', '', str(self.today_date))): raise Exception()
season = client.parseDOM(item, 'SeasonNumber')[0]
season = '%01d' % int(season)
season = season.encode('utf-8')
episode = client.parseDOM(item, 'EpisodeNumber')[0]
episode = re.sub('[^0-9]', '', '%01d' % int(episode))
episode = episode.encode('utf-8')
title = client.parseDOM(item, 'EpisodeName')[0]
if title == '': title = '0'
title = client.replaceHTMLCodes(title)
title = title.encode('utf-8')
try: thumb = client.parseDOM(item, 'filename')[0]
except: thumb = ''
if not thumb == '': thumb = self.tvdb_image + thumb
else: thumb = '0'
thumb = client.replaceHTMLCodes(thumb)
thumb = thumb.encode('utf-8')
if not thumb == '0': pass
elif not fanart == '0': thumb = fanart.replace(self.tvdb_image, self.tvdb_poster)
elif not poster == '0': thumb = poster
try: rating = client.parseDOM(item, 'Rating')[0]
except: rating = ''
if rating == '': rating = '0'
rating = client.replaceHTMLCodes(rating)
rating = rating.encode('utf-8')
try: director = client.parseDOM(item, 'Director')[0]
except: director = ''
director = [x for x in director.split('|') if not x == '']
director = ' / '.join(director)
if director == '': director = '0'
director = client.replaceHTMLCodes(director)
director = director.encode('utf-8')
try: writer = client.parseDOM(item, 'Writer')[0]
except: writer = ''
writer = [x for x in writer.split('|') if not x == '']
writer = ' / '.join(writer)
if writer == '': writer = '0'
writer = client.replaceHTMLCodes(writer)
writer = writer.encode('utf-8')
try:
local = client.parseDOM(item, 'id')[0]
local = [x for x in locals if '<id>%s</id>' % str(local) in x][0]
except:
local = item
label = client.parseDOM(local, 'EpisodeName')[0]
if label == '': label = '0'
label = client.replaceHTMLCodes(label)
label = label.encode('utf-8')
try: episodeplot = client.parseDOM(local, 'Overview')[0]
except: episodeplot = ''
if episodeplot == '': episodeplot = '0'
if episodeplot == '0': episodeplot = plot
episodeplot = client.replaceHTMLCodes(episodeplot)
try: episodeplot = episodeplot.encode('utf-8')
except: pass
self.list.append({'title': title, 'label': label, 'season': season, 'episode': episode, 'tvshowtitle': tvshowtitle, 'year': year, 'premiered': premiered, 'status': status, 'studio': studio, 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'director': director, 'writer': writer, 'cast': cast, 'plot': episodeplot, 'imdb': imdb, 'tvdb': tvdb, 'poster': poster, 'banner': banner, 'fanart': fanart, 'thumb': thumb})
except:
pass
return self.list
def seasonDirectory(self, items):
if items == None or len(items) == 0: control.idle() ; sys.exit()
sysaddon = sys.argv[0]
syshandle = int(sys.argv[1])
addonPoster, addonBanner = control.addonPoster(), control.addonBanner()
addonFanart, settingFanart = control.addonFanart(), control.setting('fanart')
traktCredentials = trakt.getTraktCredentialsInfo()
try: isOld = False ; control.item().getArt('type')
except: isOld = True
try: indicators = playcount.getSeasonIndicators(items[0]['imdb'])
except: pass
watchedMenu = control.lang(32068).encode('utf-8') if trakt.getTraktIndicatorsInfo() == True else control.lang(32066).encode('utf-8')
unwatchedMenu = control.lang(32069).encode('utf-8') if trakt.getTraktIndicatorsInfo() == True else control.lang(32067).encode('utf-8')
queueMenu = control.lang(32065).encode('utf-8')
traktManagerMenu = control.lang(32070).encode('utf-8')
labelMenu = control.lang(32055).encode('utf-8')
playRandom = control.lang(32535).encode('utf-8')
addToLibrary = control.lang(32551).encode('utf-8')
for i in items:
try:
label = '%s %s' % (labelMenu, i['season'])
systitle = sysname = urllib.quote_plus(i['tvshowtitle'])
imdb, tvdb, year, season = i['imdb'], i['tvdb'], i['year'], i['season']
meta = dict((k,v) for k, v in i.iteritems() if not v == '0')
meta.update({'code': imdb, 'imdbnumber': imdb, 'imdb_id': imdb})
meta.update({'tvdb_id': tvdb})
meta.update({'mediatype': 'tvshow'})
meta.update({'trailer': '%s?action=trailer&name=%s' % (sysaddon, sysname)})
if not 'duration' in i: meta.update({'duration': '60'})
elif i['duration'] == '0': meta.update({'duration': '60'})
try: meta.update({'duration': str(int(meta['duration']) * 60)})
except: pass
try: meta.update({'genre': cleangenre.lang(meta['genre'], self.lang)})
except: pass
try: meta.update({'tvshowtitle': i['label']})
except: pass
try:
if season in indicators: meta.update({'playcount': 1, 'overlay': 7})
else: meta.update({'playcount': 0, 'overlay': 6})
except:
pass
url = '%s?action=episodes&tvshowtitle=%s&year=%s&imdb=%s&tvdb=%s&season=%s' % (sysaddon, systitle, year, imdb, tvdb, season)
cm = []
cm.append((playRandom, 'RunPlugin(%s?action=random&rtype=episode&tvshowtitle=%s&year=%s&imdb=%s&tvdb=%s&season=%s)' % (sysaddon, urllib.quote_plus(systitle), urllib.quote_plus(year), urllib.quote_plus(imdb), urllib.quote_plus(tvdb), urllib.quote_plus(season))))
cm.append((queueMenu, 'RunPlugin(%s?action=queueItem)' % sysaddon))
cm.append((watchedMenu, 'RunPlugin(%s?action=tvPlaycount&name=%s&imdb=%s&tvdb=%s&season=%s&query=7)' % (sysaddon, systitle, imdb, tvdb, season)))
cm.append((unwatchedMenu, 'RunPlugin(%s?action=tvPlaycount&name=%s&imdb=%s&tvdb=%s&season=%s&query=6)' % (sysaddon, systitle, imdb, tvdb, season)))
if traktCredentials == True:
cm.append((traktManagerMenu, 'RunPlugin(%s?action=traktManager&name=%s&tvdb=%s&content=tvshow)' % (sysaddon, sysname, tvdb)))
if isOld == True:
cm.append((control.lang2(19033).encode('utf-8'), 'Action(Info)'))
cm.append((addToLibrary, 'RunPlugin(%s?action=tvshowToLibrary&tvshowtitle=%s&year=%s&imdb=%s&tvdb=%s)' % (sysaddon, systitle, year, imdb, tvdb)))
item = control.item(label=label)
art = {}
if 'thumb' in i and not i['thumb'] == '0':
art.update({'icon': i['thumb'], 'thumb': i['thumb'], 'poster': i['thumb']})
elif 'poster' in i and not i['poster'] == '0':
art.update({'icon': i['poster'], 'thumb': i['poster'], 'poster': i['poster']})
else:
art.update({'icon': addonPoster, 'thumb': addonPoster, 'poster': addonPoster})
if 'banner' in i and not i['banner'] == '0':
art.update({'banner': i['banner']})
elif 'fanart' in i and not i['fanart'] == '0':
art.update({'banner': i['fanart']})
else:
art.update({'banner': addonBanner})
if settingFanart == 'true' and 'fanart' in i and not i['fanart'] == '0':
item.setProperty('Fanart_Image', i['fanart'])
elif not addonFanart == None:
item.setProperty('Fanart_Image', addonFanart)
item.setArt(art)
item.addContextMenuItems(cm)
item.setInfo(type='Video', infoLabels = meta)
video_streaminfo = {'codec': 'h264'}
item.addStreamInfo('video', video_streaminfo)
control.addItem(handle=syshandle, url=url, listitem=item, isFolder=True)
except:
pass
try: control.property(syshandle, 'showplot', items[0]['plot'])
except: pass
control.content(syshandle, 'seasons')
control.directory(syshandle, cacheToDisc=True)
views.setView('seasons', {'skin.estuary': 55, 'skin.confluence': 500})
class episodes:
def __init__(self):
self.list = []
self.trakt_link = 'http://api.trakt.tv'
self.tvmaze_link = 'http://api.tvmaze.com'
self.tvdb_key = 'MUQ2MkYyRjkwMDMwQzQ0NA=='
self.datetime = (datetime.datetime.utcnow() - datetime.timedelta(hours = 5))
self.systime = (self.datetime).strftime('%Y%m%d%H%M%S%f')
self.today_date = (self.datetime).strftime('%Y-%m-%d')
self.trakt_user = control.setting('trakt.user').strip()
self.lang = control.apiLanguage()['tvdb']
self.tvdb_info_link = 'http://thetvdb.com/api/%s/series/%s/all/%s.zip' % (self.tvdb_key.decode('base64'), '%s', '%s')
self.tvdb_image = 'http://thetvdb.com/banners/'
self.tvdb_poster = 'http://thetvdb.com/banners/_cache/'
self.added_link = 'http://api.tvmaze.com/schedule'
self.mycalendar_link = 'http://api.trakt.tv/calendars/my/shows/date[29]/60/'
self.trakthistory_link = 'http://api.trakt.tv/users/me/history/shows?limit=300'
self.progress_link = 'http://api.trakt.tv/users/me/watched/shows'
self.hiddenprogress_link = 'http://api.trakt.tv/users/hidden/progress_watched?limit=1000&type=show'
self.calendar_link = 'http://api.tvmaze.com/schedule?date=%s'
self.traktlists_link = 'http://api.trakt.tv/users/me/lists'
self.traktlikedlists_link = 'http://api.trakt.tv/users/likes/lists?limit=1000000'
self.traktlist_link = 'http://api.trakt.tv/users/%s/lists/%s/items'
def get(self, tvshowtitle, year, imdb, tvdb, season=None, episode=None, idx=True, create_directory=True):
try:
if idx == True:
if season == None and episode == None:
self.list = cache.get(seasons().tvdb_list, 1, tvshowtitle, year, imdb, tvdb, self.lang, '-1')
elif episode == None:
self.list = cache.get(seasons().tvdb_list, 1, tvshowtitle, year, imdb, tvdb, self.lang, season)
else:
self.list = cache.get(seasons().tvdb_list, 1, tvshowtitle, year, imdb, tvdb, self.lang, '-1')
num = [x for x,y in enumerate(self.list) if y['season'] == str(season) and y['episode'] == str(episode)][-1]
self.list = [y for x,y in enumerate(self.list) if x >= num]
if create_directory == True: self.episodeDirectory(self.list)
return self.list
else:
self.list = seasons().tvdb_list(tvshowtitle, year, imdb, tvdb, 'en', '-1')
return self.list
except:
pass
def calendar(self, url):
try:
try: url = getattr(self, url + '_link')
except: pass
if self.trakt_link in url and url == self.progress_link:
self.blist = cache.get(self.trakt_progress_list, 720, url, self.trakt_user, self.lang)
self.list = []
self.list = cache.get(self.trakt_progress_list, 0, url, self.trakt_user, self.lang)
elif self.trakt_link in url and url == self.mycalendar_link:
self.blist = cache.get(self.trakt_episodes_list, 720, url, self.trakt_user, self.lang)
self.list = []
self.list = cache.get(self.trakt_episodes_list, 0, url, self.trakt_user, self.lang)
elif self.trakt_link in url and '/users/' in url:
self.list = cache.get(self.trakt_list, 0, url, self.trakt_user)
self.list = self.list[::-1]
elif self.trakt_link in url:
self.list = cache.get(self.trakt_list, 1, url, self.trakt_user)
elif self.tvmaze_link in url and url == self.added_link:
urls = [i['url'] for i in self.calendars(idx=False)][:5]
self.list = []
for url in urls:
self.list += cache.get(self.tvmaze_list, 720, url, True)
elif self.tvmaze_link in url:
self.list = cache.get(self.tvmaze_list, 1, url, False)
self.episodeDirectory(self.list)
return self.list
except:
pass
def widget(self):
if trakt.getTraktIndicatorsInfo() == True:
setting = control.setting('tv.widget.alt')
else:
setting = control.setting('tv.widget')
if setting == '2':
self.calendar(self.progress_link)
elif setting == '3':
self.calendar(self.mycalendar_link)
else:
self.calendar(self.added_link)
def calendars(self, idx=True):
m = control.lang(32060).encode('utf-8').split('|')
try: months = [(m[0], 'January'), (m[1], 'February'), (m[2], 'March'), (m[3], 'April'), (m[4], 'May'), (m[5], 'June'), (m[6], 'July'), (m[7], 'August'), (m[8], 'September'), (m[9], 'October'), (m[10], 'November'), (m[11], 'December')]
except: months = []
d = control.lang(32061).encode('utf-8').split('|')
try: days = [(d[0], 'Monday'), (d[1], 'Tuesday'), (d[2], 'Wednesday'), (d[3], 'Thursday'), (d[4], 'Friday'), (d[5], 'Saturday'), (d[6], 'Sunday')]
except: days = []
for i in range(0, 30):
try:
name = (self.datetime - datetime.timedelta(days = i))
name = (control.lang(32062) % (name.strftime('%A'), name.strftime('%d %B'))).encode('utf-8')
for m in months: name = name.replace(m[1], m[0])
for d in days: name = name.replace(d[1], d[0])
try: name = name.encode('utf-8')
except: pass
url = self.calendar_link % (self.datetime - datetime.timedelta(days = i)).strftime('%Y-%m-%d')
self.list.append({'name': name, 'url': url, 'image': 'calendar.png', 'action': 'calendar'})
except:
pass
if idx == True: self.addDirectory(self.list)
return self.list
def userlists(self):
try:
userlists = []
if trakt.getTraktCredentialsInfo() == False: raise Exception()
activity = trakt.getActivity()
except:
pass
try:
if trakt.getTraktCredentialsInfo() == False: raise Exception()
try:
if activity > cache.timeout(self.trakt_user_list, self.traktlists_link, self.trakt_user): raise Exception()
userlists += cache.get(self.trakt_user_list, 720, self.traktlists_link, self.trakt_user)
except:
userlists += cache.get(self.trakt_user_list, 0, self.traktlists_link, self.trakt_user)
except:
pass
try:
self.list = []
if trakt.getTraktCredentialsInfo() == False: raise Exception()
try:
if activity > cache.timeout(self.trakt_user_list, self.traktlikedlists_link, self.trakt_user): raise Exception()
userlists += cache.get(self.trakt_user_list, 720, self.traktlikedlists_link, self.trakt_user)
except:
userlists += cache.get(self.trakt_user_list, 0, self.traktlikedlists_link, self.trakt_user)
except:
pass
self.list = userlists
for i in range(0, len(self.list)): self.list[i].update({'image': 'userlists.png', 'action': 'calendar'})
self.addDirectory(self.list, queue=True)
return self.list
def trakt_list(self, url, user):
try:
for i in re.findall('date\[(\d+)\]', url):
url = url.replace('date[%s]' % i, (self.datetime - datetime.timedelta(days = int(i))).strftime('%Y-%m-%d'))
q = dict(urlparse.parse_qsl(urlparse.urlsplit(url).query))
q.update({'extended': 'full'})
q = (urllib.urlencode(q)).replace('%2C', ',')
u = url.replace('?' + urlparse.urlparse(url).query, '') + '?' + q
itemlist = []
items = trakt.getTraktAsJson(u)
except:
return
for item in items:
try:
title = item['episode']['title']
if title == None or title == '': raise Exception()
title = client.replaceHTMLCodes(title)
season = item['episode']['season']
season = re.sub('[^0-9]', '', '%01d' % int(season))
if season == '0': raise Exception()
episode = item['episode']['number']
episode = re.sub('[^0-9]', '', '%01d' % int(episode))
if episode == '0': raise Exception()
tvshowtitle = item['show']['title']
if tvshowtitle == None or tvshowtitle == '': raise Exception()
tvshowtitle = client.replaceHTMLCodes(tvshowtitle)
year = item['show']['year']
year = re.sub('[^0-9]', '', str(year))
imdb = item['show']['ids']['imdb']
if imdb == None or imdb == '': imdb = '0'
else: imdb = 'tt' + re.sub('[^0-9]', '', str(imdb))
tvdb = item['show']['ids']['tvdb']
if tvdb == None or tvdb == '': raise Exception()
tvdb = re.sub('[^0-9]', '', str(tvdb))
premiered = item['episode']['first_aired']
try: premiered = re.compile('(\d{4}-\d{2}-\d{2})').findall(premiered)[0]
except: premiered = '0'
studio = item['show']['network']
if studio == None: studio = '0'
genre = item['show']['genres']
genre = [i.title() for i in genre]
if genre == []: genre = '0'
genre = ' / '.join(genre)
try: duration = str(item['show']['runtime'])
except: duration = '0'
if duration == None: duration = '0'
try: rating = str(item['episode']['rating'])
except: rating = '0'
if rating == None or rating == '0.0': rating = '0'
try: votes = str(item['show']['votes'])
except: votes = '0'
try: votes = str(format(int(votes),',d'))
except: pass
if votes == None: votes = '0'
mpaa = item['show']['certification']
if mpaa == None: mpaa = '0'
plot = item['episode']['overview']
if plot == None or plot == '': plot = item['show']['overview']
if plot == None or plot == '': plot = '0'
plot = client.replaceHTMLCodes(plot)
try:
if self.lang == 'en': raise Exception()
item = trakt.getTVShowTranslation(imdb, lang=self.lang, season=season, episode=episode, full=True)
title = item.get('title') or title
plot = item.get('overview') or plot
tvshowtitle = trakt.getTVShowTranslation(imdb, lang=self.lang) or tvshowtitle
except:
pass
itemlist.append({'title': title, 'season': season, 'episode': episode, 'tvshowtitle': tvshowtitle, 'year': year, 'premiered': premiered, 'status': 'Continuing', 'studio': studio, 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'plot': plot, 'imdb': imdb, 'tvdb': tvdb, 'poster': '0', 'thumb': '0'})
except:
pass
itemlist = itemlist[::-1]
return itemlist
def trakt_progress_list(self, url, user, lang):
try:
url += '?extended=full'
result = trakt.getTraktAsJson(url)
items = []
except:
return
for item in result:
try:
num_1 = 0
for i in range(0, len(item['seasons'])):
if item['seasons'][i]['number'] > 0: num_1 += len(item['seasons'][i]['episodes'])
num_2 = int(item['show']['aired_episodes'])
if num_1 >= num_2: raise Exception()
season = str(item['seasons'][-1]['number'])
episode = [x for x in item['seasons'][-1]['episodes'] if 'number' in x]
episode = sorted(episode, key=lambda x: x['number'])
episode = str(episode[-1]['number'])
tvshowtitle = item['show']['title']
if tvshowtitle == None or tvshowtitle == '': raise Exception()
tvshowtitle = client.replaceHTMLCodes(tvshowtitle)
year = item['show']['year']
year = re.sub('[^0-9]', '', str(year))
if int(year) > int(self.datetime.strftime('%Y')): raise Exception()
imdb = item['show']['ids']['imdb']
if imdb == None or imdb == '': imdb = '0'
tvdb = item['show']['ids']['tvdb']
if tvdb == None or tvdb == '': raise Exception()
tvdb = re.sub('[^0-9]', '', str(tvdb))
items.append({'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year, 'snum': season, 'enum': episode})
except:
pass
try:
result = trakt.getTraktAsJson(self.hiddenprogress_link)
result = [str(i['show']['ids']['tvdb']) for i in result]
items = [i for i in items if not i['tvdb'] in result]
except:
pass
def items_list(i):
try:
item = [x for x in self.blist if x['tvdb'] == i['tvdb'] and x['snum'] == i['snum'] and x['enum'] == i['enum']][0]
item['action'] = 'episodes'
self.list.append(item)
return
except:
pass
try:
url = self.tvdb_info_link % (i['tvdb'], lang)
data = urllib2.urlopen(url, timeout=10).read()
zip = zipfile.ZipFile(StringIO.StringIO(data))
result = zip.read('%s.xml' % lang)
artwork = zip.read('banners.xml')
zip.close()
result = result.split('<Episode>')
item = [x for x in result if '<EpisodeNumber>' in x]
item2 = result[0]
num = [x for x,y in enumerate(item) if re.compile('<SeasonNumber>(.+?)</SeasonNumber>').findall(y)[0] == str(i['snum']) and re.compile('<EpisodeNumber>(.+?)</EpisodeNumber>').findall(y)[0] == str(i['enum'])][-1]
item = [y for x,y in enumerate(item) if x > num][0]
print lang
print item
premiered = client.parseDOM(item, 'FirstAired')[0]
if premiered == '' or '-00' in premiered: premiered = '0'
premiered = client.replaceHTMLCodes(premiered)
premiered = premiered.encode('utf-8')
try: status = client.parseDOM(item2, 'Status')[0]
except: status = ''
if status == '': status = 'Ended'
status = client.replaceHTMLCodes(status)
status = status.encode('utf-8')
if status == 'Ended': pass
elif premiered == '0': raise Exception()
elif int(re.sub('[^0-9]', '', str(premiered))) > int(re.sub('[^0-9]', '', str(self.today_date))): raise Exception()
title = client.parseDOM(item, 'EpisodeName')[0]
if title == '': title = '0'
title = client.replaceHTMLCodes(title)
title = title.encode('utf-8')
season = client.parseDOM(item, 'SeasonNumber')[0]
season = '%01d' % int(season)
season = season.encode('utf-8')
episode = client.parseDOM(item, 'EpisodeNumber')[0]
episode = re.sub('[^0-9]', '', '%01d' % int(episode))
episode = episode.encode('utf-8')
tvshowtitle = i['tvshowtitle']
imdb, tvdb = i['imdb'], i['tvdb']
year = i['year']
try: year = year.encode('utf-8')
except: pass
try: poster = client.parseDOM(item2, 'poster')[0]
except: poster = ''
if not poster == '': poster = self.tvdb_image + poster
else: poster = '0'
poster = client.replaceHTMLCodes(poster)
poster = poster.encode('utf-8')
try: banner = client.parseDOM(item2, 'banner')[0]
except: banner = ''
if not banner == '': banner = self.tvdb_image + banner
else: banner = '0'
banner = client.replaceHTMLCodes(banner)
banner = banner.encode('utf-8')
try: fanart = client.parseDOM(item2, 'fanart')[0]
except: fanart = ''
if not fanart == '': fanart = self.tvdb_image + fanart
else: fanart = '0'
fanart = client.replaceHTMLCodes(fanart)
fanart = fanart.encode('utf-8')
try: thumb = client.parseDOM(item, 'filename')[0]
except: thumb = ''
if not thumb == '': thumb = self.tvdb_image + thumb
else: thumb = '0'
thumb = client.replaceHTMLCodes(thumb)
thumb = thumb.encode('utf-8')
if not poster == '0': pass
elif not fanart == '0': poster = fanart
elif not banner == '0': poster = banner
if not banner == '0': pass
elif not fanart == '0': banner = fanart
elif not poster == '0': banner = poster
if not thumb == '0': pass
elif not fanart == '0': thumb = fanart.replace(self.tvdb_image, self.tvdb_poster)
elif not poster == '0': thumb = poster
try: studio = client.parseDOM(item2, 'Network')[0]
except: studio = ''
if studio == '': studio = '0'
studio = client.replaceHTMLCodes(studio)
studio = studio.encode('utf-8')
try: genre = client.parseDOM(item2, 'Genre')[0]
except: genre = ''
genre = [x for x in genre.split('|') if not x == '']
genre = ' / '.join(genre)
if genre == '': genre = '0'
genre = client.replaceHTMLCodes(genre)
genre = genre.encode('utf-8')
try: duration = client.parseDOM(item2, 'Runtime')[0]
except: duration = ''
if duration == '': duration = '0'
duration = client.replaceHTMLCodes(duration)
duration = duration.encode('utf-8')
try: rating = client.parseDOM(item, 'Rating')[0]
except: rating = ''
if rating == '': rating = '0'
rating = client.replaceHTMLCodes(rating)
rating = rating.encode('utf-8')
try: votes = client.parseDOM(item2, 'RatingCount')[0]
except: votes = '0'
if votes == '': votes = '0'
votes = client.replaceHTMLCodes(votes)
votes = votes.encode('utf-8')
try: mpaa = client.parseDOM(item2, 'ContentRating')[0]
except: mpaa = ''
if mpaa == '': mpaa = '0'
mpaa = client.replaceHTMLCodes(mpaa)
mpaa = mpaa.encode('utf-8')
try: director = client.parseDOM(item, 'Director')[0]
except: director = ''
director = [x for x in director.split('|') if not x == '']
director = ' / '.join(director)
if director == '': director = '0'
director = client.replaceHTMLCodes(director)
director = director.encode('utf-8')
try: writer = client.parseDOM(item, 'Writer')[0]
except: writer = ''
writer = [x for x in writer.split('|') if not x == '']
writer = ' / '.join(writer)
if writer == '': writer = '0'
writer = client.replaceHTMLCodes(writer)
writer = writer.encode('utf-8')
try: cast = client.parseDOM(item2, 'Actors')[0]
except: cast = ''
cast = [x for x in cast.split('|') if not x == '']
try: cast = [(x.encode('utf-8'), '') for x in cast]
except: cast = []
try: plot = client.parseDOM(item, 'Overview')[0]
except: plot = ''
if plot == '':
try: plot = client.parseDOM(item2, 'Overview')[0]
except: plot = ''
if plot == '': plot = '0'
plot = client.replaceHTMLCodes(plot)
plot = plot.encode('utf-8')
self.list.append({'title': title, 'season': season, 'episode': episode, 'tvshowtitle': tvshowtitle, 'year': year, 'premiered': premiered, 'status': status, 'studio': studio, 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'director': director, 'writer': writer, 'cast': cast, 'plot': plot, 'imdb': imdb, 'tvdb': tvdb, 'poster': poster, 'banner': banner, 'fanart': fanart, 'thumb': thumb, 'snum': i['snum'], 'enum': i['enum'], 'action': 'episodes'})
except:
pass
items = items[:100]
threads = []
for i in items: threads.append(workers.Thread(items_list, i))
[i.start() for i in threads]
[i.join() for i in threads]
try: self.list = sorted(self.list, key=lambda k: k['premiered'], reverse=True)
except: pass
return self.list
def trakt_episodes_list(self, url, user, lang):
items = self.trakt_list(url, user)
def items_list(i):
try:
item = [x for x in self.blist if x['tvdb'] == i['tvdb'] and x['season'] == i['season'] and x['episode'] == i['episode']][0]
if item['poster'] == '0': raise Exception()
self.list.append(item)
return
except:
pass
try:
url = self.tvdb_info_link % (i['tvdb'], lang)
data = urllib2.urlopen(url, timeout=10).read()
zip = zipfile.ZipFile(StringIO.StringIO(data))
result = zip.read('%s.xml' % lang)
artwork = zip.read('banners.xml')
zip.close()
result = result.split('<Episode>')
item = [(re.findall('<SeasonNumber>%01d</SeasonNumber>' % int(i['season']), x), re.findall('<EpisodeNumber>%01d</EpisodeNumber>' % int(i['episode']), x), x) for x in result]
item = [x[2] for x in item if len(x[0]) > 0 and len(x[1]) > 0][0]
item2 = result[0]
premiered = client.parseDOM(item, 'FirstAired')[0]
if premiered == '' or '-00' in premiered: premiered = '0'
premiered = client.replaceHTMLCodes(premiered)
premiered = premiered.encode('utf-8')
try: status = client.parseDOM(item2, 'Status')[0]
except: status = ''
if status == '': status = 'Ended'
status = client.replaceHTMLCodes(status)
status = status.encode('utf-8')
title = client.parseDOM(item, 'EpisodeName')[0]
if title == '': title = '0'
title = client.replaceHTMLCodes(title)
title = title.encode('utf-8')
season = client.parseDOM(item, 'SeasonNumber')[0]
season = '%01d' % int(season)
season = season.encode('utf-8')
episode = client.parseDOM(item, 'EpisodeNumber')[0]
episode = re.sub('[^0-9]', '', '%01d' % int(episode))
episode = episode.encode('utf-8')
tvshowtitle = i['tvshowtitle']
imdb, tvdb = i['imdb'], i['tvdb']
year = i['year']
try: year = year.encode('utf-8')
except: pass
try: poster = client.parseDOM(item2, 'poster')[0]
except: poster = ''
if not poster == '': poster = self.tvdb_image + poster
else: poster = '0'
poster = client.replaceHTMLCodes(poster)
poster = poster.encode('utf-8')
try: banner = client.parseDOM(item2, 'banner')[0]
except: banner = ''
if not banner == '': banner = self.tvdb_image + banner
else: banner = '0'
banner = client.replaceHTMLCodes(banner)
banner = banner.encode('utf-8')
try: fanart = client.parseDOM(item2, 'fanart')[0]
except: fanart = ''
if not fanart == '': fanart = self.tvdb_image + fanart
else: fanart = '0'
fanart = client.replaceHTMLCodes(fanart)
fanart = fanart.encode('utf-8')
try: thumb = client.parseDOM(item, 'filename')[0]
except: thumb = ''
if not thumb == '': thumb = self.tvdb_image + thumb
else: thumb = '0'
thumb = client.replaceHTMLCodes(thumb)
thumb = thumb.encode('utf-8')
if not poster == '0': pass
elif not fanart == '0': poster = fanart
elif not banner == '0': poster = banner
if not banner == '0': pass
elif not fanart == '0': banner = fanart
elif not poster == '0': banner = poster
if not thumb == '0': pass
elif not fanart == '0': thumb = fanart.replace(self.tvdb_image, self.tvdb_poster)
elif not poster == '0': thumb = poster
try: studio = client.parseDOM(item2, 'Network')[0]
except: studio = ''
if studio == '': studio = '0'
studio = client.replaceHTMLCodes(studio)
studio = studio.encode('utf-8')
try: genre = client.parseDOM(item2, 'Genre')[0]
except: genre = ''
genre = [x for x in genre.split('|') if not x == '']
genre = ' / '.join(genre)
if genre == '': genre = '0'
genre = client.replaceHTMLCodes(genre)
genre = genre.encode('utf-8')
try: duration = client.parseDOM(item2, 'Runtime')[0]
except: duration = ''
if duration == '': duration = '0'
duration = client.replaceHTMLCodes(duration)
duration = duration.encode('utf-8')
try: rating = client.parseDOM(item, 'Rating')[0]
except: rating = ''
if rating == '': rating = '0'
rating = client.replaceHTMLCodes(rating)
rating = rating.encode('utf-8')
try: votes = client.parseDOM(item2, 'RatingCount')[0]
except: votes = '0'
if votes == '': votes = '0'
votes = client.replaceHTMLCodes(votes)
votes = votes.encode('utf-8')
try: mpaa = client.parseDOM(item2, 'ContentRating')[0]
except: mpaa = ''
if mpaa == '': mpaa = '0'
mpaa = client.replaceHTMLCodes(mpaa)
mpaa = mpaa.encode('utf-8')
try: director = client.parseDOM(item, 'Director')[0]
except: director = ''
director = [x for x in director.split('|') if not x == '']
director = ' / '.join(director)
if director == '': director = '0'
director = client.replaceHTMLCodes(director)
director = director.encode('utf-8')
try: writer = client.parseDOM(item, 'Writer')[0]
except: writer = ''
writer = [x for x in writer.split('|') if not x == '']
writer = ' / '.join(writer)
if writer == '': writer = '0'
writer = client.replaceHTMLCodes(writer)
writer = writer.encode('utf-8')
try: cast = client.parseDOM(item2, 'Actors')[0]
except: cast = ''
cast = [x for x in cast.split('|') if not x == '']
try: cast = [(x.encode('utf-8'), '') for x in cast]
except: cast = []
try: plot = client.parseDOM(item, 'Overview')[0]
except: plot = ''
if plot == '':
try: plot = client.parseDOM(item2, 'Overview')[0]
except: plot = ''
if plot == '': plot = '0'
plot = client.replaceHTMLCodes(plot)
plot = plot.encode('utf-8')
self.list.append({'title': title, 'season': season, 'episode': episode, 'tvshowtitle': tvshowtitle, 'year': year, 'premiered': premiered, 'status': status, 'studio': studio, 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'director': director, 'writer': writer, 'cast': cast, 'plot': plot, 'imdb': imdb, 'tvdb': tvdb, 'poster': poster, 'banner': banner, 'fanart': fanart, 'thumb': thumb})
except:
pass
items = items[:100]
threads = []
for i in items: threads.append(workers.Thread(items_list, i))
[i.start() for i in threads]
[i.join() for i in threads]
return self.list
def trakt_user_list(self, url, user):
try:
items = trakt.getTraktAsJson(url)
except:
pass
for item in items:
try:
try: name = item['list']['name']
except: name = item['name']
name = client.replaceHTMLCodes(name)
try: url = (trakt.slug(item['list']['user']['username']), item['list']['ids']['slug'])
except: url = ('me', item['ids']['slug'])
url = self.traktlist_link % url
url = url.encode('utf-8')
self.list.append({'name': name, 'url': url, 'context': url})
except:
pass
self.list = sorted(self.list, key=lambda k: utils.title_key(k['name']))
return self.list
def tvmaze_list(self, url, limit):
try:
result = client.request(url)
itemlist = []
items = json.loads(result)
except:
return
for item in items:
try:
if not 'english' in item['show']['language'].lower(): raise Exception()
if limit == True and not 'scripted' in item['show']['type'].lower(): raise Exception()
title = item['name']
if title == None or title == '': raise Exception()
title = client.replaceHTMLCodes(title)
title = title.encode('utf-8')
season = item['season']
season = re.sub('[^0-9]', '', '%01d' % int(season))
if season == '0': raise Exception()
season = season.encode('utf-8')
episode = item['number']
episode = re.sub('[^0-9]', '', '%01d' % int(episode))
if episode == '0': raise Exception()
episode = episode.encode('utf-8')
tvshowtitle = item['show']['name']
if tvshowtitle == None or tvshowtitle == '': raise Exception()
tvshowtitle = client.replaceHTMLCodes(tvshowtitle)
tvshowtitle = tvshowtitle.encode('utf-8')
year = item['show']['premiered']
year = re.findall('(\d{4})', year)[0]
year = year.encode('utf-8')
imdb = item['show']['externals']['imdb']
if imdb == None or imdb == '': imdb = '0'
else: imdb = 'tt' + re.sub('[^0-9]', '', str(imdb))
imdb = imdb.encode('utf-8')
tvdb = item['show']['externals']['thetvdb']
if tvdb == None or tvdb == '': raise Exception()
tvdb = re.sub('[^0-9]', '', str(tvdb))
tvdb = tvdb.encode('utf-8')
poster = '0'
try: poster = item['show']['image']['original']
except: poster = '0'
if poster == None or poster == '': poster = '0'
poster = poster.encode('utf-8')
try: thumb1 = item['show']['image']['original']
except: thumb1 = '0'
try: thumb2 = item['image']['original']
except: thumb2 = '0'
if thumb2 == None or thumb2 == '0': thumb = thumb1
else: thumb = thumb2
if thumb == None or thumb == '': thumb = '0'
thumb = thumb.encode('utf-8')
premiered = item['airdate']
try: premiered = re.findall('(\d{4}-\d{2}-\d{2})', premiered)[0]
except: premiered = '0'
premiered = premiered.encode('utf-8')
try: studio = item['show']['network']['name']
except: studio = '0'
if studio == None: studio = '0'
studio = studio.encode('utf-8')
try: genre = item['show']['genres']
except: genre = '0'
genre = [i.title() for i in genre]
if genre == []: genre = '0'
genre = ' / '.join(genre)
genre = genre.encode('utf-8')
try: duration = item['show']['runtime']
except: duration = '0'
if duration == None: duration = '0'
duration = str(duration)
duration = duration.encode('utf-8')
try: rating = item['show']['rating']['average']
except: rating = '0'
if rating == None or rating == '0.0': rating = '0'
rating = str(rating)
rating = rating.encode('utf-8')
try: plot = item['show']['summary']
except: plot = '0'
if plot == None: plot = '0'
plot = re.sub('<.+?>|</.+?>|\n', '', plot)
plot = client.replaceHTMLCodes(plot)
plot = plot.encode('utf-8')
itemlist.append({'title': title, 'season': season, 'episode': episode, 'tvshowtitle': tvshowtitle, 'year': year, 'premiered': premiered, 'status': 'Continuing', 'studio': studio, 'genre': genre, 'duration': duration, 'rating': rating, 'plot': plot, 'imdb': imdb, 'tvdb': tvdb, 'poster': poster, 'thumb': thumb})
except:
pass
itemlist = itemlist[::-1]
return itemlist
def episodeDirectory(self, items):
if items == None or len(items) == 0: control.idle() ; sys.exit()
sysaddon = sys.argv[0]
syshandle = int(sys.argv[1])
addonPoster, addonBanner = control.addonPoster(), control.addonBanner()
addonFanart, settingFanart = control.addonFanart(), control.setting('fanart')
traktCredentials = trakt.getTraktCredentialsInfo()
try: isOld = False ; control.item().getArt('type')
except: isOld = True
isPlayable = 'true' if not 'plugin' in control.infoLabel('Container.PluginName') else 'false'
indicators = playcount.getTVShowIndicators(refresh=True)
try: multi = [i['tvshowtitle'] for i in items]
except: multi = []
multi = len([x for y,x in enumerate(multi) if x not in multi[:y]])
multi = True if multi > 1 else False
try: sysaction = items[0]['action']
except: sysaction = ''
isFolder = False if not sysaction == 'episodes' else True
playbackMenu = control.lang(32063).encode('utf-8') if control.setting('hosts.mode') == '2' else control.lang(32064).encode('utf-8')
watchedMenu = control.lang(32068).encode('utf-8') if trakt.getTraktIndicatorsInfo() == True else control.lang(32066).encode('utf-8')
unwatchedMenu = control.lang(32069).encode('utf-8') if trakt.getTraktIndicatorsInfo() == True else control.lang(32067).encode('utf-8')
queueMenu = control.lang(32065).encode('utf-8')
traktManagerMenu = control.lang(32070).encode('utf-8')
tvshowBrowserMenu = control.lang(32071).encode('utf-8')
addToLibrary = control.lang(32551).encode('utf-8')
for i in items:
try:
if not 'label' in i: i['label'] = i['title']
if i['label'] == '0':
label = '%sx%02d . %s %s' % (i['season'], int(i['episode']), 'Episode', i['episode'])
else:
label = '%sx%02d . %s' % (i['season'], int(i['episode']), i['label'])
if multi == True:
label = '%s - %s' % (i['tvshowtitle'], label)
imdb, tvdb, year, season, episode = i['imdb'], i['tvdb'], i['year'], i['season'], i['episode']
systitle = urllib.quote_plus(i['title'])
systvshowtitle = urllib.quote_plus(i['tvshowtitle'])
syspremiered = urllib.quote_plus(i['premiered'])
meta = dict((k,v) for k, v in i.iteritems() if not v == '0')
meta.update({'mediatype': 'episode'})
meta.update({'trailer': '%s?action=trailer&name=%s' % (sysaddon, systvshowtitle)})
if not 'duration' in i: meta.update({'duration': '60'})
elif i['duration'] == '0': meta.update({'duration': '60'})
try: meta.update({'duration': str(int(meta['duration']) * 60)})
except: pass
try: meta.update({'genre': cleangenre.lang(meta['genre'], self.lang)})
except: pass
try: meta.update({'year': re.findall('(\d{4})', i['premiered'])[0]})
except: pass
try: meta.update({'title': i['label']})
except: pass
sysmeta = urllib.quote_plus(json.dumps(meta))
url = '%s?action=play&title=%s&year=%s&imdb=%s&tvdb=%s&season=%s&episode=%s&tvshowtitle=%s&premiered=%s&meta=%s&t=%s' % (sysaddon, systitle, year, imdb, tvdb, season, episode, systvshowtitle, syspremiered, sysmeta, self.systime)
sysurl = urllib.quote_plus(url)
path = '%s?action=play&title=%s&year=%s&imdb=%s&tvdb=%s&season=%s&episode=%s&tvshowtitle=%s&premiered=%s' % (sysaddon, systitle, year, imdb, tvdb, season, episode, systvshowtitle, syspremiered)
if isFolder == True:
url = '%s?action=episodes&tvshowtitle=%s&year=%s&imdb=%s&tvdb=%s&season=%s&episode=%s' % (sysaddon, systvshowtitle, year, imdb, tvdb, season, episode)
cm = []
cm.append((queueMenu, 'RunPlugin(%s?action=queueItem)' % sysaddon))
if multi == True:
cm.append((tvshowBrowserMenu, 'Container.Update(%s?action=seasons&tvshowtitle=%s&year=%s&imdb=%s&tvdb=%s,return)' % (sysaddon, systvshowtitle, year, imdb, tvdb)))
try:
overlay = int(playcount.getEpisodeOverlay(indicators, imdb, tvdb, season, episode))
if overlay == 7:
cm.append((unwatchedMenu, 'RunPlugin(%s?action=episodePlaycount&imdb=%s&tvdb=%s&season=%s&episode=%s&query=6)' % (sysaddon, imdb, tvdb, season, episode)))
meta.update({'playcount': 1, 'overlay': 7})
else:
cm.append((watchedMenu, 'RunPlugin(%s?action=episodePlaycount&imdb=%s&tvdb=%s&season=%s&episode=%s&query=7)' % (sysaddon, imdb, tvdb, season, episode)))
meta.update({'playcount': 0, 'overlay': 6})
except:
pass
if traktCredentials == True:
cm.append((traktManagerMenu, 'RunPlugin(%s?action=traktManager&name=%s&tvdb=%s&content=tvshow)' % (sysaddon, systvshowtitle, tvdb)))
if isFolder == False:
cm.append((playbackMenu, 'RunPlugin(%s?action=alterSources&url=%s&meta=%s)' % (sysaddon, sysurl, sysmeta)))
if isOld == True:
cm.append((control.lang2(19033).encode('utf-8'), 'Action(Info)'))
cm.append((addToLibrary, 'RunPlugin(%s?action=tvshowToLibrary&tvshowtitle=%s&year=%s&imdb=%s&tvdb=%s)' % (sysaddon, systvshowtitle, year, imdb, tvdb)))
item = control.item(label=label)
art = {}
if 'poster' in i and not i['poster'] == '0':
art.update({'poster': i['poster'], 'tvshow.poster': i['poster'], 'season.poster': i['poster']})
else:
art.update({'poster': addonPoster})
if 'thumb' in i and not i['thumb'] == '0':
art.update({'icon': i['thumb'], 'thumb': i['thumb']})
elif 'fanart' in i and not i['fanart'] == '0':
art.update({'icon': i['fanart'], 'thumb': i['fanart']})
elif 'poster' in i and not i['poster'] == '0':
art.update({'icon': i['poster'], 'thumb': i['poster']})
else:
art.update({'icon': addonFanart, 'thumb': addonFanart})
if 'banner' in i and not i['banner'] == '0':
art.update({'banner': i['banner']})
elif 'fanart' in i and not i['fanart'] == '0':
art.update({'banner': i['fanart']})
else:
art.update({'banner': addonBanner})
if settingFanart == 'true' and 'fanart' in i and not i['fanart'] == '0':
item.setProperty('Fanart_Image', i['fanart'])
elif not addonFanart == None:
item.setProperty('Fanart_Image', addonFanart)
item.setArt(art)
item.addContextMenuItems(cm)
item.setProperty('IsPlayable', isPlayable)
item.setInfo(type='Video', infoLabels = meta)
video_streaminfo = {'codec': 'h264'}
item.addStreamInfo('video', video_streaminfo)
control.addItem(handle=syshandle, url=url, listitem=item, isFolder=isFolder)
except:
pass
control.content(syshandle, 'episodes')
control.directory(syshandle, cacheToDisc=True)
views.setView('episodes', {'skin.estuary': 55, 'skin.confluence': 504})
def addDirectory(self, items, queue=False):
if items == None or len(items) == 0: control.idle() ; sys.exit()
sysaddon = sys.argv[0]
syshandle = int(sys.argv[1])
addonFanart, addonThumb, artPath = control.addonFanart(), control.addonThumb(), control.artPath()
queueMenu = control.lang(32065).encode('utf-8')
for i in items:
try:
name = i['name']
if i['image'].startswith('http'): thumb = i['image']
elif not artPath == None: thumb = os.path.join(artPath, i['image'])
else: thumb = addonThumb
url = '%s?action=%s' % (sysaddon, i['action'])
try: url += '&url=%s' % urllib.quote_plus(i['url'])
except: pass
cm = []
if queue == True:
cm.append((queueMenu, 'RunPlugin(%s?action=queueItem)' % sysaddon))
item = control.item(label=name)
item.setArt({'icon': thumb, 'thumb': thumb})
if not addonFanart == None: item.setProperty('Fanart_Image', addonFanart)
item.addContextMenuItems(cm)
control.addItem(handle=syshandle, url=url, listitem=item, isFolder=True)
except:
pass
control.content(syshandle, 'addons')
control.directory(syshandle, cacheToDisc=True)
| repotvsupertuga/repo | script.module.stream.tvsupertuga.addon/resources/lib/indexers/episodes.py | Python | gpl-2.0 | 65,914 | 0.011909 |
# Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.utils.http import urlencode
from django.contrib.auth import authenticate
from django.http import (
HttpResponse, HttpResponseBadRequest, HttpResponseForbidden)
from django.core.exceptions import ValidationError
from django.views.decorators.http import require_http_methods
from urlparse import urlunsplit, urlsplit, parse_qsl
from astakos.im.util import restrict_next
from astakos.im.user_utils import login as auth_login, logout
from astakos.im.views.decorators import cookie_fix
import astakos.im.messages as astakos_messages
from astakos.im.settings import REDIRECT_ALLOWED_SCHEMES
import logging
logger = logging.getLogger(__name__)
@require_http_methods(["GET"])
@cookie_fix
def login(request):
"""
If there is no ``next`` request parameter redirects to astakos index page
displaying an error message.
If the request user is authenticated and has signed the approval terms,
redirects to `next` request parameter. If not, redirects to approval terms
in order to return back here after agreeing with the terms.
Otherwise, redirects to login in order to return back here after successful
login.
"""
next = request.GET.get('next')
if not next:
return HttpResponseBadRequest('Missing next parameter')
if not restrict_next(next, allowed_schemes=REDIRECT_ALLOWED_SCHEMES):
return HttpResponseForbidden(_(
astakos_messages.NOT_ALLOWED_NEXT_PARAM))
force = request.GET.get('force', None)
response = HttpResponse()
if force == '' and request.user.is_authenticated():
logout(request)
if request.user.is_authenticated():
# if user has not signed the approval terms
# redirect to approval terms with next the request path
if not request.user.signed_terms:
# first build next parameter
parts = list(urlsplit(request.build_absolute_uri()))
params = dict(parse_qsl(parts[3], keep_blank_values=True))
parts[3] = urlencode(params)
next = urlunsplit(parts)
# build url location
parts[2] = reverse('latest_terms')
params = {'next': next}
parts[3] = urlencode(params)
url = urlunsplit(parts)
response['Location'] = url
response.status_code = 302
return response
renew = request.GET.get('renew', None)
if renew == '':
request.user.renew_token(
flush_sessions=True,
current_key=request.session.session_key
)
try:
request.user.save()
except ValidationError, e:
return HttpResponseBadRequest(e)
# authenticate before login
user = authenticate(
username=request.user.username,
auth_token=request.user.auth_token
)
auth_login(request, user)
logger.info('Token reset for %s' % user.username)
parts = list(urlsplit(next))
parts[3] = urlencode({
'uuid': request.user.uuid,
'token': request.user.auth_token
})
url = urlunsplit(parts)
response['Location'] = url
response.status_code = 302
return response
else:
# redirect to login with next the request path
# first build next parameter
parts = list(urlsplit(request.build_absolute_uri()))
params = dict(parse_qsl(parts[3], keep_blank_values=True))
# delete force parameter
if 'force' in params:
del params['force']
parts[3] = urlencode(params)
next = urlunsplit(parts)
# build url location
parts[2] = reverse('login')
params = {'next': next}
parts[3] = urlencode(params)
url = urlunsplit(parts)
response['Location'] = url
response.status_code = 302
return response
| Erethon/synnefo | snf-astakos-app/astakos/im/views/target/redirect.py | Python | gpl-3.0 | 4,711 | 0 |
"""Entity and System Managers."""
import six
from ecs.exceptions import (
NonexistentComponentTypeForEntity, DuplicateSystemTypeError,
SystemAlreadyAddedToManagerError)
from ecs.models import Entity
class EntityManager(object):
"""Provide database-like access to components based on an entity key."""
def __init__(self):
self._database = {}
self._next_guid = 0
@property
def database(self):
"""Get this manager's database. Direct modification is not
permitted.
:return: the database
:rtype: :class:`dict`
"""
return self._database
def create_entity(self):
"""Return a new entity instance with the current lowest GUID value.
Does not store a reference to it, and does not make any entries in the
database referencing it.
:return: the new entity
:rtype: :class:`ecs.models.Entity`
"""
entity = Entity(self._next_guid)
self._next_guid += 1
return entity
def add_component(self, entity, component_instance):
"""Add a component to the database and associate it with the given
entity.
:param entity: entity to associate
:type entity: :class:`ecs.models.Entity`
:param component_instance: component to add to the entity
:type component_instance: :class:`ecs.models.Component`
"""
component_type = type(component_instance)
if component_type not in self._database:
self._database[component_type] = {}
self._database[component_type][entity] = component_instance
def remove_component(self, entity, component_type):
"""Remove the component of ``component_type`` associated with
entity from the database. Doesn't do any kind of data-teardown. It is
up to the system calling this code to do that. In the future, a
callback system may be used to implement type-specific destructors.
:param entity: entity to associate
:type entity: :class:`ecs.models.Entity`
:param component_type: component type to remove from the entity
:type component_type: :class:`type` which is :class:`Component`
subclass
"""
try:
del self._database[component_type][entity]
if self._database[component_type] == {}:
del self._database[component_type]
except KeyError:
pass
def pairs_for_type(self, component_type):
"""Return an iterator over ``(entity, component_instance)`` tuples for
all entities in the database possessing a component of
``component_type``. Return an empty iterator if there are no components
of this type in the database. It should be used in a loop like this,
where ``Renderable`` is a component type:
.. code-block:: python
for entity, renderable_component in \
entity_manager.pairs_for_type(Renderable):
pass # do something
:param component_type: a type of created component
:type component_type: :class:`type` which is :class:`Component`
subclass
:return: iterator on ``(entity, component_instance)`` tuples
:rtype: :class:`iter` on
(:class:`ecs.models.Entity`, :class:`ecs.models.Component`)
"""
try:
return six.iteritems(self._database[component_type])
except KeyError:
return six.iteritems({})
def component_for_entity(self, entity, component_type):
"""Return the instance of ``component_type`` for the entity from the
database.
:param entity: associated entity
:type entity: :class:`ecs.models.Entity`
:param component_type: a type of created component
:type component_type: :class:`type` which is :class:`Component`
subclass
:return: component instance
:rtype: :class:`ecs.models.Component`
:raises: :exc:`NonexistentComponentTypeForEntity` when
``component_type`` does not exist on the given entity
"""
try:
return self._database[component_type][entity]
except KeyError:
raise NonexistentComponentTypeForEntity(
entity, component_type)
def remove_entity(self, entity):
"""Remove all components from the database that are associated with
the entity, with the side-effect that the entity is also no longer
in the database.
:param entity: entity to remove
:type entity: :class:`ecs.models.Entity`
"""
# For Python 2, don't use iterkeys(), otherwise we will get a
# RuntimeError about mutating the length of the dictionary at runtime.
# For Python 3, we can't even use keys(), because that is a view object
# that acts like iterkeys(). We therefore make a copy using list() to
# avoid modifying the iterator.
for comp_type in list(self._database.keys()):
try:
del self._database[comp_type][entity]
if self._database[comp_type] == {}:
del self._database[comp_type]
except KeyError:
pass
class SystemManager(object):
"""A container and manager for :class:`ecs.models.System` objects."""
def __init__(self, entity_manager):
""":param entity_manager: this manager's entity manager
:type entity_manager: :class:`SystemManager`
"""
self._systems = []
self._system_types = {}
self._entity_manager = entity_manager
# Allow getting the list of systems but not directly setting it.
@property
def systems(self):
"""Get this manager's list of systems.
:return: system list
:rtype: :class:`list` of :class:`ecs.models.System`
"""
return self._systems
def add_system(self, system_instance, priority=0):
"""Add a :class:`ecs.models.System` instance to the manager.
:param system_instance: instance of a system
:param priority: non-negative integer (default: 0)
:type system_instance: :class:`ecs.models.System`
:type priority: :class:`int`
:raises: :class:`ecs.exceptions.DuplicateSystemTypeError` when the
system type is already present in this manager
:raises: :class:`ecs.exceptions.SystemAlreadyAddedToManagerError` when
the system already belongs to a system manager
"""
system_type = type(system_instance)
if system_type in self._system_types:
raise DuplicateSystemTypeError(system_type)
if system_instance.system_manager is not None:
raise SystemAlreadyAddedToManagerError(
system_instance, self, system_instance.system_manager)
system_instance.entity_manager = self._entity_manager
system_instance.system_manager = self
self._system_types[system_type] = system_instance
self._systems.append(system_instance)
system_instance.priority = priority
self._systems.sort(key=lambda x: x.priority)
def remove_system(self, system_type):
"""Tell the manager to no longer run the system of this type.
:param system_type: type of system to remove
:type system_type: :class:`type`
"""
system = self._system_types[system_type]
system.entity_manager = None
system.system_manager = None
self._systems.remove(system)
del self._system_types[system_type]
def update(self, dt):
"""Run each system's ``update()`` method for this frame. The systems
are run in the order in which they were added.
:param dt: delta time, or elapsed time for this frame
:type dt: :class:`float`
"""
# Iterating over a list of systems instead of values in a dictionary is
# noticeably faster. We maintain a list in addition to a dictionary
# specifically for this purpose.
#
# Though initially we had the entity manager being passed through to
# each update() method, this turns out to cause quite a large
# performance penalty. So now it is just set on each system.
for system in self._systems:
system.update(dt)
| seanfisk/ecs | ecs/managers.py | Python | mit | 8,344 | 0 |
# -*- coding: utf-8 -*-
"""
eve.methods.post
~~~~~~~~~~~~~~~~
This module imlements the POST method, supported by the resources
endopints.
:copyright: (c) 2015 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from datetime import datetime
from flask import current_app as app, abort
from eve.utils import config, parse_request, debug_error_message
from eve.auth import requires_auth
from eve.defaults import resolve_default_values
from eve.validation import ValidationError
from eve.methods.common import parse, payload, ratelimit, \
pre_event, store_media_files, resolve_user_restricted_access, \
resolve_embedded_fields, build_response_document, marshal_write_response, \
resolve_sub_resource_path, resolve_document_etag, oplog_push
from eve.versioning import resolve_document_version, \
insert_versioning_documents
@ratelimit()
@requires_auth('resource')
@pre_event
def post(resource, payl=None):
"""
Default function for handling POST requests, it has decorators for
rate limiting, authentication and for raising pre-request events. After the
decorators are applied forwards to call to :func:`post_internal`
.. versionchanged:: 0.5
Split original post() into post/post_internal combo.
"""
return post_internal(resource, payl, skip_validation=False)
def post_internal(resource, payl=None, skip_validation=False):
"""
Intended for internal post calls, this method is not rate limited,
authentication is not checked and pre-request events are not raised.
Adds one or more documents to a resource. Each document is validated
against the domain schema. If validation passes the document is inserted
and ID_FIELD, LAST_UPDATED and DATE_CREATED along with a link to the
document are returned. If validation fails, a list of validation issues
is returned.
:param resource: name of the resource involved.
:param payl: alternative payload. When calling post() from your own code
you can provide an alternative payload. This can be useful,
for example, when you have a callback function hooked to a
certain endpoint, and want to perform additional post() calls
from there.
Please be advised that in order to successfully use this
option, a request context must be available.
See https://github.com/nicolaiarocci/eve/issues/74 for a
discussion, and a typical use case.
:param skip_validation: skip payload validation before write (bool)
.. versionchanged:: 0.6
Fix: since v0.6, skip_validation = True causes a 422 response (#726).
.. versionchanged:: 0.6
Initialize DELETED field when soft_delete is enabled.
.. versionchanged:: 0.5
Back to resolving default values after validaton as now the validator
can properly validate dependency even when some have default values. See
#353.
Push updates to the OpLog.
Original post() has been split into post() and post_internal().
ETAGS are now stored with documents (#369).
.. versionchanged:: 0.4
Resolve default values before validation is performed. See #353.
Support for document versioning.
.. versionchanged:: 0.3
Return 201 if at least one document has been successfully inserted.
Fix #231 auth field not set if resource level authentication is set.
Support for media fields.
When IF_MATCH is disabled, no etag is included in the payload.
Support for new validation format introduced with Cerberus v0.5.
.. versionchanged:: 0.2
Use the new STATUS setting.
Use the new ISSUES setting.
Raise 'on_pre_<method>' event.
Explictly resolve default values instead of letting them be resolved
by common.parse. This avoids a validation error when a read-only field
also has a default value.
Added ``on_inserted*`` events after the database insert
.. versionchanged:: 0.1.1
auth.request_auth_value is now used to store the auth_field value.
.. versionchanged:: 0.1.0
More robust handling of auth_field.
Support for optional HATEOAS.
.. versionchanged: 0.0.9
Event hooks renamed to be more robuts and consistent: 'on_posting'
renamed to 'on_insert'.
You can now pass a pre-defined custom payload to the funcion.
.. versionchanged:: 0.0.9
Storing self.app.auth.userid in auth_field when 'user-restricted
resource access' is enabled.
.. versionchanged: 0.0.7
Support for Rate-Limiting.
Support for 'extra_response_fields'.
'on_posting' and 'on_posting_<resource>' events are raised before the
documents are inserted into the database. This allows callback functions
to arbitrarily edit/update the documents being stored.
.. versionchanged:: 0.0.6
Support for bulk inserts.
Please note: validation constraints are checked against the database,
and not between the payload documents themselves. This causes an
interesting corner case: in the event of a multiple documents payload
where two or more documents carry the same value for a field where the
'unique' constraint is set, the payload will validate successfully, as
there are no duplicates in the database (yet). If this is an issue, the
client can always send the documents once at a time for insertion, or
validate locally before submitting the payload to the API.
.. versionchanged:: 0.0.5
Support for 'application/json' Content-Type .
Support for 'user-restricted resource access'.
.. versionchanged:: 0.0.4
Added the ``requires_auth`` decorator.
.. versionchanged:: 0.0.3
JSON links. Superflous ``response`` container removed.
"""
date_utc = datetime.utcnow().replace(microsecond=0)
resource_def = app.config['DOMAIN'][resource]
schema = resource_def['schema']
validator = None if skip_validation else app.validator(schema, resource)
documents = []
results = []
failures = 0
if config.BANDWIDTH_SAVER is True:
embedded_fields = []
else:
req = parse_request(resource)
embedded_fields = resolve_embedded_fields(resource, req)
# validation, and additional fields
if payl is None:
payl = payload()
# print "\n\ninside eve post\n\n***************************************"
# print embedded_fields
# print "payl "
# print payl
'''
Added by : LHearen
E-mail : LHearen@126.com
Description: Used to construct our own RESTful interfaces - but the extra
items should not be stored in DB;
'''
if "_id" in payl:
payl["_id"] = '27167fe7-fc9d-47d5-9cd0-717106ef67be'
if "Module" in payl:
del payl["Module"]
if "Method" in payl:
del payl["Method"]
# print "payl "
# print payl
# print "resource "
# print resource
# print "\n\nend here"
if isinstance(payl, dict):
payl = [payl]
if not payl:
# empty bulkd insert
abort(400, description=debug_error_message(
'Empty bulk insert'
))
if len(payl) > 1 and not config.DOMAIN[resource]['bulk_enabled']:
abort(400, description=debug_error_message(
'Bulk insert not allowed'
))
for value in payl:
document = []
doc_issues = {}
try:
document = parse(value, resource)
resolve_sub_resource_path(document, resource)
if skip_validation:
validation = True
else:
validation = validator.validate(document)
if validation: # validation is successful
# validator might be not available if skip_validation. #726.
if validator:
# Apply coerced values
document = validator.document
# Populate meta and default fields
document[config.LAST_UPDATED] = \
document[config.DATE_CREATED] = date_utc
if config.DOMAIN[resource]['soft_delete'] is True:
document[config.DELETED] = False
resolve_user_restricted_access(document, resource)
resolve_default_values(document, resource_def['defaults'])
store_media_files(document, resource)
resolve_document_version(document, resource, 'POST')
else:
# validation errors added to list of document issues
doc_issues = validator.errors
except ValidationError as e:
doc_issues['validation exception'] = str(e)
except Exception as e:
# most likely a problem with the incoming payload, report back to
# the client as if it was a validation issue
app.logger.exception(e)
doc_issues['exception'] = str(e)
if len(doc_issues):
document = {
config.STATUS: config.STATUS_ERR,
config.ISSUES: doc_issues,
}
failures += 1
documents.append(document)
if failures:
# If at least one document got issues, the whole request fails and a
# ``422 Bad Request`` status is return.
for document in documents:
if config.STATUS in document \
and document[config.STATUS] == config.STATUS_ERR:
results.append(document)
else:
results.append({config.STATUS: config.STATUS_OK})
return_code = config.VALIDATION_ERROR_STATUS
else:
# notify callbacks
getattr(app, "on_insert")(resource, documents)
getattr(app, "on_insert_%s" % resource)(documents)
# compute etags here as documents might have been updated by callbacks.
resolve_document_etag(documents, resource)
# bulk insert
ids = app.data.insert(resource, documents)
# update oplog if needed
oplog_push(resource, documents, 'POST')
# assign document ids
for document in documents:
# either return the custom ID_FIELD or the id returned by
# data.insert().
document[resource_def['id_field']] = \
document.get(resource_def['id_field'], ids.pop(0))
# build the full response document
result = document
build_response_document(
result, resource, embedded_fields, document)
# add extra write meta data
result[config.STATUS] = config.STATUS_OK
# limit what actually gets sent to minimize bandwidth usage
result = marshal_write_response(result, resource)
results.append(result)
# insert versioning docs
insert_versioning_documents(resource, documents)
# notify callbacks
getattr(app, "on_inserted")(resource, documents)
getattr(app, "on_inserted_%s" % resource)(documents)
# request was received and accepted; at least one document passed
# validation and was accepted for insertion.
return_code = 201
if len(results) == 1:
response = results.pop(0)
else:
response = {
config.STATUS: config.STATUS_ERR if failures else config.STATUS_OK,
config.ITEMS: results,
}
if failures:
response[config.ERROR] = {
"code": return_code,
"message": "Insertion failure: %d document(s) contain(s) error(s)"
% failures,
}
print "now we're inside post.py, before customizing response"
print response
for key in response.keys():
if key != "_id":
del response[key]
print 'final response'
print response
return response, None, None, return_code
| Hearen/OnceServer | Server/Eve/post.py | Python | mit | 12,002 | 0.000167 |
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils.translation import ugettext
from django.contrib.auth.decorators import login_required
from django.contrib.admin.views.decorators import staff_member_required
from models import InvitationError, Invitation, InvitationStats
from forms import InvitationForm, RegistrationFormInvitation
from registration.signals import user_registered
def apply_extra_context(context, extra_context=None):
if extra_context is None:
extra_context = {}
for key, value in extra_context.items():
context[key] = callable(value) and value() or value
return context
@login_required
def invite(request, success_url=None,
form_class=InvitationForm,
template_name='invitation/invitation_form.html',
extra_context=None):
"""
Create an invitation and send invitation email.
Send invitation email and then redirect to success URL if the
invitation form is valid. Redirect named URL ``invitation_unavailable``
on InvitationError. Render invitation form template otherwise.
**Required arguments:**
None.
**Optional arguments:**
:success_url:
The URL to redirect to on successful registration. Default value is
``None``, ``invitation_complete`` will be resolved in this case.
:form_class:
A form class to use for invitation. Takes ``request.user`` as first
argument to its constructor. Must have an ``email`` field. Custom
validation can be implemented here.
:template_name:
A custom template to use. Default value is
``invitation/invitation_form.html``.
:extra_context:
A dictionary of variables to add to the template context. Any
callable object in this dictionary will be called to produce
the end result which appears in the context.
**Template:**
``invitation/invitation_form.html`` or ``template_name`` keyword
argument.
**Context:**
A ``RequestContext`` instance is used rendering the template. Context,
in addition to ``extra_context``, contains:
:form:
The invitation form.
"""
if request.method == 'POST':
form = form_class(request.POST, request.FILES)
if form.is_valid():
try:
invitation = Invitation.objects.invite(
request.user, form.cleaned_data["email"], form.cleaned_data["message"])
except InvitationError, e:
print '****'
print e
print '****'
return HttpResponseRedirect(reverse('invitation_unavailable'))
invitation.send_email(request=request)
if 'next' in request.REQUEST:
return HttpResponseRedirect(request.REQUEST['next'])
return HttpResponseRedirect(success_url or reverse('invitation_complete'))
else:
form = form_class()
context = apply_extra_context(RequestContext(request), extra_context)
return render_to_response(template_name,
{'form': form},
context_instance=context)
def register(request,
invitation_key,
wrong_key_template='invitation/wrong_invitation_key.html',
redirect_to_if_authenticated='/',
success_url=None,
form_class=RegistrationFormInvitation,
template_name='registration/registration_form.html',
extra_context=None):
"""
Allow a new user to register via invitation.
Send invitation email and then redirect to success URL if the
invitation form is valid. Redirect named URL ``invitation_unavailable``
on InvitationError. Render invitation form template otherwise. Sends
registration.signals.user_registered after creating the user.
**Required arguments:**
:invitation_key:
An invitation key in the form of ``[\da-e]{40}``
**Optional arguments:**
:wrong_key_template:
Template to be used when an invalid invitation key is supplied.
Default value is ``invitation/wrong_invitation_key.html``.
:redirect_to_if_authenticated:
URL to be redirected when an authenticated user calls this view.
Defaults value is ``/``
:success_url:
The URL to redirect to on successful registration. Default value is
``None``, ``invitation_registered`` will be resolved in this case.
:form_class:
A form class to use for registration. Takes the invited email as first
argument to its constructor.
:template_name:
A custom template to use. Default value is
``registration/registration_form.html``.
:extra_context:
A dictionary of variables to add to the template context. Any
callable object in this dictionary will be called to produce
the end result which appears in the context.
**Templates:**
``invitation/invitation_form.html`` or ``template_name`` keyword
argument as the *main template*.
``invitation/wrong_invitation_key.html`` or ``wrong_key_template`` keyword
argument as the *wrong key template*.
**Context:**
``RequestContext`` instances are used rendering both templates. Context,
in addition to ``extra_context``, contains:
For wrong key template
:invitation_key: supplied invitation key
For main template
:form:
The registration form.
"""
if request.user.is_authenticated():
return HttpResponseRedirect(redirect_to_if_authenticated)
try:
invitation = Invitation.objects.find(invitation_key)
except Invitation.DoesNotExist:
context = apply_extra_context(RequestContext(request), extra_context)
return render_to_response(wrong_key_template,
{'invitation_key': invitation_key},
context_instance=context)
if request.method == 'POST':
form = form_class(invitation.email, request.POST, request.FILES)
if form.is_valid():
new_user = form.save()
invitation.mark_accepted(new_user)
user_registered.send(sender="invitation",
user=new_user,
request=request)
# return HttpResponseRedirect(success_url or reverse('invitation_registered'))
# return HttpResponseRedirect(success_url or reverse('profiles-profile-detail', kwargs={'slug':new_user.username}))
return HttpResponseRedirect(success_url or reverse('auth_login'))
else:
form = form_class(invitation.email)
context = apply_extra_context(RequestContext(request), extra_context)
return render_to_response(template_name,
{'form': form},
context_instance=context)
@staff_member_required
def reward(request):
"""
Add invitations to users with high invitation performance and redirect
refferring page.
"""
rewarded_users, invitations_given = InvitationStats.objects.reward()
if rewarded_users:
message = ugettext(u'%(users)s users are given a total of ' \
u'%(invitations)s invitations.') % {
'users': rewarded_users,
'invitations': invitations_given}
else:
message = ugettext(u'No user has performance above ' \
u'threshold, no invitations awarded.')
request.user.message_set.create(message=message)
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
| hzlf/openbroadcast | website/apps/invitation/views.py | Python | gpl-3.0 | 7,841 | 0.000893 |
from haystack import indexes
from photolib.models import Photo
class PhotoIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
alt = indexes.CharField(model_attr='alt', indexed=False)
uuid = indexes.CharField(model_attr='uuid', indexed=False)
thumbnail_url = indexes.CharField(indexed=False, model_attr='image_thumbnail__url')
def get_model(self):
return Photo
def get_updated_field(self):
return 'last_updated'
def index_queryset(self, using=None):
return Photo.objects.visible()
| sunlightlabs/horseradish | photolib/search_indexes.py | Python | bsd-3-clause | 592 | 0.001689 |
# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp.osv import osv, fields
class users(osv.osv):
_name = 'res.users'
_inherit = 'res.users'
_columns = {
'journal_ids': fields.many2many('account.journal', 'journal_security_journal_users','user_id',
'journal_id', 'Restricted Journals', help="This journals and the information related to it will be only visible for users where you specify that they can see them setting this same field."),
} | HBEE/odoo-addons | account_journal_security/res_users.py | Python | agpl-3.0 | 741 | 0.008097 |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
###############################################################################
#234567890123456789012345678901234567890123456789012345678901234567890123456789
#--------1---------2---------3---------4---------5---------6---------7---------
# ##### BEGIN COPYRIGHT BLOCK #####
#
# initial script copyright (c)2011-2013 Alexander Nussbaumer
#
# ##### END COPYRIGHT BLOCK #####
#import python stuff
from os import (
path
)
# import io_scene_ms3d stuff
from io_scene_ms3d.ms3d_strings import (
ms3d_str,
)
#import blender stuff
from bpy import (
ops,
)
###############################################################################
def enable_edit_mode(enable, blender_context):
if blender_context.active_object is None \
or not blender_context.active_object.type in {'MESH', 'ARMATURE', }:
return
if enable:
modeString = 'EDIT'
else:
modeString = 'OBJECT'
if ops.object.mode_set.poll():
ops.object.mode_set(mode=modeString)
###############################################################################
def enable_pose_mode(enable, blender_context):
if blender_context.active_object is None \
or not blender_context.active_object.type in {'ARMATURE', }:
return
if enable:
modeString = 'POSE'
else:
modeString = 'OBJECT'
if ops.object.mode_set.poll():
ops.object.mode_set(mode=modeString)
###############################################################################
def select_all(select):
if select:
actionString = 'SELECT'
else:
actionString = 'DESELECT'
if ops.object.select_all.poll():
ops.object.select_all(action=actionString)
if ops.mesh.select_all.poll():
ops.mesh.select_all(action=actionString)
if ops.pose.select_all.poll():
ops.pose.select_all(action=actionString)
###############################################################################
def pre_setup_environment(porter, blender_context):
# inject undo to porter
# and turn off undo
porter.undo = blender_context.user_preferences.edit.use_global_undo
blender_context.user_preferences.edit.use_global_undo = False
# inject active_object to self
porter.active_object = blender_context.scene.objects.active
# change to a well defined mode
enable_edit_mode(True, blender_context)
# enable face-selection-mode
blender_context.tool_settings.mesh_select_mode = (False, False, True)
# change back to object mode
enable_edit_mode(False, blender_context)
###############################################################################
def post_setup_environment(porter, blender_context):
# restore active object
blender_context.scene.objects.active = porter.active_object
if not blender_context.scene.objects.active \
and blender_context.selected_objects:
blender_context.scene.objects.active \
= blender_context.selected_objects[0]
# restore pre operator undo state
blender_context.user_preferences.edit.use_global_undo = porter.undo
###############################################################################
def get_edge_split_modifier_add_if(blender_mesh_object):
blender_modifier = blender_mesh_object.modifiers.get(
ms3d_str['OBJECT_MODIFIER_SMOOTHING_GROUP'])
if blender_modifier is None:
blender_modifier = blender_mesh_object.modifiers.new(
ms3d_str['OBJECT_MODIFIER_SMOOTHING_GROUP'],
type='EDGE_SPLIT')
blender_modifier.show_expanded = False
blender_modifier.use_edge_angle = False
blender_modifier.use_edge_sharp = True
blender_mesh_object.data.show_edge_seams = True
blender_mesh_object.data.show_edge_sharp = True
return blender_modifier
###########################################################################
def rotation_matrix(v_track, v_up):
## rotation matrix from two vectors
## http://gamedev.stackexchange.com/questions/20097/how-to-calculate-a-3x3-rotation-matrix-from-2-direction-vectors
## http://www.fastgraph.com/makegames/3drotation/
matrix = Matrix().to_3x3()
c1 = v_track
c1.normalize()
c0 = c1.cross(v_up)
c0.normalize()
c2 = c0.cross(c1)
c2.normalize()
matrix.col[0] = c0
matrix.col[1] = c1
matrix.col[2] = c2
return matrix
###############################################################################
def matrix_difference(mat_src, mat_dst):
mat_dst_inv = mat_dst.inverted()
return mat_dst_inv * mat_src
###############################################################################
###############################################################################
#234567890123456789012345678901234567890123456789012345678901234567890123456789
#--------1---------2---------3---------4---------5---------6---------7---------
# ##### END OF FILE #####
| Microvellum/Fluid-Designer | win64-vc/2.78/scripts/addons/io_scene_ms3d/ms3d_utils.py | Python | gpl-3.0 | 5,817 | 0.002235 |
"""
Django-Datamaps provides helper functions compatible with datamaps.js.
"""
__version__ = '0.2.2'
| rochapps/django-datamaps | datamaps/__init__.py | Python | bsd-2-clause | 103 | 0 |
from build import evaluate_callables
class WhenEvaluatingADictWithNoCallables:
def when_i_evaluate_the_dict(self):
self.result = evaluate_callables({"abc": 123, "def": 456, "xyz": 789})
def it_should_return_the_same_dict(self):
assert self.result == {"abc": 123, "def": 456, "xyz": 789}
class WhenEvaluatingADictWithCallables:
def given_input_containing_lambdas(self):
self.input = {"abc": lambda: 123, "def": lambda: 456, "xyz": 789}
self.input_copy = self.input.copy()
def when_i_evaluate_the_dict(self):
self.result = evaluate_callables(self.input)
def it_should_return_the_dict_having_called_the_functions(self):
assert self.result == {"abc": 123, "def": 456, "xyz": 789}
def it_should_not_change_the_original_dict(self):
assert self.input == self.input_copy
class MyDict(dict):
def __eq__(self, other):
if not isinstance(other, MyDict):
return False
return super().__eq__(other)
def copy(self):
return MyDict({k: v for k, v in self.items()})
class WhenEvaluatingACustomDictWithNoCallables:
def when_i_evaluate_the_dict(self):
self.result = evaluate_callables(MyDict({"abc": 123, "def": 456, "xyz": 789}))
def it_should_return_an_instance_of_the_same_class(self):
assert self.result == MyDict({"abc": 123, "def": 456, "xyz": 789})
class WhenEvaluatingACustomDictWithCallables:
def given_input_containing_lambdas(self):
self.input = MyDict({"abc": lambda: 123, "def": lambda: 456, "xyz": 789})
self.input_copy = self.input.copy()
def when_i_evaluate_the_dict(self):
self.result = evaluate_callables(self.input)
def it_should_return_an_instance_of_the_same_class_having_called_the_functions(self):
assert self.result == MyDict({"abc": 123, "def": 456, "xyz": 789})
def it_should_not_change_the_original_dict(self):
assert self.input == self.input_copy
# todo: make it work for other sequences
| benjamin-hodgson/build | test/evaluate_callables_tests.py | Python | mit | 2,020 | 0.001485 |
"""
Default settings for the ``mezzanine.core`` app. Each of these can be
overridden in your project's settings module, just like regular
Django settings. The ``editable`` argument for each controls whether
the setting is editable via Django's admin.
Thought should be given to how a setting is actually used before
making it editable, as it may be inappropriate - for example settings
that are only read during startup shouldn't be editable, since changing
them would require an application reload.
"""
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import register_setting
register_setting(
name="ADMIN_MENU_ORDER",
description=_("Controls the ordering and grouping of the admin menu."),
editable=False,
default=(
(_("Content"), ("pages.Page", "blog.BlogPost",
"generic.ThreadedComment", (_("Media Library"), "fb_browse"),)),
(_("Site"), ("sites.Site", "redirects.Redirect", "conf.Setting")),
(_("Users"), ("auth.User", "auth.Group",)),
),
)
register_setting(
name="ADMIN_REMOVAL",
description=_("Unregister these models from the admin."),
editable=False,
default=(),
)
register_setting(
name="ADMIN_THUMB_SIZE",
description=_("Size of thumbnail previews for image fields in the "
"admin interface."),
editable=False,
default="24x24",
)
register_setting(
name="CACHE_SET_DELAY_SECONDS",
description=_("Mezzanine's caching uses a technique know as mint "
"caching. This is where the requested expiry for a cache entry "
"is stored with the cache entry in cache, and the real expiry "
"used has the ``CACHE_SET_DELAY`` added to it. Then on a cache get, "
"the store expiry is checked, and if it has past, the cache entry "
"is set again, and no entry is returned. This tries to ensure that "
"cache misses never occur, and if many clients were to get a cache "
"miss at once, only one would actually need to re-generated the "
"cache entry."),
editable=False,
default=30,
)
register_setting(
name="AKISMET_API_KEY",
label=_("Akismet API Key"),
description=_("Key for http://akismet.com spam filtering service. Used "
"for filtering comments and forms."),
editable=True,
default="",
)
if "mezzanine.blog" in settings.INSTALLED_APPS:
dashboard_tags = (
("blog_tags.quick_blog", "mezzanine_tags.app_list"),
("comment_tags.recent_comments",),
("mezzanine_tags.recent_actions",),
)
else:
dashboard_tags = (
("mezzanine_tags.app_list",),
("mezzanine_tags.recent_actions",),
(),
)
register_setting(
name="DASHBOARD_TAGS",
description=_("A three item sequence, each containing a sequence of "
"template tags used to render the admin dashboard."),
editable=False,
default=dashboard_tags,
)
register_setting(
name="DEVICE_DEFAULT",
description=_("Device specific template sub-directory to use as the "
"default device."),
editable=False,
default="",
)
register_setting(
name="DEVICE_USER_AGENTS",
description=_("Mapping of device specific template sub-directory names to "
"the sequence of strings that may be found in their user agents."),
editable=False,
default=(
("mobile", ("2.0 MMP", "240x320", "400X240", "AvantGo", "BlackBerry",
"Blazer", "Cellphone", "Danger", "DoCoMo", "Elaine/3.0",
"EudoraWeb", "Googlebot-Mobile", "hiptop", "IEMobile",
"KYOCERA/WX310K", "LG/U990", "MIDP-2.", "MMEF20", "MOT-V",
"NetFront", "Newt", "Nintendo Wii", "Nitro", "Nokia",
"Opera Mini", "Palm", "PlayStation Portable", "portalmmm",
"Proxinet", "ProxiNet", "SHARP-TQ-GX10", "SHG-i900",
"Small", "SonyEricsson", "Symbian OS", "SymbianOS",
"TS21i-10", "UP.Browser", "UP.Link", "webOS", "Windows CE",
"WinWAP", "YahooSeeker/M1A1-R2D2", "iPhone", "iPod", "Android",
"BlackBerry9530", "LG-TU915 Obigo", "LGE VX", "webOS",
"Nokia5800",)
),
),
)
register_setting(
name="FORMS_USE_HTML5",
description=_("If ``True``, website forms will use HTML5 features."),
editable=False,
default=False,
)
register_setting(
name="EXTRA_MODEL_FIELDS",
description=_("A sequence of fields that will be injected into "
"Mezzanine's (or any library's) models. Each item in the sequence is "
"a four item sequence. The first two items are the dotted path to the "
"model and its field name to be added, and the dotted path to the "
"field class to use for the field. The third and fourth items are a "
"sequence of positional args and a dictionary of keyword args, to use "
"when creating the field instance. When specifying the field class, "
"the path ``django.models.db.`` can be omitted for regular Django "
"model fields."),
editable=False,
default=(),
)
register_setting(
name="GOOGLE_ANALYTICS_ID",
label=_("Google Analytics ID"),
editable=True,
description=_("Google Analytics ID (http://www.google.com/analytics/)"),
default="",
)
register_setting(
name="HOST_THEMES",
description=_("A sequence mapping host names to themes, allowing "
"different templates to be served per HTTP hosts "
"Each item in the sequence is a two item sequence, "
"containing a host such as ``othersite.example.com``, and "
"the name of an importable Python package for the theme. "
"If the host is matched for a request, the templates "
"directory inside the theme package will be first searched "
"when loading templates."),
editable=False,
default=(),
)
register_setting(
name="JQUERY_FILENAME",
label=_("Name of the jQuery file."),
description=_("Name of the jQuery file found in "
"mezzanine/core/static/mezzanine/js/"),
editable=False,
default="jquery-1.7.1.min.js",
)
register_setting(
name="MAX_PAGING_LINKS",
label=_("Max paging links"),
description=_("Max number of paging links to display when paginating."),
editable=True,
default=10,
)
register_setting(
name="RICHTEXT_WIDGET_CLASS",
description=_("Dotted package path and class name of the widget to use "
"for the ``RichTextField``."),
editable=False,
default="mezzanine.core.forms.TinyMceWidget",
)
register_setting(
name="RICHTEXT_ALLOWED_TAGS",
description=_("List of HTML tags that won't be stripped from "
"``RichTextField`` instances."),
editable=False,
default=("a", "abbr", "acronym", "address", "area", "b", "bdo", "big",
"blockquote", "br", "button", "caption", "center", "cite", "code",
"col", "colgroup", "dd", "del", "dfn", "dir", "div", "dl", "dt",
"em", "fieldset", "font", "form", "h1", "h2", "h3", "h4", "h5",
"h6", "hr", "i", "img", "input", "ins", "kbd", "label", "legend",
"li", "map", "menu", "ol", "optgroup", "option", "p", "pre", "q",
"s", "samp", "select", "small", "span", "strike", "strong", "sub",
"sup", "table", "tbody", "td", "textarea", "tfoot", "th", "thead",
"tr", "tt", "u", "ul", "var", "wbr"),
)
register_setting(
name="RICHTEXT_ALLOWED_ATTRIBUTES",
description=_("List of HTML attributes that won't be stripped from "
"``RichTextField`` instances."),
editable=False,
default=("abbr", "accept", "accept-charset", "accesskey", "action",
"align", "alt", "axis", "border", "cellpadding", "cellspacing",
"char", "charoff", "charset", "checked", "cite", "class", "clear",
"cols", "colspan", "color", "compact", "coords", "datetime", "dir",
"disabled", "enctype", "for", "frame", "headers", "height", "href",
"hreflang", "hspace", "id", "ismap", "label", "lang", "longdesc",
"maxlength", "media", "method", "multiple", "name", "nohref",
"noshade", "nowrap", "prompt", "readonly", "rel", "rev", "rows",
"rowspan", "rules", "scope", "selected", "shape", "size", "span",
"src", "start", "style", "summary", "tabindex", "target", "title",
"type", "usemap", "valign", "value", "vspace", "width", "xml:lang"),
)
register_setting(
name="RICHTEXT_ALLOWED_STYLES",
description=_("List of inline CSS styles that won't be stripped from "
"``RichTextField`` instances."),
editable=False,
default=(),
)
register_setting(
name="RICHTEXT_FILTER",
description=_("Dotted path to the function to call on a ``RichTextField`` "
"value before it is rendered to the template."),
editable=False,
default=None,
)
RICHTEXT_FILTER_LEVEL_HIGH = 1
RICHTEXT_FILTER_LEVEL_LOW = 2
RICHTEXT_FILTER_LEVEL_NONE = 3
RICHTEXT_FILTER_LEVELS = (
(RICHTEXT_FILTER_LEVEL_HIGH, _("High")),
(RICHTEXT_FILTER_LEVEL_LOW, _("Low (allows video, iframe, Flash, etc)")),
(RICHTEXT_FILTER_LEVEL_NONE, _("No filtering")),
)
register_setting(
name="RICHTEXT_FILTER_LEVEL",
label=_("Rich Text filter level"),
description=_("*Do not change this setting unless you know what you're "
"doing.*\n\nWhen content is saved in a Rich Text (WYSIWYG) field, "
"unsafe HTML tags and attributes are stripped from the content to "
"protect against staff members intentionally adding code that could "
"be used to cause problems, such as changing their account to "
"a super-user with full access to the system.\n\n"
"This setting allows you to change the level of filtering that "
"occurs. Setting it to low will allow certain extra tags to be "
"permitted, such as those required for embedding video. While these "
"tags are not the main candidates for users adding malicious code, "
"they are still considered dangerous and could potentially be "
"mis-used by a particularly technical user, and so are filtered out "
"when the filtering level is set to high.\n\n"
"Setting the filtering level to no filtering, will disable all "
"filtering, and allow any code to be entered by staff members, "
"including script tags."),
editable=True,
choices=RICHTEXT_FILTER_LEVELS,
default=RICHTEXT_FILTER_LEVEL_HIGH,
)
register_setting(
name="SEARCH_PER_PAGE",
label=_("Search results per page"),
description=_("Number of results shown in the search results page."),
editable=True,
default=10,
)
register_setting(
name="SITE_PREFIX",
description=_("A URL prefix for mounting all of Mezzanine's urlpatterns "
"under. When using this, you'll also need to manually apply it to "
"your project's root ``urls.py`` module. The root ``urls.py`` module "
"provided by Mezzanine's ``mezzanine-project`` command contains an "
"example of this towards its end."),
editable=False,
default="",
)
register_setting(
name="SITE_TITLE",
label=_("Site Title"),
description=_("Title that will display at the top of the site, and be "
"appended to the content of the HTML title tags on every page."),
editable=True,
default="Mezzanine",
)
register_setting(
name="SITE_TAGLINE",
label=_("Tagline"),
description=_("A tag line that will appear at the top of all pages."),
editable=True,
default=_("An open source content management platform."),
)
register_setting(
name="SLUGIFY",
description=_("Dotted Python path to the callable for converting "
"strings into URL slugs. Defaults to "
"``mezzanine.utils.urls.slugify_unicode`` which allows for non-ascii "
"URLS. Change to ``django.template.defaultfilters.slugify`` to use "
"Django's slugify function, or something of your own if required."),
editable=False,
default="mezzanine.utils.urls.slugify_unicode",
)
register_setting(
name="SSL_ENABLED",
label=_("Enable SSL"),
description=_("If ``True``, users will be automatically redirected to "
"HTTPS for the URLs specified by the ``SSL_FORCE_URL_PREFIXES`` "
"setting."),
editable=True,
default=False,
)
register_setting(
name="SSL_FORCE_HOST",
label=_("Force Host"),
description=_("Host name that the site should always be accessed via that "
"matches the SSL certificate."),
editable=True,
default="",
)
register_setting(
name="SSL_FORCE_URL_PREFIXES",
description="Sequence of URL prefixes that will be forced to run over "
"SSL when ``SSL_ENABLED`` is ``True``. i.e. "
"('/admin', '/example') would force all URLs beginning with "
"/admin or /example to run over SSL.",
editable=False,
default=("/admin", "/account"),
)
register_setting(
name="STOP_WORDS",
description=_("List of words which will be stripped from search queries."),
editable=False,
default=(
"a", "about", "above", "above", "across", "after",
"afterwards", "again", "against", "all", "almost", "alone",
"along", "already", "also", "although", "always", "am",
"among", "amongst", "amoungst", "amount", "an", "and",
"another", "any", "anyhow", "anyone", "anything", "anyway",
"anywhere", "are", "around", "as", "at", "back", "be",
"became", "because", "become", "becomes", "becoming", "been",
"before", "beforehand", "behind", "being", "below", "beside",
"besides", "between", "beyond", "bill", "both", "bottom",
"but", "by", "call", "can", "cannot", "cant", "co", "con",
"could", "couldnt", "cry", "de", "describe", "detail", "do",
"done", "down", "due", "during", "each", "eg", "eight",
"either", "eleven", "else", "elsewhere", "empty", "enough",
"etc", "even", "ever", "every", "everyone", "everything",
"everywhere", "except", "few", "fifteen", "fify", "fill",
"find", "fire", "first", "five", "for", "former", "formerly",
"forty", "found", "four", "from", "front", "full", "further",
"get", "give", "go", "had", "has", "hasnt", "have", "he",
"hence", "her", "here", "hereafter", "hereby", "herein",
"hereupon", "hers", "herself", "him", "himself", "his",
"how", "however", "hundred", "ie", "if", "in", "inc",
"indeed", "interest", "into", "is", "it", "its", "itself",
"keep", "last", "latter", "latterly", "least", "less", "ltd",
"made", "many", "may", "me", "meanwhile", "might", "mill",
"mine", "more", "moreover", "most", "mostly", "move", "much",
"must", "my", "myself", "name", "namely", "neither", "never",
"nevertheless", "next", "nine", "no", "nobody", "none",
"noone", "nor", "not", "nothing", "now", "nowhere", "of",
"off", "often", "on", "once", "one", "only", "onto", "or",
"other", "others", "otherwise", "our", "ours", "ourselves",
"out", "over", "own", "part", "per", "perhaps", "please",
"put", "rather", "re", "same", "see", "seem", "seemed",
"seeming", "seems", "serious", "several", "she", "should",
"show", "side", "since", "sincere", "six", "sixty", "so",
"some", "somehow", "someone", "something", "sometime",
"sometimes", "somewhere", "still", "such", "system", "take",
"ten", "than", "that", "the", "their", "them", "themselves",
"then", "thence", "there", "thereafter", "thereby",
"therefore", "therein", "thereupon", "these", "they",
"thickv", "thin", "third", "this", "those", "though",
"three", "through", "throughout", "thru", "thus", "to",
"together", "too", "top", "toward", "towards", "twelve",
"twenty", "two", "un", "under", "until", "up", "upon", "us",
"very", "via", "was", "we", "well", "were", "what", "whatever",
"when", "whence", "whenever", "where", "whereafter", "whereas",
"whereby", "wherein", "whereupon", "wherever", "whether",
"which", "while", "whither", "who", "whoever", "whole", "whom",
"whose", "why", "will", "with", "within", "without", "would",
"yet", "you", "your", "yours", "yourself", "yourselves", "the",
),
)
register_setting(
name="TAG_CLOUD_SIZES",
label=_("Tag Cloud Sizes"),
description=_("Number of different sizes for tags when shown as a cloud."),
editable=True,
default=4,
)
register_setting(
name="TEMPLATE_ACCESSIBLE_SETTINGS",
description=_("Sequence of setting names available within templates."),
editable=False,
default=(
"ACCOUNTS_VERIFICATION_REQUIRED", "ADMIN_MEDIA_PREFIX",
"BLOG_BITLY_USER", "BLOG_BITLY_KEY",
"COMMENTS_DISQUS_SHORTNAME", "COMMENTS_NUM_LATEST",
"COMMENTS_DISQUS_API_PUBLIC_KEY", "COMMENTS_DISQUS_API_SECRET_KEY",
"DEV_SERVER", "FORMS_USE_HTML5", "GRAPPELLI_INSTALLED",
"GOOGLE_ANALYTICS_ID", "JQUERY_FILENAME", "LOGIN_URL", "LOGOUT_URL",
"PAGES_MENU_SHOW_ALL", "SITE_TITLE", "SITE_TAGLINE", "RATINGS_MAX",
),
)
register_setting(
name="THUMBNAILS_DIR_NAME",
description=_("Directory name to store thumbnails in, that will be "
"created relative to the original image's directory."),
editable=False,
default=".thumbnails",
)
register_setting(
name="TINYMCE_SETUP_JS",
description=_("URL for the JavaScript file (relative to ``STATIC_URL``) "
"that handles configuring TinyMCE when the default "
"``RICHTEXT_WIDGET_CLASS`` is used."),
editable=False,
default="mezzanine/js/tinymce_setup.js",
)
# The following settings are defined here for documentation purposes
# as this file is used to auto-generate the documentation for all
# available settings. They are Mezzanine specific, but their values
# are *always* overridden by the project's settings or local_settings
# modules, so the default values defined here will never be used.
register_setting(
name="USE_SOUTH",
description=_("If ``True``, the south application will be "
"automatically added to the ``INSTALLED_APPS`` setting."),
editable=False,
default=True,
)
| guibernardino/mezzanine | mezzanine/core/defaults.py | Python | bsd-2-clause | 18,350 | 0.004741 |
'''
Camera
======
Core class for acquiring the camera and converting its input into a
:class:`~kivy.graphics.texture.Texture`.
.. versionchanged:: 1.10.0
The pygst and videocapture providers have been removed.
.. versionchanged:: 1.8.0
There is now 2 distinct Gstreamer implementation: one using Gi/Gst
working for both Python 2+3 with Gstreamer 1.0, and one using PyGST
working only for Python 2 + Gstreamer 0.10.
'''
__all__ = ('CameraBase', 'Camera')
from kivy.utils import platform
from kivy.event import EventDispatcher
from kivy.logger import Logger
from kivy.core import core_select_lib
class CameraBase(EventDispatcher):
'''Abstract Camera Widget class.
Concrete camera classes must implement initialization and
frame capturing to a buffer that can be uploaded to the gpu.
:Parameters:
`index`: int
Source index of the camera.
`size`: tuple (int, int)
Size at which the image is drawn. If no size is specified,
it defaults to the resolution of the camera image.
`resolution`: tuple (int, int)
Resolution to try to request from the camera.
Used in the gstreamer pipeline by forcing the appsink caps
to this resolution. If the camera doesnt support the resolution,
a negotiation error might be thrown.
:Events:
`on_load`
Fired when the camera is loaded and the texture has become
available.
`on_texture`
Fired each time the camera texture is updated.
'''
__events__ = ('on_load', 'on_texture')
def __init__(self, **kwargs):
kwargs.setdefault('stopped', False)
kwargs.setdefault('resolution', (640, 480))
kwargs.setdefault('index', 0)
self.stopped = kwargs.get('stopped')
self._resolution = kwargs.get('resolution')
self._index = kwargs.get('index')
self._buffer = None
self._format = 'rgb'
self._texture = None
self.capture_device = None
kwargs.setdefault('size', self._resolution)
super(CameraBase, self).__init__()
self.init_camera()
if not self.stopped:
self.start()
def _set_resolution(self, res):
self._resolution = res
self.init_camera()
def _get_resolution(self):
return self._resolution
resolution = property(lambda self: self._get_resolution(),
lambda self, x: self._set_resolution(x),
doc='Resolution of camera capture (width, height)')
def _set_index(self, x):
if x == self._index:
return
self._index = x
self.init_camera()
def _get_index(self):
return self._x
index = property(lambda self: self._get_index(),
lambda self, x: self._set_index(x),
doc='Source index of the camera')
def _get_texture(self):
return self._texture
texture = property(lambda self: self._get_texture(),
doc='Return the camera texture with the latest capture')
def init_camera(self):
'''Initialise the camera (internal)'''
pass
def start(self):
'''Start the camera acquire'''
self.stopped = False
def stop(self):
'''Release the camera'''
self.stopped = True
def _update(self, dt):
'''Update the camera (internal)'''
pass
def _copy_to_gpu(self):
'''Copy the the buffer into the texture'''
if self._texture is None:
Logger.debug('Camera: copy_to_gpu() failed, _texture is None !')
return
self._texture.blit_buffer(self._buffer, colorfmt=self._format)
self._buffer = None
self.dispatch('on_texture')
def on_texture(self):
pass
def on_load(self):
pass
# Load the appropriate providers
providers = ()
if platform == 'macosx':
providers += (('avfoundation', 'camera_avfoundation',
'CameraAVFoundation'), )
elif platform == 'android':
providers += (('android', 'camera_android', 'CameraAndroid'), )
else:
providers += (('picamera', 'camera_picamera', 'CameraPiCamera'), )
providers += (('gi', 'camera_gi', 'CameraGi'), )
providers += (('opencv', 'camera_opencv', 'CameraOpenCV'), )
Camera = core_select_lib('camera', (providers))
| LogicalDash/kivy | kivy/core/camera/__init__.py | Python | mit | 4,415 | 0 |
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Getting Things GNOME! - a personal organizer for the GNOME desktop
# Copyright (c) 2008-2013 - Lionel Dricot & Bertrand Rousseau
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
import os
import pickle
from GTG.core.dirs import plugin_configuration_dir
from GTG.tools.logger import Log
class PluginAPI(object):
"""The plugin engine's API.
L{PluginAPI} is a object that provides a nice API for
plugins to interact with GTG.
Multiple L{PluginAPI}s can exist. A instance is created to be used
with the task browser and another instance is created to be used
with the task editor.
"""
def __init__(self,
requester,
view_manager,
taskeditor=None):
"""
Construct a PluginAPI object.
@param requester: The requester.
@param view_manager: The view manager
@param task_id: The Editor, if we are in one
otherwise.
"""
self.__requester = requester
self.__view_manager = view_manager
self.selection_changed_callback_listeners = []
if taskeditor:
self.__ui = taskeditor
self.__builder = self.__ui.get_builder()
self.__toolbar = self.__builder.get_object('task_tb1')
self.__task_id = taskeditor.get_task()
else:
self.__ui = self.__view_manager.get_browser()
self.__builder = self.__ui.get_builder()
self.__toolbar = self.__builder.get_object('task_toolbar')
self.__task_id = None
self.__view_manager.browser.selection.connect(
"changed", self.__selection_changed)
self.taskwidget_id = 0
self.taskwidget_widg = []
def __selection_changed(self, selection):
for func in self.selection_changed_callback_listeners:
func(selection)
# Accessor methods ============================================================
def is_editor(self):
"""
Returns true if this is an Editor API
"""
return bool(self.__task_id)
def is_browser(self):
"""
Returns true if this is a Browser API
"""
return not self.is_editor()
def get_view_manager(self):
"""
returns a GTG.gtk.manager.Manager
"""
return self.__view_manager
def get_requester(self):
"""
returns a GTG.core.requester.Requester
"""
return self.__requester
def get_gtk_builder(self):
"""
Returns the gtk builder for the parent window
"""
return self.__builder
def get_ui(self):
'''
Returns a Browser or an Editor
'''
return self.__ui
def get_browser(self):
'''
Returns a Browser
'''
return self.__view_manager.get_browser()
def get_selected(self):
'''
Returns the selected tasks in the browser or the task ID if the editor
'''
if self.is_editor():
return self.__task_id
else:
return self.__view_manager.browser.get_selected_tasks()
def set_active_selection_changed_callback(self, func):
if func not in self.selection_changed_callback_listeners:
self.selection_changed_callback_listeners.append(func)
def remove_active_selection_changed_callback(self, plugin_class):
new_list = [func for func in self.selection_changed_callback_listeners
if func.__class__ != plugin_class]
self.selection_changed_callback_listeners = new_list
# Changing the UI ===========================================================
def add_menu_item(self, item):
"""Adds a menu entry to the Plugin Menu of the Main Window
(task browser).
@param item: The Gtk.MenuItem that is going to be added.
"""
widget = self.__builder.get_object('plugin_mi')
widget.get_submenu().append(item)
widget.show_all()
def remove_menu_item(self, item):
"""Removes a menu entry from the Plugin Menu of the Main Window
(task browser).
@param item: The Gtk.MenuItem that is going to be removed.
@return: Returns C{True} if the operation has sucess or c{False} if it
fails.
"""
menu = self.__builder.get_object('plugin_mi')
submenu = menu.get_submenu()
try:
submenu.remove(item)
except:
pass
if not submenu.get_children():
menu.hide()
def add_toolbar_item(self, widget):
"""Adds a button to the task browser's toolbar or the task editor
toolbar, depending on which plugin api it's being used.
@param widget: The Gtk.ToolButton that is going to be added to the
toolbar.
"""
# -1 means "append to the end"
self.__toolbar.insert(widget, -1)
def remove_toolbar_item(self, widget):
"""
Remove a widget from the toolbar.
"""
try:
self.__toolbar.remove(widget)
except Exception as e:
print("Error removing the toolbar item in the TaskEditor: %s" % e)
def add_widget_to_taskeditor(self, widget):
"""Adds a widget to the bottom of the task editor dialog
@param widget: The Gtk.Widget that is going to be added.
"""
vbox = self.__builder.get_object('vbox4')
if vbox:
vbox.pack_start(widget, True, True, 0)
vbox.reorder_child(widget, -2)
widget.show_all()
self.taskwidget_id += 1
self.taskwidget_widg.append(widget)
return self.taskwidget_id
else:
return None
def remove_widget_from_taskeditor(self, widg_id):
"""Remove a widget from the bottom of the task editor dialog
@param widget: The Gtk.Widget that is going to be removed
"""
if self.is_editor() and widg_id:
try:
wi = self.__builder.get_object('vbox4')
if wi and widg_id in self.taskwidget_widg:
wi.remove(self.taskwidget_widg.pop(widg_id))
except Exception as e:
Log.debug("Error removing the toolbar item in the TaskEditor:"
"%s" % e)
def set_bgcolor_func(self, func=None):
""" Set a function which defines a background color for each task
NOTE: This function stronglye depend on browser and could be easily
broken by changes in browser code
"""
browser = self.get_browser()
# set default bgcolor?
if func is None:
func = browser.tv_factory.task_bg_color
for pane in browser.vtree_panes.values():
pane.set_bg_color(func, 'bg_color')
pane.basetree.get_basetree().refresh_all()
# file saving/loading =======================================================
def load_configuration_object(self, plugin_name, filename,
default_values=None):
if default_values is not None:
config = dict(default_values)
else:
config = dict()
dirname = plugin_configuration_dir(plugin_name)
path = os.path.join(dirname, filename)
try:
with open(path, 'rb') as file:
item = pickle.load(file)
config.update(item)
except:
pass
return config
def save_configuration_object(self, plugin_name, filename, item):
dirname = plugin_configuration_dir(plugin_name)
if not os.path.isdir(dirname):
os.makedirs(dirname)
path = os.path.join(dirname, filename)
with open(path, 'wb') as file:
pickle.dump(item, file)
| shtrom/gtg | GTG/core/plugins/api.py | Python | gpl-3.0 | 8,572 | 0.00035 |
# -*- coding: utf-8 -*-
class RomanianHelper(object):
@staticmethod
def englishize_romanian(string):
symbols = (u"țţȚŢșşȘŞăǎĂîÎâÂ",
u"ttTTssSSaaAiIaA")
tr = {ord(a):ord(b) for a, b in zip(*symbols)}
return string.translate(tr)
@staticmethod
def beautify_romanian(string):
symbols = (u"ǎţşŢŞ",
u"ățșȚȘ")
tr = {ord(a):ord(b) for a, b in zip(*symbols)}
return string.translate(tr)
| costibleotu/czl-scrape | sanatate/scrapy_proj/helpers/romanian.py | Python | mpl-2.0 | 509 | 0.004132 |
#
# The Python Imaging Library.
# $Id$
#
# PCX file handling
#
# This format was originally used by ZSoft's popular PaintBrush
# program for the IBM PC. It is also supported by many MS-DOS and
# Windows applications, including the Windows PaintBrush program in
# Windows 3.
#
# history:
# 1995-09-01 fl Created
# 1996-05-20 fl Fixed RGB support
# 1997-01-03 fl Fixed 2-bit and 4-bit support
# 1999-02-03 fl Fixed 8-bit support (broken in 1.0b1)
# 1999-02-07 fl Added write support
# 2002-06-09 fl Made 2-bit and 4-bit support a bit more robust
# 2002-07-30 fl Seek from to current position, not beginning of file
# 2003-06-03 fl Extract DPI settings (info["dpi"])
#
# Copyright (c) 1997-2003 by Secret Labs AB.
# Copyright (c) 1995-2003 by Fredrik Lundh.
#
# See the README file for information on usage and redistribution.
#
__version__ = "0.6"
import Image, ImageFile, ImagePalette
def i16(c,o):
return ord(c[o]) + (ord(c[o+1])<<8)
def _accept(prefix):
return ord(prefix[0]) == 10 and ord(prefix[1]) in [0, 2, 3, 5]
##
# Image plugin for Paintbrush images.
class PcxImageFile(ImageFile.ImageFile):
format = "PCX"
format_description = "Paintbrush"
def _open(self):
# header
s = self.fp.read(128)
if not _accept(s):
raise SyntaxError, "not a PCX file"
# image
bbox = i16(s,4), i16(s,6), i16(s,8)+1, i16(s,10)+1
if bbox[2] <= bbox[0] or bbox[3] <= bbox[1]:
raise SyntaxError, "bad PCX image size"
# format
version = ord(s[1])
bits = ord(s[3])
planes = ord(s[65])
stride = i16(s,66)
self.info["dpi"] = i16(s,12), i16(s,14)
if bits == 1 and planes == 1:
mode = rawmode = "1"
elif bits == 1 and planes in (2, 4):
mode = "P"
rawmode = "P;%dL" % planes
self.palette = ImagePalette.raw("RGB", s[16:64])
elif version == 5 and bits == 8 and planes == 1:
mode = rawmode = "L"
# FIXME: hey, this doesn't work with the incremental loader !!!
self.fp.seek(-769, 2)
s = self.fp.read(769)
if len(s) == 769 and ord(s[0]) == 12:
# check if the palette is linear greyscale
for i in range(256):
if s[i*3+1:i*3+4] != chr(i)*3:
mode = rawmode = "P"
break
if mode == "P":
self.palette = ImagePalette.raw("RGB", s[1:])
self.fp.seek(128)
elif version == 5 and bits == 8 and planes == 3:
mode = "RGB"
rawmode = "RGB;L"
else:
raise IOError, "unknown PCX mode"
self.mode = mode
self.size = bbox[2]-bbox[0], bbox[3]-bbox[1]
bbox = (0, 0) + self.size
self.tile = [("pcx", bbox, self.fp.tell(), (rawmode, planes * stride))]
# --------------------------------------------------------------------
# save PCX files
SAVE = {
# mode: (version, bits, planes, raw mode)
"1": (2, 1, 1, "1"),
"L": (5, 8, 1, "L"),
"P": (5, 8, 1, "P"),
"RGB": (5, 8, 3, "RGB;L"),
}
def o16(i):
return chr(i&255) + chr(i>>8&255)
def _save(im, fp, filename, check=0):
try:
version, bits, planes, rawmode = SAVE[im.mode]
except KeyError:
raise ValueError, "Cannot save %s images as PCX" % im.mode
if check:
return check
# bytes per plane
stride = (im.size[0] * bits + 7) / 8
# under windows, we could determine the current screen size with
# "Image.core.display_mode()[1]", but I think that's overkill...
screen = im.size
dpi = 100, 100
# PCX header
fp.write(
chr(10) + chr(version) + chr(1) + chr(bits) + o16(0) +
o16(0) + o16(im.size[0]-1) + o16(im.size[1]-1) + o16(dpi[0]) +
o16(dpi[1]) + chr(0)*24 + chr(255)*24 + chr(0) + chr(planes) +
o16(stride) + o16(1) + o16(screen[0]) + o16(screen[1]) +
chr(0)*54
)
assert fp.tell() == 128
ImageFile._save(im, fp, [("pcx", (0,0)+im.size, 0,
(rawmode, bits*planes))])
if im.mode == "P":
# colour palette
fp.write(chr(12))
fp.write(im.im.getpalette("RGB", "RGB")) # 768 bytes
elif im.mode == "L":
# greyscale palette
fp.write(chr(12))
for i in range(256):
fp.write(chr(i)*3)
# --------------------------------------------------------------------
# registry
Image.register_open("PCX", PcxImageFile, _accept)
Image.register_save("PCX", _save)
Image.register_extension("PCX", ".pcx")
| robiame/AndroidGeodata | pil/PcxImagePlugin.py | Python | mit | 4,834 | 0.005379 |
# http://remotescripts.blogspot.com
"""
Track Control User Modes component originally designed for use with the APC40.
Copyright (C) 2010 Hanz Petrov <hanz.petrov@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import Live
from _Framework.ModeSelectorComponent import ModeSelectorComponent
from _Framework.ButtonElement import ButtonElement
from _Framework.DeviceComponent import DeviceComponent
class EncoderUserModesComponent(ModeSelectorComponent):
' SelectorComponent that assigns encoders to different user functions '
__module__ = __name__
def __init__(self, parent, encoder_modes, param_controls, bank_buttons, mixer, device, encoder_device_modes, encoder_eq_modes): #, mixer, sliders):
assert (len(bank_buttons) == 4)
ModeSelectorComponent.__init__(self)
self._parent = parent
self._encoder_modes = encoder_modes
self._param_controls = param_controls
self._bank_buttons = bank_buttons
self._mixer = mixer
self._device = device
self._encoder_device_modes = encoder_device_modes
self._encoder_eq_modes = encoder_eq_modes
self._mode_index = 0
self._modes_buttons = []
self._user_buttons = []
self._last_mode = 0
def disconnect(self):
ModeSelectorComponent.disconnect(self)
self._parent = None
self._encoder_modes = None
self._param_controls = None
self._bank_buttons = None
self._mixer = None
self._device = None
self._encoder_device_modes = None
self._encoder_eq_modes = None
self._modes_buttons = None
self._user_buttons = None
def on_enabled_changed(self):
pass
def set_mode(self, mode):
assert isinstance(mode, int)
assert (mode in range(self.number_of_modes()))
if (self._mode_index != mode):
self._last_mode = self._mode_index # keep track of previous mode, to allow conditional actions
self._mode_index = mode
self._set_modes()
def set_mode_buttons(self, buttons):
assert isinstance(buttons, (tuple,
type(None)))
for button in self._modes_buttons:
button.remove_value_listener(self._mode_value)
self._modes_buttons = []
if (buttons != None):
for button in buttons:
assert isinstance(button, ButtonElement)
identify_sender = True
button.add_value_listener(self._mode_value, identify_sender)
self._modes_buttons.append(button)
assert (self._mode_index in range(self.number_of_modes()))
def number_of_modes(self):
return 4
def update(self):
pass
def _mode_value(self, value, sender):
assert (len(self._modes_buttons) > 0)
assert isinstance(value, int)
assert isinstance(sender, ButtonElement)
assert (self._modes_buttons.count(sender) == 1)
if ((value is not 0) or (not sender.is_momentary())):
self.set_mode(self._modes_buttons.index(sender))
def _set_modes(self):
if self.is_enabled():
assert (self._mode_index in range(self.number_of_modes()))
for index in range(len(self._modes_buttons)):
if (index <= self._mode_index):
self._modes_buttons[index].turn_on()
else:
self._modes_buttons[index].turn_off()
for button in self._modes_buttons:
button.release_parameter()
button.use_default_message()
for control in self._param_controls:
control.release_parameter()
control.use_default_message()
#control.set_needs_takeover(False)
self._encoder_modes.set_enabled(False)
self._encoder_device_modes.set_lock_button(None)
self._encoder_device_modes._alt_device.set_bank_nav_buttons(None, None)
self._encoder_device_modes._alt_device.set_on_off_button(None)
if self._encoder_device_modes._alt_device._parameter_controls != None:
for control in self._encoder_device_modes._alt_device._parameter_controls:
control.release_parameter()
self._encoder_device_modes.set_enabled(False)
self._encoder_eq_modes.set_enabled(False)
self._encoder_eq_modes.set_lock_button(None)
if self._encoder_eq_modes._track_eq != None:
self._encoder_eq_modes._track_eq.set_cut_buttons(None)
if self._encoder_eq_modes._track_eq._gain_controls != None:
for control in self._encoder_eq_modes._track_eq._gain_controls:
control.release_parameter()
if self._encoder_eq_modes._strip != None:
self._encoder_eq_modes._strip.set_send_controls(None)
self._user_buttons = []
if (self._mode_index == 0):
self._encoder_modes.set_enabled(True)
elif (self._mode_index == 1):
self._encoder_device_modes.set_enabled(True)
self._encoder_device_modes.set_controls_and_buttons(self._param_controls, self._modes_buttons)
elif (self._mode_index == 2):
self._encoder_eq_modes.set_enabled(True)
self._encoder_eq_modes.set_controls_and_buttons(self._param_controls, self._modes_buttons)
elif (self._mode_index == 3):
self._encoder_eq_modes._ignore_buttons = True
if self._encoder_eq_modes._track_eq != None:
self._encoder_eq_modes._track_eq._ignore_cut_buttons = True
self._encoder_device_modes._ignore_buttons = True
for button in self._modes_buttons:
self._user_buttons.append(button)
for control in self._param_controls:
control.set_identifier((control.message_identifier() - 9))
control._ring_mode_button.send_value(0)
else:
pass
#self._rebuild_callback()
# local variables:
# tab-width: 4
| jim-cooley/abletonremotescripts | remote-scripts/samples/APC_64_40_r1b/APC_64_40/EncoderUserModesComponent.py | Python | apache-2.0 | 6,872 | 0.005675 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import io
import uuid
import os.path
import socket
import optparse
import cgi
docvert_root = os.path.dirname(os.path.abspath(__file__))
inbuilt_bottle_path = os.path.join(docvert_root, 'lib/bottle')
try:
import bottle
if not hasattr(bottle, 'static_file'):
message = "Notice: Old version of Bottle at %s, instead using bundled version at %s%sbottle.py" % (bottle.__file__, inbuilt_bottle_path, os.sep)
print(message)
raise ImportError(message)
except ImportError as exception:
try:
sys.path.insert(0, inbuilt_bottle_path)
try:
reload(bottle)
except NameError:
import bottle
except ImportError:
sys.stderr.write("Error: Unable to find Bottle libraries in %s. Exiting...\n" % sys.path)
sys.exit(0)
import lib.bottlesession.bottlesession
bottle.debug(True)
import core.docvert
import core.docvert_storage
import core.docvert_exception
import core.document_type
# START DEFAULT CONFIG
theme='default'
host='localhost'
port=8080
# END CONFIG
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port", help="Port to run on", type="int")
parser.add_option("-H", "--host", dest="host", help="Hostname or IP run on", type="str")
(options, args) = parser.parse_args()
if options.port:
port = options.port
if options.host:
host = options.host
theme_directory='%s/core/web_service_themes' % docvert_root
bottle.TEMPLATE_PATH.append('%s/%s' % (theme_directory, theme))
# URL mappings
@bottle.route('/index', method='GET')
@bottle.route('/', method='GET')
@bottle.view('index')
def index():
return dict(list(core.docvert.get_all_pipelines(False).items()) + list({"libreOfficeStatus": core.docvert_libreoffice.checkLibreOfficeStatus()}.items()) )
@bottle.route('/static/:path#.*#', method='GET')
def static(path=''):
return bottle.static_file(path, root=theme_directory)
@bottle.route('/lib/:path#.*#', method='GET')
def libstatic(path=None):
return bottle.static_file(path, root='%s/lib' % docvert_root)
@bottle.route('/web-service.php', method='POST') #for legacy Docvert support
@bottle.route('/web-service', method='POST')
@bottle.view('web-service')
def webservice():
files = dict()
first_document_id = None
there_was_at_least_one_thing_uploaded = False
for key, item in bottle.request.files.items():
there_was_at_least_one_thing_uploaded = True
items = bottle.request.files.getall(key)
for field_storage in items:
filename = field_storage.filename
unique = 1
if filename in files and files[filename].getvalue() == field_storage.value: #remove same file uploaded multiple times
continue
while filename in files:
filename = field_storage.filename + str(unique)
unique += 1
files[filename] = io.BytesIO(field_storage.value)
pipeline_id = bottle.request.POST.get('pipeline')
if pipeline_id.startswith('autopipeline:'): #Docvert 4.x
pipeline_id = pipeline_id[len('autopipeline:'):]
auto_pipeline_id = None
if bottle.request.POST.get('break_up_pages_ui_version'):
if bottle.request.POST.get('break_up_pages'):
auto_pipeline_id = bottle.request.POST.get('autopipeline')
if auto_pipeline_id is None:
pipelines = list(core.docvert.get_all_pipelines().items())
for pipelinetype_key, pipelinetype_value in pipelines:
if pipelinetype_key == "auto_pipelines":
for pipeline in pipelinetype_value:
if "nothing" in pipeline["id"].lower():
auto_pipeline_id = pipeline["id"]
else:
auto_pipeline_id = bottle.request.POST.get('autopipeline')
docvert_4_default = '.default'
if auto_pipeline_id and auto_pipeline_id.endswith(docvert_4_default):
auto_pipeline_id = auto_pipeline_id[0:-len(docvert_4_default)]
after_conversion = bottle.request.POST.get('afterconversion')
urls = bottle.request.POST.getall('upload_web[]')
if len(urls) == 1 and urls[0] == '':
urls = list()
else:
urls = set(urls)
response = None
if there_was_at_least_one_thing_uploaded is False: #while we could have counted len(files) or len(urls) the logic around those is more complex, and I don't want to show this error unless there was genuinely no files uploaded
bottle.response.content_type = "text/html"
return '<!DOCTYPE html><html><body><h1>Error: No files were uploaded</h1><p>Known issues that can cause this:</p><ul><li>Permissions problem on the server or browser: Try ensuring that your upload file has all read permissions set.</li><li>Chrome/Chromium can sometimes cause file upload problems (some combination of Chrome/Bottle, it\'s not a Docvert-specific bug). Sorry, but Firefox seems to work.</li></ul><hr><a href="/">Try again?</a></body></html>'
try:
response = core.docvert.process_conversion(files, urls, pipeline_id, 'pipelines', auto_pipeline_id, suppress_errors=True)
except core.docvert_exception.debug_exception as exception:
bottle.response.content_type = exception.content_type
return exception.data
conversion_id = "%s" % uuid.uuid4()
if after_conversion == "downloadZip" or after_conversion == "zip":
bottle.response.content_type = 'application/zip'
bottle.response.headers['Content-Disposition'] = 'attachment; filename="%s.zip"' % response.get_zip_name()
return response.to_zip().getvalue()
pipeline_summary = "%s (%s)" % (pipeline_id, auto_pipeline_id)
session_manager = lib.bottlesession.bottlesession.PickleSession()
session = session_manager.get_session()
session[conversion_id] = response
conversions_tabs = dict()
first_document_url = "conversions/%s/%s/" % (conversion_id, response.default_document)
for filename in list(files.keys()):
thumbnail_path = "%s/thumbnail.png" % filename
if thumbnail_path in response:
thumbnail_path = None
conversions_tabs[filename] = dict(friendly_name=response.get_friendly_name_if_available(filename), pipeline=pipeline_id, auto_pipeline=auto_pipeline_id, thumbnail_path=thumbnail_path)
try:
session_manager.save(session)
except OSError as e:
import traceback
traceback.print_exc(file=sys.stdout)
conversions_tabs = {'Session file problem': dict(friendly_name='Session file problem', pipeline=None, auto_pipeline=None, thumbnail_path=None) }
first_document_url = "/bottle_session_file_problem"
return dict(conversions=conversions_tabs, conversion_id=conversion_id, first_document_url=first_document_url)
@bottle.route('/favicon.ico', method='GET')
def favicon():
return bottle.static_file('favicon.ico', root='%s/%s' % (theme_directory, theme))
@bottle.route('/bottle_session_file_problem', method='GET')
def bottle_session_file_problem():
print('%s/lib/bottle' % docvert_root)
return bottle.static_file('bottle_session_file_problem.html', root='%s/lib/bottle' % docvert_root)
@bottle.route('/conversions/:conversion_id/:path#.*#')
def conversion_static_file(conversion_id, path):
session_manager = lib.bottlesession.bottlesession.PickleSession()
session = session_manager.get_session()
if conversion_id not in session: # They don't have authorisation
raise bottle.HTTPError(code=404)
filetypes = {".xml":"text/xml", ".html":"text/html", ".xhtml":"text/html", ".htm":"text/html", ".svg":"image/svg+xml", ".txt":"text/plain", ".png":"image/png", ".gif":"image/gif", ".bmp":"image/x-ms-bmp", ".jpg":"image/jpeg", ".jpe":"image/jpeg", ".jpeg":"image/jpeg", ".css":"text/css", ".js":"text/javascript", ".odt":"application/vnd.oasis.opendocument.text", ".odp":"application/vnd.oasis.opendocument.presentation", ".ods":"application/vnd.oasis.opendocument.spreadsheet", ".dbk":"application/docbook+xml"}
if path not in session[conversion_id]: # They have authorisation but that exact path doesn't exist, try fallbacks
fallbacks = ["index.html", "index.htm", "index.xml", "index.php", "default.htm", "default.html", "index.asp", "default.aspx", "index.aspx", "default.aspx", "index.txt", "index.odt", "default.odt", "index.dbk", "default.dbk"]
valid_fallback_path = None
separator = "/"
if path.endswith("/"):
separator = ""
for fallback in fallbacks:
fallback_path = path+separator+fallback
if fallback_path in session[conversion_id]:
valid_fallback_path = fallback_path
break
if valid_fallback_path is None:
raise bottle.HTTPError(code=404)
path = valid_fallback_path
extension = os.path.splitext(path)[1]
if extension == ".odt":
bottle.response.content_type = filetypes[".html"]
link_html = 'click here to download %s' % cgi.escape(os.path.basename(path))
thumbnail_path = "%s/thumbnail.png" % path[0:path.rfind("/")]
if thumbnail_path in session[conversion_id]:
link_html = '<img src="thumbnail.png"><br>' + link_html
return '<!DOCTYPE html><html><head><title>%s</title><style type="text/css">body{font-family:sans-serif;font-size:small} a{text-decoration:none} p{text-align:center} img{clear:both;border: solid 1px #cccccc}</style></head><body><p><a href="%s">%s</a></p></body></html>' % (
cgi.escape(path),
cgi.escape(os.path.basename(path)),
link_html
)
extension = os.path.splitext(path)[1]
if extension in filetypes:
bottle.response.content_type = filetypes[extension]
else:
bottle.response.content_type = "text/plain"
return session[conversion_id][path]
@bottle.route('/conversions-zip/:conversion_id')
def conversion_zip(conversion_id):
session_manager = lib.bottlesession.bottlesession.PickleSession()
session = session_manager.get_session()
if conversion_id not in session: # They don't have authorisation
raise bottle.HTTPError(code=404)
bottle.response.content_type = 'application/zip'
bottle.response.headers['Content-Disposition'] = 'attachment; filename="%s.zip"' % session[conversion_id].get_zip_name()
return session[conversion_id].to_zip().getvalue()
@bottle.route('/libreoffice-status', method='GET')
def libreoffice_status():
return bottle.json_dumps( {"libreoffice-status":core.docvert_libreoffice.checkLibreOfficeStatus()} )
@bottle.route('/tests', method='GET')
@bottle.view('tests')
def tests():
return core.docvert.get_all_pipelines()
@bottle.route('/web-service/tests/:test_id', method='GET')
def web_service_tests(test_id):
suppress_error = bottle.request.GET.get('suppress_error') == "true"
storage = core.docvert_storage.storage_memory_based()
error_message = None
if suppress_error:
try:
core.docvert.process_pipeline(None, test_id, "tests", None, storage)
except Exception as exception:
bottle.response.content_type = "text/plain"
class_name = "%s" % type(exception).__name__
return bottle.json_dumps([{"status":"fail", "message": "Unable to run tests due to exception. <%s> %s" % (class_name, exception)}])
else:
try:
core.docvert.process_pipeline(None, test_id, "tests", None, storage)
except (core.docvert_exception.debug_exception, core.docvert_exception.debug_xml_exception) as exception:
bottle.response.content_type = exception.content_type
return exception.data
return bottle.json_dumps(storage.tests)
@bottle.route('/tests/', method='GET')
def tests_wrongdir():
bottle.redirect('/tests')
@bottle.route('/3rdparty/sscdocapi')
def third_party_sscdocapi():
return bottle.static_file('sscdocapi.html', root='%s/core/3rd-party/' % docvert_root)
try:
bottle.run(host=host, port=port, quiet=False)
except socket.error as e:
if 'address already in use' in str(e).lower():
print('ERROR: %s:%i already in use.\nTry another port? Use command line parameter -H HOST or -p PORT to change it.' % (host, port))
else:
raise
| holloway/docvert-python3 | docvert-web.py | Python | gpl-3.0 | 12,330 | 0.007218 |
# © 2014-2015 Tecnativa S.L. - Jairo Llopis
# © 2016 Tecnativa S.L. - Vicent Cubells
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo import fields, models
class ResPartner(models.Model):
_inherit = "res.partner"
department_id = fields.Many2one("res.partner.department", "Department")
class ResPartnerDepartment(models.Model):
_name = "res.partner.department"
_order = "parent_path"
_parent_order = "name"
_parent_store = True
_description = "Department"
name = fields.Char(required=True, translate=True)
parent_id = fields.Many2one(
"res.partner.department", "Parent department", ondelete="restrict"
)
child_ids = fields.One2many(
"res.partner.department", "parent_id", "Child departments"
)
parent_path = fields.Char(index=True)
| OCA/partner-contact | partner_contact_department/models/res_partner.py | Python | agpl-3.0 | 839 | 0 |
"""
Copyright (c) 2012-2020 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
from storageadmin.models import Pool
from system.osi import run_command
from fs.btrfs import mount_root
BTRFS = "/usr/sbin/btrfs"
def main():
for p in Pool.objects.all():
try:
print("Processing pool(%s)" % p.name)
mnt_pt = mount_root(p)
o, e, rc = run_command([BTRFS, "subvol", "list", mnt_pt])
subvol_ids = []
for l in o:
if re.match("ID ", l) is not None:
subvol_ids.append(l.split()[1])
o, e, rc = run_command([BTRFS, "qgroup", "show", mnt_pt], throw=False)
if rc != 0:
print("Quotas not enabled on pool(%s). Skipping it." % p.name)
continue
qgroup_ids = []
for l in o:
if re.match("0/", l) is not None:
q = l.split()[0].split("/")[1]
if q == "5":
continue
qgroup_ids.append(l.split()[0].split("/")[1])
for q in qgroup_ids:
if q not in subvol_ids:
print("qgroup %s not in use. deleting" % q)
run_command([BTRFS, "qgroup", "destroy", "0/%s" % q, mnt_pt])
else:
print("qgroup %s is in use. Moving on." % q)
print("Finished processing pool(%s)" % p.name)
except Exception as e:
print(
"Exception while qgroup-cleanup of Pool(%s): %s" % (p.name, e.__str__())
)
if __name__ == "__main__":
main()
| phillxnet/rockstor-core | src/rockstor/scripts/qgroup_clean.py | Python | gpl-3.0 | 2,273 | 0.0022 |
#
# Copyright (c) 2013-2016 NORDUnet A/S
# Copyright (c) 2019 SUNET
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# 3. Neither the name of the NORDUnet nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from typing import Any, Optional
from pydantic import Field
from eduid_common.config.base import WorkerConfig
class AmConfig(WorkerConfig):
"""
Configuration for the attribute manager celery worker
"""
new_user_date: str = '2001-01-01'
action_plugins: list = Field(default_factory=lambda: ['tou'])
class MsgConfig(WorkerConfig):
"""
Configuration for the msg celery worker
"""
audit: bool = True
devel_mode: bool = False
mail_certfile: str = ''
mail_host: str = 'localhost'
mail_keyfile: str = ''
mail_password: str = ''
mail_port: int = 25
mail_starttls: bool = False
mail_username: str = ''
message_rate_limit: Optional[int] = None # for celery. tasks per second - None for no rate limit
# TODO: Purge everything about the MM API
mm_api_uri: str = 'MM-API-not-available'
mm_default_subject: str = 'MM-API-not-available'
mongo_dbname: str = 'eduid_msg'
navet_api_pw: str = ''
navet_api_uri: str = ''
navet_api_user: str = ''
navet_api_verify_ssl: bool = False
sms_acc: str = ''
sms_key: str = ''
sms_sender: str = 'eduID'
template_dir: str = ''
class MobConfig(WorkerConfig):
"""
Configuration for the lookup mobile celery worker
"""
devel_mode: bool = False
log_path: str = ''
teleadress_client_password: str = ''
teleadress_client_user: str = ''
| SUNET/eduid-common | src/eduid_common/config/workers.py | Python | bsd-3-clause | 3,033 | 0.00033 |
"""
Models from Godley & Lavoie text.
[G&L 2012] "Monetary Economics: An Integrated Approach to credit, Money, Income, Production
and Wealth; Second Edition", by Wynne Godley and Marc Lavoie, Palgrave Macmillan, 2012.
ISBN 978-0-230-30184-9
Copyright 2016 Brian Romanchuk
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sfc_models.models as models
class GL_book_model(object):
"""
Base class for example models from [G&L 2012] for single-country models.
Generates the sectors, either in a new model object, or an object that is passed in.
The user supplies a country code.
"""
def __init__(self, country_code, model=None, use_book_exogenous=True):
"""
Constructor for an example model. Builds a single country, using a code that is passed in.
If the user supplies an existing Model object, uses that. This allows us to embed in a multi-country model.
:param country_code: str
:param model: sfc_models.models.Model
:param use_book_exogenous: bool
"""
if model is None:
model = models.Model()
self.Model = model
self.Country = models.Country(model, country_code, country_code)
self.UseBookExogenous = use_book_exogenous
def build_model(self): # pragma: no cover This is a virtual base class
"""
Does the work of building the sectors within a country. Returns the Model object.
:return: sfc_models.models.Model
"""
return self.Model
def expected_output(self): # pragma: no cover -- Virtual base class.
"""
Returns a list of expected output. Used to validate the framework output.
Uses the default exogenous series.
Format:
A list of tuples, that consist of the variable name, and (limited) time series of output.
For example:
[
('GOOD_SUP_GOOD', [0., 10., 12.]),
('HH_AfterTax', [0., 15., 18., 22.]),
]
In this case, the variable 'GOOD_SUP_GOOD' is expected to be [0., 10., 12.] for the first 3 periods
and
'HH_AftterTax' is expected to be [0., 15., 18., 22.] over the fiest 4 periods.
In other words, target outputs do not have to be the same length.
:return: list
"""
return []
| brianr747/SFC_models | sfc_models/gl_book/__init__.py | Python | apache-2.0 | 2,795 | 0.003936 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-05-12 16:09
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='MaintenanceMode',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('name', models.CharField(default=b'Maintenance Mode', max_length=255)),
('is_active', models.BooleanField(default=False)),
('message', models.TextField(blank=True, help_text=b'Message to Display on Page. When available, displayed instead of a template.')),
('template_name', models.CharField(default=b'content_pages/maintenance_message.html', help_text=b'Template with HTML snippet to display. Note: This field is ignored if a "message" is available.', max_length=255)),
('end_datetime', models.DateTimeField(blank=True, help_text=b'End datetime to display within the template. Important: Maintenance mode NEEDS TO BE TURNED OFF *manually*. This date/time is for display purposes only.', null=True)),
],
options={
'db_table': 'content_pages',
'verbose_name_plural': 'Maintenance Mode',
},
),
]
| IQSS/geoconnect | gc_apps/content_pages/migrations/0001_initial.py | Python | apache-2.0 | 1,770 | 0.003955 |
"""Replacement for ``django.template.loader`` that uses Jinja 2.
The module provides a generic way to load templates from an arbitrary
backend storage (e.g. filesystem, database).
"""
from coffin.template import Template as CoffinTemplate
from jinja2 import TemplateNotFound
def find_template_source(name, dirs=None):
# This is Django's most basic loading function through which
# all template retrievals go. Not sure if Jinja 2 publishes
# an equivalent, but no matter, it mostly for internal use
# anyway - developers will want to start with
# ``get_template()`` or ``get_template_from_string`` anyway.
raise NotImplementedError()
def get_template(template_name):
# Jinja will handle this for us, and env also initializes
# the loader backends the first time it is called.
from pyjade_coffin.common import env
return env.get_template(template_name)
def get_template_from_string(source):
"""
Does not support then ``name`` and ``origin`` parameters from
the Django version.
"""
from pyjade_coffin.common import env
return env.from_string(source)
def render_to_string(template_name, dictionary=None, context_instance=None):
"""Loads the given ``template_name`` and renders it with the given
dictionary as context. The ``template_name`` may be a string to load
a single template using ``get_template``, or it may be a tuple to use
``select_template`` to find one of the templates in the list.
``dictionary`` may also be Django ``Context`` object.
Returns a string.
"""
dictionary = dictionary or {}
if isinstance(template_name, (list, tuple)):
template = select_template(template_name)
else:
template = get_template(template_name)
if context_instance:
context_instance.update(dictionary)
else:
context_instance = dictionary
return template.render(context_instance)
def select_template(template_name_list):
"Given a list of template names, returns the first that can be loaded."
for template_name in template_name_list:
try:
return get_template(template_name)
except TemplateNotFound:
continue
# If we get here, none of the templates could be loaded
raise TemplateNotFound(', '.join(template_name_list))
| AbleCoder/pyjade_coffin | pyjade_coffin/template/loader.py | Python | mit | 2,320 | 0 |
#!/home/software/SACLA_tool/bin/python2.7
import numpy as np
import h5py
import matplotlib
import matplotlib.pyplot as plt
import argparse
import time
#import pandas as pd
import sys
from argparse import ArgumentParser
parser = ArgumentParser()
parser = ArgumentParser(description="Plot intense ice shots")
parser.add_argument("-run", "--run-number", type=int, dest="run", required=True,
help="run to process")
parser.add_argument("-exp", "--exp-year", type=int, dest="exp", default=2016,
help="experimental year to compress (default: 2016)")
parser.add_argument("-multi", "--multi-run", action="store_true", dest="multi", required=False, default=False,
help="process multi-file run converted using DataConvert4")
parser.add_argument("-tag", "--output-tag", type=str, dest="tag", default="run",
help="tag for output folder (default: run)")
parser.add_argument("-o", "--output-flag", type=str, dest="outputFlag",
help="where to process runs. 'W' refers to /work/perakis/ and 'UD' refers to '/UserData/fperakis' (default: UD)",
choices=['W','UD'], default='UD')
args = parser.parse_args()
# -- default parameters
file_folder = '/UserData/fperakis/2016_6/%s%d/'%(args.tag,args.run) # h5 files folder
src_folder = '/home/fperakis/2016_06/git/SACLA2016A8015/src/' # src files folder
adu_gain = 75.0 # adu/photon @ 5 keV
# -- files and folders
file_name = '%d.h5'%(args.run)
file_path = file_folder+file_name
sys.path.insert(0, src_folder)
from img_class import *
# -- import data
fh5 = h5py.File(file_path, 'r')
run_key = [ k for k in fh5.keys() if k.startswith('run_') ][0]
tags = fh5['/%s/detector_2d_assembled_1'%run_key].keys()[1:]
# -- image generator
img_gen = ( fh5['%s/detector_2d_assembled_1/%s/detector_data'%(run_key,tag) ].value for tag in tags )
num_im = len(tags)
mean_int = np.zeros(num_im)
# -- average image
im_avg = img_gen.next()
mean_int[0] = np.average(im_avg.flatten())
i=1
for im_next in img_gen:
t1 = time.time()
im_avg += im_next
mean_int[i] = np.average(im_next.flatten())
print 'R.%d | M.%.1f ADU | S.%d/%d | %.1f Hz'%(args.run,mean_int[i],i,num_im,1.0/(time.time() - t1))
i += 1
im_avg /= num_im
# -- save dark
np.save(file_folder+'%d_dark.npy'%args.run, im_avg)
# -- run mean
total_mean = np.average(im_avg.flatten())
# -- mean hist
hist_bins = np.arange(np.floor(mean_int.min()), np.ceil(mean_int.max()) + 2, 2) - 1
hist, hist_bins = np.histogram(mean_int, bins=hist_bins)
hist_bins_center = [(hist_bins[i] + hist_bins[i+1])/2.0 for i in range(len(hist_bins) - 1)]
# -- plot
#plt.figure()
#plt.plot(hist_bins_center, hist)
#plt.title('r.%d - mean intensity histogram'%args.run)
#plt.xlabel('mean intensity [ADU]')
#plt.ylabel('number of shots')
#plt.savefig(file_folder+'%d_hist.png'%args.run)
# -- plot
title = 'r.%d - average %d dark shots'%(args.run,num_im)
i = img_class(im_avg, title)
plt.savefig(file_folder+'%d_dark.png'%args.run)
i.draw_img()
| sellberg/SACLA2016A8015 | scripts/09_make_dark.py | Python | bsd-2-clause | 3,025 | 0.018182 |
# -*- coding: utf-8 -*-
import sys
import os
# Import the common config file
# Note that paths in the common config are interpreted as if they were
# in the location of this file
sys.path.insert(0, os.path.abspath('../../_common'))
from common_conf import *
# Override the common config
html_short_title_toc = manuals_dict["faqs"]
html_short_title = u'CDAP %s' % html_short_title_toc
html_context = {"html_short_title_toc":html_short_title_toc}
# Remove this guide from the mapping as it will fail as it has been deleted by clean
intersphinx_mapping.pop("faqs", None)
html_theme = 'cdap-faqs'
| chtyim/cdap | cdap-docs/faqs/source/conf.py | Python | apache-2.0 | 603 | 0.008292 |
# -*- coding: utf-8 -*-
__license__ = 'GPL 3'
__copyright__ = '2009, John Schember <john@nachtimwald.com>'
__docformat__ = 'restructuredtext en'
'''
Transform OEB content into RB compatible markup.
'''
import re
from calibre import prepare_string_for_xml
from calibre.ebooks.rb import unique_name
TAGS = [
'b',
'big',
'blockquote',
'br',
'center',
'code',
'div',
'h1',
'h2',
'h3',
'h4',
'h5',
'h6',
'hr',
'i',
'li',
'ol',
'p',
'pre',
'small',
'sub',
'sup',
'ul',
]
LINK_TAGS = [
'a',
]
IMAGE_TAGS = [
'img',
]
STYLES = [
('font-weight', {'bold' : 'b', 'bolder' : 'b'}),
('font-style', {'italic' : 'i'}),
('text-align', {'center' : 'center'}),
]
class RBMLizer(object):
def __init__(self, log, name_map={}):
self.log = log
self.name_map = name_map
self.link_hrefs = {}
def extract_content(self, oeb_book, opts):
self.log.info('Converting XHTML to RB markup...')
self.oeb_book = oeb_book
self.opts = opts
return self.mlize_spine()
def mlize_spine(self):
self.link_hrefs = {}
output = [u'<HTML><HEAD><TITLE></TITLE></HEAD><BODY>']
output.append(self.get_cover_page())
output.append(u'ghji87yhjko0Caliblre-toc-placeholder-for-insertion-later8ujko0987yjk')
output.append(self.get_text())
output.append(u'</BODY></HTML>')
output = ''.join(output).replace(u'ghji87yhjko0Caliblre-toc-placeholder-for-insertion-later8ujko0987yjk', self.get_toc())
output = self.clean_text(output)
return output
def get_cover_page(self):
from calibre.ebooks.oeb.stylizer import Stylizer
from calibre.ebooks.oeb.base import XHTML
output = u''
if 'cover' in self.oeb_book.guide:
if self.name_map.get(self.oeb_book.guide['cover'].href, None):
output += '<IMG SRC="%s">' % self.name_map[self.oeb_book.guide['cover'].href]
if 'titlepage' in self.oeb_book.guide:
self.log.debug('Generating cover page...')
href = self.oeb_book.guide['titlepage'].href
item = self.oeb_book.manifest.hrefs[href]
if item.spine_position is None:
stylizer = Stylizer(item.data, item.href, self.oeb_book,
self.opts, self.opts.output_profile)
output += ''.join(self.dump_text(item.data.find(XHTML('body')), stylizer, item))
return output
def get_toc(self):
toc = [u'']
if self.opts.inline_toc:
self.log.debug('Generating table of contents...')
toc.append(u'<H1>%s</H1><UL>\n' % _('Table of Contents:'))
for item in self.oeb_book.toc:
if item.href in self.link_hrefs.keys():
toc.append('<LI><A HREF="#%s">%s</A></LI>\n' % (self.link_hrefs[item.href], item.title))
else:
self.oeb.warn('Ignoring toc item: %s not found in document.' % item)
toc.append('</UL>')
return ''.join(toc)
def get_text(self):
from calibre.ebooks.oeb.stylizer import Stylizer
from calibre.ebooks.oeb.base import XHTML
output = [u'']
for item in self.oeb_book.spine:
self.log.debug('Converting %s to RocketBook HTML...' % item.href)
stylizer = Stylizer(item.data, item.href, self.oeb_book, self.opts, self.opts.output_profile)
output.append(self.add_page_anchor(item))
output += self.dump_text(item.data.find(XHTML('body')), stylizer, item)
return ''.join(output)
def add_page_anchor(self, page):
return self.get_anchor(page, '')
def get_anchor(self, page, aid):
aid = '%s#%s' % (page.href, aid)
if aid not in self.link_hrefs.keys():
self.link_hrefs[aid] = 'calibre_link-%s' % len(self.link_hrefs.keys())
aid = self.link_hrefs[aid]
return u'<A NAME="%s"></A>' % aid
def clean_text(self, text):
# Remove anchors that do not have links
anchors = set(re.findall(r'(?<=<A NAME=").+?(?="></A>)', text))
links = set(re.findall(r'(?<=<A HREF="#).+?(?=">)', text))
for unused in anchors.difference(links):
text = text.replace('<A NAME="%s"></A>' % unused, '')
return text
def dump_text(self, elem, stylizer, page, tag_stack=[]):
from calibre.ebooks.oeb.base import XHTML_NS, barename, namespace
if not isinstance(elem.tag, basestring) \
or namespace(elem.tag) != XHTML_NS:
return [u'']
text = [u'']
style = stylizer.style(elem)
if style['display'] in ('none', 'oeb-page-head', 'oeb-page-foot') \
or style['visibility'] == 'hidden':
return [u'']
tag = barename(elem.tag)
tag_count = 0
# Process tags that need special processing and that do not have inner
# text. Usually these require an argument
if tag in IMAGE_TAGS:
if elem.attrib.get('src', None):
if page.abshref(elem.attrib['src']) not in self.name_map.keys():
self.name_map[page.abshref(elem.attrib['src'])] = unique_name('%s' % len(self.name_map.keys()), self.name_map.keys())
text.append('<IMG SRC="%s">' % self.name_map[page.abshref(elem.attrib['src'])])
rb_tag = tag.upper() if tag in TAGS else None
if rb_tag:
tag_count += 1
text.append('<%s>' % rb_tag)
tag_stack.append(rb_tag)
# Anchors links
if tag in LINK_TAGS:
href = elem.get('href')
if href:
href = page.abshref(href)
if '://' not in href:
if '#' not in href:
href += '#'
if href not in self.link_hrefs.keys():
self.link_hrefs[href] = 'calibre_link-%s' % len(self.link_hrefs.keys())
href = self.link_hrefs[href]
text.append('<A HREF="#%s">' % href)
tag_count += 1
tag_stack.append('A')
# Anchor ids
id_name = elem.get('id')
if id_name:
text.append(self.get_anchor(page, id_name))
# Processes style information
for s in STYLES:
style_tag = s[1].get(style[s[0]], None)
if style_tag:
style_tag = style_tag.upper()
tag_count += 1
text.append('<%s>' % style_tag)
tag_stack.append(style_tag)
# Proccess tags that contain text.
if hasattr(elem, 'text') and elem.text:
text.append(prepare_string_for_xml(elem.text))
for item in elem:
text += self.dump_text(item, stylizer, page, tag_stack)
close_tag_list = []
for i in range(0, tag_count):
close_tag_list.insert(0, tag_stack.pop())
text += self.close_tags(close_tag_list)
if hasattr(elem, 'tail') and elem.tail:
text.append(prepare_string_for_xml(elem.tail))
return text
def close_tags(self, tags):
text = [u'']
for i in range(0, len(tags)):
tag = tags.pop()
text.append('</%s>' % tag)
return text
| Eksmo/calibre | src/calibre/ebooks/rb/rbml.py | Python | gpl-3.0 | 7,383 | 0.002844 |
# coding: utf-8
import os
import sys
import time
import tihldelib.userlib as lib
__author__ = 'Harald Floor Wilhelmsen'
def get_useramount():
formatstring = 'Format: python lan_users.py useramount'
# Checking if there are sufficient arguments, if not exit
if len(sys.argv) != 2:
sys.exit('Invaild number of arguments. ' + formatstring)
user_amount = sys.argv[1].strip()
if not user_amount.isdigit():
sys.exit('Wrong number-format. ' + formatstring)
return user_amount, int(input('Start id of user id'))
def create_lan_users():
user_amount, start_id = get_useramount()
response = str(input(str(user_amount) + ' users to add. Continue? [y/N]'))
if response.replace('\n', '').strip() != 'y':
return 'User called exit before adding users'
api = lib.get_ipa_api()
username_format = 'lan-{}'
credentials_file_path = '/root/lan_users{}.txt'.format(time.time())
with open(credentials_file_path, 'a') as credentials_file:
for i in range(start_id, start_id + user_amount):
username = username_format.format(i)
user_info = lib.add_user_ipa(username=username, firstname='Lan', lastname='Lanesen', groupid=1002,
homedir_base='/home/lan/', api=api)
credentials_file.write('Brukernavn: {0}\nPassord: {1}\n\n'.format(username, user_info[1]))
def main():
euid = os.geteuid()
if euid != 0:
print('Needs to be run as root. Re-run with sudo')
return
msg = create_lan_users()
if msg:
print(msg)
return
main()
| tihlde/TIHLDEscripts | userscripts/lan_users_ipa.py | Python | apache-2.0 | 1,614 | 0.001239 |
"""
Support for the myStrom buttons.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/binary_sensor.mystrom/
"""
import asyncio
import logging
from homeassistant.components.binary_sensor import (BinarySensorDevice, DOMAIN)
from homeassistant.components.http import HomeAssistantView
from homeassistant.const import HTTP_UNPROCESSABLE_ENTITY
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['http']
@asyncio.coroutine
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
"""Set up myStrom Binary Sensor."""
hass.http.register_view(MyStromView(async_add_devices))
return True
class MyStromView(HomeAssistantView):
"""View to handle requests from myStrom buttons."""
url = '/api/mystrom'
name = 'api:mystrom'
def __init__(self, add_devices):
"""Initialize the myStrom URL endpoint."""
self.buttons = {}
self.add_devices = add_devices
@asyncio.coroutine
def get(self, request):
"""The GET request received from a myStrom button."""
res = yield from self._handle(request.app['hass'], request.query)
return res
@asyncio.coroutine
def _handle(self, hass, data):
"""Handle requests to the myStrom endpoint."""
button_action = list(data.keys())[0]
button_id = data[button_action]
entity_id = '{}.{}_{}'.format(DOMAIN, button_id, button_action)
if button_action not in ['single', 'double', 'long', 'touch']:
_LOGGER.error(
"Received unidentified message from myStrom button: %s", data)
return ("Received unidentified message: {}".format(data),
HTTP_UNPROCESSABLE_ENTITY)
if entity_id not in self.buttons:
_LOGGER.info("New myStrom button/action detected: %s/%s",
button_id, button_action)
self.buttons[entity_id] = MyStromBinarySensor(
'{}_{}'.format(button_id, button_action))
hass.async_add_job(self.add_devices, [self.buttons[entity_id]])
else:
new_state = True if self.buttons[entity_id].state == 'off' \
else False
self.buttons[entity_id].async_on_update(new_state)
class MyStromBinarySensor(BinarySensorDevice):
"""Representation of a myStrom button."""
def __init__(self, button_id):
"""Initialize the myStrom Binary sensor."""
self._button_id = button_id
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._button_id
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._state
def async_on_update(self, value):
"""Receive an update."""
self._state = value
self.hass.async_add_job(self.async_update_ha_state())
| ct-23/home-assistant | homeassistant/components/binary_sensor/mystrom.py | Python | apache-2.0 | 3,028 | 0 |
import gettext
import socket
import sys
import logging
_ = lambda x: gettext.ldgettext("rhsm", x)
import gtk
gtk.gdk.threads_init()
import rhsm
sys.path.append("/usr/share/rhsm")
# enable logging for firstboot
from subscription_manager import logutil
logutil.init_logger()
log = logging.getLogger("rhsm-app." + __name__)
# neuter linkify in firstboot
from subscription_manager.gui.utils import running_as_firstboot
running_as_firstboot()
from subscription_manager.injectioninit import init_dep_injection
init_dep_injection()
from subscription_manager.injection import PLUGIN_MANAGER, IDENTITY, require
from subscription_manager.facts import Facts
from subscription_manager.hwprobe import Hardware
from subscription_manager.gui.firstboot_base import RhsmFirstbootModule
from subscription_manager.gui import managergui
from subscription_manager.gui import registergui
from subscription_manager.gui.utils import handle_gui_exception
from subscription_manager.gui.autobind import \
ServiceLevelNotSupportedException, NoProductsException, \
AllProductsCoveredException
from subscription_manager import managerlib
from rhsm.connection import RestlibException
from rhsm.utils import remove_scheme
sys.path.append("/usr/share/rhn")
rhn_config = None
try:
from up2date_client import config as rhn_config
except ImportError:
log.debug("no rhn-client-tools modules could be imported")
MANUALLY_SUBSCRIBE_PAGE = 11
class SelectSLAScreen(registergui.SelectSLAScreen):
"""
override the default SelectSLAScreen to jump to the manual subscribe page.
"""
def _on_get_service_levels_cb(self, result, error=None):
if error is not None:
if isinstance(error[1], ServiceLevelNotSupportedException):
message = _("Unable to auto-attach, server does not support "
"service levels. Please run 'Subscription Manager' "
"to manually attach a subscription.")
self._parent.manual_message = message
self._parent.pre_done(MANUALLY_SUBSCRIBE_PAGE)
elif isinstance(error[1], NoProductsException):
message = _("No installed products on system. No need to "
"update subscriptions at this time.")
self._parent.manual_message = message
self._parent.pre_done(MANUALLY_SUBSCRIBE_PAGE)
elif isinstance(error[1], AllProductsCoveredException):
message = _("All installed products are fully subscribed.")
self._parent.manual_message = message
self._parent.pre_done(MANUALLY_SUBSCRIBE_PAGE)
else:
handle_gui_exception(error, _("Error subscribing"),
self._parent.window)
self._parent.finish_registration(failed=True)
return
(current_sla, unentitled_products, sla_data_map) = result
self._parent.current_sla = current_sla
if len(sla_data_map) == 1:
# If system already had a service level, we can hit this point
# when we cannot fix any unentitled products:
if current_sla is not None and \
not self._can_add_more_subs(current_sla, sla_data_map):
message = _("Unable to attach any additional subscriptions at "
"current service level: %s") % current_sla
self._parent.manual_message = message
self._parent.pre_done(MANUALLY_SUBSCRIBE_PAGE)
return
self._dry_run_result = sla_data_map.values()[0]
self._parent.pre_done(registergui.CONFIRM_SUBS_PAGE)
elif len(sla_data_map) > 1:
self._sla_data_map = sla_data_map
self.set_model(unentitled_products, sla_data_map)
self._parent.pre_done(registergui.DONT_CHANGE)
else:
message = _("No service levels will cover all installed products. "
"Please run 'Subscription Manager' to manually "
"attach subscriptions.")
self._parent.manual_message = message
self._parent.pre_done(MANUALLY_SUBSCRIBE_PAGE)
class PerformRegisterScreen(registergui.PerformRegisterScreen):
def _on_registration_finished_cb(self, new_account, error=None):
if error is not None:
handle_gui_exception(error, registergui.REGISTER_ERROR,
self._parent.window)
self._parent.finish_registration(failed=True)
return
try:
managerlib.persist_consumer_cert(new_account)
self._parent.backend.cs.force_cert_check() # Ensure there isn't much wait time
if self._parent.activation_keys:
self._parent.pre_done(registergui.REFRESH_SUBSCRIPTIONS_PAGE)
elif self._parent.skip_auto_bind:
message = _("You have opted to skip auto-attach.")
self._parent.manual_message = message
self._parent.pre_done(MANUALLY_SUBSCRIBE_PAGE)
else:
self._parent.pre_done(registergui.SELECT_SLA_PAGE)
# If we get errors related to consumer name on register,
# go back to the credentials screen where we set the
# consumer name. See bz#865954
except RestlibException, e:
handle_gui_exception(e, registergui.REGISTER_ERROR,
self._parent.window)
if e.code == 404 and self._parent.activation_keys:
self._parent.pre_done(registergui.ACTIVATION_KEY_PAGE)
if e.code == 400:
self._parent.pre_done(registergui.CREDENTIALS_PAGE)
except Exception, e:
handle_gui_exception(e, registergui.REGISTER_ERROR,
self._parent.window)
self._parent.finish_registration(failed=True)
def pre(self):
# TODO: this looks like it needs updating now that we run
# firstboot without rhn client tools.
# Because the RHN client tools check if certs exist and bypass our
# firstboot module if so, we know that if we reach this point and
# identity certs exist, someone must have hit the back button.
# TODO: i'd like this call to be inside the async progress stuff,
# since it does take some time
identity = require(IDENTITY)
if identity.is_valid():
try:
managerlib.unregister(self._parent.backend.cp_provider.get_consumer_auth_cp(),
self._parent.identity.uuid)
except socket.error, e:
handle_gui_exception(e, e, self._parent.window)
self._parent._registration_finished = False
return registergui.PerformRegisterScreen.pre(self)
class ManuallySubscribeScreen(registergui.Screen):
widget_names = registergui.Screen.widget_names + ['title']
gui_file = "manually_subscribe"
def __init__(self, parent, backend):
super(ManuallySubscribeScreen, self).__init__(parent, backend)
self.button_label = _("Finish")
def apply(self):
return registergui.FINISH
def pre(self):
if self._parent.manual_message:
self.title.set_label(self._parent.manual_message)
# XXX set message here.
return False
class moduleClass(RhsmFirstbootModule, registergui.RegisterScreen):
def __init__(self):
"""
Create a new firstboot Module for the 'register' screen.
"""
RhsmFirstbootModule.__init__(self, # Firstboot module title
# Note: translated title needs to be unique across all
# firstboot modules, not just the rhsm ones. See bz #828042
_("Subscription Management Registration"),
_("Subscription Registration"),
200.1, 109.10)
backend = managergui.Backend()
self.plugin_manager = require(PLUGIN_MANAGER)
registergui.RegisterScreen.__init__(self, backend, Facts())
#insert our new screens
screen = SelectSLAScreen(self, backend)
screen.index = self._screens[registergui.SELECT_SLA_PAGE].index
self._screens[registergui.SELECT_SLA_PAGE] = screen
self.register_notebook.remove_page(screen.index)
self.register_notebook.insert_page(screen.container,
position=screen.index)
screen = PerformRegisterScreen(self, backend)
self._screens[registergui.PERFORM_REGISTER_PAGE] = screen
screen = ManuallySubscribeScreen(self, backend)
self._screens.append(screen)
screen.index = self.register_notebook.append_page(screen.container)
# Will be False if we are on an older RHEL version where
# rhn-client-tools already does some things so we don't have to.
self.standalone = True
distribution = Hardware().get_distribution()
log.debug("Distribution: %s" % str(distribution))
try:
dist_version = float(distribution[1])
# We run this for Fedora as well, but all we really care about here
# is if this is prior to RHEL 7, so this comparison should be safe.
if dist_version < 7:
self.standalone = False
except Exception, e:
log.error("Unable to parse a distribution version.")
log.exception(e)
log.debug("Running standalone firstboot: %s" % self.standalone)
self.manual_message = None
self._skip_apply_for_page_jump = False
self._cached_credentials = None
self._registration_finished = False
self.interface = None
self.proxies_were_enabled_from_gui = None
self._apply_result = self._RESULT_FAILURE
def _get_initial_screen(self):
"""
Override parent method as in some cases, we use a different
starting screen.
"""
if self.standalone:
return registergui.INFO_PAGE
else:
return registergui.CHOOSE_SERVER_PAGE
def error_screen(self):
return self._get_initial_screen()
def _read_rhn_proxy_settings(self):
if not rhn_config:
return
# Read and store rhn-setup's proxy settings, as they have been set
# on the prior screen (which is owned by rhn-setup)
up2date_cfg = rhn_config.initUp2dateConfig()
cfg = rhsm.config.initConfig()
# Track if we have changed this in the gui proxy dialog, if
# we have changed it to disabled, then we apply "null", otherwise
# if the version off the fs was disabled, we ignore the up2date proxy settings.
#
# Don't do anything if proxies aren't enabled in rhn config.
if not up2date_cfg['enableProxy']:
if self.proxies_were_enabled_from_gui:
cfg.set('server', 'proxy_hostname', '')
cfg.set('server', 'proxy_port', '')
self.backend.cp_provider.set_connection_info()
return
# If we get here, we think we are enabling or updating proxy info
# based on changes from the gui proxy settings dialog, so take that
# to mean that enabledProxy=0 means to unset proxy info, not just to
# not override it.
self.proxies_were_enabled_from_gui = up2date_cfg['enableProxy']
proxy = up2date_cfg['httpProxy']
if proxy:
# Remove any URI scheme provided
proxy = remove_scheme(proxy)
try:
host, port = proxy.split(':')
# the rhn proxy value is unicode, assume we can
# cast to ascii ints
port = str(int(port))
cfg.set('server', 'proxy_hostname', host)
cfg.set('server', 'proxy_port', port)
except ValueError:
cfg.set('server', 'proxy_hostname', proxy)
cfg.set('server', 'proxy_port',
rhsm.config.DEFAULT_PROXY_PORT)
if up2date_cfg['enableProxyAuth']:
cfg.set('server', 'proxy_user', up2date_cfg['proxyUser'])
cfg.set('server', 'proxy_password',
up2date_cfg['proxyPassword'])
self.backend.cp_provider.set_connection_info()
def apply(self, interface, testing=False):
"""
'Next' button has been clicked - try to register with the
provided user credentials and return the appropriate result
value.
"""
# on el5 we can't just move to another page, we have to set the next
# page then do an apply. since we've already done our work async, skip
# this time through
if self._skip_apply_for_page_jump:
self._skip_apply_for_page_jump = False
# Reset back to first screen in our module in case the user hits back.
# The firstboot register screen subclass will handle unregistering
# if necessary when it runs again.
self.show()
return self._RESULT_SUCCESS
self.interface = interface
# bad proxy settings can cause socket.error or friends here
# see bz #810363
try:
valid_registration = self.register()
except socket.error, e:
handle_gui_exception(e, e, self.window)
return self._RESULT_FAILURE
# run main_iteration till we have no events, like idle
# loop sources, aka, the thread watchers are finished.
while gtk.events_pending():
gtk.main_iteration()
if valid_registration:
self._cached_credentials = self._get_credentials_hash()
# finish_registration/skip_remaining_screens should set
# __apply_result to RESULT_JUMP
return self._apply_result
def close_window(self):
"""
Overridden from RegisterScreen - we want to bypass the default behavior
of hiding the GTK window.
"""
pass
def emit_consumer_signal(self):
"""
Overriden from RegisterScreen - we don't care about consumer update
signals.
"""
pass
def createScreen(self):
"""
Create a new instance of gtk.VBox, pulling in child widgets from the
glade file.
"""
self.vbox = gtk.VBox(spacing=10)
self.register_dialog = self.get_widget("dialog-vbox6")
self.register_dialog.reparent(self.vbox)
# Get rid of the 'register' and 'cancel' buttons, as we are going to
# use the 'forward' and 'back' buttons provided by the firsboot module
# to drive the same functionality
self._destroy_widget('register_button')
self._destroy_widget('cancel_button')
# In firstboot, we leverage the RHN setup proxy settings already
# presented to the user, so hide the choose server screen's proxy
# text and button. But, if we are standalone, show our versions.
if not self.standalone:
screen = self._screens[registergui.CHOOSE_SERVER_PAGE]
screen.proxy_frame.destroy()
def initializeUI(self):
# Need to make sure that each time the UI is initialized we reset back
# to the main register screen.
# Note, even if we are standalone firstboot mode (no rhn modules),
# we may still have RHN installed, and possibly configured.
self._read_rhn_proxy_settings()
# NOTE: On EL5 this does not appear to be called when the user
# presses Back, only when they go through the first time.
self.show()
def focus(self):
"""
Focus the initial UI element on the page, in this case the
login name field.
"""
# FIXME: This is currently broken
# login_text = self.glade.get_widget("account_login")
# login_text.grab_focus()
def _destroy_widget(self, widget_name):
"""
Destroy a widget by name.
See gtk.Widget.destroy()
"""
widget = self.get_object(widget_name)
widget.destroy()
def _set_navigation_sensitive(self, sensitive):
# we are setting the firstboot next/back buttons
# insensitive here, instead of the register/cancel
# buttons this calls if shown in standalone gui.
# But, to get to those, we need a reference to the
# firstboot interface instance.
# In rhel6.4, we don't get a handle on interface, until we
# module.apply(). We call _set_navigation_sensitive from
# module.show() (to set these back if they have changed in
# the standalone gui flow), which is before apply(). So
# do nothing here if we haven't set a ref to self.interface
# yet. See bz#863572
# EL5:
if self._is_compat:
self.compat_parent.backButton.set_sensitive(sensitive)
self.compat_parent.nextButton.set_sensitive(sensitive)
# EL6:
else:
if self.interface is not None:
self.interface.backButton.set_sensitive(sensitive)
self.interface.nextButton.set_sensitive(sensitive)
def _get_credentials_hash(self):
"""
Return an internal hash representation of the text input
widgets. This is used to compare if we have changed anything
when moving back and forth across modules.
"""
return {"username": self.username,
"password": self.password,
"consumername": self.consumername,
}
def _get_text(self, widget_name):
"""
Return the text value of an input widget referenced
by name.
"""
widget = self.get_object(widget_name)
return widget.get_text()
def _set_register_label(self, screen):
"""
Overridden from registergui to disable changing the firstboot button
labels.
"""
pass
def finish_registration(self, failed=False):
log.info("Finishing registration, failed=%s" % failed)
if failed:
self._set_navigation_sensitive(True)
self._set_initial_screen()
else:
self._registration_finished = True
self._skip_remaining_screens(self.interface)
registergui.RegisterScreen.finish_registration(self, failed=failed)
def _skip_remaining_screens(self, interface):
"""
Find the first non-rhsm module after the rhsm modules, and move to it.
Assumes that there is only _one_ rhsm screen
"""
if self._is_compat:
# el5 is easy, we can just pretend the next button was clicked,
# and tell our own logic not to run for the button press.
self._skip_apply_for_page_jump = True
self.compat_parent.nextClicked()
else:
self._apply_result = self._RESULT_SUCCESS
return
# for el5
childWindow = moduleClass
| vritant/subscription-manager | src/subscription_manager/gui/firstboot/rhsm_login.py | Python | gpl-2.0 | 19,013 | 0.001788 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt_vms
short_description: Module to manage Virtual Machines in oVirt/RHV
version_added: "2.2"
author:
- Ondra Machacek (@machacekondra)
description:
- This module manages whole lifecycle of the Virtual Machine(VM) in oVirt/RHV.
- Since VM can hold many states in oVirt/RHV, this see notes to see how the states of the VM are handled.
options:
name:
description:
- Name of the Virtual Machine to manage.
- If VM don't exists C(name) is required. Otherwise C(id) or C(name) can be used.
id:
description:
- ID of the Virtual Machine to manage.
state:
description:
- Should the Virtual Machine be running/stopped/present/absent/suspended/next_run/registered.
When C(state) is I(registered) and the unregistered VM's name
belongs to an already registered in engine VM in the same DC
then we fail to register the unregistered template.
- I(present) state will create/update VM and don't change its state if it already exists.
- I(running) state will create/update VM and start it.
- I(next_run) state updates the VM and if the VM has next run configuration it will be rebooted.
- Please check I(notes) to more detailed description of states.
- I(registered) is supported since 2.4.
choices: [ absent, next_run, present, registered, running, stopped, suspended ]
default: present
cluster:
description:
- Name of the cluster, where Virtual Machine should be created.
- Required if creating VM.
allow_partial_import:
description:
- Boolean indication whether to allow partial registration of Virtual Machine when C(state) is registered.
version_added: "2.4"
vnic_profile_mappings:
description:
- "Mapper which maps an external virtual NIC profile to one that exists in the engine when C(state) is registered.
vnic_profile is described by the following dictionary:"
- "C(source_network_name): The network name of the source network."
- "C(source_profile_name): The prfile name related to the source network."
- "C(target_profile_id): The id of the target profile id to be mapped to in the engine."
version_added: "2.5"
cluster_mappings:
description:
- "Mapper which maps cluster name between VM's OVF and the destination cluster this VM should be registered to,
relevant when C(state) is registered.
Cluster mapping is described by the following dictionary:"
- "C(source_name): The name of the source cluster."
- "C(dest_name): The name of the destination cluster."
version_added: "2.5"
role_mappings:
description:
- "Mapper which maps role name between VM's OVF and the destination role this VM should be registered to,
relevant when C(state) is registered.
Role mapping is described by the following dictionary:"
- "C(source_name): The name of the source role."
- "C(dest_name): The name of the destination role."
version_added: "2.5"
domain_mappings:
description:
- "Mapper which maps aaa domain name between VM's OVF and the destination aaa domain this VM should be registered to,
relevant when C(state) is registered.
The aaa domain mapping is described by the following dictionary:"
- "C(source_name): The name of the source aaa domain."
- "C(dest_name): The name of the destination aaa domain."
version_added: "2.5"
affinity_group_mappings:
description:
- "Mapper which maps affinty name between VM's OVF and the destination affinity this VM should be registered to,
relevant when C(state) is registered."
version_added: "2.5"
affinity_label_mappings:
description:
- "Mappper which maps affinity label name between VM's OVF and the destination label this VM should be registered to,
relevant when C(state) is registered."
version_added: "2.5"
lun_mappings:
description:
- "Mapper which maps lun between VM's OVF and the destination lun this VM should contain, relevant when C(state) is registered.
lun_mappings is described by the following dictionary:
- C(logical_unit_id): The logical unit number to identify a logical unit,
- C(logical_unit_port): The port being used to connect with the LUN disk.
- C(logical_unit_portal): The portal being used to connect with the LUN disk.
- C(logical_unit_address): The address of the block storage host.
- C(logical_unit_target): The iSCSI specification located on an iSCSI server
- C(logical_unit_username): Username to be used to connect to the block storage host.
- C(logical_unit_password): Password to be used to connect to the block storage host.
- C(storage_type): The storage type which the LUN reside on (iscsi or fcp)"
version_added: "2.5"
reassign_bad_macs:
description:
- "Boolean indication whether to reassign bad macs when C(state) is registered."
version_added: "2.5"
template:
description:
- Name of the template, which should be used to create Virtual Machine.
- Required if creating VM.
- If template is not specified and VM doesn't exist, VM will be created from I(Blank) template.
template_version:
description:
- Version number of the template to be used for VM.
- By default the latest available version of the template is used.
version_added: "2.3"
use_latest_template_version:
description:
- Specify if latest template version should be used, when running a stateless VM.
- If this parameter is set to I(yes) stateless VM is created.
type: bool
version_added: "2.3"
storage_domain:
description:
- Name of the storage domain where all template disks should be created.
- This parameter is considered only when C(template) is provided.
- IMPORTANT - This parameter is not idempotent, if the VM exists and you specfiy different storage domain,
disk won't move.
version_added: "2.4"
disk_format:
description:
- Specify format of the disk.
- If C(cow) format is used, disk will by created as sparse, so space will be allocated for the volume as needed, also known as I(thin provision).
- If C(raw) format is used, disk storage will be allocated right away, also known as I(preallocated).
- Note that this option isn't idempotent as it's not currently possible to change format of the disk via API.
- This parameter is considered only when C(template) and C(storage domain) is provided.
choices: [ cow, raw ]
default: cow
version_added: "2.4"
memory:
description:
- Amount of memory of the Virtual Machine. Prefix uses IEC 60027-2 standard (for example 1GiB, 1024MiB).
- Default value is set by engine.
memory_guaranteed:
description:
- Amount of minimal guaranteed memory of the Virtual Machine.
Prefix uses IEC 60027-2 standard (for example 1GiB, 1024MiB).
- C(memory_guaranteed) parameter can't be lower than C(memory) parameter.
- Default value is set by engine.
memory_max:
description:
- Upper bound of virtual machine memory up to which memory hot-plug can be performed.
Prefix uses IEC 60027-2 standard (for example 1GiB, 1024MiB).
- Default value is set by engine.
version_added: "2.5"
cpu_shares:
description:
- Set a CPU shares for this Virtual Machine.
- Default value is set by oVirt/RHV engine.
cpu_cores:
description:
- Number of virtual CPUs cores of the Virtual Machine.
- Default value is set by oVirt/RHV engine.
cpu_sockets:
description:
- Number of virtual CPUs sockets of the Virtual Machine.
- Default value is set by oVirt/RHV engine.
cpu_threads:
description:
- Number of virtual CPUs sockets of the Virtual Machine.
- Default value is set by oVirt/RHV engine.
version_added: "2.5"
type:
description:
- Type of the Virtual Machine.
- Default value is set by oVirt/RHV engine.
- I(high_performance) is supported since Ansible 2.5 and oVirt/RHV 4.2.
choices: [ desktop, server, high_performance ]
quota_id:
description:
- "Virtual Machine quota ID to be used for disk. By default quota is chosen by oVirt/RHV engine."
version_added: "2.5"
operating_system:
description:
- Operating system of the Virtual Machine.
- Default value is set by oVirt/RHV engine.
choices:
- debian_7
- freebsd
- freebsdx64
- other
- other_linux
- other_linux_ppc64
- other_ppc64
- rhel_3
- rhel_4
- rhel_4x64
- rhel_5
- rhel_5x64
- rhel_6
- rhel_6x64
- rhel_6_ppc64
- rhel_7x64
- rhel_7_ppc64
- sles_11
- sles_11_ppc64
- ubuntu_12_04
- ubuntu_12_10
- ubuntu_13_04
- ubuntu_13_10
- ubuntu_14_04
- ubuntu_14_04_ppc64
- windows_10
- windows_10x64
- windows_2003
- windows_2003x64
- windows_2008
- windows_2008x64
- windows_2008r2x64
- windows_2008R2x64
- windows_2012x64
- windows_2012R2x64
- windows_7
- windows_7x64
- windows_8
- windows_8x64
- windows_xp
boot_devices:
description:
- List of boot devices which should be used to boot. For example C([ cdrom, hd ]).
- Default value is set by oVirt/RHV engine.
choices: [ cdrom, hd, network ]
boot_menu:
description:
- "I(True) enable menu to select boot device, I(False) to disable it. By default is chosen by oVirt/RHV engine."
version_added: "2.5"
usb_support:
description:
- "I(True) enable USB support, I(False) to disable it. By default is chosen by oVirt/RHV engine."
version_added: "2.5"
serial_console:
description:
- "I(True) enable VirtIO serial console, I(False) to disable it. By default is chosen by oVirt/RHV engine."
version_added: "2.5"
sso:
description:
- "I(True) enable Single Sign On by Guest Agent, I(False) to disable it. By default is chosen by oVirt/RHV engine."
version_added: "2.5"
host:
description:
- Specify host where Virtual Machine should be running. By default the host is chosen by engine scheduler.
- This parameter is used only when C(state) is I(running) or I(present).
high_availability:
description:
- If I(yes) Virtual Machine will be set as highly available.
- If I(no) Virtual Machine won't be set as highly available.
- If no value is passed, default value is set by oVirt/RHV engine.
type: bool
high_availability_priority:
description:
- Indicates the priority of the virtual machine inside the run and migration queues.
Virtual machines with higher priorities will be started and migrated before virtual machines with lower
priorities. The value is an integer between 0 and 100. The higher the value, the higher the priority.
- If no value is passed, default value is set by oVirt/RHV engine.
version_added: "2.5"
lease:
description:
- Name of the storage domain this virtual machine lease reside on.
- NOTE - Supported since oVirt 4.1.
version_added: "2.4"
delete_protected:
description:
- If I(yes) Virtual Machine will be set as delete protected.
- If I(no) Virtual Machine won't be set as delete protected.
- If no value is passed, default value is set by oVirt/RHV engine.
stateless:
description:
- If I(yes) Virtual Machine will be set as stateless.
- If I(no) Virtual Machine will be unset as stateless.
- If no value is passed, default value is set by oVirt/RHV engine.
clone:
description:
- If I(yes) then the disks of the created virtual machine will be cloned and independent of the template.
- This parameter is used only when C(state) is I(running) or I(present) and VM didn't exist before.
type: bool
default: 'no'
clone_permissions:
description:
- If I(yes) then the permissions of the template (only the direct ones, not the inherited ones)
will be copied to the created virtual machine.
- This parameter is used only when C(state) is I(running) or I(present) and VM didn't exist before.
type: bool
default: 'no'
cd_iso:
description:
- ISO file from ISO storage domain which should be attached to Virtual Machine.
- If you pass empty string the CD will be ejected from VM.
- If used with C(state) I(running) or I(present) and VM is running the CD will be attached to VM.
- If used with C(state) I(running) or I(present) and VM is down the CD will be attached to VM persistently.
force:
description:
- Please check to I(Synopsis) to more detailed description of force parameter, it can behave differently
in different situations.
type: bool
default: 'no'
nics:
description:
- List of NICs, which should be attached to Virtual Machine. NIC is described by following dictionary.
- C(name) - Name of the NIC.
- C(profile_name) - Profile name where NIC should be attached.
- C(interface) - Type of the network interface. One of following I(virtio), I(e1000), I(rtl8139), default is I(virtio).
- C(mac_address) - Custom MAC address of the network interface, by default it's obtained from MAC pool.
- NOTE - This parameter is used only when C(state) is I(running) or I(present) and is able to only create NICs.
To manage NICs of the VM in more depth please use M(ovirt_nics) module instead.
disks:
description:
- List of disks, which should be attached to Virtual Machine. Disk is described by following dictionary.
- C(name) - Name of the disk. Either C(name) or C(id) is reuqired.
- C(id) - ID of the disk. Either C(name) or C(id) is reuqired.
- C(interface) - Interface of the disk, either I(virtio) or I(IDE), default is I(virtio).
- C(bootable) - I(True) if the disk should be bootable, default is non bootable.
- C(activate) - I(True) if the disk should be activated, default is activated.
- NOTE - This parameter is used only when C(state) is I(running) or I(present) and is able to only attach disks.
To manage disks of the VM in more depth please use M(ovirt_disks) module instead.
sysprep:
description:
- Dictionary with values for Windows Virtual Machine initialization using sysprep.
- C(host_name) - Hostname to be set to Virtual Machine when deployed.
- C(active_directory_ou) - Active Directory Organizational Unit, to be used for login of user.
- C(org_name) - Organization name to be set to Windows Virtual Machine.
- C(domain) - Domain to be set to Windows Virtual Machine.
- C(timezone) - Timezone to be set to Windows Virtual Machine.
- C(ui_language) - UI language of the Windows Virtual Machine.
- C(system_locale) - System localization of the Windows Virtual Machine.
- C(input_locale) - Input localization of the Windows Virtual Machine.
- C(windows_license_key) - License key to be set to Windows Virtual Machine.
- C(user_name) - Username to be used for set password to Windows Virtual Machine.
- C(root_password) - Password to be set for username to Windows Virtual Machine.
cloud_init:
description:
- Dictionary with values for Unix-like Virtual Machine initialization using cloud init.
- C(host_name) - Hostname to be set to Virtual Machine when deployed.
- C(timezone) - Timezone to be set to Virtual Machine when deployed.
- C(user_name) - Username to be used to set password to Virtual Machine when deployed.
- C(root_password) - Password to be set for user specified by C(user_name) parameter.
- C(authorized_ssh_keys) - Use this SSH keys to login to Virtual Machine.
- C(regenerate_ssh_keys) - If I(True) SSH keys will be regenerated on Virtual Machine.
- C(custom_script) - Cloud-init script which will be executed on Virtual Machine when deployed. This is appended to the end of the
cloud-init script generated by any other options.
- C(dns_servers) - DNS servers to be configured on Virtual Machine.
- C(dns_search) - DNS search domains to be configured on Virtual Machine.
- C(nic_boot_protocol) - Set boot protocol of the network interface of Virtual Machine. Can be one of C(none), C(dhcp) or C(static).
- C(nic_ip_address) - If boot protocol is static, set this IP address to network interface of Virtual Machine.
- C(nic_netmask) - If boot protocol is static, set this netmask to network interface of Virtual Machine.
- C(nic_gateway) - If boot protocol is static, set this gateway to network interface of Virtual Machine.
- C(nic_name) - Set name to network interface of Virtual Machine.
- C(nic_on_boot) - If I(True) network interface will be set to start on boot.
cloud_init_nics:
description:
- List of dictionaries representing network interafaces to be setup by cloud init.
- This option is used, when user needs to setup more network interfaces via cloud init.
- If one network interface is enough, user should use C(cloud_init) I(nic_*) parameters. C(cloud_init) I(nic_*) parameters
are merged with C(cloud_init_nics) parameters.
- Dictionary can contain following values.
- C(nic_boot_protocol) - Set boot protocol of the network interface of Virtual Machine. Can be one of C(none), C(dhcp) or C(static).
- C(nic_ip_address) - If boot protocol is static, set this IP address to network interface of Virtual Machine.
- C(nic_netmask) - If boot protocol is static, set this netmask to network interface of Virtual Machine.
- C(nic_gateway) - If boot protocol is static, set this gateway to network interface of Virtual Machine.
- C(nic_name) - Set name to network interface of Virtual Machine.
- C(nic_on_boot) - If I(True) network interface will be set to start on boot.
version_added: "2.3"
cloud_init_persist:
description:
- "If I(true) the C(cloud_init) or C(sysprep) parameters will be saved for the virtual machine
and won't be virtual machine won't be started as run-once."
version_added: "2.5"
aliases: [ 'sysprep_persist' ]
kernel_path:
description:
- Path to a kernel image used to boot the virtual machine.
- Kernel image must be stored on either the ISO domain or on the host's storage.
version_added: "2.3"
initrd_path:
description:
- Path to an initial ramdisk to be used with the kernel specified by C(kernel_path) option.
- Ramdisk image must be stored on either the ISO domain or on the host's storage.
version_added: "2.3"
kernel_params:
description:
- Kernel command line parameters (formatted as string) to be used with the kernel specified by C(kernel_path) option.
version_added: "2.3"
instance_type:
description:
- Name of virtual machine's hardware configuration.
- By default no instance type is used.
version_added: "2.3"
description:
description:
- Description of the Virtual Machine.
version_added: "2.3"
comment:
description:
- Comment of the Virtual Machine.
version_added: "2.3"
timezone:
description:
- Sets time zone offset of the guest hardware clock.
- For example C(Etc/GMT)
version_added: "2.3"
serial_policy:
description:
- Specify a serial number policy for the Virtual Machine.
- Following options are supported.
- C(vm) - Sets the Virtual Machine's UUID as its serial number.
- C(host) - Sets the host's UUID as the Virtual Machine's serial number.
- C(custom) - Allows you to specify a custom serial number in C(serial_policy_value).
version_added: "2.3"
serial_policy_value:
description:
- Allows you to specify a custom serial number.
- This parameter is used only when C(serial_policy) is I(custom).
version_added: "2.3"
vmware:
description:
- Dictionary of values to be used to connect to VMware and import
a virtual machine to oVirt.
- Dictionary can contain following values.
- C(username) - The username to authenticate against the VMware.
- C(password) - The password to authenticate against the VMware.
- C(url) - The URL to be passed to the I(virt-v2v) tool for conversion.
For example I(vpx://wmware_user@vcenter-host/DataCenter/Cluster/esxi-host?no_verify=1)
- C(drivers_iso) - The name of the ISO containing drivers that can
be used during the I(virt-v2v) conversion process.
- C(sparse) - Specifies the disk allocation policy of the resulting
virtual machine. I(true) for sparse, I(false) for preallocated.
Default value is I(true).
- C(storage_domain) - Specifies the target storage domain for
converted disks. This is required parameter.
version_added: "2.3"
xen:
description:
- Dictionary of values to be used to connect to XEN and import
a virtual machine to oVirt.
- Dictionary can contain following values.
- C(url) - The URL to be passed to the I(virt-v2v) tool for conversion.
For example I(xen+ssh://root@zen.server). This is required parameter.
- C(drivers_iso) - The name of the ISO containing drivers that can
be used during the I(virt-v2v) conversion process.
- C(sparse) - Specifies the disk allocation policy of the resulting
virtual machine. I(true) for sparse, I(false) for preallocated.
Default value is I(true).
- C(storage_domain) - Specifies the target storage domain for
converted disks. This is required parameter.
version_added: "2.3"
kvm:
description:
- Dictionary of values to be used to connect to kvm and import
a virtual machine to oVirt.
- Dictionary can contain following values.
- C(name) - The name of the KVM virtual machine.
- C(username) - The username to authenticate against the KVM.
- C(password) - The password to authenticate against the KVM.
- C(url) - The URL to be passed to the I(virt-v2v) tool for conversion.
For example I(qemu:///system). This is required parameter.
- C(drivers_iso) - The name of the ISO containing drivers that can
be used during the I(virt-v2v) conversion process.
- C(sparse) - Specifies the disk allocation policy of the resulting
virtual machine. I(true) for sparse, I(false) for preallocated.
Default value is I(true).
- C(storage_domain) - Specifies the target storage domain for
converted disks. This is required parameter.
version_added: "2.3"
cpu_mode:
description:
- "CPU mode of the virtual machine. It can be some of the following: I(host_passthrough), I(host_model) or I(custom)."
- "For I(host_passthrough) CPU type you need to set C(placement_policy) to I(pinned)."
- "If no value is passed, default value is set by oVirt/RHV engine."
version_added: "2.5"
placement_policy:
description:
- "The configuration of the virtual machine's placement policy."
- "Placement policy can be one of the following values:"
- "C(migratable) - Allow manual and automatic migration."
- "C(pinned) - Do not allow migration."
- "C(user_migratable) - Allow manual migration only."
- "If no value is passed, default value is set by oVirt/RHV engine."
version_added: "2.5"
cpu_pinning:
description:
- "CPU Pinning topology to map virtual machine CPU to host CPU."
- "CPU Pinning topology is a list of dictionary which can have following values:"
- "C(cpu) - Number of the host CPU."
- "C(vcpu) - Number of the virtual machine CPU."
version_added: "2.5"
soundcard_enabled:
description:
- "If I(true), the sound card is added to the virtual machine."
version_added: "2.5"
smartcard_enabled:
description:
- "If I(true), use smart card authentication."
version_added: "2.5"
io_threads:
description:
- "Number of IO threads used by virtual machine. I(0) means IO threading disabled."
version_added: "2.5"
ballooning_enabled:
description:
- "If I(true), use memory ballooning."
- "Memory balloon is a guest device, which may be used to re-distribute / reclaim the host memory
based on VM needs in a dynamic way. In this way it's possible to create memory over commitment states."
version_added: "2.5"
numa_tune_mode:
description:
- "Set how the memory allocation for NUMA nodes of this VM is applied (relevant if NUMA nodes are set for this VM)."
- "It can be one of the following: I(interleave), I(preferred) or I(strict)."
- "If no value is passed, default value is set by oVirt/RHV engine."
version_added: "2.6"
numa_nodes:
description:
- "List of vNUMA Nodes to set for this VM and pin them to assigned host's physical NUMA node."
- "Each vNUMA node is described by following dictionary:"
- "C(index) - The index of this NUMA node (mandatory)."
- "C(memory) - Memory size of the NUMA node in MiB (mandatory)."
- "C(cores) - list of VM CPU cores indexes to be included in this NUMA node (mandatory)."
- "C(numa_node_pins) - list of physical NUMA node indexes to pin this virtual NUMA node to."
version_added: "2.6"
rng_device:
description:
- "Random number generator (RNG). You can choose of one the following devices I(urandom), I(random) or I(hwrng)."
- "In order to select I(hwrng), you must have it enabled on cluster first."
- "/dev/urandom is used for cluster version >= 4.1, and /dev/random for cluster version <= 4.0"
version_added: "2.5"
custom_properties:
description:
- "Properties sent to VDSM to configure various hooks."
- "Custom properties is a list of dictionary which can have following values:"
- "C(name) - Name of the custom property. For example: I(hugepages), I(vhost), I(sap_agent), etc."
- "C(regexp) - Regular expression to set for custom property."
- "C(value) - Value to set for custom property."
version_added: "2.5"
watchdog:
description:
- "Assign watchdog device for the virtual machine."
- "Watchdogs is a dictionary which can have following values:"
- "C(model) - Model of the watchdog device. For example: I(i6300esb), I(diag288) or I(null)."
- "C(action) - Watchdog action to be performed when watchdog is triggered. For example: I(none), I(reset), I(poweroff), I(pause) or I(dump)."
version_added: "2.5"
graphical_console:
description:
- "Assign graphical console to the virtual machine."
- "Graphical console is a dictionary which can have following values:"
- "C(headless_mode) - If I(true) disable the graphics console for this virtual machine."
- "C(protocol) - Graphical protocol, a list of I(spice), I(vnc), or both."
version_added: "2.5"
notes:
- If VM is in I(UNASSIGNED) or I(UNKNOWN) state before any operation, the module will fail.
If VM is in I(IMAGE_LOCKED) state before any operation, we try to wait for VM to be I(DOWN).
If VM is in I(SAVING_STATE) state before any operation, we try to wait for VM to be I(SUSPENDED).
If VM is in I(POWERING_DOWN) state before any operation, we try to wait for VM to be I(UP) or I(DOWN). VM can
get into I(UP) state from I(POWERING_DOWN) state, when there is no ACPI or guest agent running inside VM, or
if the shutdown operation fails.
When user specify I(UP) C(state), we always wait to VM to be in I(UP) state in case VM is I(MIGRATING),
I(REBOOTING), I(POWERING_UP), I(RESTORING_STATE), I(WAIT_FOR_LAUNCH). In other states we run start operation on VM.
When user specify I(stopped) C(state), and If user pass C(force) parameter set to I(true) we forcibly stop the VM in
any state. If user don't pass C(force) parameter, we always wait to VM to be in UP state in case VM is
I(MIGRATING), I(REBOOTING), I(POWERING_UP), I(RESTORING_STATE), I(WAIT_FOR_LAUNCH). If VM is in I(PAUSED) or
I(SUSPENDED) state, we start the VM. Then we gracefully shutdown the VM.
When user specify I(suspended) C(state), we always wait to VM to be in UP state in case VM is I(MIGRATING),
I(REBOOTING), I(POWERING_UP), I(RESTORING_STATE), I(WAIT_FOR_LAUNCH). If VM is in I(PAUSED) or I(DOWN) state,
we start the VM. Then we suspend the VM.
When user specify I(absent) C(state), we forcibly stop the VM in any state and remove it.
extends_documentation_fragment: ovirt
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
- name: Creates a new Virtual Machine from template named 'rhel7_template'
ovirt_vms:
state: present
name: myvm
template: rhel7_template
cluster: mycluster
- name: Register VM
ovirt_vms:
state: registered
storage_domain: mystorage
cluster: mycluster
name: myvm
- name: Register VM using id
ovirt_vms:
state: registered
storage_domain: mystorage
cluster: mycluster
id: 1111-1111-1111-1111
- name: Register VM, allowing partial import
ovirt_vms:
state: registered
storage_domain: mystorage
allow_partial_import: "True"
cluster: mycluster
id: 1111-1111-1111-1111
- name: Register VM with vnic profile mappings and reassign bad macs
ovirt_vms:
state: registered
storage_domain: mystorage
cluster: mycluster
id: 1111-1111-1111-1111
vnic_profile_mappings:
- source_network_name: mynetwork
source_profile_name: mynetwork
target_profile_id: 3333-3333-3333-3333
- source_network_name: mynetwork2
source_profile_name: mynetwork2
target_profile_id: 4444-4444-4444-4444
reassign_bad_macs: "True"
- name: Register VM with mappings
ovirt_vms:
state: registered
storage_domain: mystorage
cluster: mycluster
id: 1111-1111-1111-1111
role_mappings:
- source_name: Role_A
dest_name: Role_B
domain_mappings:
- source_name: Domain_A
dest_name: Domain_B
lun_mappings:
- source_storage_type: iscsi
source_logical_unit_id: 1IET_000d0001
source_logical_unit_port: 3260
source_logical_unit_portal: 1
source_logical_unit_address: 10.34.63.203
source_logical_unit_target: iqn.2016-08-09.brq.str-01:omachace
dest_storage_type: iscsi
dest_logical_unit_id: 1IET_000d0002
dest_logical_unit_port: 3260
dest_logical_unit_portal: 1
dest_logical_unit_address: 10.34.63.204
dest_logical_unit_target: iqn.2016-08-09.brq.str-02:omachace
affinity_group_mappings:
- source_name: Affinity_A
dest_name: Affinity_B
affinity_label_mappings:
- source_name: Label_A
dest_name: Label_B
cluster_mappings:
- source_name: cluster_A
dest_name: cluster_B
- name: Creates a stateless VM which will always use latest template version
ovirt_vms:
name: myvm
template: rhel7
cluster: mycluster
use_latest_template_version: true
# Creates a new server rhel7 Virtual Machine from Blank template
# on brq01 cluster with 2GiB memory and 2 vcpu cores/sockets
# and attach bootable disk with name rhel7_disk and attach virtio NIC
- ovirt_vms:
state: present
cluster: brq01
name: myvm
memory: 2GiB
cpu_cores: 2
cpu_sockets: 2
cpu_shares: 1024
type: server
operating_system: rhel_7x64
disks:
- name: rhel7_disk
bootable: True
nics:
- name: nic1
- name: Run VM with cloud init
ovirt_vms:
name: rhel7
template: rhel7
cluster: Default
memory: 1GiB
high_availability: true
high_availability_priority: 50 # Available from Ansible 2.5
cloud_init:
nic_boot_protocol: static
nic_ip_address: 10.34.60.86
nic_netmask: 255.255.252.0
nic_gateway: 10.34.63.254
nic_name: eth1
nic_on_boot: true
host_name: example.com
custom_script: |
write_files:
- content: |
Hello, world!
path: /tmp/greeting.txt
permissions: '0644'
user_name: root
root_password: super_password
- name: Run VM with cloud init, with multiple network interfaces
ovirt_vms:
name: rhel7_4
template: rhel7
cluster: mycluster
cloud_init_nics:
- nic_name: eth0
nic_boot_protocol: dhcp
nic_on_boot: true
- nic_name: eth1
nic_boot_protocol: static
nic_ip_address: 10.34.60.86
nic_netmask: 255.255.252.0
nic_gateway: 10.34.63.254
nic_on_boot: true
- name: Run VM with sysprep
ovirt_vms:
name: windows2012R2_AD
template: windows2012R2
cluster: Default
memory: 3GiB
high_availability: true
sysprep:
host_name: windowsad.example.com
user_name: Administrator
root_password: SuperPassword123
- name: Migrate/Run VM to/on host named 'host1'
ovirt_vms:
state: running
name: myvm
host: host1
- name: Change VMs CD
ovirt_vms:
name: myvm
cd_iso: drivers.iso
- name: Eject VMs CD
ovirt_vms:
name: myvm
cd_iso: ''
- name: Boot VM from CD
ovirt_vms:
name: myvm
cd_iso: centos7_x64.iso
boot_devices:
- cdrom
- name: Stop vm
ovirt_vms:
state: stopped
name: myvm
- name: Upgrade memory to already created VM
ovirt_vms:
name: myvm
memory: 4GiB
- name: Hot plug memory to already created and running VM (VM won't be restarted)
ovirt_vms:
name: myvm
memory: 4GiB
# Create/update a VM to run with two vNUMA nodes and pin them to physical NUMA nodes as follows:
# vnuma index 0-> numa index 0, vnuma index 1-> numa index 1
- name: Create a VM to run with two vNUMA nodes
ovirt_vms:
name: myvm
cluster: mycluster
numa_tune_mode: "interleave"
numa_nodes:
- index: 0
cores: [0]
memory: 20
numa_node_pins: [0]
- index: 1
cores: [1]
memory: 30
numa_node_pins: [1]
- name: Update an existing VM to run without previously created vNUMA nodes (i.e. remove all vNUMA nodes+NUMA pinning setting)
ovirt_vms:
name: myvm
cluster: mycluster
state: "present"
numa_tune_mode: "interleave"
numa_nodes:
- index: -1
# When change on the VM needs restart of the VM, use next_run state,
# The VM will be updated and rebooted if there are any changes.
# If present state would be used, VM won't be restarted.
- ovirt_vms:
state: next_run
name: myvm
boot_devices:
- network
- name: Import virtual machine from VMware
ovirt_vms:
state: stopped
cluster: mycluster
name: vmware_win10
timeout: 1800
poll_interval: 30
vmware:
url: vpx://user@1.2.3.4/Folder1/Cluster1/2.3.4.5?no_verify=1
name: windows10
storage_domain: mynfs
username: user
password: password
- name: Create vm from template and create all disks on specific storage domain
ovirt_vms:
name: vm_test
cluster: mycluster
template: mytemplate
storage_domain: mynfs
nics:
- name: nic1
- name: Remove VM, if VM is running it will be stopped
ovirt_vms:
state: absent
name: myvm
# Defining a specific quota for a VM:
# Since Ansible 2.5
- ovirt_quotas_facts:
data_center: Default
name: myquota
- ovirt_vms:
name: myvm
sso: False
boot_menu: True
usb_support: True
serial_console: True
quota_id: "{{ ovirt_quotas[0]['id'] }}"
- name: Create a VM that has the console configured for both Spice and VNC
ovirt_vms:
name: myvm
template: mytemplate
cluster: mycluster
graphical_console:
protocol:
- spice
- vnc
'''
RETURN = '''
id:
description: ID of the VM which is managed
returned: On success if VM is found.
type: str
sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c
vm:
description: "Dictionary of all the VM attributes. VM attributes can be found on your oVirt/RHV instance
at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/vm."
returned: On success if VM is found.
type: dict
'''
import traceback
try:
import ovirtsdk4.types as otypes
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
BaseModule,
check_params,
check_sdk,
convert_to_bytes,
create_connection,
equal,
get_dict_of_struct,
get_entity,
get_link_name,
get_id_by_name,
ovirt_full_argument_spec,
search_by_name,
wait,
)
class VmsModule(BaseModule):
def __init__(self, *args, **kwargs):
super(VmsModule, self).__init__(*args, **kwargs)
self._initialization = None
self._is_new = False
def __get_template_with_version(self):
"""
oVirt/RHV in version 4.1 doesn't support search by template+version_number,
so we need to list all templates with specific name and then iterate
through it's version until we find the version we look for.
"""
template = None
templates_service = self._connection.system_service().templates_service()
if self.param('template'):
templates = templates_service.list(search='name=%s' % self.param('template'))
if self.param('template_version'):
templates = [
t for t in templates
if t.version.version_number == self.param('template_version')
]
if not templates:
raise ValueError(
"Template with name '%s' and version '%s' was not found'" % (
self.param('template'),
self.param('template_version')
)
)
template = sorted(templates, key=lambda t: t.version.version_number, reverse=True)[0]
elif self._is_new:
# If template isn't specified and VM is about to be created specify default template:
template = templates_service.template_service('00000000-0000-0000-0000-000000000000').get()
return template
def __get_storage_domain_and_all_template_disks(self, template):
if self.param('template') is None:
return None
if self.param('storage_domain') is None:
return None
disks = list()
for att in self._connection.follow_link(template.disk_attachments):
disks.append(
otypes.DiskAttachment(
disk=otypes.Disk(
id=att.disk.id,
format=otypes.DiskFormat(self.param('disk_format')),
storage_domains=[
otypes.StorageDomain(
id=get_id_by_name(
self._connection.system_service().storage_domains_service(),
self.param('storage_domain')
)
)
]
)
)
)
return disks
def build_entity(self):
template = self.__get_template_with_version()
disk_attachments = self.__get_storage_domain_and_all_template_disks(template)
return otypes.Vm(
id=self.param('id'),
name=self.param('name'),
cluster=otypes.Cluster(
name=self.param('cluster')
) if self.param('cluster') else None,
disk_attachments=disk_attachments,
template=otypes.Template(
id=template.id,
) if template else None,
use_latest_template_version=self.param('use_latest_template_version'),
stateless=self.param('stateless') or self.param('use_latest_template_version'),
delete_protected=self.param('delete_protected'),
bios=(
otypes.Bios(boot_menu=otypes.BootMenu(enabled=self.param('boot_menu')))
) if self.param('boot_menu') is not None else None,
console=(
otypes.Console(enabled=self.param('serial_console'))
) if self.param('serial_console') is not None else None,
usb=(
otypes.Usb(enabled=self.param('usb_support'))
) if self.param('usb_support') is not None else None,
sso=(
otypes.Sso(
methods=[otypes.Method(id=otypes.SsoMethod.GUEST_AGENT)] if self.param('sso') else []
)
),
quota=otypes.Quota(id=self._module.params.get('quota_id')) if self.param('quota_id') is not None else None,
high_availability=otypes.HighAvailability(
enabled=self.param('high_availability'),
priority=self.param('high_availability_priority'),
) if self.param('high_availability') is not None or self.param('high_availability_priority') else None,
lease=otypes.StorageDomainLease(
storage_domain=otypes.StorageDomain(
id=get_id_by_name(
service=self._connection.system_service().storage_domains_service(),
name=self.param('lease')
)
)
) if self.param('lease') is not None else None,
cpu=otypes.Cpu(
topology=otypes.CpuTopology(
cores=self.param('cpu_cores'),
sockets=self.param('cpu_sockets'),
threads=self.param('cpu_threads'),
) if any((
self.param('cpu_cores'),
self.param('cpu_sockets'),
self.param('cpu_threads')
)) else None,
cpu_tune=otypes.CpuTune(
vcpu_pins=[
otypes.VcpuPin(vcpu=int(pin['vcpu']), cpu_set=str(pin['cpu'])) for pin in self.param('cpu_pinning')
],
) if self.param('cpu_pinning') else None,
mode=otypes.CpuMode(self.param('cpu_mode')) if self.param('cpu_mode') else None,
) if any((
self.param('cpu_cores'),
self.param('cpu_sockets'),
self.param('cpu_threads'),
self.param('cpu_mode'),
self.param('cpu_pinning')
)) else None,
cpu_shares=self.param('cpu_shares'),
os=otypes.OperatingSystem(
type=self.param('operating_system'),
boot=otypes.Boot(
devices=[
otypes.BootDevice(dev) for dev in self.param('boot_devices')
],
) if self.param('boot_devices') else None,
) if (
self.param('operating_system') or self.param('boot_devices')
) else None,
type=otypes.VmType(
self.param('type')
) if self.param('type') else None,
memory=convert_to_bytes(
self.param('memory')
) if self.param('memory') else None,
memory_policy=otypes.MemoryPolicy(
guaranteed=convert_to_bytes(self.param('memory_guaranteed')),
ballooning=self.param('ballooning_enabled'),
max=convert_to_bytes(self.param('memory_max')),
) if any((
self.param('memory_guaranteed'),
self.param('ballooning_enabled') is not None,
self.param('memory_max')
)) else None,
instance_type=otypes.InstanceType(
id=get_id_by_name(
self._connection.system_service().instance_types_service(),
self.param('instance_type'),
),
) if self.param('instance_type') else None,
description=self.param('description'),
comment=self.param('comment'),
time_zone=otypes.TimeZone(
name=self.param('timezone'),
) if self.param('timezone') else None,
serial_number=otypes.SerialNumber(
policy=otypes.SerialNumberPolicy(self.param('serial_policy')),
value=self.param('serial_policy_value'),
) if (
self.param('serial_policy') is not None or
self.param('serial_policy_value') is not None
) else None,
placement_policy=otypes.VmPlacementPolicy(
affinity=otypes.VmAffinity(self.param('placement_policy')),
hosts=[
otypes.Host(name=self.param('host')),
] if self.param('host') else None,
) if self.param('placement_policy') else None,
soundcard_enabled=self.param('soundcard_enabled'),
display=otypes.Display(
smartcard_enabled=self.param('smartcard_enabled')
) if self.param('smartcard_enabled') is not None else None,
io=otypes.Io(
threads=self.param('io_threads'),
) if self.param('io_threads') is not None else None,
numa_tune_mode=otypes.NumaTuneMode(
self.param('numa_tune_mode')
) if self.param('numa_tune_mode') else None,
rng_device=otypes.RngDevice(
source=otypes.RngSource(self.param('rng_device')),
) if self.param('rng_device') else None,
custom_properties=[
otypes.CustomProperty(
name=cp.get('name'),
regexp=cp.get('regexp'),
value=str(cp.get('value')),
) for cp in self.param('custom_properties') if cp
] if self.param('custom_properties') is not None else None,
initialization=self.get_initialization() if self.param('cloud_init_persist') else None,
)
def update_check(self, entity):
def check_cpu_pinning():
if self.param('cpu_pinning'):
current = []
if entity.cpu.cpu_tune:
current = [(str(pin.cpu_set), int(pin.vcpu)) for pin in entity.cpu.cpu_tune.vcpu_pins]
passed = [(str(pin['cpu']), int(pin['vcpu'])) for pin in self.param('cpu_pinning')]
return sorted(current) == sorted(passed)
return True
def check_custom_properties():
if self.param('custom_properties'):
current = []
if entity.custom_properties:
current = [(cp.name, cp.regexp, str(cp.value)) for cp in entity.custom_properties]
passed = [(cp.get('name'), cp.get('regexp'), str(cp.get('value'))) for cp in self.param('custom_properties') if cp]
return sorted(current) == sorted(passed)
return True
def check_host():
if self.param('host') is not None:
return self.param('host') in [self._connection.follow_link(host).name for host in getattr(entity.placement_policy, 'hosts', None) or []]
return True
cpu_mode = getattr(entity.cpu, 'mode')
vm_display = entity.display
return (
check_cpu_pinning() and
check_custom_properties() and
check_host() and
not self.param('cloud_init_persist') and
equal(self.param('cluster'), get_link_name(self._connection, entity.cluster)) and equal(convert_to_bytes(self.param('memory')), entity.memory) and
equal(convert_to_bytes(self.param('memory_guaranteed')), entity.memory_policy.guaranteed) and
equal(convert_to_bytes(self.param('memory_max')), entity.memory_policy.max) and
equal(self.param('cpu_cores'), entity.cpu.topology.cores) and
equal(self.param('cpu_sockets'), entity.cpu.topology.sockets) and
equal(self.param('cpu_threads'), entity.cpu.topology.threads) and
equal(self.param('cpu_mode'), str(cpu_mode) if cpu_mode else None) and
equal(self.param('type'), str(entity.type)) and
equal(self.param('operating_system'), str(entity.os.type)) and
equal(self.param('boot_menu'), entity.bios.boot_menu.enabled) and
equal(self.param('soundcard_enabled'), entity.soundcard_enabled) and
equal(self.param('smartcard_enabled'), getattr(vm_display, 'smartcard_enabled', False)) and
equal(self.param('io_threads'), entity.io.threads) and
equal(self.param('ballooning_enabled'), entity.memory_policy.ballooning) and
equal(self.param('serial_console'), entity.console.enabled) and
equal(self.param('usb_support'), entity.usb.enabled) and
equal(self.param('sso'), True if entity.sso.methods else False) and
equal(self.param('quota_id'), getattr(entity.quota, 'id', None)) and
equal(self.param('high_availability'), entity.high_availability.enabled) and
equal(self.param('high_availability_priority'), entity.high_availability.priority) and
equal(self.param('lease'), get_link_name(self._connection, getattr(entity.lease, 'storage_domain', None))) and
equal(self.param('stateless'), entity.stateless) and
equal(self.param('cpu_shares'), entity.cpu_shares) and
equal(self.param('delete_protected'), entity.delete_protected) and
equal(self.param('use_latest_template_version'), entity.use_latest_template_version) and
equal(self.param('boot_devices'), [str(dev) for dev in getattr(entity.os.boot, 'devices', [])]) and
equal(self.param('instance_type'), get_link_name(self._connection, entity.instance_type), ignore_case=True) and
equal(self.param('description'), entity.description) and
equal(self.param('comment'), entity.comment) and
equal(self.param('timezone'), getattr(entity.time_zone, 'name', None)) and
equal(self.param('serial_policy'), str(getattr(entity.serial_number, 'policy', None))) and
equal(self.param('serial_policy_value'), getattr(entity.serial_number, 'value', None)) and
equal(self.param('placement_policy'), str(entity.placement_policy.affinity) if entity.placement_policy else None) and
equal(self.param('numa_tune_mode'), str(entity.numa_tune_mode)) and
equal(self.param('rng_device'), str(entity.rng_device.source) if entity.rng_device else None)
)
def pre_create(self, entity):
# Mark if entity exists before touching it:
if entity is None:
self._is_new = True
def post_update(self, entity):
self.post_present(entity.id)
def post_present(self, entity_id):
# After creation of the VM, attach disks and NICs:
entity = self._service.service(entity_id).get()
self.changed = self.__attach_disks(entity)
self.changed = self.__attach_nics(entity)
self.changed = self.__attach_numa_nodes(entity)
self.changed = self.__attach_watchdog(entity)
self.changed = self.__attach_graphical_console(entity)
def pre_remove(self, entity):
# Forcibly stop the VM, if it's not in DOWN state:
if entity.status != otypes.VmStatus.DOWN:
if not self._module.check_mode:
self.changed = self.action(
action='stop',
action_condition=lambda vm: vm.status != otypes.VmStatus.DOWN,
wait_condition=lambda vm: vm.status == otypes.VmStatus.DOWN,
)['changed']
def __suspend_shutdown_common(self, vm_service):
if vm_service.get().status in [
otypes.VmStatus.MIGRATING,
otypes.VmStatus.POWERING_UP,
otypes.VmStatus.REBOOT_IN_PROGRESS,
otypes.VmStatus.WAIT_FOR_LAUNCH,
otypes.VmStatus.UP,
otypes.VmStatus.RESTORING_STATE,
]:
self._wait_for_UP(vm_service)
def _pre_shutdown_action(self, entity):
vm_service = self._service.vm_service(entity.id)
self.__suspend_shutdown_common(vm_service)
if entity.status in [otypes.VmStatus.SUSPENDED, otypes.VmStatus.PAUSED]:
vm_service.start()
self._wait_for_UP(vm_service)
return vm_service.get()
def _pre_suspend_action(self, entity):
vm_service = self._service.vm_service(entity.id)
self.__suspend_shutdown_common(vm_service)
if entity.status in [otypes.VmStatus.PAUSED, otypes.VmStatus.DOWN]:
vm_service.start()
self._wait_for_UP(vm_service)
return vm_service.get()
def _post_start_action(self, entity):
vm_service = self._service.service(entity.id)
self._wait_for_UP(vm_service)
self._attach_cd(vm_service.get())
self._migrate_vm(vm_service.get())
def _attach_cd(self, entity):
cd_iso = self.param('cd_iso')
if cd_iso is not None:
vm_service = self._service.service(entity.id)
current = vm_service.get().status == otypes.VmStatus.UP
cdroms_service = vm_service.cdroms_service()
cdrom_device = cdroms_service.list()[0]
cdrom_service = cdroms_service.cdrom_service(cdrom_device.id)
cdrom = cdrom_service.get(current=current)
if getattr(cdrom.file, 'id', '') != cd_iso:
if not self._module.check_mode:
cdrom_service.update(
cdrom=otypes.Cdrom(
file=otypes.File(id=cd_iso)
),
current=current,
)
self.changed = True
return entity
def _migrate_vm(self, entity):
vm_host = self.param('host')
vm_service = self._service.vm_service(entity.id)
if vm_host is not None:
# In case VM is preparing to be UP, wait to be up, to migrate it:
if entity.status == otypes.VmStatus.UP:
hosts_service = self._connection.system_service().hosts_service()
current_vm_host = hosts_service.host_service(entity.host.id).get().name
if vm_host != current_vm_host:
if not self._module.check_mode:
vm_service.migrate(host=otypes.Host(name=vm_host))
self._wait_for_UP(vm_service)
self.changed = True
return entity
def _wait_for_UP(self, vm_service):
wait(
service=vm_service,
condition=lambda vm: vm.status == otypes.VmStatus.UP,
wait=self.param('wait'),
timeout=self.param('timeout'),
)
def _wait_for_vm_disks(self, vm_service):
disks_service = self._connection.system_service().disks_service()
for da in vm_service.disk_attachments_service().list():
disk_service = disks_service.disk_service(da.disk.id)
wait(
service=disk_service,
condition=lambda disk: disk.status == otypes.DiskStatus.OK,
wait=self.param('wait'),
timeout=self.param('timeout'),
)
def wait_for_down(self, vm):
"""
This function will first wait for the status DOWN of the VM.
Then it will find the active snapshot and wait until it's state is OK for
stateless VMs and statless snaphot is removed.
"""
vm_service = self._service.vm_service(vm.id)
wait(
service=vm_service,
condition=lambda vm: vm.status == otypes.VmStatus.DOWN,
wait=self.param('wait'),
timeout=self.param('timeout'),
)
if vm.stateless:
snapshots_service = vm_service.snapshots_service()
snapshots = snapshots_service.list()
snap_active = [
snap for snap in snapshots
if snap.snapshot_type == otypes.SnapshotType.ACTIVE
][0]
snap_stateless = [
snap for snap in snapshots
if snap.snapshot_type == otypes.SnapshotType.STATELESS
]
# Stateless snapshot may be already removed:
if snap_stateless:
"""
We need to wait for Active snapshot ID, to be removed as it's current
stateless snapshot. Then we need to wait for staless snapshot ID to
be read, for use, because it will become active snapshot.
"""
wait(
service=snapshots_service.snapshot_service(snap_active.id),
condition=lambda snap: snap is None,
wait=self.param('wait'),
timeout=self.param('timeout'),
)
wait(
service=snapshots_service.snapshot_service(snap_stateless[0].id),
condition=lambda snap: snap.snapshot_status == otypes.SnapshotStatus.OK,
wait=self.param('wait'),
timeout=self.param('timeout'),
)
return True
def __attach_graphical_console(self, entity):
graphical_console = self.param('graphical_console')
if not graphical_console:
return
vm_service = self._service.service(entity.id)
gcs_service = vm_service.graphics_consoles_service()
graphical_consoles = gcs_service.list()
# Remove all graphical consoles if there are any:
if bool(graphical_console.get('headless_mode')):
if not self._module.check_mode:
for gc in graphical_consoles:
gcs_service.console_service(gc.id).remove()
return len(graphical_consoles) > 0
# If there are not gc add any gc to be added:
protocol = graphical_console.get('protocol')
if isinstance(protocol, str):
protocol = [protocol]
current_protocols = [str(gc.protocol) for gc in graphical_consoles]
if not current_protocols:
if not self._module.check_mode:
for p in protocol:
gcs_service.add(
otypes.GraphicsConsole(
protocol=otypes.GraphicsType(p),
)
)
return True
# Update consoles:
if sorted(protocol) != sorted(current_protocols):
if not self._module.check_mode:
for gc in graphical_consoles:
gcs_service.console_service(gc.id).remove()
for p in protocol:
gcs_service.add(
otypes.GraphicsConsole(
protocol=otypes.GraphicsType(p),
)
)
return True
def __attach_disks(self, entity):
if not self.param('disks'):
return
vm_service = self._service.service(entity.id)
disks_service = self._connection.system_service().disks_service()
disk_attachments_service = vm_service.disk_attachments_service()
self._wait_for_vm_disks(vm_service)
for disk in self.param('disks'):
# If disk ID is not specified, find disk by name:
disk_id = disk.get('id')
if disk_id is None:
disk_id = getattr(
search_by_name(
service=disks_service,
name=disk.get('name')
),
'id',
None
)
# Attach disk to VM:
disk_attachment = disk_attachments_service.attachment_service(disk_id)
if get_entity(disk_attachment) is None:
if not self._module.check_mode:
disk_attachments_service.add(
otypes.DiskAttachment(
disk=otypes.Disk(
id=disk_id,
),
active=disk.get('activate', True),
interface=otypes.DiskInterface(
disk.get('interface', 'virtio')
),
bootable=disk.get('bootable', False),
)
)
self.changed = True
def __get_vnic_profile_id(self, nic):
"""
Return VNIC profile ID looked up by it's name, because there can be
more VNIC profiles with same name, other criteria of filter is cluster.
"""
vnics_service = self._connection.system_service().vnic_profiles_service()
clusters_service = self._connection.system_service().clusters_service()
cluster = search_by_name(clusters_service, self.param('cluster'))
profiles = [
profile for profile in vnics_service.list()
if profile.name == nic.get('profile_name')
]
cluster_networks = [
net.id for net in self._connection.follow_link(cluster.networks)
]
try:
return next(
profile.id for profile in profiles
if profile.network.id in cluster_networks
)
except StopIteration:
raise Exception(
"Profile '%s' was not found in cluster '%s'" % (
nic.get('profile_name'),
self.param('cluster')
)
)
def __attach_numa_nodes(self, entity):
numa_nodes_service = self._service.service(entity.id).numa_nodes_service()
if len(self.param('numa_nodes')) > 0:
# Remove all existing virtual numa nodes before adding new ones
existed_numa_nodes = numa_nodes_service.list()
existed_numa_nodes.sort(reverse=len(existed_numa_nodes) > 1 and existed_numa_nodes[1].index > existed_numa_nodes[0].index)
for current_numa_node in existed_numa_nodes:
numa_nodes_service.node_service(current_numa_node.id).remove()
for numa_node in self.param('numa_nodes'):
if numa_node is None or numa_node.get('index') is None or numa_node.get('cores') is None or numa_node.get('memory') is None:
return False
numa_nodes_service.add(
otypes.VirtualNumaNode(
index=numa_node.get('index'),
memory=numa_node.get('memory'),
cpu=otypes.Cpu(
cores=[
otypes.Core(
index=core
) for core in numa_node.get('cores')
],
),
numa_node_pins=[
otypes.NumaNodePin(
index=pin
) for pin in numa_node.get('numa_node_pins')
] if numa_node.get('numa_node_pins') is not None else None,
)
)
return True
def __attach_watchdog(self, entity):
watchdogs_service = self._service.service(entity.id).watchdogs_service()
watchdog = self.param('watchdog')
if watchdog is not None:
current_watchdog = next(iter(watchdogs_service.list()), None)
if watchdog.get('model') is None and current_watchdog:
watchdogs_service.watchdog_service(current_watchdog.id).remove()
return True
elif watchdog.get('model') is not None and current_watchdog is None:
watchdogs_service.add(
otypes.Watchdog(
model=otypes.WatchdogModel(watchdog.get('model').lower()),
action=otypes.WatchdogAction(watchdog.get('action')),
)
)
return True
elif current_watchdog is not None:
if (
str(current_watchdog.model).lower() != watchdog.get('model').lower() or
str(current_watchdog.action).lower() != watchdog.get('action').lower()
):
watchdogs_service.watchdog_service(current_watchdog.id).update(
otypes.Watchdog(
model=otypes.WatchdogModel(watchdog.get('model')),
action=otypes.WatchdogAction(watchdog.get('action')),
)
)
return True
return False
def __attach_nics(self, entity):
# Attach NICs to VM, if specified:
nics_service = self._service.service(entity.id).nics_service()
for nic in self.param('nics'):
if search_by_name(nics_service, nic.get('name')) is None:
if not self._module.check_mode:
nics_service.add(
otypes.Nic(
name=nic.get('name'),
interface=otypes.NicInterface(
nic.get('interface', 'virtio')
),
vnic_profile=otypes.VnicProfile(
id=self.__get_vnic_profile_id(nic),
) if nic.get('profile_name') else None,
mac=otypes.Mac(
address=nic.get('mac_address')
) if nic.get('mac_address') else None,
)
)
self.changed = True
def get_initialization(self):
if self._initialization is not None:
return self._initialization
sysprep = self.param('sysprep')
cloud_init = self.param('cloud_init')
cloud_init_nics = self.param('cloud_init_nics') or []
if cloud_init is not None:
cloud_init_nics.append(cloud_init)
if cloud_init or cloud_init_nics:
self._initialization = otypes.Initialization(
nic_configurations=[
otypes.NicConfiguration(
boot_protocol=otypes.BootProtocol(
nic.pop('nic_boot_protocol').lower()
) if nic.get('nic_boot_protocol') else None,
name=nic.pop('nic_name', None),
on_boot=nic.pop('nic_on_boot', None),
ip=otypes.Ip(
address=nic.pop('nic_ip_address', None),
netmask=nic.pop('nic_netmask', None),
gateway=nic.pop('nic_gateway', None),
) if (
nic.get('nic_gateway') is not None or
nic.get('nic_netmask') is not None or
nic.get('nic_ip_address') is not None
) else None,
)
for nic in cloud_init_nics
if (
nic.get('nic_gateway') is not None or
nic.get('nic_netmask') is not None or
nic.get('nic_ip_address') is not None or
nic.get('nic_boot_protocol') is not None or
nic.get('nic_on_boot') is not None
)
] if cloud_init_nics else None,
**cloud_init
)
elif sysprep:
self._initialization = otypes.Initialization(
**sysprep
)
return self._initialization
def _get_role_mappings(module):
roleMappings = list()
for roleMapping in module.params['role_mappings']:
roleMappings.append(
otypes.RegistrationRoleMapping(
from_=otypes.Role(
name=roleMapping['source_name'],
) if roleMapping['source_name'] else None,
to=otypes.Role(
name=roleMapping['dest_name'],
) if roleMapping['dest_name'] else None,
)
)
return roleMappings
def _get_affinity_group_mappings(module):
affinityGroupMappings = list()
for affinityGroupMapping in module.params['affinity_group_mappings']:
affinityGroupMappings.append(
otypes.RegistrationAffinityGroupMapping(
from_=otypes.AffinityGroup(
name=affinityGroupMapping['source_name'],
) if affinityGroupMapping['source_name'] else None,
to=otypes.AffinityGroup(
name=affinityGroupMapping['dest_name'],
) if affinityGroupMapping['dest_name'] else None,
)
)
return affinityGroupMappings
def _get_affinity_label_mappings(module):
affinityLabelMappings = list()
for affinityLabelMapping in module.params['affinity_label_mappings']:
affinityLabelMappings.append(
otypes.RegistrationAffinityLabelMapping(
from_=otypes.AffinityLabel(
name=affinityLabelMapping['source_name'],
) if affinityLabelMapping['source_name'] else None,
to=otypes.AffinityLabel(
name=affinityLabelMapping['dest_name'],
) if affinityLabelMapping['dest_name'] else None,
)
)
return affinityLabelMappings
def _get_domain_mappings(module):
domainMappings = list()
for domainMapping in module.params['domain_mappings']:
domainMappings.append(
otypes.RegistrationDomainMapping(
from_=otypes.Domain(
name=domainMapping['source_name'],
) if domainMapping['source_name'] else None,
to=otypes.Domain(
name=domainMapping['dest_name'],
) if domainMapping['dest_name'] else None,
)
)
return domainMappings
def _get_lun_mappings(module):
lunMappings = list()
for lunMapping in module.params['lun_mappings']:
lunMappings.append(
otypes.RegistrationLunMapping(
from_=otypes.Disk(
lun_storage=otypes.HostStorage(
type=otypes.StorageType(lunMapping['source_storage_type'])
if (lunMapping['source_storage_type'] in
['iscsi', 'fcp']) else None,
logical_units=[
otypes.LogicalUnit(
id=lunMapping['source_logical_unit_id'],
)
],
),
) if lunMapping['source_logical_unit_id'] else None,
to=otypes.Disk(
lun_storage=otypes.HostStorage(
type=otypes.StorageType(lunMapping['dest_storage_type'])
if (lunMapping['dest_storage_type'] in
['iscsi', 'fcp']) else None,
logical_units=[
otypes.LogicalUnit(
id=lunMapping['dest_logical_unit_id'],
port=lunMapping['dest_logical_unit_port'],
portal=lunMapping['dest_logical_unit_portal'],
address=lunMapping['dest_logical_unit_address'],
target=lunMapping['dest_logical_unit_target'],
password=lunMapping['dest_logical_unit_password'],
username=lunMapping['dest_logical_unit_username'],
)
],
),
) if lunMapping['dest_logical_unit_id'] else None,
),
),
return lunMappings
def _get_cluster_mappings(module):
clusterMappings = list()
for clusterMapping in module.params['cluster_mappings']:
clusterMappings.append(
otypes.RegistrationClusterMapping(
from_=otypes.Cluster(
name=clusterMapping['source_name'],
),
to=otypes.Cluster(
name=clusterMapping['dest_name'],
) if clusterMapping['dest_name'] else None,
)
)
return clusterMappings
def _get_vnic_profile_mappings(module):
vnicProfileMappings = list()
for vnicProfileMapping in module.params['vnic_profile_mappings']:
vnicProfileMappings.append(
otypes.VnicProfileMapping(
source_network_name=vnicProfileMapping['source_network_name'],
source_network_profile_name=vnicProfileMapping['source_profile_name'],
target_vnic_profile=otypes.VnicProfile(
id=vnicProfileMapping['target_profile_id'],
) if vnicProfileMapping['target_profile_id'] else None,
)
)
return vnicProfileMappings
def import_vm(module, connection):
vms_service = connection.system_service().vms_service()
if search_by_name(vms_service, module.params['name']) is not None:
return False
events_service = connection.system_service().events_service()
last_event = events_service.list(max=1)[0]
external_type = [
tmp for tmp in ['kvm', 'xen', 'vmware']
if module.params[tmp] is not None
][0]
external_vm = module.params[external_type]
imports_service = connection.system_service().external_vm_imports_service()
imported_vm = imports_service.add(
otypes.ExternalVmImport(
vm=otypes.Vm(
name=module.params['name']
),
name=external_vm.get('name'),
username=external_vm.get('username', 'test'),
password=external_vm.get('password', 'test'),
provider=otypes.ExternalVmProviderType(external_type),
url=external_vm.get('url'),
cluster=otypes.Cluster(
name=module.params['cluster'],
) if module.params['cluster'] else None,
storage_domain=otypes.StorageDomain(
name=external_vm.get('storage_domain'),
) if external_vm.get('storage_domain') else None,
sparse=external_vm.get('sparse', True),
host=otypes.Host(
name=module.params['host'],
) if module.params['host'] else None,
)
)
# Wait until event with code 1152 for our VM don't appear:
vms_service = connection.system_service().vms_service()
wait(
service=vms_service.vm_service(imported_vm.vm.id),
condition=lambda vm: len([
event
for event in events_service.list(
from_=int(last_event.id),
search='type=1152 and vm.id=%s' % vm.id,
)
]) > 0 if vm is not None else False,
fail_condition=lambda vm: vm is None,
timeout=module.params['timeout'],
poll_interval=module.params['poll_interval'],
)
return True
def control_state(vm, vms_service, module):
if vm is None:
return
force = module.params['force']
state = module.params['state']
vm_service = vms_service.vm_service(vm.id)
if vm.status == otypes.VmStatus.IMAGE_LOCKED:
wait(
service=vm_service,
condition=lambda vm: vm.status == otypes.VmStatus.DOWN,
)
elif vm.status == otypes.VmStatus.SAVING_STATE:
# Result state is SUSPENDED, we should wait to be suspended:
wait(
service=vm_service,
condition=lambda vm: vm.status == otypes.VmStatus.SUSPENDED,
)
elif (
vm.status == otypes.VmStatus.UNASSIGNED or
vm.status == otypes.VmStatus.UNKNOWN
):
# Invalid states:
module.fail_json(msg="Not possible to control VM, if it's in '{}' status".format(vm.status))
elif vm.status == otypes.VmStatus.POWERING_DOWN:
if (force and state == 'stopped') or state == 'absent':
vm_service.stop()
wait(
service=vm_service,
condition=lambda vm: vm.status == otypes.VmStatus.DOWN,
)
else:
# If VM is powering down, wait to be DOWN or UP.
# VM can end in UP state in case there is no GA
# or ACPI on the VM or shutdown operation crashed:
wait(
service=vm_service,
condition=lambda vm: vm.status in [otypes.VmStatus.DOWN, otypes.VmStatus.UP],
)
def main():
argument_spec = ovirt_full_argument_spec(
state=dict(type='str', default='present', choices=['absent', 'next_run', 'present', 'registered', 'running', 'stopped', 'suspended']),
name=dict(type='str'),
id=dict(type='str'),
cluster=dict(type='str'),
allow_partial_import=dict(type='bool'),
template=dict(type='str'),
template_version=dict(type='int'),
use_latest_template_version=dict(type='bool'),
storage_domain=dict(type='str'),
disk_format=dict(type='str', default='cow', choices=['cow', 'raw']),
disks=dict(type='list', default=[]),
memory=dict(type='str'),
memory_guaranteed=dict(type='str'),
memory_max=dict(type='str'),
cpu_sockets=dict(type='int'),
cpu_cores=dict(type='int'),
cpu_shares=dict(type='int'),
cpu_threads=dict(type='int'),
type=dict(type='str', choices=['server', 'desktop', 'high_performance']),
operating_system=dict(type='str'),
cd_iso=dict(type='str'),
boot_devices=dict(type='list'),
vnic_profile_mappings=dict(default=[], type='list'),
cluster_mappings=dict(default=[], type='list'),
role_mappings=dict(default=[], type='list'),
affinity_group_mappings=dict(default=[], type='list'),
affinity_label_mappings=dict(default=[], type='list'),
lun_mappings=dict(default=[], type='list'),
domain_mappings=dict(default=[], type='list'),
reassign_bad_macs=dict(default=None, type='bool'),
boot_menu=dict(type='bool'),
serial_console=dict(type='bool'),
usb_support=dict(type='bool'),
sso=dict(type='bool'),
quota_id=dict(type='str'),
high_availability=dict(type='bool'),
high_availability_priority=dict(type='int'),
lease=dict(type='str'),
stateless=dict(type='bool'),
delete_protected=dict(type='bool'),
force=dict(type='bool', default=False),
nics=dict(type='list', default=[]),
cloud_init=dict(type='dict'),
cloud_init_nics=dict(type='list', default=[]),
cloud_init_persist=dict(type='bool', default=False, aliases=['sysprep_persist']),
sysprep=dict(type='dict'),
host=dict(type='str'),
clone=dict(type='bool', default=False),
clone_permissions=dict(type='bool', default=False),
kernel_path=dict(type='str'),
initrd_path=dict(type='str'),
kernel_params=dict(type='str'),
instance_type=dict(type='str'),
description=dict(type='str'),
comment=dict(type='str'),
timezone=dict(type='str'),
serial_policy=dict(type='str', choices=['vm', 'host', 'custom']),
serial_policy_value=dict(type='str'),
vmware=dict(type='dict'),
xen=dict(type='dict'),
kvm=dict(type='dict'),
cpu_mode=dict(type='str'),
placement_policy=dict(type='str'),
cpu_pinning=dict(type='list'),
soundcard_enabled=dict(type='bool', default=None),
smartcard_enabled=dict(type='bool', default=None),
io_threads=dict(type='int', default=None),
ballooning_enabled=dict(type='bool', default=None),
rng_device=dict(type='str'),
numa_tune_mode=dict(type='str', choices=['interleave', 'preferred', 'strict']),
numa_nodes=dict(type='list', default=[]),
custom_properties=dict(type='list'),
watchdog=dict(type='dict'),
graphical_console=dict(type='dict'),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_one_of=[['id', 'name']],
)
check_sdk(module)
check_params(module)
try:
state = module.params['state']
auth = module.params.pop('auth')
connection = create_connection(auth)
vms_service = connection.system_service().vms_service()
vms_module = VmsModule(
connection=connection,
module=module,
service=vms_service,
)
vm = vms_module.search_entity(list_params={'all_content': True})
control_state(vm, vms_service, module)
if state in ('present', 'running', 'next_run'):
if module.params['xen'] or module.params['kvm'] or module.params['vmware']:
vms_module.changed = import_vm(module, connection)
# In case VM don't exist, wait for VM DOWN state,
# otherwise don't wait for any state, just update VM:
ret = vms_module.create(
entity=vm,
result_state=otypes.VmStatus.DOWN if vm is None else None,
clone=module.params['clone'],
clone_permissions=module.params['clone_permissions'],
)
vms_module.post_present(ret['id'])
# Run the VM if it was just created, else don't run it:
if state == 'running':
initialization = vms_module.get_initialization()
ret = vms_module.action(
action='start',
post_action=vms_module._post_start_action,
action_condition=lambda vm: (
vm.status not in [
otypes.VmStatus.MIGRATING,
otypes.VmStatus.POWERING_UP,
otypes.VmStatus.REBOOT_IN_PROGRESS,
otypes.VmStatus.WAIT_FOR_LAUNCH,
otypes.VmStatus.UP,
otypes.VmStatus.RESTORING_STATE,
]
),
wait_condition=lambda vm: vm.status == otypes.VmStatus.UP,
# Start action kwargs:
use_cloud_init=not module.params.get('cloud_init_persist') and module.params.get('cloud_init') is not None,
use_sysprep=not module.params.get('cloud_init_persist') and module.params.get('sysprep') is not None,
vm=otypes.Vm(
placement_policy=otypes.VmPlacementPolicy(
hosts=[otypes.Host(name=module.params['host'])]
) if module.params['host'] else None,
initialization=initialization,
os=otypes.OperatingSystem(
cmdline=module.params.get('kernel_params'),
initrd=module.params.get('initrd_path'),
kernel=module.params.get('kernel_path'),
) if (
module.params.get('kernel_params') or
module.params.get('initrd_path') or
module.params.get('kernel_path')
) else None,
) if (
module.params.get('kernel_params') or
module.params.get('initrd_path') or
module.params.get('kernel_path') or
module.params.get('host') or
initialization is not None and not module.params.get('cloud_init_persist')
) else None,
)
if state == 'next_run':
# Apply next run configuration, if needed:
vm = vms_service.vm_service(ret['id']).get()
if vm.next_run_configuration_exists:
ret = vms_module.action(
action='reboot',
entity=vm,
action_condition=lambda vm: vm.status == otypes.VmStatus.UP,
wait_condition=lambda vm: vm.status == otypes.VmStatus.UP,
)
ret['changed'] = vms_module.changed
elif state == 'stopped':
if module.params['xen'] or module.params['kvm'] or module.params['vmware']:
vms_module.changed = import_vm(module, connection)
ret = vms_module.create(
entity=vm,
result_state=otypes.VmStatus.DOWN if vm is None else None,
clone=module.params['clone'],
clone_permissions=module.params['clone_permissions'],
)
vms_module.post_present(ret['id'])
if module.params['force']:
ret = vms_module.action(
action='stop',
post_action=vms_module._attach_cd,
action_condition=lambda vm: vm.status != otypes.VmStatus.DOWN,
wait_condition=vms_module.wait_for_down,
)
else:
ret = vms_module.action(
action='shutdown',
pre_action=vms_module._pre_shutdown_action,
post_action=vms_module._attach_cd,
action_condition=lambda vm: vm.status != otypes.VmStatus.DOWN,
wait_condition=vms_module.wait_for_down,
)
elif state == 'suspended':
vms_module.create(
entity=vm,
result_state=otypes.VmStatus.DOWN if vm is None else None,
clone=module.params['clone'],
clone_permissions=module.params['clone_permissions'],
)
vms_module.post_present(ret['id'])
ret = vms_module.action(
action='suspend',
pre_action=vms_module._pre_suspend_action,
action_condition=lambda vm: vm.status != otypes.VmStatus.SUSPENDED,
wait_condition=lambda vm: vm.status == otypes.VmStatus.SUSPENDED,
)
elif state == 'absent':
ret = vms_module.remove()
elif state == 'registered':
storage_domains_service = connection.system_service().storage_domains_service()
# Find the storage domain with unregistered VM:
sd_id = get_id_by_name(storage_domains_service, module.params['storage_domain'])
storage_domain_service = storage_domains_service.storage_domain_service(sd_id)
vms_service = storage_domain_service.vms_service()
# Find the the unregistered VM we want to register:
vms = vms_service.list(unregistered=True)
vm = next(
(vm for vm in vms if (vm.id == module.params['id'] or vm.name == module.params['name'])),
None
)
changed = False
if vm is None:
vm = vms_module.search_entity()
if vm is None:
raise ValueError(
"VM '%s(%s)' wasn't found." % (module.params['name'], module.params['id'])
)
else:
# Register the vm into the system:
changed = True
vm_service = vms_service.vm_service(vm.id)
vm_service.register(
allow_partial_import=module.params['allow_partial_import'],
cluster=otypes.Cluster(
name=module.params['cluster']
) if module.params['cluster'] else None,
vnic_profile_mappings=_get_vnic_profile_mappings(module)
if module.params['vnic_profile_mappings'] else None,
reassign_bad_macs=module.params['reassign_bad_macs']
if module.params['reassign_bad_macs'] is not None else None,
registration_configuration=otypes.RegistrationConfiguration(
cluster_mappings=_get_cluster_mappings(module),
role_mappings=_get_role_mappings(module),
domain_mappings=_get_domain_mappings(module),
lun_mappings=_get_lun_mappings(module),
affinity_group_mappings=_get_affinity_group_mappings(module),
affinity_label_mappings=_get_affinity_label_mappings(module),
) if (module.params['cluster_mappings']
or module.params['role_mappings']
or module.params['domain_mappings']
or module.params['lun_mappings']
or module.params['affinity_group_mappings']
or module.params['affinity_label_mappings']) else None
)
if module.params['wait']:
vm = vms_module.wait_for_import()
else:
# Fetch vm to initialize return.
vm = vm_service.get()
ret = {
'changed': changed,
'id': vm.id,
'vm': get_dict_of_struct(vm)
}
module.exit_json(**ret)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=auth.get('token') is None)
if __name__ == "__main__":
main()
| ATIX-AG/ansible | lib/ansible/modules/cloud/ovirt/ovirt_vms.py | Python | gpl-3.0 | 93,016 | 0.002795 |
# -*- coding: utf-8 -*-
# ***************************************************************************
# * *
# * Copyright (c) 2015 Dan Falck <ddfalck@gmail.com> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
''' Used for CNC machine Stops for Path module. Create an Optional or Mandatory Stop.'''
import FreeCAD
import FreeCADGui
import Path
from PySide import QtCore, QtGui
# Qt tanslation handling
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def translate(context, text, disambig=None):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def translate(context, text, disambig=None):
return QtGui.QApplication.translate(context, text, disambig)
class Stop:
def __init__(self,obj):
obj.addProperty("App::PropertyEnumeration", "Stop", "Path", QtCore.QT_TRANSLATE_NOOP("App::Property","Add Optional or Mandatory Stop to the program"))
obj.Stop=['Optional', 'Mandatory']
obj.Proxy = self
mode = 2
obj.setEditorMode('Placement', mode)
def __getstate__(self):
return None
def __setstate__(self, state):
return None
def onChanged(self, obj, prop):
pass
# FreeCAD.ActiveDocument.recompute()
def execute(self, obj):
if obj.Stop == 'Optional':
word = 'M1'
else:
word = 'M0'
output = ""
output = word + '\n'
path = Path.Path(output)
obj.Path = path
class _ViewProviderStop:
def __init__(self, vobj): # mandatory
# obj.addProperty("App::PropertyFloat","SomePropertyName","PropertyGroup","Description of this property")
vobj.Proxy = self
mode = 2
vobj.setEditorMode('LineWidth', mode)
vobj.setEditorMode('MarkerColor', mode)
vobj.setEditorMode('NormalColor', mode)
vobj.setEditorMode('ShowFirstRapid', mode)
vobj.setEditorMode('DisplayMode', mode)
vobj.setEditorMode('BoundingBox', mode)
vobj.setEditorMode('Selectable', mode)
vobj.setEditorMode('ShapeColor', mode)
vobj.setEditorMode('Transparency', mode)
vobj.setEditorMode('Visibility', mode)
def __getstate__(self): # mandatory
return None
def __setstate__(self, state): # mandatory
return None
def getIcon(self): # optional
return ":/icons/Path-Stop.svg"
def onChanged(self, vobj, prop): # optional
mode = 2
vobj.setEditorMode('LineWidth', mode)
vobj.setEditorMode('MarkerColor', mode)
vobj.setEditorMode('NormalColor', mode)
vobj.setEditorMode('ShowFirstRapid', mode)
vobj.setEditorMode('DisplayMode', mode)
vobj.setEditorMode('BoundingBox', mode)
vobj.setEditorMode('Selectable', mode)
vobj.setEditorMode('ShapeColor', mode)
vobj.setEditorMode('Transparency', mode)
vobj.setEditorMode('Visibility', mode)
class CommandPathStop:
def GetResources(self):
return {'Pixmap': 'Path-Stop',
'MenuText': QtCore.QT_TRANSLATE_NOOP("Path_Stop", "Stop"),
'Accel': "P, C",
'ToolTip': QtCore.QT_TRANSLATE_NOOP("Path_Stop", "Add Optional or Mandatory Stop to the program")}
def IsActive(self):
if FreeCAD.ActiveDocument is not None:
for o in FreeCAD.ActiveDocument.Objects:
if o.Name[:3] == "Job":
return True
return False
def Activated(self):
FreeCAD.ActiveDocument.openTransaction(
translate("Path_Stop", "Add Optional or Mandatory Stop to the program"))
FreeCADGui.addModule("PathScripts.PathStop")
snippet = '''
import Path
import PathScripts
from PathScripts import PathUtils
prjexists = False
obj = FreeCAD.ActiveDocument.addObject("Path::FeaturePython","Stop")
PathScripts.PathStop.Stop(obj)
PathScripts.PathStop._ViewProviderStop(obj.ViewObject)
PathUtils.addToJob(obj)
'''
FreeCADGui.doCommand(snippet)
FreeCAD.ActiveDocument.commitTransaction()
FreeCAD.ActiveDocument.recompute()
if FreeCAD.GuiUp:
# register the FreeCAD command
FreeCADGui.addCommand('Path_Stop', CommandPathStop())
FreeCAD.Console.PrintLog("Loading PathStop... done\n")
| bblacey/FreeCAD-MacOS-CI | src/Mod/Path/PathScripts/PathStop.py | Python | lgpl-2.1 | 5,748 | 0.00174 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .deployed_service_replica_info import DeployedServiceReplicaInfo
class DeployedStatefulServiceReplicaInfo(DeployedServiceReplicaInfo):
"""Information about a stateful service replica deployed on a node.
:param service_name: Full hierarchical name of the service in URI format
starting with `fabric:`.
:type service_name: str
:param service_type_name: Name of the service type as specified in the
service manifest.
:type service_type_name: str
:param service_manifest_name: The name of the service manifest in which
this service type is defined.
:type service_manifest_name: str
:param code_package_name: The name of the code package that hosts this
replica.
:type code_package_name: str
:param partition_id:
:type partition_id: str
:param replica_status: Possible values include: 'Invalid', 'InBuild',
'Standby', 'Ready', 'Down', 'Dropped'
:type replica_status: str
:param address: The last address returned by the replica in Open or
ChangeRole.
:type address: str
:param service_package_activation_id:
:type service_package_activation_id: str
:param ServiceKind: Polymorphic Discriminator
:type ServiceKind: str
:param replica_id: Id of the stateful service replica.
:type replica_id: str
:param replica_role: Possible values include: 'Unknown', 'None',
'Primary', 'IdleSecondary', 'ActiveSecondary'
:type replica_role: str
"""
_validation = {
'ServiceKind': {'required': True},
}
_attribute_map = {
'service_name': {'key': 'ServiceName', 'type': 'str'},
'service_type_name': {'key': 'ServiceTypeName', 'type': 'str'},
'service_manifest_name': {'key': 'ServiceManifestName', 'type': 'str'},
'code_package_name': {'key': 'CodePackageName', 'type': 'str'},
'partition_id': {'key': 'PartitionID', 'type': 'str'},
'replica_status': {'key': 'ReplicaStatus', 'type': 'str'},
'address': {'key': 'Address', 'type': 'str'},
'service_package_activation_id': {'key': 'ServicePackageActivationId', 'type': 'str'},
'ServiceKind': {'key': 'ServiceKind', 'type': 'str'},
'replica_id': {'key': 'ReplicaId', 'type': 'str'},
'replica_role': {'key': 'ReplicaRole', 'type': 'str'},
}
def __init__(self, service_name=None, service_type_name=None, service_manifest_name=None, code_package_name=None, partition_id=None, replica_status=None, address=None, service_package_activation_id=None, replica_id=None, replica_role=None):
super(DeployedStatefulServiceReplicaInfo, self).__init__(service_name=service_name, service_type_name=service_type_name, service_manifest_name=service_manifest_name, code_package_name=code_package_name, partition_id=partition_id, replica_status=replica_status, address=address, service_package_activation_id=service_package_activation_id)
self.replica_id = replica_id
self.replica_role = replica_role
self.ServiceKind = 'Stateful'
| v-iam/azure-sdk-for-python | azure-servicefabric/azure/servicefabric/models/deployed_stateful_service_replica_info.py | Python | mit | 3,502 | 0.001142 |
# -*- coding: utf-8 -*-
# ***************************************************************************
# * Copyright (c) 2014 Yorik van Havre <yorik@uncreated.net> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
import FreeCAD
import FreeCADGui
import Path
from PySide import QtCore
__doc__ = """Path Hop object and FreeCAD command"""
# Qt translation handling
def translate(context, text, disambig=None):
return QtCore.QCoreApplication.translate(context, text, disambig)
class ObjectHop:
def __init__(self, obj):
obj.addProperty("App::PropertyLink", "NextObject", "Path", QtCore.QT_TRANSLATE_NOOP("App::Property","The object to be reached by this hop"))
obj.addProperty("App::PropertyDistance", "HopHeight", "Path", QtCore.QT_TRANSLATE_NOOP("App::Property","The Z height of the hop"))
obj.Proxy = self
def __getstate__(self):
return None
def __setstate__(self, state):
return None
def execute(self, obj):
nextpoint = FreeCAD.Vector()
if obj.NextObject:
if obj.NextObject.isDerivedFrom("Path::Feature"):
# look for the first position of the next path
for c in obj.NextObject.Path.Commands:
if c.Name in ["G0", "G00", "G1", "G01", "G2", "G02", "G3", "G03"]:
nextpoint = c.Placement.Base
break
# absolute coords, millimeters, cancel offsets
output = "G90\nG21\nG40\n"
# go up to the given height
output += "G0 Z" + str(obj.HopHeight.Value) + "\n"
# go horizontally to the position of nextpoint
output += "G0 X" + str(nextpoint.x) + " Y" + str(nextpoint.y) + "\n"
# print output
path = Path.Path(output)
obj.Path = path
class ViewProviderPathHop:
def __init__(self, vobj):
self.Object = vobj.Object
vobj.Proxy = self
def attach(self, vobj):
self.Object = vobj.Object
def getIcon(self):
return ":/icons/Path_Hop.svg"
def __getstate__(self):
return None
def __setstate__(self, state):
return None
class CommandPathHop:
def GetResources(self):
return {'Pixmap': 'Path_Hop',
'MenuText': QtCore.QT_TRANSLATE_NOOP("Path_Hop", "Hop"),
'ToolTip': QtCore.QT_TRANSLATE_NOOP("Path_Hop", "Creates a Path Hop object")}
def IsActive(self):
if FreeCAD.ActiveDocument is not None:
for o in FreeCAD.ActiveDocument.Objects:
if o.Name[:3] == "Job":
return True
return False
def Activated(self):
# check that the selection contains exactly what we want
selection = FreeCADGui.Selection.getSelection()
if len(selection) != 1:
FreeCAD.Console.PrintError(
translate("Path_Hop", "Please select one path object")+"\n")
return
if not selection[0].isDerivedFrom("Path::Feature"):
FreeCAD.Console.PrintError(
translate("Path_Hop", "The selected object is not a path")+"\n")
return
FreeCAD.ActiveDocument.openTransaction(
translate("Path_Hop", "Create Hop"))
FreeCADGui.addModule("PathScripts.PathHop")
FreeCADGui.addModule("PathScripts.PathUtils")
FreeCADGui.doCommand(
'obj = FreeCAD.ActiveDocument.addObject("Path::FeaturePython","Hop")')
FreeCADGui.doCommand('PathScripts.PathHop.ObjectHop(obj)')
FreeCADGui.doCommand(
'PathScripts.PathHop.ViewProviderPathHop(obj.ViewObject)')
FreeCADGui.doCommand(
'obj.NextObject = FreeCAD.ActiveDocument.' + selection[0].Name)
FreeCADGui.doCommand('PathScripts.PathUtils.addToJob(obj)')
FreeCAD.ActiveDocument.commitTransaction()
FreeCAD.ActiveDocument.recompute()
if FreeCAD.GuiUp:
# register the FreeCAD command
FreeCADGui.addCommand('Path_Hop', CommandPathHop())
FreeCAD.Console.PrintLog("Loading PathHop... done\n")
| sanguinariojoe/FreeCAD | src/Mod/Path/PathScripts/PathHop.py | Python | lgpl-2.1 | 5,422 | 0.001844 |
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironicclient.tests.functional import base
class IronicClientHelp(base.FunctionalTestBase):
"""Test for python-ironicclient help messages."""
def test_ironic_help(self):
"""Check Ironic client main help message contents."""
caption = ("Command-line interface to the "
"OpenStack Bare Metal Provisioning API.")
subcommands = {
'bash-completion',
'chassis-create',
'chassis-delete',
'chassis-list',
'chassis-node-list',
'chassis-show',
'chassis-update',
'driver-list',
'driver-properties',
'driver-show',
'driver-vendor-passthru',
'help',
'node-create',
'node-delete',
'node-get-boot-device',
'node-get-console',
'node-get-supported-boot-devices',
'node-list',
'node-port-list',
'node-set-boot-device',
'node-set-console-mode',
'node-set-maintenance',
'node-set-power-state',
'node-set-provision-state',
'node-show',
'node-show-states',
'node-update',
'node-validate',
'node-vendor-passthru',
'port-create',
'port-delete',
'port-list',
'port-show',
'port-update'
}
output = self._ironic('help', flags='', params='')
self.assertIn(caption, output)
for string in subcommands:
self.assertIn(string, output)
| NaohiroTamura/python-ironicclient | ironicclient/tests/functional/test_help_msg.py | Python | apache-2.0 | 2,193 | 0 |
from bitmovin.utils import Serializable
class AutoRestartConfiguration(Serializable):
def __init__(self, segments_written_timeout: float = None, bytes_written_timeout: float = None,
frames_written_timeout: float = None, hls_manifests_update_timeout: float = None,
dash_manifests_update_timeout: float = None, schedule_expression: str = None):
super().__init__()
self.segmentsWrittenTimeout = segments_written_timeout
self.bytesWrittenTimeout = bytes_written_timeout
self.framesWrittenTimeout = frames_written_timeout
self.hlsManifestsUpdateTimeout = hls_manifests_update_timeout
self.dashManifestsUpdateTimeout = dash_manifests_update_timeout
self.scheduleExpression = schedule_expression
| bitmovin/bitmovin-python | bitmovin/resources/models/encodings/live/auto_restart_configuration.py | Python | unlicense | 785 | 0.003822 |
import numpy as np
from Other_samples.testCases import *
from Other_samples.Gradient_check.gc_utils import sigmoid, relu, dictionary_to_vector, vector_to_dictionary, \
gradients_to_vector
def forward_propagation(x, theta):
"""
Implement the linear forward propagation (compute J) presented in Figure 1 (J(theta) = theta * x)
Arguments:
x -- a real-valued input
theta -- our parameter, a real number as well
Returns:
J -- the value of function J, computed using the formula J(theta) = theta * x
"""
J = theta * x
return J
x, theta = 2, 4
J = forward_propagation(x, theta)
print("J = " + str(J))
def backward_propagation(x, theta):
"""
Computes the derivative of J with respect to theta (see Figure 1).
Arguments:
x -- a real-valued input
theta -- our parameter, a real number as well
Returns:
dtheta -- the gradient of the cost with respect to theta
"""
dtheta = x
return dtheta
x, theta = 2, 4
dtheta = backward_propagation(x, theta)
print("dtheta = " + str(dtheta))
def gradient_check(x, theta, epsilon=1e-7):
"""
Implement the backward propagation presented in Figure 1.
Arguments:
x -- a real-valued input
theta -- our parameter, a real number as well
epsilon -- tiny shift to the input to compute approximated gradient with formula(1)
Returns:
difference -- difference (2) between the approximated gradient and the backward propagation gradient
"""
thetaplus = theta + epsilon # Step 1
thetaminus = theta - epsilon # Step 2
J_plus = forward_propagation(x, thetaplus) # Step 3
J_minus = forward_propagation(x, thetaminus) # Step 4
gradapprox = (J_plus - J_minus) / (2 * epsilon) # Step 5
grad = backward_propagation(x, gradapprox)
numerator = np.linalg.norm(grad - gradapprox) # Step 1'
denominator = np.linalg.norm(grad) + np.linalg.norm(gradapprox) # Step 2'
difference = numerator / denominator # Step 3'
if difference < 1e-7:
print("The gradient is correct!")
else:
print("The gradient is wrong!")
return difference
x, theta = 2, 4
difference = gradient_check(x, theta)
print("difference = " + str(difference))
def forward_propagation_n(X, Y, parameters):
"""
Implements the forward propagation (and computes the cost) presented in Figure 3.
Arguments:
X -- training set for m examples
Y -- labels for m examples
parameters -- python dictionary containing your parameters "W1", "b1", "W2", "b2", "W3", "b3":
W1 -- weight matrix of shape (5, 4)
b1 -- bias vector of shape (5, 1)
W2 -- weight matrix of shape (3, 5)
b2 -- bias vector of shape (3, 1)
W3 -- weight matrix of shape (1, 3)
b3 -- bias vector of shape (1, 1)
Returns:
cost -- the cost function (logistic cost for one example)
"""
# retrieve parameters
m = X.shape[1]
W1 = parameters["W1"]
b1 = parameters["b1"]
W2 = parameters["W2"]
b2 = parameters["b2"]
W3 = parameters["W3"]
b3 = parameters["b3"]
# LINEAR -> RELU -> LINEAR -> RELU -> LINEAR -> SIGMOID
Z1 = np.dot(W1, X) + b1
A1 = relu(Z1)
Z2 = np.dot(W2, A1) + b2
A2 = relu(Z2)
Z3 = np.dot(W3, A2) + b3
A3 = sigmoid(Z3)
# Cost
logprobs = np.multiply(-np.log(A3), Y) + np.multiply(-np.log(1 - A3), 1 - Y)
cost = 1. / m * np.sum(logprobs)
cache = (Z1, A1, W1, b1, Z2, A2, W2, b2, Z3, A3, W3, b3)
return cost, cache
def backward_propagation_n(X, Y, cache):
"""
Implement the backward propagation presented in figure 2.
Arguments:
X -- input datapoint, of shape (input size, 1)
Y -- true "label"
cache -- cache output from forward_propagation_n()
Returns:
gradients -- A dictionary with the gradients of the cost with respect to each parameter, activation and pre-activation variables.
"""
m = X.shape[1]
(Z1, A1, W1, b1, Z2, A2, W2, b2, Z3, A3, W3, b3) = cache
dZ3 = A3 - Y
dW3 = 1. / m * np.dot(dZ3, A2.T)
db3 = 1. / m * np.sum(dZ3, axis=1, keepdims=True)
dA2 = np.dot(W3.T, dZ3)
dZ2 = np.multiply(dA2, np.int64(A2 > 0))
dW2 = 1. / m * np.dot(dZ2, A1.T)
db2 = 1. / m * np.sum(dZ2, axis=1, keepdims=True)
dA1 = np.dot(W2.T, dZ2)
dZ1 = np.multiply(dA1, np.int64(A1 > 0))
dW1 = 1. / m * np.dot(dZ1, X.T)
db1 = 1. / m * np.sum(dZ1, axis=1, keepdims=True)
gradients = {"dZ3": dZ3, "dW3": dW3, "db3": db3,
"dA2": dA2, "dZ2": dZ2, "dW2": dW2, "db2": db2,
"dA1": dA1, "dZ1": dZ1, "dW1": dW1, "db1": db1}
return gradients
def gradient_check_n(parameters, gradients, X, Y, epsilon=1e-7):
"""
Checks if backward_propagation_n computes correctly the gradient of the cost output by forward_propagation_n
Arguments:
parameters -- python dictionary containing your parameters "W1", "b1", "W2", "b2", "W3", "b3":
grad -- output of backward_propagation_n, contains gradients of the cost with respect to the parameters.
x -- input datapoint, of shape (input size, 1)
y -- true "label"
epsilon -- tiny shift to the input to compute approximated gradient with formula(1)
Returns:
difference -- difference (2) between the approximated gradient and the backward propagation gradient
"""
# Set-up variables
parameters_values, _ = dictionary_to_vector(parameters)
grad = gradients_to_vector(gradients)
num_parameters = parameters_values.shape[0]
J_plus = np.zeros((num_parameters, 1))
J_minus = np.zeros((num_parameters, 1))
gradapprox = np.zeros((num_parameters, 1))
# Compute gradapprox
for i in range(num_parameters):
thetaplus = np.copy(parameters_values) # Step 1
thetaplus[i][0] = thetaplus[i] + epsilon # Step 2
J_plus[i], _ = forward_propagation_n(X, Y, vector_to_dictionary(thetaplus)) # Step 3
thetaminus = np.copy(parameters_values) # Step 1
thetaminus[i][0] = thetaminus[i] - epsilon # Step 2
J_minus[i], _ = forward_propagation_n(X, Y, vector_to_dictionary(thetaminus)) # Step 3
gradapprox[i] = (J_plus[i] - J_minus[i]) / (2 * epsilon)
numerator = np.linalg.norm(grad - gradapprox) # Step 1'
denominator = np.linalg.norm(grad) + np.linalg.norm(gradapprox) # Step 2'
difference = numerator / denominator # Step 3'
if difference > 1e-7:
print(
"\033[93m" + "There is a mistake in the backward propagation! difference = " + str(difference) + "\033[0m")
else:
print(
"\033[92m" + "Your backward propagation works perfectly fine! difference = " + str(difference) + "\033[0m")
return difference
X, Y, parameters = gradient_check_n_test_case()
cost, cache = forward_propagation_n(X, Y, parameters)
gradients = backward_propagation_n(X, Y, cache)
difference = gradient_check_n(parameters, gradients, X, Y)
| adexin/Python-Machine-Learning-Samples | Other_samples/Gradient_check/gradient_check.py | Python | mit | 7,033 | 0.002702 |
import random, time, pygame, sys
from pygame.locals import *
FPS = 25
WINDOWWIDTH = 640
WINDOWHEIGHT = 480
BOXSIZE = 20
BOARDWIDTH = 10
BOARDHEIGHT = 20
BLANK = '.'
MOVESIDEWAYSFREQ = 0.15
MOVEDOWNFREQ = 0.1
XMARGIN = int((WINDOWWIDTH - BOARDWIDTH * BOXSIZE) / 2)
TOPMARGIN = WINDOWHEIGHT - (BOARDHEIGHT * BOXSIZE) - 5
WHITE = (255, 255, 255)
GRAY = (185, 185, 185)
BLACK = ( 0, 0, 0)
RED = (155, 0, 0)
LIGHTRED = (175, 20, 20)
GREEN = ( 0, 155, 0)
LIGHTGREEN = ( 20, 175, 20)
BLUE = ( 0, 0, 155)
LIGHTBLUE = ( 20, 20, 175)
YELLOW = (155, 155, 0)
LIGHTYELLOW = (175, 175, 20)
PURPLE = (160, 32, 240)
LIGHTPURPLE = (188, 100, 104)
BORDERCOLOR = WHITE
BGCOLOR = GRAY
TEXTCOLOR = WHITE
TEXTSHADOWCOLOR = BLACK
COLORS = ( BLUE, GREEN, RED, YELLOW, PURPLE)
LIGHTCOLORS = (LIGHTBLUE, LIGHTGREEN, LIGHTRED, LIGHTYELLOW, LIGHTPURPLE)
assert len(COLORS) == len(LIGHTCOLORS)
TEMPLATEWIDTH = 5
TEMPLATEHEIGHT = 5
tetris_img = pygame.image.load('tetris_00.jpg')
tetris = pygame.transform.scale(tetris_img, (WINDOWWIDTH, WINDOWHEIGHT))
S_SHAPE_TEMPLATE = [['.....',
'.....',
'..OO.',
'.OO..',
'.....'],
['.....',
'..O..',
'..OO.',
'...O.',
'.....']]
Z_SHAPE_TEMPLATE = [['.....',
'.....',
'.OO..',
'..OO.',
'.....'],
['.....',
'..O..',
'.OO..',
'.O...',
'.....']]
I_SHAPE_TEMPLATE = [['..O..',
'..O..',
'..O..',
'..O..',
'.....'],
['.....',
'.....',
'OOOO.',
'.....',
'.....']]
O_SHAPE_TEMPLATE = [['.....',
'.....',
'.OO..',
'.OO..',
'.....']]
J_SHAPE_TEMPLATE = [['.....',
'.O...',
'.OOO.',
'.....',
'.....'],
['.....',
'..OO.',
'..O..',
'..O..',
'.....'],
['.....',
'.....',
'.OOO.',
'...O.',
'.....'],
['.....',
'..O..',
'..O..',
'.OO..',
'.....']]
L_SHAPE_TEMPLATE = [['.....',
'...O.',
'.OOO.',
'.....',
'.....'],
['.....',
'..O..',
'..O..',
'..OO.',
'.....'],
['.....',
'.....',
'.OOO.',
'.O...',
'.....'],
['.....',
'.OO..',
'..O..',
'..O..',
'.....']]
T_SHAPE_TEMPLATE = [['.....',
'..O..',
'.OOO.',
'.....',
'.....'],
['.....',
'..O..',
'..OO.',
'..O..',
'.....'],
['.....',
'.....',
'.OOO.',
'..O..',
'.....'],
['.....',
'..O..',
'.OO..',
'..O..',
'.....']]
PIECES = {'S': S_SHAPE_TEMPLATE,
'Z': Z_SHAPE_TEMPLATE,
'J': J_SHAPE_TEMPLATE,
'L': L_SHAPE_TEMPLATE,
'I': I_SHAPE_TEMPLATE,
'O': O_SHAPE_TEMPLATE,
'T': T_SHAPE_TEMPLATE}
def main():
global FPSCLOCK, DISPLAYSURF, BASICFONT, BIGFONT
pygame.init()
FPSCLOCK = pygame.time.Clock()
DISPLAYSURF = pygame.display.set_mode((WINDOWWIDTH, WINDOWHEIGHT))
BASICFONT = pygame.font.Font('aa.ttf', 18)
BIGFONT = pygame.font.Font('aa.ttf', 100)
pygame.display.set_caption('OK! 테트리스')
DISPLAYSURF.blit(tetris, (0,0))
showTextScreen('OK! 테트리스')
while True:
if random.randint(0, 1) == 0:
pygame.mixer.music.load('summer.mp3')
else:
pygame.mixer.music.load('summer.mp3')
pygame.mixer.music.play(-1, 0.0)
runGame()
pygame.mixer.music.stop()
showTextScreen('죽었죠!')
def runGame():
board = getBlankBoard()
lastMoveDownTime = time.time()
lastMoveSidewaysTime = time.time()
lastFallTime = time.time()
movingDown = False
movingLeft = False
movingRight = False
score = 0
level, fallFreq = calculateLevelAndFallFreq(score)
fallingPiece = getNewPiece()
nextPiece = getNewPiece()
while True:
if fallingPiece == None:
fallingPiece = nextPiece
nextPiece = getNewPiece()
lastFallTime = time.time()
if not isValidPosition(board, fallingPiece):
return
checkForQuit()
for event in pygame.event.get():
if event.type == KEYUP:
if (event.key == K_p):
DISPLAYSURF.fill(BGCOLOR)
pygame.mixer.music.stop()
showTextScreen('중지')
pygame.mixer.music.play(-1, 0.0)
lastFallTime = time.time()
lastMoveDownTime = time.time()
lastMoveSidewaysTime = time.time()
elif (event.key == K_LEFT or event.key == K_a):
movingLeft = False
elif (event.key == K_RIGHT or event.key == K_d):
movingRight = False
elif (event.key == K_DOWN or event.key == K_s):
movingDown = False
elif event.type == KEYDOWN:
if (event.key == K_LEFT or event.key == K_a) and isValidPosition(board, fallingPiece, adjX=-1):
fallingPiece['x'] -= 1
movingLeft = True
movingRight = False
lastMoveSidewaysTime = time.time()
elif (event.key == K_RIGHT or event.key == K_d) and isValidPosition(board, fallingPiece, adjX=1):
fallingPiece['x'] += 1
movingRight = True
movingLeft = False
lastMoveSidewaysTime = time.time()
elif (event.key == K_UP or event.key == K_w):
fallingPiece['rotation'] = (fallingPiece['rotation'] + 1) % len(PIECES[fallingPiece['shape']])
if not isValidPosition(board, fallingPiece):
fallingPiece['rotation'] = (fallingPiece['rotation'] - 1) % len(PIECES[fallingPiece['shape']])
elif (event.key == K_q):
fallingPiece['rotation'] = (fallingPiece['rotation'] - 1) % len(PIECES[fallingPiece['shape']])
if not isValidPosition(board, fallingPiece):
fallingPiece['rotation'] = (fallingPiece['rotation'] + 1) % len(PIECES[fallingPiece['shape']])
elif (event.key == K_DOWN or event.key == K_s):
movingDown = True
if isValidPosition(board, fallingPiece, adjY=1):
fallingPiece['y'] += 1
lastMoveDownTime = time.time()
elif event.key == K_SPACE:
movingDown = False
movingLeft = False
movingRight = False
for i in range(1, BOARDHEIGHT):
if not isValidPosition(board, fallingPiece, adjY=i):
break
fallingPiece['y'] += i - 1
if (movingLeft or movingRight) and time.time() - lastMoveSidewaysTime > MOVESIDEWAYSFREQ:
if movingLeft and isValidPosition(board, fallingPiece, adjX=-1):
fallingPiece['x'] -= 1
elif movingRight and isValidPosition(board, fallingPiece, adjX=1):
fallingPiece['x'] += 1
lastMoveSidewaysTime = time.time()
if movingDown and time.time() - lastMoveDownTime > MOVEDOWNFREQ and isValidPosition(board, fallingPiece, adjY=1):
fallingPiece['y'] += 1
lastMoveDownTime = time.time()
if time.time() - lastFallTime > fallFreq:
if not isValidPosition(board, fallingPiece, adjY=1):
addToBoard(board, fallingPiece)
score += removeCompleteLines(board)
level, fallFreq = calculateLevelAndFallFreq(score)
fallingPiece = None
else:
fallingPiece['y'] += 1
lastFallTime = time.time()
DISPLAYSURF.fill(BGCOLOR)
drawBoard(board)
drawStatus(score, level)
drawNextPiece(nextPiece)
if fallingPiece != None:
drawPiece(fallingPiece)
pygame.display.update()
FPSCLOCK.tick(FPS)
def makeTextObjs(text, font, color):
surf = font.render(text, True, color)
return surf, surf.get_rect()
def terminate():
pygame.quit()
sys.exit()
def checkForKeyPress():
checkForQuit()
for event in pygame.event.get([KEYDOWN, KEYUP]):
if event.type == KEYDOWN:
continue
return event.key
return None
def showTextScreen(text):
titleSurf, titleRect = makeTextObjs(text, BIGFONT, TEXTSHADOWCOLOR)
titleRect.center = (int(WINDOWWIDTH / 2), int(WINDOWHEIGHT / 2))
DISPLAYSURF.blit(titleSurf, titleRect)
titleSurf, titleRect = makeTextObjs(text, BIGFONT, TEXTCOLOR)
titleRect.center = (int(WINDOWWIDTH / 2) - 3, int(WINDOWHEIGHT / 2) - 3)
DISPLAYSURF.blit(titleSurf, titleRect)
pressKeySurf, pressKeyRect = makeTextObjs('아무키나 누르시오.', BASICFONT, TEXTCOLOR)
pressKeyRect.center = (int(WINDOWWIDTH / 2), int(WINDOWHEIGHT / 2) + 100)
DISPLAYSURF.blit(pressKeySurf, pressKeyRect)
pressKeySurf, pressKeyRect = makeTextObjs('P = 중지 ', BASICFONT, TEXTCOLOR)
pressKeyRect.center = (int(WINDOWWIDTH / 2), int(WINDOWHEIGHT / 2) + 150)
DISPLAYSURF.blit(pressKeySurf, pressKeyRect)
while checkForKeyPress() == None:
pygame.display.update()
FPSCLOCK.tick()
def checkForQuit():
for event in pygame.event.get(QUIT):
terminate()
for event in pygame.event.get(KEYUP):
if event.key == K_ESCAPE:
terminate()
pygame.event.post(event)
def calculateLevelAndFallFreq(score):
level = int(score / 10) + 1
fallFreq = 0.27 - (level * 0.02)
return level, fallFreq
def getNewPiece():
shape = random.choice(list(PIECES.keys()))
newPiece = {'shape': shape,
'rotation': random.randint(0, len(PIECES[shape]) - 1),
'x': int(BOARDWIDTH / 2) - int(TEMPLATEWIDTH / 2),
'y': -2,
'color': random.randint(0, len(COLORS)-1)}
return newPiece
def addToBoard(board, piece):
for x in range(TEMPLATEWIDTH):
for y in range(TEMPLATEHEIGHT):
if PIECES[piece['shape']][piece['rotation']][y][x] != BLANK:
board[x + piece['x']][y + piece['y']] = piece['color']
def getBlankBoard():
board = []
for i in range(BOARDWIDTH):
board.append([BLANK] * BOARDHEIGHT)
return board
def isOnBoard(x, y):
return x >= 0 and x < BOARDWIDTH and y < BOARDHEIGHT
def isValidPosition(board, piece, adjX=0, adjY=0):
for x in range(TEMPLATEWIDTH):
for y in range(TEMPLATEHEIGHT):
isAboveBoard = y + piece['y'] + adjY < 0
if isAboveBoard or PIECES[piece['shape']][piece['rotation']][y][x] == BLANK:
continue
if not isOnBoard(x + piece['x'] + adjX, y + piece['y'] + adjY):
return False
if board[x + piece['x'] + adjX][y + piece['y'] + adjY] != BLANK:
return False
return True
def isCompleteLine(board, y):
for x in range(BOARDWIDTH):
if board[x][y] == BLANK:
return False
return True
def removeCompleteLines(board):
numLinesRemoved = 0
y = BOARDHEIGHT - 1
while y >= 0:
if isCompleteLine(board, y):
for pullDownY in range(y, 0, -1):
for x in range(BOARDWIDTH):
board[x][pullDownY] = board[x][pullDownY-1]
for x in range(BOARDWIDTH):
board[x][0] = BLANK
numLinesRemoved += 1
else:
y -= 1
return numLinesRemoved
def convertToPixelCoords(boxx, boxy):
return (XMARGIN + (boxx * BOXSIZE)), (TOPMARGIN + (boxy * BOXSIZE))
def drawBox(boxx, boxy, color, pixelx=None, pixely=None):
if color == BLANK:
return
if pixelx == None and pixely == None:
pixelx, pixely = convertToPixelCoords(boxx, boxy)
pygame.draw.rect(DISPLAYSURF, COLORS[color], (pixelx + 1, pixely + 1, BOXSIZE - 1, BOXSIZE - 1))
pygame.draw.rect(DISPLAYSURF, LIGHTCOLORS[color], (pixelx + 1, pixely + 1, BOXSIZE - 4, BOXSIZE - 4))
def drawBoard(board):
pygame.draw.rect(DISPLAYSURF, BORDERCOLOR, (XMARGIN - 3, TOPMARGIN - 7, (BOARDWIDTH * BOXSIZE) + 8, (BOARDHEIGHT * BOXSIZE) + 8), 5)
pygame.draw.rect(DISPLAYSURF, BGCOLOR, (XMARGIN, TOPMARGIN, BOXSIZE * BOARDWIDTH, BOXSIZE * BOARDHEIGHT))
for x in range(BOARDWIDTH):
for y in range(BOARDHEIGHT):
drawBox(x, y, board[x][y])
def drawStatus(score, level):
scoreSurf = BASICFONT.render('점수: %s' % score, True, TEXTCOLOR)
scoreRect = scoreSurf.get_rect()
scoreRect.topleft = (WINDOWWIDTH - 600, 20)
DISPLAYSURF.blit(scoreSurf, scoreRect)
levelSurf = BASICFONT.render('레벨: %s' % level, True, TEXTCOLOR)
levelRect = levelSurf.get_rect()
levelRect.topleft = (WINDOWWIDTH - 600, 50)
DISPLAYSURF.blit(levelSurf, levelRect)
def drawPiece(piece, pixelx=None, pixely=None):
shapeToDraw = PIECES[piece['shape']][piece['rotation']]
if pixelx == None and pixely == None:
pixelx, pixely = convertToPixelCoords(piece['x'], piece['y'])
for x in range(TEMPLATEWIDTH):
for y in range(TEMPLATEHEIGHT):
if shapeToDraw[y][x] != BLANK:
drawBox(None, None, piece['color'], pixelx + (x * BOXSIZE), pixely + (y * BOXSIZE))
def drawNextPiece(piece):
nextSurf = BASICFONT.render('미리보기:', True, TEXTCOLOR)
nextRect = nextSurf.get_rect()
nextRect.topleft = (WINDOWWIDTH - 600, 100)
DISPLAYSURF.blit(nextSurf, nextRect)
drawPiece(piece, pixelx=WINDOWWIDTH-600, pixely=120)
if __name__ == '__main__':
main() | saintdragon2/python-3-lecture-2015 | civil_mid_final/알았조/tetris a.py | Python | mit | 15,222 | 0.004222 |
from django.conf.urls import url
from django.contrib.auth.decorators import permission_required
from . import views
urlpatterns = [
url (r'^file/select/$', views.FileSelect.as_view(), name='file-select'),
# raise_exception=True => 403 Forbidden instead of redirect to /admin
url (r'^page/(?P<pk>\d+)/update/$',
permission_required ('cms.change_page', raise_exception=True)(
views.PageUpdate.as_view()),
name='page-update'),
url (r'^content/(?P<pk>\d+)/update/$',
permission_required ('cms.change_content', raise_exception=True)(
views.ContentUpdate.as_view()),
name='content-update'),
url (r'^info/$', views.InfoList.as_view(), name='info-list'),
url (r'^info/(?P<pk>\d+)/$', views.InfoDetail.as_view(), name='info-detail'),
]
| normalnorway/normal.no | django/apps/cms/urls.py | Python | gpl-3.0 | 831 | 0.009627 |
import pytest
from click.testing import CliRunner
from parkour import cli
import md5
def file_checksums_equal(file1, file2):
with open(file1) as f:
checksum1 = md5.new(f.read()).digest()
with open(file2) as f:
checksum2 = md5.new(f.read()).digest()
return checksum1==checksum2
def test_trimmed_output():
runner = CliRunner()
result = runner.invoke(cli.main, ['-a', 'fastq/s3_1.fastq.gz', '-b', 'fastq/s3_2.fastq.gz', '-u', 'trim'])
print(result.output)
assert file_checksums_equal('p.s3_1.trim.fastq', 'correct_output/p.s3_1.trim.fastq')
| buenrostrolab/proatac | tests/test_cli.py | Python | mit | 575 | 0.015652 |
#!/usr/bin/python3
# gather.py
lookup_terms = [
{'program' :'email_outlook.py',
'known_as':['email', 'mail', 'outlook', 'messages',
'sent items', 'inbox', 'spam']
},
{'program' :'sys_PC_usage.py',
'known_as':['PC usage', 'Application logging']
},
{'program' :'sys_process_windows.py',
'known_as':['process']
},
#{'program' :'collect_bookmarks.py',
# 'known_as':['chrome', 'bookmarks', 'browsing history', 'messages',
# 'sent items', 'inbox', 'spam']
#},
]
def TEST():
"""
This is the main function gather which defines
what programs are setup to collect and the data
around them - it doesnt store access details like
passwords and where to save outputs but rather is
a simple structure to let the calling module (such
as AIKIF, vais) know what is available and how to
run it
"""
for l in lookup_terms:
print(l['program'] + ' = ', ','.join([t for t in l['known_as']]))
TEST() | acutesoftware/rawdata | rawdata/gather.py | Python | mit | 1,024 | 0.027344 |
#!/usr/bin/env python
"""
APM automatic test suite
Andrew Tridgell, October 2011
"""
from __future__ import print_function
import atexit
import fnmatch
import glob
import optparse
import os
import shutil
import signal
import sys
import time
import traceback
import apmrover2
import arducopter
import arduplane
import quadplane
import ardusub
from pysim import util
from pymavlink import mavutil
from pymavlink.generator import mavtemplate
def buildlogs_dirpath():
return os.getenv("BUILDLOGS", util.reltopdir("../buildlogs"))
def buildlogs_path(path):
'''return a string representing path in the buildlogs directory'''
bits = [buildlogs_dirpath()]
if isinstance(path, list):
bits.extend(path)
else:
bits.append(path)
return os.path.join(*bits)
def get_default_params(atype, binary):
"""Get default parameters."""
# use rover simulator so SITL is not starved of input
HOME = mavutil.location(40.071374969556928, -105.22978898137808, 1583.702759, 246)
if "plane" in binary or "rover" in binary:
frame = "rover"
else:
frame = "+"
home = "%f,%f,%u,%u" % (HOME.lat, HOME.lng, HOME.alt, HOME.heading)
sitl = util.start_SITL(binary, wipe=True, model=frame, home=home, speedup=10, unhide_parameters=True)
mavproxy = util.start_MAVProxy_SITL(atype)
print("Dumping defaults")
idx = mavproxy.expect(['Please Run Setup', 'Saved [0-9]+ parameters to (\S+)'])
if idx == 0:
# we need to restart it after eeprom erase
util.pexpect_close(mavproxy)
util.pexpect_close(sitl)
sitl = util.start_SITL(binary, model=frame, home=home, speedup=10)
mavproxy = util.start_MAVProxy_SITL(atype)
idx = mavproxy.expect('Saved [0-9]+ parameters to (\S+)')
parmfile = mavproxy.match.group(1)
dest = buildlogs_path('%s-defaults.parm' % atype)
shutil.copy(parmfile, dest)
util.pexpect_close(mavproxy)
util.pexpect_close(sitl)
print("Saved defaults for %s to %s" % (atype, dest))
return True
def build_all():
"""Run the build_all.sh script."""
print("Running build_all.sh")
if util.run_cmd(util.reltopdir('Tools/scripts/build_all.sh'), directory=util.reltopdir('.')) != 0:
print("Failed build_all.sh")
return False
return True
def build_binaries():
"""Run the build_binaries.py script."""
print("Running build_binaries.py")
# copy the script as it changes git branch, which can change the script while running
orig = util.reltopdir('Tools/scripts/build_binaries.py')
copy = util.reltopdir('./build_binaries.py')
shutil.copy2(orig, copy)
# also copy generate_manifest library:
orig_gm = util.reltopdir('Tools/scripts/generate_manifest.py')
copy_gm = util.reltopdir('./generate_manifest.py')
shutil.copy2(orig_gm, copy_gm)
if util.run_cmd(copy, directory=util.reltopdir('.')) != 0:
print("Failed build_binaries.py")
return False
return True
def build_devrelease():
"""Run the build_devrelease.sh script."""
print("Running build_devrelease.sh")
# copy the script as it changes git branch, which can change the script while running
orig = util.reltopdir('Tools/scripts/build_devrelease.sh')
copy = util.reltopdir('./build_devrelease.sh')
shutil.copy2(orig, copy)
if util.run_cmd(copy, directory=util.reltopdir('.')) != 0:
print("Failed build_devrelease.sh")
return False
return True
def build_examples():
"""Build examples."""
for target in 'px4-v2', 'navio':
print("Running build.examples for %s" % target)
try:
util.build_examples(target)
except Exception as e:
print("Failed build_examples on board=%s" % target)
print(str(e))
return False
return True
def build_parameters():
"""Run the param_parse.py script."""
print("Running param_parse.py")
for vehicle in 'ArduPlane', 'ArduCopter', 'ArduSub', 'APMrover2', 'AntennaTracker':
if util.run_cmd([util.reltopdir('Tools/autotest/param_metadata/param_parse.py'), '--vehicle', vehicle], directory=util.reltopdir('.')) != 0:
print("Failed param_parse.py (%s)" % vehicle)
return False
return True
def convert_gpx():
"""Convert any tlog files to GPX and KML."""
mavlog = glob.glob(buildlogs_path("*.tlog"))
for m in mavlog:
util.run_cmd(util.reltopdir("modules/mavlink/pymavlink/tools/mavtogpx.py") + " --nofixcheck " + m)
gpx = m + '.gpx'
kml = m + '.kml'
util.run_cmd('gpsbabel -i gpx -f %s -o kml,units=m,floating=1,extrude=1 -F %s' % (gpx, kml), checkfail=False)
util.run_cmd('zip %s.kmz %s.kml' % (m, m), checkfail=False)
util.run_cmd("mavflightview.py --imagefile=%s.png %s" % (m, m))
return True
def test_prerequisites():
"""Check we have the right directories and tools to run tests."""
print("Testing prerequisites")
util.mkdir_p(buildlogs_dirpath())
return True
def alarm_handler(signum, frame):
"""Handle test timeout."""
global results, opts
try:
results.add('TIMEOUT', '<span class="failed-text">FAILED</span>', opts.timeout)
util.pexpect_close_all()
convert_gpx()
write_fullresults()
os.killpg(0, signal.SIGKILL)
except Exception:
pass
sys.exit(1)
def should_run_step(step):
"""See if a step should be skipped."""
for skip in skipsteps:
if fnmatch.fnmatch(step.lower(), skip.lower()):
return False
return True
__bin_names = {
"ArduCopter" : "arducopter",
"ArduPlane" : "arduplane",
"APMrover2" : "ardurover",
"AntennaTracker" : "antennatracker",
"CopterAVC" : "arducopter-heli",
"QuadPlane" : "arduplane",
"ArduSub" : "ardusub"
}
def binary_path(step, debug=False):
try:
vehicle = step.split(".")[1]
except Exception:
return None
if vehicle in __bin_names:
binary_name = __bin_names[vehicle]
else:
# cope with builds that don't have a specific binary
return None
if debug:
binary_basedir = "sitl-debug"
else:
binary_basedir = "sitl"
binary = util.reltopdir(os.path.join('build', binary_basedir, 'bin', binary_name))
if not os.path.exists(binary):
if os.path.exists(binary + ".exe"):
binary += ".exe"
else:
raise ValueError("Binary (%s) does not exist" % (binary,))
return binary
def run_step(step):
"""Run one step."""
# remove old logs
util.run_cmd('/bin/rm -f logs/*.BIN logs/LASTLOG.TXT')
if step == "prerequisites":
return test_prerequisites()
build_opts = {
"j": opts.j,
"debug": opts.debug,
"clean": not opts.no_clean,
"configure": not opts.no_configure,
}
if step == 'build.ArduPlane':
return util.build_SITL('bin/arduplane', **build_opts)
if step == 'build.APMrover2':
return util.build_SITL('bin/ardurover', **build_opts)
if step == 'build.ArduCopter':
return util.build_SITL('bin/arducopter', **build_opts)
if step == 'build.AntennaTracker':
return util.build_SITL('bin/antennatracker', **build_opts)
if step == 'build.Helicopter':
return util.build_SITL('bin/arducopter-heli', **build_opts)
if step == 'build.ArduSub':
return util.build_SITL('bin/ardusub', **build_opts)
binary = binary_path(step, debug=opts.debug)
if step.startswith("default"):
vehicle = step[8:]
return get_default_params(vehicle, binary)
fly_opts = {
"viewerip": opts.viewerip,
"use_map": opts.map,
"valgrind": opts.valgrind,
"gdb": opts.gdb,
"gdbserver": opts.gdbserver,
}
if opts.speedup is not None:
fly_opts["speedup"] = opts.speedup
if step == 'fly.ArduCopter':
return arducopter.fly_ArduCopter(binary, frame=opts.frame, **fly_opts)
if step == 'fly.CopterAVC':
return arducopter.fly_CopterAVC(binary, **fly_opts)
if step == 'fly.ArduPlane':
return arduplane.fly_ArduPlane(binary, **fly_opts)
if step == 'fly.QuadPlane':
return quadplane.fly_QuadPlane(binary, **fly_opts)
if step == 'drive.APMrover2':
return apmrover2.drive_APMrover2(binary, frame=opts.frame, **fly_opts)
if step == 'dive.ArduSub':
return ardusub.dive_ArduSub(binary, **fly_opts)
if step == 'build.All':
return build_all()
if step == 'build.Binaries':
return build_binaries()
if step == 'build.DevRelease':
return build_devrelease()
if step == 'build.Examples':
return build_examples()
if step == 'build.Parameters':
return build_parameters()
if step == 'convertgpx':
return convert_gpx()
raise RuntimeError("Unknown step %s" % step)
class TestResult(object):
"""Test result class."""
def __init__(self, name, result, elapsed):
self.name = name
self.result = result
self.elapsed = "%.1f" % elapsed
class TestFile(object):
"""Test result file."""
def __init__(self, name, fname):
self.name = name
self.fname = fname
class TestResults(object):
"""Test results class."""
def __init__(self):
self.date = time.asctime()
self.githash = util.run_cmd('git rev-parse HEAD', output=True, directory=util.reltopdir('.')).strip()
self.tests = []
self.files = []
self.images = []
def add(self, name, result, elapsed):
"""Add a result."""
self.tests.append(TestResult(name, result, elapsed))
def addfile(self, name, fname):
"""Add a result file."""
self.files.append(TestFile(name, fname))
def addimage(self, name, fname):
"""Add a result image."""
self.images.append(TestFile(name, fname))
def addglob(self, name, pattern):
"""Add a set of files."""
for f in glob.glob(buildlogs_path(pattern)):
self.addfile(name, os.path.basename(f))
def addglobimage(self, name, pattern):
"""Add a set of images."""
for f in glob.glob(buildlogs_path(pattern)):
self.addimage(name, os.path.basename(f))
def write_webresults(results_to_write):
"""Write webpage results."""
t = mavtemplate.MAVTemplate()
for h in glob.glob(util.reltopdir('Tools/autotest/web/*.html')):
html = util.loadfile(h)
f = open(buildlogs_path(os.path.basename(h)), mode='w')
t.write(f, html, results_to_write)
f.close()
for f in glob.glob(util.reltopdir('Tools/autotest/web/*.png')):
shutil.copy(f, buildlogs_path(os.path.basename(f)))
def write_fullresults():
"""Write out full results set."""
global results
results.addglob("Google Earth track", '*.kmz')
results.addfile('Full Logs', 'autotest-output.txt')
results.addglob('DataFlash Log', '*-log.bin')
results.addglob("MAVLink log", '*.tlog')
results.addglob("GPX track", '*.gpx')
# results common to all vehicles:
vehicle_files = [ ('{vehicle} build log', '{vehicle}.txt'),
('{vehicle} code size', '{vehicle}.sizes.txt'),
('{vehicle} stack sizes', '{vehicle}.framesizes.txt'),
('{vehicle} defaults', 'default_params/{vehicle}-defaults.parm'),
('{vehicle} core', '{vehicle}.core'),
('{vehicle} ELF', '{vehicle}.elf'),
]
vehicle_globs = [('{vehicle} log', '{vehicle}-*.BIN'),
]
for vehicle in 'ArduPlane','ArduCopter','APMrover2','AntennaTracker', 'ArduSub':
subs = { 'vehicle': vehicle }
for vehicle_file in vehicle_files:
description = vehicle_file[0].format(**subs)
filename = vehicle_file[1].format(**subs)
results.addfile(description, filename)
for vehicle_glob in vehicle_globs:
description = vehicle_glob[0].format(**subs)
glob = vehicle_glob[1].format(**subs)
results.addglob(description, glob)
results.addglob("CopterAVC log", 'CopterAVC-*.BIN')
results.addfile("CopterAVC core", 'CopterAVC.core')
results.addglob('APM:Libraries documentation', 'docs/libraries/index.html')
results.addglob('APM:Plane documentation', 'docs/ArduPlane/index.html')
results.addglob('APM:Copter documentation', 'docs/ArduCopter/index.html')
results.addglob('APM:Rover documentation', 'docs/APMrover2/index.html')
results.addglob('APM:Sub documentation', 'docs/ArduSub/index.html')
results.addglobimage("Flight Track", '*.png')
write_webresults(results)
def check_logs(step):
"""Check for log files from a step."""
print("check step: ", step)
if step.startswith('fly.'):
vehicle = step[4:]
elif step.startswith('drive.'):
vehicle = step[6:]
else:
return
logs = glob.glob("logs/*.BIN")
for log in logs:
bname = os.path.basename(log)
newname = buildlogs_path("%s-%s" % (vehicle, bname))
print("Renaming %s to %s" % (log, newname))
shutil.move(log, newname)
corefile = "core"
if os.path.exists(corefile):
newname = buildlogs_path("%s.core" % vehicle)
print("Renaming %s to %s" % (corefile, newname))
shutil.move(corefile, newname)
try:
util.run_cmd('/bin/cp build/sitl/bin/* %s' % buildlogs_dirpath(),
directory=util.reltopdir('.'))
except Exception:
print("Unable to save binary")
def run_tests(steps):
"""Run a list of steps."""
global results
passed = True
failed = []
for step in steps:
util.pexpect_close_all()
t1 = time.time()
print(">>>> RUNNING STEP: %s at %s" % (step, time.asctime()))
try:
if run_step(step):
results.add(step, '<span class="passed-text">PASSED</span>', time.time() - t1)
print(">>>> PASSED STEP: %s at %s" % (step, time.asctime()))
check_logs(step)
else:
print(">>>> FAILED STEP: %s at %s" % (step, time.asctime()))
passed = False
failed.append(step)
results.add(step, '<span class="failed-text">FAILED</span>', time.time() - t1)
except Exception as msg:
passed = False
failed.append(step)
print(">>>> FAILED STEP: %s at %s (%s)" % (step, time.asctime(), msg))
traceback.print_exc(file=sys.stdout)
results.add(step, '<span class="failed-text">FAILED</span>', time.time() - t1)
check_logs(step)
if not passed:
print("FAILED %u tests: %s" % (len(failed), failed))
util.pexpect_close_all()
write_fullresults()
return passed
if __name__ == "__main__":
############## main program #############
os.environ['PYTHONUNBUFFERED'] = '1'
os.putenv('TMPDIR', util.reltopdir('tmp'))
parser = optparse.OptionParser("autotest")
parser.add_option("--skip", type='string', default='', help='list of steps to skip (comma separated)')
parser.add_option("--list", action='store_true', default=False, help='list the available steps')
parser.add_option("--viewerip", default=None, help='IP address to send MAVLink and fg packets to')
parser.add_option("--map", action='store_true', default=False, help='show map')
parser.add_option("--experimental", default=False, action='store_true', help='enable experimental tests')
parser.add_option("--timeout", default=3000, type='int', help='maximum runtime in seconds')
parser.add_option("--speedup", default=None, type='int', help='speedup to run the simulations at')
parser.add_option("--valgrind", default=False, action='store_true', help='run ArduPilot binaries under valgrind')
parser.add_option("--gdb", default=False, action='store_true', help='run ArduPilot binaries under gdb')
parser.add_option("--debug", default=False, action='store_true', help='make built binaries debug binaries')
parser.add_option("-j", default=None, type='int', help='build CPUs')
parser.add_option("--frame", type='string', default=None, help='specify frame type')
parser.add_option("--gdbserver", default=False, action='store_true', help='run ArduPilot binaries under gdbserver')
parser.add_option("--no-clean", default=False, action='store_true', help='do not clean before building', dest="no_clean")
parser.add_option("--no-configure", default=False, action='store_true', help='do not configure before building', dest="no_configure")
opts, args = parser.parse_args()
steps = [
'prerequisites',
'build.All',
'build.Binaries',
# 'build.DevRelease',
'build.Examples',
'build.Parameters',
'build.ArduPlane',
'defaults.ArduPlane',
'fly.ArduPlane',
'fly.QuadPlane',
'build.APMrover2',
'defaults.APMrover2',
'drive.APMrover2',
'build.ArduCopter',
'defaults.ArduCopter',
'fly.ArduCopter',
'build.Helicopter',
'fly.CopterAVC',
'build.AntennaTracker',
'build.ArduSub',
'defaults.ArduSub',
'dive.ArduSub',
'convertgpx',
]
skipsteps = opts.skip.split(',')
# ensure we catch timeouts
signal.signal(signal.SIGALRM, alarm_handler)
signal.alarm(opts.timeout)
if opts.list:
for step in steps:
print(step)
sys.exit(0)
util.mkdir_p(buildlogs_dirpath())
lckfile = buildlogs_path('autotest.lck')
print("lckfile=%s" % repr(lckfile))
lck = util.lock_file(lckfile)
if lck is None:
print("autotest is locked - exiting. lckfile=(%s)" % (lckfile,))
sys.exit(0)
atexit.register(util.pexpect_close_all)
if len(args) > 0:
# allow a wildcard list of steps
matched = []
for a in args:
matches = [step for step in steps if fnmatch.fnmatch(step.lower(), a.lower())]
if not len(matches):
print("No steps matched {}".format(a))
sys.exit(1)
matched.extend(matches)
steps = matched
# skip steps according to --skip option:
steps_to_run = [ s for s in steps if should_run_step(s) ]
results = TestResults()
try:
if not run_tests(steps_to_run):
sys.exit(1)
except KeyboardInterrupt:
util.pexpect_close_all()
sys.exit(1)
except Exception:
# make sure we kill off any children
util.pexpect_close_all()
raise
| dgrat/ardupilot | Tools/autotest/autotest.py | Python | gpl-3.0 | 18,627 | 0.004832 |
"""
This file is part of the TheLMA (THe Laboratory Management Application) project.
See LICENSE.txt for licensing, CONTRIBUTORS.txt for contributor information.
MoleculeType entity classes.
"""
from everest.entities.base import Entity
from everest.entities.utils import slug_from_string
__docformat__ = "reStructuredText en"
__all__ = ['MoleculeType',
'MOLECULE_TYPE_IDS']
class MOLECULE_TYPE_IDS(object):
"""
Known molecule types.
"""
# FIXME: reconcile with `thelma.data.moleculetype` # pylint:disable=W0511
SSDNA = 'SSDNA'
AMPLICON = 'AMPLICON'
SIRNA = 'SIRNA'
COMPOUND = 'COMPOUND'
LONG_DSRNA = 'LONG_DSRNA'
ANTI_MIR = 'ANTI_MIR'
ESI_RNA = 'ESI_RNA'
MIRNA_INHI = 'MIRNA_INHI'
CLND_DSDNA = 'CLND_DSDNA'
MIRNA_MIMI = 'MIRNA_MIMI'
__ALL = [nm for nm in sorted(locals().keys()) if not nm.startswith('_')]
@classmethod
def is_known_type(cls, molecule_type_name):
"""
Checks whether the given molecule type name is a known one.
"""
return molecule_type_name in cls.__ALL
class MoleculeType(Entity):
"""
Instances of this class describe molecule types, such as \'siRna\'.
"""
#: The name of the molecule type.
name = None
#: A more detailed description.
description = None
#: An number indicating the time it takes for molecules of this type to
#: thaw.
thaw_time = None
#: A list of modification chemical structures
#: (:class:`thelma.entities.chemicalstructure.ChemicalStructure`)
#: that are associated with this molecule type.
modifications = None
#: The default stock concentration for this molecule type.
default_stock_concentration = None
def __init__(self, name, default_stock_concentration,
description='', thaw_time=0, modifications=None, **kw):
if not 'id' in kw:
kw['id'] = name.lower()
Entity.__init__(self, **kw)
self.name = name
self.default_stock_concentration = default_stock_concentration
self.description = description
self.thaw_time = thaw_time
if modifications == None:
self.modifications = []
@property
def slug(self):
#: For instances of this class, the slug is derived from the
#: :attr:`name`.
return slug_from_string(self.name)
def __str__(self):
return self.id
def __repr__(self):
str_format = '<%s id: %s, name: %s, thaw_time: %s>'
params = (self.__class__.__name__, self.id, self.name, self.thaw_time)
return str_format % params
| helixyte/TheLMA | thelma/entities/moleculetype.py | Python | mit | 2,617 | 0.001146 |
import os
def main():
import sys
# Separate the nose params and the pydev params.
pydev_params = []
other_test_framework_params = []
found_other_test_framework_param = None
NOSE_PARAMS = '--nose-params'
PY_TEST_PARAMS = '--py-test-params'
for arg in sys.argv[1:]:
if not found_other_test_framework_param and arg != NOSE_PARAMS and arg != PY_TEST_PARAMS:
pydev_params.append(arg)
else:
if not found_other_test_framework_param:
found_other_test_framework_param = arg
else:
other_test_framework_params.append(arg)
# Here we'll run either with nose or with the pydev_runfiles.
import pydev_runfiles
import pydev_runfiles_xml_rpc
import pydevd_constants
from pydevd_file_utils import _NormFile
DEBUG = 0
if DEBUG:
sys.stdout.write('Received parameters: %s\n' % (sys.argv,))
sys.stdout.write('Params for pydev: %s\n' % (pydev_params,))
if found_other_test_framework_param:
sys.stdout.write('Params for test framework: %s, %s\n' % (found_other_test_framework_param, other_test_framework_params))
try:
configuration = pydev_runfiles.parse_cmdline([sys.argv[0]] + pydev_params)
except:
sys.stderr.write('Command line received: %s\n' % (sys.argv,))
raise
pydev_runfiles_xml_rpc.InitializeServer(configuration.port) # Note that if the port is None, a Null server will be initialized.
NOSE_FRAMEWORK = 1
PY_TEST_FRAMEWORK = 2
try:
if found_other_test_framework_param:
test_framework = 0 # Default (pydev)
if found_other_test_framework_param == NOSE_PARAMS:
import nose
test_framework = NOSE_FRAMEWORK
elif found_other_test_framework_param == PY_TEST_PARAMS:
import pytest
test_framework = PY_TEST_FRAMEWORK
else:
raise ImportError()
else:
raise ImportError()
except ImportError:
if found_other_test_framework_param:
sys.stderr.write('Warning: Could not import the test runner: %s. Running with the default pydev unittest runner instead.\n' % (
found_other_test_framework_param,))
test_framework = 0
# Clear any exception that may be there so that clients don't see it.
# See: https://sourceforge.net/tracker/?func=detail&aid=3408057&group_id=85796&atid=577329
if hasattr(sys, 'exc_clear'):
sys.exc_clear()
if test_framework == 0:
return pydev_runfiles.main(configuration) # Note: still doesn't return a proper value.
else:
# We'll convert the parameters to what nose or py.test expects.
# The supported parameters are:
# runfiles.py --config-file|-t|--tests <Test.test1,Test2> dirs|files --nose-params xxx yyy zzz
# (all after --nose-params should be passed directly to nose)
# In java:
# --tests = Constants.ATTR_UNITTEST_TESTS
# --config-file = Constants.ATTR_UNITTEST_CONFIGURATION_FILE
# The only thing actually handled here are the tests that we want to run, which we'll
# handle and pass as what the test framework expects.
py_test_accept_filter = {}
files_to_tests = configuration.files_to_tests
if files_to_tests:
# Handling through the file contents (file where each line is a test)
files_or_dirs = []
for file, tests in files_to_tests.items():
if test_framework == NOSE_FRAMEWORK:
for test in tests:
files_or_dirs.append(file + ':' + test)
elif test_framework == PY_TEST_FRAMEWORK:
file = _NormFile(file)
py_test_accept_filter[file] = tests
files_or_dirs.append(file)
else:
raise AssertionError('Cannot handle test framework: %s at this point.' % (test_framework,))
else:
if configuration.tests:
# Tests passed (works together with the files_or_dirs)
files_or_dirs = []
for file in configuration.files_or_dirs:
if test_framework == NOSE_FRAMEWORK:
for t in configuration.tests:
files_or_dirs.append(file + ':' + t)
elif test_framework == PY_TEST_FRAMEWORK:
file = _NormFile(file)
py_test_accept_filter[file] = configuration.tests
files_or_dirs.append(file)
else:
raise AssertionError('Cannot handle test framework: %s at this point.' % (test_framework,))
else:
# Only files or dirs passed (let it do the test-loading based on those paths)
files_or_dirs = configuration.files_or_dirs
argv = other_test_framework_params + files_or_dirs
if test_framework == NOSE_FRAMEWORK:
# Nose usage: http://somethingaboutorange.com/mrl/projects/nose/0.11.2/usage.html
# show_stdout_option = ['-s']
# processes_option = ['--processes=2']
argv.insert(0, sys.argv[0])
if DEBUG:
sys.stdout.write('Final test framework args: %s\n' % (argv[1:],))
import pydev_runfiles_nose
PYDEV_NOSE_PLUGIN_SINGLETON = pydev_runfiles_nose.StartPydevNosePluginSingleton(configuration)
argv.append('--with-pydevplugin')
# Return 'not' because it will return 'success' (so, exit == 0 if success)
return not nose.run(argv=argv, addplugins=[PYDEV_NOSE_PLUGIN_SINGLETON])
elif test_framework == PY_TEST_FRAMEWORK:
if DEBUG:
sys.stdout.write('Final test framework args: %s\n' % (argv,))
sys.stdout.write('py_test_accept_filter: %s\n' % (py_test_accept_filter,))
import os
try:
xrange
except:
xrange = range
def dotted(p):
# Helper to convert path to have dots instead of slashes
return os.path.normpath(p).replace(os.sep, "/").replace('/', '.')
curr_dir = os.path.realpath('.')
curr_dotted = dotted(curr_dir) + '.'
# Overcome limitation on py.test:
# When searching conftest if we have a structure as:
# /my_package
# /my_package/conftest.py
# /my_package/tests
# /my_package/tests/test_my_package.py
# The test_my_package won't have access to the conftest contents from the
# test_my_package.py file unless the working dir is set to /my_package.
#
# See related issue (for which we work-around below):
# https://bitbucket.org/hpk42/pytest/issue/639/conftest-being-loaded-twice-giving
for path in sys.path:
path_dotted = dotted(path)
if curr_dotted.startswith(path_dotted):
os.chdir(path)
break
for i in xrange(len(argv)):
arg = argv[i]
# Workaround bug in py.test: if we pass the full path it ends up importing conftest
# more than once (so, always work with relative paths).
if os.path.isfile(arg) or os.path.isdir(arg):
from pydev_imports import relpath
try:
# May fail if on different drives
arg = relpath(arg)
except ValueError:
pass
else:
argv[i] = arg
# To find our runfile helpers (i.e.: plugin)...
d = os.path.dirname(__file__)
if d not in sys.path:
sys.path.insert(0, d)
import pickle, zlib, base64
# Update environment PYTHONPATH so that it finds our plugin if using xdist.
os.environ['PYTHONPATH'] = os.pathsep.join(sys.path)
# Set what should be skipped in the plugin through an environment variable
s = base64.b64encode(zlib.compress(pickle.dumps(py_test_accept_filter)))
if pydevd_constants.IS_PY3K:
s = s.decode('ascii') # Must be str in py3.
os.environ['PYDEV_PYTEST_SKIP'] = s
# Identifies the main pid (i.e.: if it's not the main pid it has to connect back to the
# main pid to give xml-rpc notifications).
os.environ['PYDEV_MAIN_PID'] = str(os.getpid())
os.environ['PYDEV_PYTEST_SERVER'] = str(configuration.port)
argv.append('-p')
argv.append('pydev_runfiles_pytest2')
if 'unittest' in sys.modules or 'unittest2' in sys.modules:
sys.stderr.write('pydev test runner error: imported unittest before running pytest.main\n')
return pytest.main(argv)
else:
raise AssertionError('Cannot handle test framework: %s at this point.' % (test_framework,))
if __name__ == '__main__':
try:
main()
finally:
try:
# The server is not a daemon thread, so, we have to ask for it to be killed!
import pydev_runfiles_xml_rpc
pydev_runfiles_xml_rpc.forceServerKill()
except:
pass # Ignore any errors here
import sys
import threading
if hasattr(sys, '_current_frames') and hasattr(threading, 'enumerate'):
import time
import traceback
class DumpThreads(threading.Thread):
def run(self):
time.sleep(10)
thread_id_to_name = {}
try:
for t in threading.enumerate():
thread_id_to_name[t.ident] = '%s (daemon: %s)' % (t.name, t.daemon)
except:
pass
stack_trace = [
'===============================================================================',
'pydev pyunit runner: Threads still found running after tests finished',
'================================= Thread Dump =================================']
for thread_id, stack in sys._current_frames().items():
stack_trace.append('\n-------------------------------------------------------------------------------')
stack_trace.append(" Thread %s" % thread_id_to_name.get(thread_id, thread_id))
stack_trace.append('')
if 'self' in stack.f_locals:
sys.stderr.write(str(stack.f_locals['self']) + '\n')
for filename, lineno, name, line in traceback.extract_stack(stack):
stack_trace.append(' File "%s", line %d, in %s' % (filename, lineno, name))
if line:
stack_trace.append(" %s" % (line.strip()))
stack_trace.append('\n=============================== END Thread Dump ===============================')
sys.stderr.write('\n'.join(stack_trace))
dump_current_frames_thread = DumpThreads()
dump_current_frames_thread.setDaemon(True) # Daemon so that this thread doesn't halt it!
dump_current_frames_thread.start()
| dannyperry571/theapprentice | script.module.pydevd/lib/runfiles.py | Python | gpl-2.0 | 11,560 | 0.004325 |
"""
Contains utilities used during testing
"""
import os
import stat
import shutil
def remove_tree(tree):
"""
reset the permission of a file and directory tree and remove it
"""
os.chmod(tree, 0o777)
shutil.rmtree(tree)
def remove_file(file_name):
"""
reset the permission of a file and remove it
"""
os.chmod(file_name, 0o777)
os.remove(file_name)
def create_file(file_name):
"""
create an file
"""
return open(file_name, "w").close()
def create_directory(directory_name):
"""
create an directory
"""
os.makedirs(directory_name)
class ChangeDirectory:
# pylint: disable=too-few-public-methods
"""
Context manager for changing the current working directory
"""
def __init__(self, new_path):
self.new_path = os.path.expanduser(new_path)
self.saved_path = os.getcwd()
def __enter__(self):
os.chdir(self.new_path)
def __exit__(self, etype, value, traceback):
os.chdir(self.saved_path)
def create_tree(tree):
"""
create an file and directory tree
"""
for branch in tree:
if isinstance(branch, str):
create_file(branch)
elif isinstance(branch, dict):
for directory, file_objs in branch.items():
create_directory(directory)
with ChangeDirectory(directory):
create_tree(file_objs)
def remove_read_permission(path):
"""
change users permissions to a path to write only
"""
mode = os.stat(path)[stat.ST_MODE]
os.chmod(path, mode & ~stat.S_IRUSR & ~stat.S_IRGRP & ~stat.S_IROTH)
def add_read_permission(path):
"""
change users permissions to a path to write only
"""
mode = os.stat(path)[stat.ST_MODE]
os.chmod(path, mode | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
def remove_write_permission(path):
"""
change users permissions to a path to read only
"""
mode = os.stat(path)[stat.ST_MODE]
os.chmod(path, mode & ~stat.S_IWUSR & ~stat.S_IWGRP & ~stat.S_IWOTH)
def remove_execute_permission(path):
"""
change users permissions to a path to read only
"""
mode = os.stat(path)[stat.ST_MODE]
os.chmod(path, mode & ~stat.S_IXUSR & ~stat.S_IXGRP & ~stat.S_IXOTH)
| arecarn/dploy | tests/utils.py | Python | mit | 2,296 | 0 |
from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('spy_headers')
class PluginSpyHeaders(object):
"""
Logs all headers sent in http requests. Useful for resolving issues.
WARNING: At the moment this modifies requests somehow!
"""
schema = {'type': 'boolean'}
@staticmethod
def log_requests_headers(response, **kwargs):
log.info('Request : %s' % response.request.url)
log.info('Response : %s (%s)' % (response.status_code, response.reason))
log.info('-- Headers: --------------------------')
for header, value in response.request.headers.items():
log.info('%s: %s' % (header, value))
log.info('--------------------------------------')
return response
def on_task_start(self, task, config):
if not config:
return
# Add our hook to the requests session
task.requests.hooks['response'].append(self.log_requests_headers)
def on_task_exit(self, task, config):
"""Task exiting, remove additions"""
if not config:
return
task.requests.hooks['response'].remove(self.log_requests_headers)
# remove also on abort
on_task_abort = on_task_exit
@event('plugin.register')
def register_plugin():
plugin.register(PluginSpyHeaders, 'spy_headers', api_ver=2)
| jawilson/Flexget | flexget/plugins/operate/spy_headers.py | Python | mit | 1,536 | 0.000651 |
import os
import re
import logging
from packaging.version import parse
from .scm.git import Git
from .scm.base import DEFAULT_TAG_VERSION
from .exception import VersionerError
from .version_string import make_string_pep440_compatible
SCM_TYPES = [Git]
LOG = logging.getLogger(__name__)
RELEASE_FORMAT = "{main_version}"
FORMAT = "{main_version}.dev{commit_count}+{branch}.{scm_change_id}"
def get_version(
path=os.curdir,
is_release=False,
version_format=FORMAT,
release_version_format=RELEASE_FORMAT,
version_file="VERSION",
release_branch_regex=None,
scm_type=None,
):
"""
return the version.
"""
path = os.path.abspath(path)
version_file_path = os.path.join(path, version_file)
scm = _get_scm(scm_type, path)
if not scm:
existing_version = _read_version_file(version_file_path)
if existing_version:
return existing_version
if scm_type is None:
msg = "unable to detect scm type."
else:
msg = "scm type {0} not found, or is not a valid repo.".format(
scm_type
)
raise VersionerError(msg)
version = determine_version(
scm,
version_format=version_format,
release_version_format=release_version_format,
release_branch_regex=release_branch_regex,
is_release=is_release,
)
_write_version_file(version_file_path, version)
return version
def determine_version(
scm,
version_format=FORMAT,
release_version_format=RELEASE_FORMAT,
release_branch_regex=None,
is_release=False,
):
props = scm.get_properties()
release_branch_regex = release_branch_regex or scm.RELEASE_BRANCH_REGEX
if not re.match(release_branch_regex, props["branch"]):
LOG.info(
"branch {0} does not match regex {1}. Using default tag version.".format(
props["branch"], release_branch_regex
)
)
props["main_version"] = DEFAULT_TAG_VERSION
else:
props["main_version"] = props["tag_version"]
props["branch"] = make_string_pep440_compatible(props["branch"])
fmt_to_use = release_version_format if is_release else version_format
try:
return str(parse(fmt_to_use.format(**props)))
except KeyError as ke:
raise VersionerError(
"key {0} was not provided by the scm type {1}".format(
ke, scm.get_name()
)
)
def _read_version_file(version_file):
if not os.path.exists(version_file):
return
with open(version_file) as fh:
return fh.read()
def _get_scm(scm_type, path):
for SCMType in SCM_TYPES:
if scm_type is None or scm_type == SCMType.get_name():
if SCMType.is_repo(path):
return SCMType(path)
return None
def _write_version_file(version_file, version):
with open(version_file, "w+") as fh:
fh.write(version)
| toumorokoshi/vcver-python | vcver/version.py | Python | mit | 2,963 | 0.000337 |
import json
import logging
import re
from html.parser import HTMLParser
import jsonschema
from django.conf import settings
logger = logging.getLogger(__name__)
class ParserBase:
"""
Base class for all parsers.
"""
def __init__(self, name):
"""Setup the artifact to hold the extracted data."""
self.name = name
self.clear()
def clear(self):
"""Reset this parser's values for another run."""
self.artifact = []
self.complete = False
def parse_line(self, line, lineno):
"""Parse a single line of the log"""
raise NotImplementedError # pragma no cover
def finish_parse(self, last_lineno_seen):
"""Clean-up/summary tasks run at the end of parsing."""
pass
def get_artifact(self):
"""By default, just return the artifact as-is."""
return self.artifact
class StepParser(ParserBase):
"""
Parse out individual job steps within a log.
Step format:
"steps": [
{
"errors": [],
"name": "set props: master", # the name of the process on start line
"started": "2013-06-05 12:39:57.838527",
"started_linenumber": 8,
"finished_linenumber": 10,
"finished": "2013-06-05 12:39:57.839226",
"result": 0
},
...
]
"""
# Matches the half-dozen 'key: value' header lines printed at the start of each
# Buildbot job log. The list of keys are taken from:
# https://hg.mozilla.org/build/buildbotcustom/file/644c3860300a/bin/log_uploader.py#l126
RE_HEADER_LINE = re.compile(r'(?:builder|slave|starttime|results|buildid|builduid|revision): .*')
# Step marker lines, eg:
# ========= Started foo (results: 0, elapsed: 0 secs) (at 2015-08-17 02:33:56.353866) =========
# ========= Finished foo (results: 0, elapsed: 0 secs) (at 2015-08-17 02:33:56.354301) =========
RE_STEP_MARKER = re.compile(r'={9} (?P<marker_type>Started|Finished) (?P<name>.*?) '
r'\(results: (?P<result_code>\d+), elapsed: .*?\) '
r'\(at (?P<timestamp>.*?)\)')
# Legacy result code to name mapping inherited from buildbot (duplicated in TextLogStep)
# TODO: Likely remove this and step handling entirely now that Taskcluster doesn't have steps.
RESULT_DICT = {
0: "success",
1: "testfailed",
2: "busted",
3: "skipped",
4: "exception",
5: "retry",
6: "usercancel",
7: "superseded",
}
STATES = {
# The initial state until we record the first step.
"awaiting_first_step": 0,
# We've started a step, but not yet seen the end of it.
"step_in_progress": 1,
# We've seen the end of the previous step.
"step_finished": 2,
}
# date format in a step started/finished header
DATE_FORMAT = '%Y-%m-%d %H:%M:%S.%f'
def __init__(self):
"""Setup the artifact to hold the header lines."""
super().__init__("step_data")
self.stepnum = -1
self.artifact = {
"steps": [],
"errors_truncated": False
}
self.sub_parser = ErrorParser()
self.state = self.STATES['awaiting_first_step']
def parse_line(self, line, lineno):
"""Parse a single line of the log.
We have to handle both buildbot style logs as well as Taskcluster logs. The latter
attempt to emulate the buildbot logs, but don't accurately do so, partly due
to the way logs are generated in Taskcluster (ie: on the workers themselves).
Buildbot logs:
builder: ...
slave: ...
starttime: ...
results: ...
buildid: ...
builduid: ...
revision: ...
======= <step START marker> =======
<step log output>
======= <step FINISH marker> =======
======= <step START marker> =======
<step log output>
======= <step FINISH marker> =======
Taskcluster logs (a worst-case example):
<log output outside a step>
======= <step START marker> =======
<step log output>
======= <step FINISH marker> =======
<log output outside a step>
======= <step START marker> =======
<step log output with no following finish marker>
As can be seen above, Taskcluster logs can have (a) log output that falls between
step markers, and (b) content at the end of the log, that is not followed by a
final finish step marker. We handle this by creating generic placeholder steps to
hold the log output that is not enclosed by step markers, and then by cleaning up
the final step in finish_parse() once all lines have been parsed.
"""
if not line.strip():
# Skip whitespace-only lines, since they will never contain an error line,
# so are not of interest. This also avoids creating spurious unnamed steps
# (which occurs when we find content outside of step markers) for the
# newlines that separate the steps in Buildbot logs.
return
if self.state == self.STATES['awaiting_first_step'] and self.RE_HEADER_LINE.match(line):
# The "key: value" job metadata header lines that appear at the top of
# Buildbot logs would result in the creation of an unnamed step at the
# start of the job, unless we skip them. (Which is not desired, since
# the lines are metadata and not test/build output.)
return
step_marker_match = self.RE_STEP_MARKER.match(line)
if not step_marker_match:
# This is a normal log line, rather than a step marker. (The common case.)
if self.state != self.STATES['step_in_progress']:
# We don't have an in-progress step, so need to start one, even though this
# isn't a "step started" marker line. We therefore create a new generic step,
# since we have no way of finding out the step metadata. This case occurs
# for the Taskcluster logs where content can fall between step markers.
self.start_step(lineno)
# Parse the line for errors, which if found, will be associated with the current step.
self.sub_parser.parse_line(line, lineno)
return
# This is either a "step started" or "step finished" marker line, eg:
# ========= Started foo (results: 0, elapsed: 0 secs) (at 2015-08-17 02:33:56.353866) =========
# ========= Finished foo (results: 0, elapsed: 0 secs) (at 2015-08-17 02:33:56.354301) =========
if step_marker_match.group('marker_type') == 'Started':
if self.state == self.STATES['step_in_progress']:
# We're partway through a step (ie: haven't seen a "step finished" marker line),
# but have now reached the "step started" marker for the next step. Before we
# can start the new step, we have to clean up the previous one - albeit using
# generic step metadata, since there was no "step finished" marker. This occurs
# in Taskcluster's logs when content falls between the step marker lines.
self.end_step(lineno)
# Start a new step using the extracted step metadata.
self.start_step(lineno,
name=step_marker_match.group('name'),
timestamp=step_marker_match.group('timestamp'))
return
# This is a "step finished" marker line.
if self.state != self.STATES['step_in_progress']:
# We're not in the middle of a step, so can't finish one. Just ignore the marker line.
return
# Close out the current step using the extracted step metadata.
self.end_step(lineno,
timestamp=step_marker_match.group('timestamp'),
result_code=int(step_marker_match.group('result_code')))
def start_step(self, lineno, name="Unnamed step", timestamp=None):
"""Create a new step and update the state to reflect we're now in the middle of a step."""
self.state = self.STATES['step_in_progress']
self.stepnum += 1
self.steps.append({
"name": name,
"started": timestamp,
"started_linenumber": lineno,
"errors": [],
})
def end_step(self, lineno, timestamp=None, result_code=None):
"""Fill in the current step's summary and update the state to show the current step has ended."""
self.state = self.STATES['step_finished']
step_errors = self.sub_parser.get_artifact()
step_error_count = len(step_errors)
if step_error_count > settings.PARSER_MAX_STEP_ERROR_LINES:
step_errors = step_errors[:settings.PARSER_MAX_STEP_ERROR_LINES]
self.artifact["errors_truncated"] = True
self.current_step.update({
"finished": timestamp,
"finished_linenumber": lineno,
# Whilst the result code is present on both the start and end buildbot-style step
# markers, for Taskcluster logs the start marker line lies about the result, since
# the log output is unbuffered, so Taskcluster does not know the real result at
# that point. As such, we only set the result when ending a step.
"result": self.RESULT_DICT.get(result_code, "unknown"),
"errors": step_errors
})
# reset the sub_parser for the next step
self.sub_parser.clear()
def finish_parse(self, last_lineno_seen):
"""Clean-up/summary tasks run at the end of parsing."""
if self.state == self.STATES['step_in_progress']:
# We've reached the end of the log without seeing the final "step finish"
# marker, which would normally have triggered updating the step. As such we
# must manually close out the current step, so things like result, finish
# time are set for it. This ensures that the error summary for Taskcluster
# infra failures actually lists the error that occurs at the
# end of the log.
self.end_step(last_lineno_seen)
@property
def steps(self):
"""Return the list of steps in the artifact"""
return self.artifact["steps"]
@property
def current_step(self):
"""Return the current step in the artifact"""
return self.steps[self.stepnum]
class TinderboxPrintParser(ParserBase):
RE_TINDERBOXPRINT = re.compile(r'.*TinderboxPrint: ?(?P<line>.*)$')
RE_UPLOADED_TO = re.compile(
r"<a href=['\"](?P<url>http(s)?://.*)['\"]>(?P<value>.+)</a>: uploaded"
)
RE_LINK_HTML = re.compile(
(r"((?P<title>[A-Za-z/\.0-9\-_ ]+): )?"
r"<a .*href=['\"](?P<url>http(s)?://.+)['\"].*>(?P<value>.+)</a>")
)
RE_LINK_TEXT = re.compile(
r"((?P<title>[A-Za-z/\.0-9\-_ ]+): )?(?P<url>http(s)?://.*)"
)
TINDERBOX_REGEXP_TUPLE = (
{
're': RE_UPLOADED_TO,
'base_dict': {
"content_type": "link",
"title": "artifact uploaded"
},
'duplicates_fields': {}
},
{
're': RE_LINK_HTML,
'base_dict': {
"content_type": "link"
},
'duplicates_fields': {}
},
{
're': RE_LINK_TEXT,
'base_dict': {
"content_type": "link"
},
'duplicates_fields': {'value': 'url'}
}
)
def __init__(self):
"""Setup the artifact to hold the job details."""
super().__init__("job_details")
def parse_line(self, line, lineno):
"""Parse a single line of the log"""
match = self.RE_TINDERBOXPRINT.match(line) if line else None
if match:
line = match.group('line')
for regexp_item in self.TINDERBOX_REGEXP_TUPLE:
match = regexp_item['re'].match(line)
if match:
artifact = match.groupdict()
# handle duplicate fields
for to_field, from_field in regexp_item['duplicates_fields'].items():
# if to_field not present or None copy form from_field
if to_field not in artifact or artifact[to_field] is None:
artifact[to_field] = artifact[from_field]
artifact.update(regexp_item['base_dict'])
self.artifact.append(artifact)
return
# default case: consider it html content
# try to detect title/value splitting on <br/>
artifact = {"content_type": "raw_html", }
if "<br/>" in line:
title, value = line.split("<br/>", 1)
artifact["title"] = title
artifact["value"] = value
# or similar long lines if they contain a url
elif "href" in line and "title" in line:
def parse_url_line(line_data):
class TpLineParser(HTMLParser):
def handle_starttag(self, tag, attrs):
d = dict(attrs)
artifact["url"] = d['href']
artifact["title"] = d['title']
def handle_data(self, data):
artifact["value"] = data
p = TpLineParser()
p.feed(line_data)
p.close()
# strip ^M returns on windows lines otherwise
# handle_data will yield no data 'value'
parse_url_line(line.replace('\r', ''))
else:
artifact["value"] = line
self.artifact.append(artifact)
class ErrorParser(ParserBase):
"""A generic error detection sub-parser"""
IN_SEARCH_TERMS = (
"TEST-UNEXPECTED-",
"fatal error",
"FATAL ERROR",
"REFTEST ERROR",
"PROCESS-CRASH",
"Assertion failure:",
"Assertion failed:",
"###!!! ABORT:",
"E/GeckoLinker",
"SUMMARY: AddressSanitizer",
"SUMMARY: LeakSanitizer",
"SUMMARY: ThreadSanitizer",
"Automation Error:",
"command timed out:",
"wget: unable ",
"TEST-VALGRIND-ERROR",
"[ FAILED ] ",
"bash.exe: *** ",
"bash: fork: Resource temporarily unavailable",
)
RE_ERR_MATCH = re.compile((
r"^error: TEST FAILED"
r"|^g?make(?:\[\d+\])?: \*\*\*"
r"|^Remote Device Error:"
r"|^[A-Za-z.]+Error: "
r"|^[A-Za-z.]*Exception: "
r"|^remoteFailed:"
r"|^rm: cannot "
r"|^abort:"
r"|^Output exceeded \d+ bytes"
r"|^The web-page 'stop build' button was pressed"
r"|.*\.js: line \d+, col \d+, Error -"
r"|^\[taskcluster\] Error:"
r"|^\[[\w._-]+:(?:error|exception)\]"
))
RE_ERR_SEARCH = re.compile((
r" error\(\d*\):"
r"|:\d+: error:"
r"| error R?C\d*:"
r"|ERROR [45]\d\d:"
r"|mozmake\.(?:exe|EXE)(?:\[\d+\])?: \*\*\*"
))
RE_EXCLUDE_1_SEARCH = re.compile(r"TEST-(?:INFO|PASS) ")
RE_EXCLUDE_2_SEARCH = re.compile(
r"I[ /](Gecko|Robocop|TestRunner).*TEST-UNEXPECTED-"
r"|^TimeoutException: "
r"|^ImportError: No module named pygtk$"
)
RE_ERR_1_MATCH = re.compile(r"^\d+:\d+:\d+ +(?:ERROR|CRITICAL|FATAL) - ")
# Looks for a leading value inside square brackets containing a "YYYY-"
# year pattern but isn't a TaskCluster error indicator (like
# ``taskcluster:error``.
#
# This matches the following:
# [task 2016-08-18T17:50:56.955523Z]
# [2016- task]
#
# But not:
# [taskcluster:error]
# [taskcluster:something 2016-]
RE_TASKCLUSTER_NORMAL_PREFIX = re.compile(r"^\[(?!taskcluster:)[^\]]*20\d{2}-[^\]]+\]\s")
RE_MOZHARNESS_PREFIX = re.compile(r"^\d+:\d+:\d+ +(?:DEBUG|INFO|WARNING) - +")
def __init__(self):
"""A simple error detection sub-parser"""
super().__init__("errors")
self.is_taskcluster = False
def add(self, line, lineno):
self.artifact.append({
"linenumber": lineno,
"line": line.rstrip()
})
def parse_line(self, line, lineno):
"""Check a single line for an error. Keeps track of the linenumber"""
# TaskCluster logs are a bit wonky.
#
# TaskCluster logs begin with output coming from TaskCluster itself,
# before it has transitioned control of the task to the configured
# process. These "internal" logs look like the following:
#
# [taskcluster 2016-09-09 17:41:43.544Z] Worker Group: us-west-2b
#
# If an error occurs during this "setup" phase, TaskCluster may emit
# lines beginning with ``[taskcluster:error]``.
#
# Once control has transitioned from TaskCluster to the configured
# task process, lines can be whatever the configured process emits.
# The popular ``run-task`` wrapper prefixes output to emulate
# TaskCluster's "internal" logs. e.g.
#
# [vcs 2016-09-09T17:45:02.842230Z] adding changesets
#
# This prefixing can confuse error parsing. So, we strip it.
#
# Because regular expression matching and string manipulation can be
# expensive when performed on every line, we only strip the TaskCluster
# log prefix if we know we're in a TaskCluster log.
# First line of TaskCluster logs almost certainly has this.
if line.startswith('[taskcluster '):
self.is_taskcluster = True
# For performance reasons, only do this if we have identified as
# a TC task.
if self.is_taskcluster:
line = re.sub(self.RE_TASKCLUSTER_NORMAL_PREFIX, "", line)
if self.is_error_line(line):
self.add(line, lineno)
def is_error_line(self, line):
if self.RE_EXCLUDE_1_SEARCH.search(line):
return False
if self.RE_ERR_1_MATCH.match(line):
return True
# Remove mozharness prefixes prior to matching
trimline = re.sub(self.RE_MOZHARNESS_PREFIX, "", line).rstrip()
if self.RE_EXCLUDE_2_SEARCH.search(trimline):
return False
return bool(any(term for term in self.IN_SEARCH_TERMS if term in trimline) or
self.RE_ERR_MATCH.match(trimline) or self.RE_ERR_SEARCH.search(trimline))
with open('schemas/performance-artifact.json') as f:
PERF_SCHEMA = json.load(f)
class PerformanceParser(ParserBase):
"""a sub-parser to find generic performance data"""
# Using $ in the regex as an end of line bounds causes the
# regex to fail on windows logs. This is likely due to the
# ^M character representation of the windows end of line.
RE_PERFORMANCE = re.compile(r'.*?PERFHERDER_DATA:\s+({.*})')
def __init__(self):
super().__init__("performance_data")
def parse_line(self, line, lineno):
match = self.RE_PERFORMANCE.match(line)
if match:
try:
dict = json.loads(match.group(1))
jsonschema.validate(dict, PERF_SCHEMA)
self.artifact.append(dict)
except ValueError:
logger.warning("Unable to parse Perfherder data from line: %s",
line)
except jsonschema.ValidationError as e:
logger.warning("Perfherder line '%s' does not comply with "
"json schema: %s", line, e)
# Don't mark the parser as complete, in case there are multiple performance artifacts.
| edmorley/treeherder | treeherder/log_parser/parsers.py | Python | mpl-2.0 | 20,165 | 0.002628 |
# ybrowserauth installation script
#
from distutils.core import setup
setup(name='ybrowserauth',
version='1.2',
py_modules=['ybrowserauth'],
license='http://www.opensource.org/licenses/bsd-license.php',
url='http://developer.yahoo.com/auth',
description='Lets you add Yahoo! Browser-Based authentication to your applications',
author='Jason Levitt',
contact='http://developer.yahoo.com/blog',
) | pombreda/django-hotclub | libs/external_libs/ybrowserauth/setup.py | Python | mit | 455 | 0.004396 |
from typing import List, Tuple, Sequence
import logging
logger = logging.getLogger(__name__)
from .lock_tables import lock_tables
from .uiformatters import UIFormatter, get_uiformatters, AutonumberOverflowException
def autonumber_and_save(collection, user, obj) -> None:
uiformatters = get_uiformatters(collection, user, obj.__class__.__name__)
autonumber_fields = [(formatter, vals)
for formatter in uiformatters
for value in [getattr(obj, formatter.field_name.lower())]
if value is not None
for vals in [formatter.parse(value)]
if formatter.needs_autonumber(vals)]
if len(autonumber_fields) > 0:
do_autonumbering(collection, obj, autonumber_fields)
else:
logger.debug("no fields to autonumber for %s", obj)
obj.save()
def do_autonumbering(collection, obj, fields: List[Tuple[UIFormatter, Sequence[str]]]) -> None:
logger.debug("autonumbering %s fields: %s", obj, fields)
# The autonumber action is prepared and thunked outside the locked table
# context since it looks at other tables and that is not allowed by mysql
# if those tables are not also locked.
thunks = [
formatter.prepare_autonumber_thunk(collection, obj.__class__, vals)
for formatter, vals in fields
]
with lock_tables(obj._meta.db_table):
for apply_autonumbering_to in thunks:
apply_autonumbering_to(obj)
obj.save()
| specify/specify7 | specifyweb/specify/autonumbering.py | Python | gpl-2.0 | 1,532 | 0.004569 |
"""
Test compiling and executing using the dmd tool.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
from Common.singleStringCannotBeMultipleOptions import testForTool
testForTool('dmd')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| timj/scons | test/D/HSTeoh/sconstest-singleStringCannotBeMultipleOptions_dmd.py | Python | mit | 1,396 | 0 |
# Copyright 2016 Facundo Batista, Nicolás Demarchi
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For further info, check https://github.com/PyAr/fades
"""Tests for file_options."""
import argparse
import unittest
from configparser import ConfigParser
from unittest.mock import patch
from fades import file_options
class OptionsFileTestCase(unittest.TestCase):
"""Check file_options.options_from_file()."""
def setUp(self):
self.argparser = argparse.ArgumentParser()
self.argparser.add_argument
self.argparser.add_argument('-f', '--foo', action='store_true')
self.argparser.add_argument('-b', '--bar', action='store')
self.argparser.add_argument('-d', '--dependency', action='append')
self.argparser.add_argument('positional', nargs='?', default=None)
def build_parser(self, args):
config_parser = ConfigParser()
config_parser['fades'] = args
return config_parser
@patch("fades.file_options.CONFIG_FILES", ('/foo/none', '/dev/null'))
def test_no_config_files(self):
args = self.argparser.parse_args([])
result = file_options.options_from_file(args)
self.assertEqual(args, result)
self.assertIsInstance(args, argparse.Namespace)
@patch("fades.file_options.CONFIG_FILES", ('mock.ini',))
@patch("configparser.ConfigParser.items")
def test_single_config_file_no_cli(self, mocked_parser):
mocked_parser.return_value = [('foo', 'true'), ('bar', 'hux')]
args = self.argparser.parse_args(['positional'])
result = file_options.options_from_file(args)
self.assertTrue(result.foo)
self.assertEqual(result.bar, 'hux')
self.assertIsInstance(args, argparse.Namespace)
@patch("fades.file_options.CONFIG_FILES", ('mock.ini',))
@patch("configparser.ConfigParser.items")
def test_single_config_file_with_cli(self, mocked_parser):
mocked_parser.return_value = [('foo', 'false'), ('bar', 'hux'), ('no_in_cli', 'testing')]
args = self.argparser.parse_args(['--foo', '--bar', 'other', 'positional'])
result = file_options.options_from_file(args)
self.assertTrue(result.foo)
self.assertEqual(result.bar, 'other')
self.assertEqual(result.no_in_cli, 'testing')
self.assertIsInstance(args, argparse.Namespace)
@patch("fades.file_options.CONFIG_FILES", ('mock.ini',))
@patch("configparser.ConfigParser.items")
def test_single_config_file_with_mergeable(self, mocked_parser):
mocked_parser.return_value = [('dependency', 'two')]
args = self.argparser.parse_args(
['--foo', '--bar', 'other', '--dependency', 'one', 'positional'])
result = file_options.options_from_file(args)
self.assertTrue(result.foo)
self.assertEqual(result.bar, 'other')
self.assertEqual(result.dependency, ['one', 'two'])
self.assertIsInstance(args, argparse.Namespace)
@patch("fades.file_options.CONFIG_FILES", ('mock.ini',))
@patch("configparser.ConfigParser.items")
def test_single_config_file_complex_mergeable(self, mocked_parser):
mocked_parser.return_value = [('dependency', 'requests>=2.1,<2.8,!=2.6.5')]
args = self.argparser.parse_args(
['--foo', '--bar', 'other', '--dependency', 'one', 'positional'])
result = file_options.options_from_file(args)
self.assertTrue(result.foo)
self.assertEqual(result.bar, 'other')
self.assertEqual(result.dependency, ['one', 'requests>=2.1,<2.8,!=2.6.5'])
self.assertIsInstance(args, argparse.Namespace)
@patch("fades.file_options.CONFIG_FILES", ('mock.ini', 'mock2.ini'))
@patch("configparser.ConfigParser.items")
def test_two_config_file_with_mergeable(self, mocked_parser):
mocked_parser.side_effect = [
[('dependency', 'two')],
[('dependency', 'three')],
]
args = self.argparser.parse_args(
['--foo', '--bar', 'other', '--dependency', 'one', 'positional'])
result = file_options.options_from_file(args)
self.assertTrue(result.foo)
self.assertEqual(result.bar, 'other')
self.assertEqual(result.dependency, ['one', 'two', 'three'])
self.assertIsInstance(args, argparse.Namespace)
@patch("fades.file_options.CONFIG_FILES", ('mock.ini', 'mock2.ini'))
@patch("configparser.ConfigParser.items")
def test_two_config_file_with_booleans(self, mocked_parser):
mocked_parser.side_effect = [
[('foo', 'true')],
[('foo', 'false')],
]
args = self.argparser.parse_args([])
result = file_options.options_from_file(args)
self.assertFalse(result.foo)
self.assertIsInstance(args, argparse.Namespace)
@patch("fades.file_options.CONFIG_FILES", ('mock.ini', 'mock2.ini'))
@patch("configparser.ConfigParser.items")
def test_two_config_file_override_by_cli(self, mocked_parser):
mocked_parser.side_effect = [
[('bar', 'no_this')],
[('bar', 'no_this_b')],
]
args = self.argparser.parse_args(['--bar', 'this'])
result = file_options.options_from_file(args)
self.assertEqual(result.bar, 'this')
self.assertIsInstance(args, argparse.Namespace)
@patch("fades.file_options.CONFIG_FILES", ('mock.ini', 'mock2.ini', 'mock3.ini'))
@patch("configparser.ConfigParser.items")
def test_three_config_file_override(self, mocked_parser):
mocked_parser.side_effect = [
[('bar', 'no_this')],
[('bar', 'neither_this')],
[('bar', 'this')],
]
args = self.argparser.parse_args([])
result = file_options.options_from_file(args)
self.assertEqual(result.bar, 'this')
self.assertIsInstance(args, argparse.Namespace)
| jcabdala/fades | tests/test_file_options.py | Python | gpl-3.0 | 6,427 | 0.000778 |
import copy
import unittest
from pyrake.utils.datatypes import CaselessDict
__doctests__ = ['pyrake.utils.datatypes']
class CaselessDictTest(unittest.TestCase):
def test_init(self):
seq = {'red': 1, 'black': 3}
d = CaselessDict(seq)
self.assertEqual(d['red'], 1)
self.assertEqual(d['black'], 3)
seq = (('red', 1), ('black', 3))
d = CaselessDict(seq)
self.assertEqual(d['red'], 1)
self.assertEqual(d['black'], 3)
def test_caseless(self):
d = CaselessDict()
d['key_Lower'] = 1
self.assertEqual(d['KEy_loWer'], 1)
self.assertEqual(d.get('KEy_loWer'), 1)
d['KEY_LOWER'] = 3
self.assertEqual(d['key_Lower'], 3)
self.assertEqual(d.get('key_Lower'), 3)
def test_delete(self):
d = CaselessDict({'key_lower': 1})
del d['key_LOWER']
self.assertRaises(KeyError, d.__getitem__, 'key_LOWER')
self.assertRaises(KeyError, d.__getitem__, 'key_lower')
def test_getdefault(self):
d = CaselessDict()
self.assertEqual(d.get('c', 5), 5)
d['c'] = 10
self.assertEqual(d.get('c', 5), 10)
def test_setdefault(self):
d = CaselessDict({'a': 1, 'b': 2})
r = d.setdefault('A', 5)
self.assertEqual(r, 1)
self.assertEqual(d['A'], 1)
r = d.setdefault('c', 5)
self.assertEqual(r, 5)
self.assertEqual(d['C'], 5)
def test_fromkeys(self):
keys = ('a', 'b')
d = CaselessDict.fromkeys(keys)
self.assertEqual(d['A'], None)
self.assertEqual(d['B'], None)
d = CaselessDict.fromkeys(keys, 1)
self.assertEqual(d['A'], 1)
self.assertEqual(d['B'], 1)
instance = CaselessDict()
d = instance.fromkeys(keys)
self.assertEqual(d['A'], None)
self.assertEqual(d['B'], None)
d = instance.fromkeys(keys, 1)
self.assertEqual(d['A'], 1)
self.assertEqual(d['B'], 1)
def test_contains(self):
d = CaselessDict()
d['a'] = 1
assert 'a' in d
def test_pop(self):
d = CaselessDict()
d['a'] = 1
self.assertEqual(d.pop('A'), 1)
self.assertRaises(KeyError, d.pop, 'A')
def test_normkey(self):
class MyDict(CaselessDict):
def normkey(self, key):
return key.title()
d = MyDict()
d['key-one'] = 2
self.assertEqual(list(d.keys()), ['Key-One'])
def test_normvalue(self):
class MyDict(CaselessDict):
def normvalue(self, value):
if value is not None:
return value + 1
d = MyDict({'key': 1})
self.assertEqual(d['key'], 2)
self.assertEqual(d.get('key'), 2)
d = MyDict()
d['key'] = 1
self.assertEqual(d['key'], 2)
self.assertEqual(d.get('key'), 2)
d = MyDict()
d.setdefault('key', 1)
self.assertEqual(d['key'], 2)
self.assertEqual(d.get('key'), 2)
d = MyDict()
d.update({'key': 1})
self.assertEqual(d['key'], 2)
self.assertEqual(d.get('key'), 2)
d = MyDict.fromkeys(('key',), 1)
self.assertEqual(d['key'], 2)
self.assertEqual(d.get('key'), 2)
def test_copy(self):
h1 = CaselessDict({'header1': 'value'})
h2 = copy.copy(h1)
self.assertEqual(h1, h2)
self.assertEqual(h1.get('header1'), h2.get('header1'))
assert isinstance(h2, CaselessDict)
if __name__ == "__main__":
unittest.main()
| elkingtowa/pyrake | tests/test_utils_datatypes.py | Python | mit | 3,592 | 0.000557 |
'''
offsets = [[[originalx,originaly], [511,709],[498,707]],\
[[522,711], [508,709],[493,706]],\
[[522,714], [503,708],[488,705]]]
'''
def offsetter(length,dim,dx,dy,sx,sy,fx,fy):
x = x0 = sx
y = y0 = sy
arr = []
for i in range(dim):
arr.append([])
for j in range(length):
x = int(x0+dx*i+dx*(j+1))
y = int(y0+dy*i+dy*(j+1))
arr[i].append([x,y])
for i in range(dim):
for j in range(len(arr)):
arr[i][j][0] += int(fx*i)
arr[i][j][1] += int(fy*i)
return arr
#print offsetter(3,3,-4,-1,532,713)
| luciencd/astrophotograpython | library/offsetter.py | Python | mit | 667 | 0.014993 |
"""Generate config flow file."""
import json
from typing import Dict
from .model import Config, Integration
BASE = """
\"\"\"Automatically generated by hassfest.
To update, run python3 -m script.hassfest
\"\"\"
# fmt: off
FLOWS = {}
""".strip()
def validate_integration(integration: Integration):
"""Validate we can load config flow without installing requirements."""
if not (integration.path / "config_flow.py").is_file():
integration.add_error(
"config_flow", "Config flows need to be defined in the file config_flow.py"
)
# Currently not require being able to load config flow without
# installing requirements.
# try:
# integration.import_pkg('config_flow')
# except ImportError as err:
# integration.add_error(
# 'config_flow',
# "Unable to import config flow: {}. Config flows should be able "
# "to be imported without installing requirements.".format(err))
# return
# if integration.domain not in config_entries.HANDLERS:
# integration.add_error(
# 'config_flow',
# "Importing the config flow platform did not register a config "
# "flow handler.")
def generate_and_validate(integrations: Dict[str, Integration]):
"""Validate and generate config flow data."""
domains = []
for domain in sorted(integrations):
integration = integrations[domain]
if not integration.manifest:
continue
config_flow = integration.manifest.get("config_flow")
if not config_flow:
continue
validate_integration(integration)
domains.append(domain)
return BASE.format(json.dumps(domains, indent=4))
def validate(integrations: Dict[str, Integration], config: Config):
"""Validate config flow file."""
config_flow_path = config.root / "homeassistant/generated/config_flows.py"
config.cache["config_flow"] = content = generate_and_validate(integrations)
with open(str(config_flow_path), "r") as fp:
if fp.read().strip() != content:
config.add_error(
"config_flow",
"File config_flows.py is not up to date. "
"Run python3 -m script.hassfest",
fixable=True,
)
return
def generate(integrations: Dict[str, Integration], config: Config):
"""Generate config flow file."""
config_flow_path = config.root / "homeassistant/generated/config_flows.py"
with open(str(config_flow_path), "w") as fp:
fp.write(config.cache["config_flow"] + "\n")
| leppa/home-assistant | script/hassfest/config_flow.py | Python | apache-2.0 | 2,621 | 0.000382 |
from test_base import BaseTest, load_msg
from mock import patch
from smtplib import SMTP
from deliver.send import Sender
class SendTest(BaseTest):
def setUp(self):
super(SendTest,self).setUp()
self.sender = Sender(self.config)
@patch('smtplib.SMTP')
@patch.object(SMTP, 'sendmail')
def test_send(self, smtp, sendmail):
msg = load_msg('sample')
self.sender.send(msg, u'email@address.com')
self.assertEqual(sendmail.call_count, 1)
self.assertEqual(msg['To'], u'email@address.com')
self.assertEqual(msg['From'], self.sender.get_address())
self.assertEqual(msg['Reply-To'], self.sender.get_address())
self.assertEqual(msg['Subject'], u'[Test] BETA 2.0')
def test_get_address(self):
self.assertEqual(self.sender.get_address(),self.config['sender'])
| sirech/deliver | deliver/tests/test_send.py | Python | mit | 852 | 0.004695 |
import os
import logging
from django.core.management.base import BaseCommand
from django.core.mail import send_mail
from django.template.loader import get_template
from workshops.models import Badge, Person, Role
logger = logging.getLogger()
class Command(BaseCommand):
help = 'Report instructors activity.'
def add_arguments(self, parser):
parser.add_argument(
'--send-out-for-real', action='store_true', default=False,
help='Send information to the instructors.',
)
parser.add_argument(
'--no-may-contact-only', action='store_true', default=False,
help='Include instructors not willing to be contacted.',
)
parser.add_argument(
'--django-mailing', action='store_true', default=False,
help='Use Django mailing system. This requires some environmental '
'variables to be set, see `settings.py`.',
)
parser.add_argument(
'-s', '--sender', action='store',
default='workshops@carpentries.org',
help='E-mail used in "from:" field.',
)
def foreign_tasks(self, tasks, person, roles):
"""List of other instructors' tasks, per event."""
return [
task.event.task_set.filter(role__in=roles)
.exclude(person=person)
.select_related('person')
for task in tasks
]
def fetch_activity(self, may_contact_only=True):
roles = Role.objects.filter(name__in=['instructor', 'helper'])
instructor_badges = Badge.objects.instructor_badges()
instructors = Person.objects.filter(badges__in=instructor_badges)
instructors = instructors.exclude(email__isnull=True)
if may_contact_only:
instructors = instructors.exclude(may_contact=False)
# let's get some things faster
instructors = instructors.select_related('airport') \
.prefetch_related('task_set', 'lessons',
'award_set', 'badges')
# don't repeat the records
instructors = instructors.distinct()
result = []
for person in instructors:
tasks = person.task_set.filter(role__in=roles) \
.select_related('event', 'role')
record = {
'person': person,
'lessons': person.lessons.all(),
'instructor_awards': person.award_set.filter(
badge__in=person.badges.instructor_badges()
),
'tasks': zip(tasks,
self.foreign_tasks(tasks, person, roles)),
}
result.append(record)
return result
def make_message(self, record):
tmplt = get_template('mailing/instructor_activity.txt')
return tmplt.render(context=record)
def subject(self, record):
# in future we can vary the subject depending on the record details
return 'Updating your Software Carpentry information'
def recipient(self, record):
return record['person'].email
def send_message(self, subject, message, sender, recipient, for_real=False,
django_mailing=False):
if for_real:
if django_mailing:
send_mail(subject, message, sender, [recipient])
else:
command = 'mail -s "{subject}" -r {sender} {recipient}'.format(
subject=subject,
sender=sender,
recipient=recipient,
)
writer = os.popen(command, 'w')
writer.write(message)
writer.close()
if self.verbosity >= 2:
# write only a header
self.stdout.write('-' * 40 + '\n')
self.stdout.write('To: {}\n'.format(recipient))
self.stdout.write('Subject: {}\n'.format(subject))
self.stdout.write('From: {}\n'.format(sender))
if self.verbosity >= 3:
# write whole message out
self.stdout.write(message + '\n')
def handle(self, *args, **options):
# default is dummy run - only actually send mail if told to
send_for_real = options['send_out_for_real']
# by default include only instructors who have `may_contact==True`
no_may_contact_only = options['no_may_contact_only']
# use mailing options from settings.py or the `mail` system command?
django_mailing = options['django_mailing']
# verbosity option is added by Django
self.verbosity = int(options['verbosity'])
sender = options['sender']
results = self.fetch_activity(not no_may_contact_only)
for result in results:
message = self.make_message(result)
subject = self.subject(result)
recipient = self.recipient(result)
self.send_message(subject, message, sender, recipient,
for_real=send_for_real,
django_mailing=django_mailing)
if self.verbosity >= 1:
self.stdout.write('Sent {} emails.\n'.format(len(results)))
| swcarpentry/amy | amy/workshops/management/commands/instructors_activity.py | Python | mit | 5,305 | 0 |
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
import os
import json
import shutil
from.error import Error
class StoreLockVersionResolver(object):
def __init__(self,resolver,lock_cache,dependency):
self.resolver=resolver
self.lock_cache=lock_cache
self.dependency=dependency
def resolve(self):
path=self.resolver.resolve()
checkout=None
if self.dependency.git_tag:
checkout=self.dependency.git_tag
elif self.dependency.git_commit:
checkout=self.dependency.git_commit
else:
raise Error('Not stable checkout information found.')
self.lock_cache.add_checkout(dependency=self.dependency,checkout=checkout)
return path
| looopTools/sw9-source | .waf-1.9.8-6657823688b736c1d1a4e2c4e8e198b4/waflib/extras/wurf/store_lock_version_resolver.py | Python | mit | 719 | 0.038943 |
# -*- coding: utf-8 -*-
'''
Tests for the Git state
'''
# Import python libs
from __future__ import absolute_import
import os
import shutil
import socket
import subprocess
import tempfile
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath, skip_if_binaries_missing
ensure_in_syspath('../../')
# Import salt libs
import integration
import salt.utils
class GitTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
'''
Validate the git state
'''
def setUp(self):
super(GitTest, self).setUp()
self.__domain = 'github.com'
try:
if hasattr(socket, 'setdefaulttimeout'):
# 10 second dns timeout
socket.setdefaulttimeout(10)
socket.gethostbyname(self.__domain)
except socket.error:
msg = 'error resolving {0}, possible network issue?'
self.skipTest(msg.format(self.__domain))
def test_latest(self):
'''
git.latest
'''
name = os.path.join(integration.TMP, 'salt_repo')
try:
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
target=name
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_latest_with_rev_and_submodules(self):
'''
git.latest
'''
name = os.path.join(integration.TMP, 'salt_repo')
try:
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
rev='develop',
target=name,
submodules=True
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_latest_failure(self):
'''
git.latest
'''
name = os.path.join(integration.TMP, 'salt_repo')
try:
ret = self.run_state(
'git.latest',
name='https://youSpelledGitHubWrong.com/saltstack/salt-test-repo.git',
rev='develop',
target=name,
submodules=True
)
self.assertSaltFalseReturn(ret)
self.assertFalse(os.path.isdir(os.path.join(name, '.git')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_latest_empty_dir(self):
'''
git.latest
'''
name = os.path.join(integration.TMP, 'salt_repo')
if not os.path.isdir(name):
os.mkdir(name)
try:
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
rev='develop',
target=name,
submodules=True
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_latest_unless_no_cwd_issue_6800(self):
'''
cwd=target was being passed to _run_check which blew up if
target dir did not already exist.
'''
name = os.path.join(integration.TMP, 'salt_repo')
if os.path.isdir(name):
shutil.rmtree(name)
try:
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
rev='develop',
target=name,
unless='test -e {0}'.format(name),
submodules=True
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_numeric_rev(self):
'''
git.latest with numeric revision
'''
name = os.path.join(integration.TMP, 'salt_repo')
try:
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
rev=0.11,
target=name,
submodules=True,
timeout=120
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_latest_with_local_changes(self):
'''
Ensure that we fail the state when there are local changes and succeed
when force_reset is True.
'''
name = os.path.join(integration.TMP, 'salt_repo')
try:
# Clone repo
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
target=name
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
# Make change to LICENSE file.
with salt.utils.fopen(os.path.join(name, 'LICENSE'), 'a') as fp_:
fp_.write('Lorem ipsum dolor blah blah blah....\n')
# Make sure that we now have uncommitted changes
self.assertTrue(self.run_function('git.diff', [name, 'HEAD']))
# Re-run state with force_reset=False, this should fail
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
target=name,
force_reset=False
)
self.assertSaltFalseReturn(ret)
# Now run the state with force_reset=True, this should succeed
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
target=name,
force_reset=True
)
self.assertSaltTrueReturn(ret)
# Make sure that we no longer have uncommitted changes
self.assertFalse(self.run_function('git.diff', [name, 'HEAD']))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_present(self):
'''
git.present
'''
name = os.path.join(integration.TMP, 'salt_repo')
try:
ret = self.run_state(
'git.present',
name=name,
bare=True
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isfile(os.path.join(name, 'HEAD')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_present_failure(self):
'''
git.present
'''
name = os.path.join(integration.TMP, 'salt_repo')
if not os.path.isdir(name):
os.mkdir(name)
try:
fname = os.path.join(name, 'stoptheprocess')
with salt.utils.fopen(fname, 'a') as fh_:
fh_.write('')
ret = self.run_state(
'git.present',
name=name,
bare=True
)
self.assertSaltFalseReturn(ret)
self.assertFalse(os.path.isfile(os.path.join(name, 'HEAD')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_present_empty_dir(self):
'''
git.present
'''
name = os.path.join(integration.TMP, 'salt_repo')
if not os.path.isdir(name):
os.mkdir(name)
try:
ret = self.run_state(
'git.present',
name=name,
bare=True
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isfile(os.path.join(name, 'HEAD')))
finally:
shutil.rmtree(name, ignore_errors=True)
@skip_if_binaries_missing('git')
def test_config_set_value_with_space_character(self):
'''
git.config
'''
name = tempfile.mkdtemp(dir=integration.TMP)
self.addCleanup(shutil.rmtree, name, ignore_errors=True)
subprocess.check_call(['git', 'init', '--quiet', name])
ret = self.run_state(
'git.config_set',
name='user.name',
value='foo bar',
repo=name,
**{'global': False})
self.assertSaltTrueReturn(ret)
if __name__ == '__main__':
from integration import run_tests
run_tests(GitTest)
| stephane-martin/salt-debian-packaging | salt-2016.3.2/tests/integration/states/git.py | Python | apache-2.0 | 8,866 | 0.001241 |
""" Run tests on photo models. """
| brianmay/spud | spud/tests/a_unit/photos/test_models.py | Python | gpl-3.0 | 35 | 0 |
import soco
from collections import namedtuple
SonosTrack = namedtuple('SonosTrack', [
'title', 'artist', 'album', 'album_art_uri', 'position',
'playlist_position', 'duration', 'uri', 'resources', 'album_art',
'metadata'
])
SonosTrack.__new__.__defaults__ = (None,) * len(SonosTrack._fields)
class Track(SonosTrack):
def get_unique_id(self):
from hashlib import sha256
h = sha256()
h.update(str(self.artist).encode('utf-8') + str(self.album).encode('utf-8') + str(self.title).encode('utf-8'))
return h.hexdigest()
Resources = namedtuple('Resources', [
'bitrate', 'bits_per_sample', 'color_depth', 'duration', 'import_uri',
'nr_audio_channels', 'protection', 'protocol_info', 'resolution',
'sample_frequency', 'size', 'uri'
])
Resources.__new__.__defaults__ = (None,) * len(Resources._fields)
class SonosWrapper(object):
""" A wrapper around some SoCo calls to simplify things. """
debug = False
speakers = None
sonos = None
def __init__(self, speakers):
self.speakers = speakers
def toggle_debug(self):
self.debug = not(self.debug)
def get_speakers(self):
return self.speakers
def get_current_track_info(self, ip):
if self.debug:
return Track(**{
'title': '99',
'artist': 'Toto',
'album': 'The Essential Toto',
'album_art_uri': 'http://127.0.0.1:1400/getaa?s=1&u=x-sonos-spotify%3aspotify%253atrack%253a4oz7fKT4bJ04KCaMM7Sp03%3fsid%3d9%26flags%3d8224%26sn%3d1',
'position': '0:00:11',
'playlist_position': '0',
'duration': '0:05:12',
'resources': [Resources(uri='x-sonos-spotify:spotify%3atrack%3a4oz7fKT4bJ04KCaMM7Sp03?sid=9&flags=8224&sn=1')],
})
else:
return Track(**self.speakers[ip].get_current_track_info())
def get_queue(self, ip):
songs = []
if self.debug:
songs.extend([
Track(**{
'title': '99',
'artist': 'Toto',
'album': 'The Essential Toto',
'album_art_uri': 'http://127.0.0.1:1400/getaa?s=1&u=x-sonos-spotify%3aspotify%253atrack%253a4oz7fKT4bJ04KCaMM7Sp03%3fsid%3d9%26flags%3d8224%26sn%3d1',
'position': '0:00:11',
'playlist_position': '0',
'duration': '0:05:12',
'resources': [
Resources(uri='x-sonos-spotify:spotify%3atrack%3a4oz7fKT4bJ04KCaMM7Sp03?sid=9&flags=8224&sn=1')
],
}),
Track(**{
'title': 'Africa',
'artist': 'Toto',
'album': 'The Essential Toto',
'album_art_uri': 'http://127.0.0.1:1400/getaa?s=1&u=x-sonos-spotify%3aspotify%253atrack%253a5ob66YV6bJ04KCaMM7Sp03%3fsid%3d9%26flags%3d8224%26sn%3d1',
'position': '0:00:11',
'playlist_position': '2',
'duration': '0:05:12',
'resources': [Resources(uri='x-sonos-spotify:spotify%3atrack%3a5ob66YV6bJ04KCaMM7Sp03?sid=9&flags=8224&sn=1')],
})
])
else:
sonos_songs = self.speakers[ip].get_queue()
for song in sonos_songs:
s = {
'title': song.title,
'artist': song.creator,
'album': song.album,
'album_art_uri': song.album_art_uri,
'resources': song.resources
}
songs.append(Track(**s))
return songs
def __getattr__(self, name):
def wrapper(*args, **kwargs):
return getattr(self.sonos, name)(*args, **kwargs)
return wrapper | andpe/minos | minos/sonos/__init__.py | Python | bsd-3-clause | 3,903 | 0.003587 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from github3.models import GitHubCore
from github3.repos import Repository
class CodeSearchResult(GitHubCore):
def __init__(self, data, session=None):
super(CodeSearchResult, self).__init__(data, session)
self._api = data.get('url')
#: Filename the match occurs in
self.name = data.get('name')
#: Path in the repository to the file
self.path = data.get('path')
#: SHA in which the code can be found
self.sha = data.get('sha')
#: URL to the Git blob endpoint
self.git_url = data.get('git_url')
#: URL to the HTML view of the blob
self.html_url = data.get('html_url')
#: Repository the code snippet belongs to
self.repository = Repository(data.get('repository', {}), self)
#: Score of the result
self.score = data.get('score')
#: Text matches
self.text_matches = data.get('text_matches', [])
def _repr(self):
return '<CodeSearchResult [{0}]>'.format(self.path)
| liresearchgroup/submtr | submtr/lib/github3/search/code.py | Python | mit | 1,087 | 0 |
#
# This file is part of Dragonfly.
# (c) Copyright 2007, 2008 by Christo Butcher
# Licensed under the LGPL.
#
# Dragonfly is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Dragonfly is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with Dragonfly. If not, see
# <http://www.gnu.org/licenses/>.
#
"""
Test cases for the logging framework
============================================================================
"""
import sys
import logging
import logging.handlers
import unittest
import dragonfly.log as log
#===========================================================================
class OutputCapturer(object):
def __init__(self):
self.blocks = []
def write(self, data):
self.blocks.append(data)
def flush(self):
pass
def clear(self):
self.blocks = []
@property
def lines(self, prefix=""):
if not self.blocks:
return ()
else:
text = "".join(self.blocks).splitlines()
text = prefix + ("\n" + prefix).join(text)
return text.splitlines()
#---------------------------------------------------------------------------
class LogTestCase(unittest.TestCase):
""" Test behavior of logging system. """
def setUp(self):
self._original_stdout = sys.stdout
self._output = OutputCapturer()
sys.stdout = self._output
self._original_stderr = sys.stderr
self._error = OutputCapturer()
sys.stderr = self._error
def tearDown(self):
sys.stdout = self._original_stdout
sys.stderr = self._original_stderr
# if self._output.blocks:
# prefix = "Output: "
# output = "".join(self._output.blocks).splitlines()
# output = prefix + ("\n" + prefix).join(output)
# print output
# if self._error.blocks:
# prefix = "Error: "
# text = "".join(self._error.blocks).splitlines()
# text = prefix + ("\n" + prefix).join(text)
# print text
self._output = None
self._error = None
def test_filtering(self):
""" Verify that log messages are filtered according to level. """
log.setup_log()
logger = logging.getLogger("grammar")
logger.debug("test_filtering - debug")
logger.info("test_filtering - info")
logger.warning("test_filtering - warning")
logger.error("test_filtering - error")
expected = ["grammar (WARNING): test_filtering - warning",
"grammar (ERROR): test_filtering - error"]
self.assertEqual(self._error.lines, expected)
self._error.clear()
logger = logging.getLogger("grammar.begin")
logger.debug("test_filtering - debug")
logger.info("test_filtering - info")
logger.warning("test_filtering - warning")
logger.error("test_filtering - error")
expected = ["grammar.begin (INFO): test_filtering - info",
"grammar.begin (WARNING): test_filtering - warning",
"grammar.begin (ERROR): test_filtering - error"]
self.assertEqual(self._error.lines, expected)
self._error.clear()
logger = logging.getLogger("grammar.load")
logger.debug("test_filtering - debug")
logger.info("test_filtering - info")
logger.warning("test_filtering - warning")
logger.error("test_filtering - error")
expected = ["grammar.load (WARNING): test_filtering - warning",
"grammar.load (ERROR): test_filtering - error"]
self.assertEqual(self._error.lines, expected)
def _new_lines(self):
filename = None
if not hasattr(self, "_previous_line_count"):
self._previous_line_count = 0
lines = open(filename).readlines()
new_lines = lines[self._previous_line_count:]
self._previous_line_count = len(lines)
return new_lines
#===========================================================================
if __name__ == "__main__":
unittest.main()
| Versatilus/dragonfly | dragonfly/test/test_log.py | Python | lgpl-3.0 | 4,558 | 0.000658 |
# encoding: utf-8
"""
Core trade api for simulated and live trading.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .trade_api import TradeApi
__all__ = ['TradeApi']
| mumuwoyou/vnpy-master | vnpy/trader/gateway/tkproGateway/TradeApi/__init__.py | Python | mit | 279 | 0 |
# -*- coding: utf-8 -*-
"""
flask.cli
~~~~~~~~~
A simple command line application to run flask apps.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
from threading import Lock, Thread
from functools import update_wrapper
import click
from ._compat import iteritems, reraise
from .helpers import get_debug_flag
from . import __version__
class NoAppException(click.UsageError):
"""Raised if an application cannot be found or loaded."""
def find_best_app(module):
"""Given a module instance this tries to find the best possible
application in the module or raises an exception.
"""
from . import Flask
# Search for the most common names first.
for attr_name in 'app', 'application':
app = getattr(module, attr_name, None)
if app is not None and isinstance(app, Flask):
return app
# Otherwise find the only object that is a Flask instance.
matches = [v for k, v in iteritems(module.__dict__)
if isinstance(v, Flask)]
if len(matches) == 1:
return matches[0]
raise NoAppException('Failed to find application in module "%s". Are '
'you sure it contains a Flask application? Maybe '
'you wrapped it in a WSGI middleware or you are '
'using a factory function.' % module.__name__)
def prepare_exec_for_file(filename):
"""Given a filename this will try to calculate the python path, add it
to the search path and return the actual module name that is expected.
"""
module = []
# Chop off file extensions or package markers
if os.path.split(filename)[1] == '__init__.py':
filename = os.path.dirname(filename)
elif filename.endswith('.py'):
filename = filename[:-3]
else:
raise NoAppException('The file provided (%s) does exist but is not a '
'valid Python file. This means that it cannot '
'be used as application. Please change the '
'extension to .py' % filename)
filename = os.path.realpath(filename)
dirpath = filename
while 1:
dirpath, extra = os.path.split(dirpath)
module.append(extra)
if not os.path.isfile(os.path.join(dirpath, '__init__.py')):
break
sys.path.insert(0, dirpath)
return '.'.join(module[::-1])
def locate_app(app_id):
"""Attempts to locate the application."""
__traceback_hide__ = True
if ':' in app_id:
module, app_obj = app_id.split(':', 1)
else:
module = app_id
app_obj = None
try:
__import__(module)
except ImportError:
raise NoAppException('The file/path provided (%s) does not appear to '
'exist. Please verify the path is correct. If '
'app is not on PYTHONPATH, ensure the extension '
'is .py' % module)
mod = sys.modules[module]
if app_obj is None:
app = find_best_app(mod)
else:
app = getattr(mod, app_obj, None)
if app is None:
raise RuntimeError('Failed to find application in module "%s"'
% module)
return app
def find_default_import_path():
app = os.environ.get('FLASK_APP')
if app is None:
return
if os.path.isfile(app):
return prepare_exec_for_file(app)
return app
def get_version(ctx, param, value):
if not value or ctx.resilient_parsing:
return
message = 'Flask %(version)s\nPython %(python_version)s'
click.echo(message % {
'version': __version__,
'python_version': sys.version,
}, color=ctx.color)
ctx.exit()
version_option = click.Option(['--version'],
help='Show the flask version',
expose_value=False,
callback=get_version,
is_flag=True, is_eager=True)
class DispatchingApp(object):
"""Special application that dispatches to a flask application which
is imported by name in a background thread. If an error happens
it is is recorded and shows as part of the WSGI handling which in case
of the Werkzeug debugger means that it shows up in the browser.
"""
def __init__(self, loader, use_eager_loading=False):
self.loader = loader
self._app = None
self._lock = Lock()
self._bg_loading_exc_info = None
if use_eager_loading:
self._load_unlocked()
else:
self._load_in_background()
def _load_in_background(self):
def _load_app():
__traceback_hide__ = True
with self._lock:
try:
self._load_unlocked()
except Exception:
self._bg_loading_exc_info = sys.exc_info()
t = Thread(target=_load_app, args=())
t.start()
def _flush_bg_loading_exception(self):
__traceback_hide__ = True
exc_info = self._bg_loading_exc_info
if exc_info is not None:
self._bg_loading_exc_info = None
reraise(*exc_info)
def _load_unlocked(self):
__traceback_hide__ = True
self._app = rv = self.loader()
self._bg_loading_exc_info = None
return rv
def __call__(self, environ, start_response):
__traceback_hide__ = True
if self._app is not None:
return self._app(environ, start_response)
self._flush_bg_loading_exception()
with self._lock:
if self._app is not None:
rv = self._app
else:
rv = self._load_unlocked()
return rv(environ, start_response)
class ScriptInfo(object):
"""Help object to deal with Flask applications. This is usually not
necessary to interface with as it's used internally in the dispatching
to click. In future versions of Flask this object will most likely play
a bigger role. Typically it's created automatically by the
:class:`FlaskGroup` but you can also manually create it and pass it
onwards as click object.
"""
def __init__(self, app_import_path=None, create_app=None):
if create_app is None:
if app_import_path is None:
app_import_path = find_default_import_path()
self.app_import_path = app_import_path
else:
app_import_path = None
#: Optionally the import path for the Flask application.
self.app_import_path = app_import_path
#: Optionally a function that is passed the script info to create
#: the instance of the application.
self.create_app = create_app
#: A dictionary with arbitrary data that can be associated with
#: this script info.
self.data = {}
self._loaded_app = None
def load_app(self):
"""Loads the Flask app (if not yet loaded) and returns it. Calling
this multiple times will just result in the already loaded app to
be returned.
"""
__traceback_hide__ = True
if self._loaded_app is not None:
return self._loaded_app
if self.create_app is not None:
rv = self.create_app(self)
else:
if not self.app_import_path:
raise NoAppException(
'Could not locate Flask application. You did not provide '
'the FLASK_APP environment variable.\n\nFor more '
'information see '
'http://flask.pocoo.org/docs/latest/quickstart/')
rv = locate_app(self.app_import_path)
debug = get_debug_flag()
if debug is not None:
rv.debug = debug
self._loaded_app = rv
return rv
pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True)
def with_appcontext(f):
"""Wraps a callback so that it's guaranteed to be executed with the
script's application context. If callbacks are registered directly
to the ``app.cli`` object then they are wrapped with this function
by default unless it's disabled.
"""
@click.pass_context
def decorator(__ctx, *args, **kwargs):
with __ctx.ensure_object(ScriptInfo).load_app().app_context():
return __ctx.invoke(f, *args, **kwargs)
return update_wrapper(decorator, f)
class AppGroup(click.Group):
"""This works similar to a regular click :class:`~click.Group` but it
changes the behavior of the :meth:`command` decorator so that it
automatically wraps the functions in :func:`with_appcontext`.
Not to be confused with :class:`FlaskGroup`.
"""
def command(self, *args, **kwargs):
"""This works exactly like the method of the same name on a regular
:class:`click.Group` but it wraps callbacks in :func:`with_appcontext`
unless it's disabled by passing ``with_appcontext=False``.
"""
wrap_for_ctx = kwargs.pop('with_appcontext', True)
def decorator(f):
if wrap_for_ctx:
f = with_appcontext(f)
return click.Group.command(self, *args, **kwargs)(f)
return decorator
def group(self, *args, **kwargs):
"""This works exactly like the method of the same name on a regular
:class:`click.Group` but it defaults the group class to
:class:`AppGroup`.
"""
kwargs.setdefault('cls', AppGroup)
return click.Group.group(self, *args, **kwargs)
class FlaskGroup(AppGroup):
"""Special subclass of the :class:`AppGroup` group that supports
loading more commands from the configured Flask app. Normally a
developer does not have to interface with this class but there are
some very advanced use cases for which it makes sense to create an
instance of this.
For information as of why this is useful see :ref:`custom-scripts`.
:param add_default_commands: if this is True then the default run and
shell commands wil be added.
:param add_version_option: adds the ``--version`` option.
:param create_app: an optional callback that is passed the script info
and returns the loaded app.
"""
def __init__(self, add_default_commands=True, create_app=None,
add_version_option=True, **extra):
params = list(extra.pop('params', None) or ())
if add_version_option:
params.append(version_option)
AppGroup.__init__(self, params=params, **extra)
self.create_app = create_app
if add_default_commands:
self.add_command(run_command)
self.add_command(shell_command)
self._loaded_plugin_commands = False
def _load_plugin_commands(self):
if self._loaded_plugin_commands:
return
try:
import pkg_resources
except ImportError:
self._loaded_plugin_commands = True
return
for ep in pkg_resources.iter_entry_points('flask.commands'):
self.add_command(ep.load(), ep.name)
self._loaded_plugin_commands = True
def get_command(self, ctx, name):
self._load_plugin_commands()
# We load built-in commands first as these should always be the
# same no matter what the app does. If the app does want to
# override this it needs to make a custom instance of this group
# and not attach the default commands.
#
# This also means that the script stays functional in case the
# application completely fails.
rv = AppGroup.get_command(self, ctx, name)
if rv is not None:
return rv
info = ctx.ensure_object(ScriptInfo)
try:
rv = info.load_app().cli.get_command(ctx, name)
if rv is not None:
return rv
except NoAppException:
pass
def list_commands(self, ctx):
self._load_plugin_commands()
# The commands available is the list of both the application (if
# available) plus the builtin commands.
rv = set(click.Group.list_commands(self, ctx))
info = ctx.ensure_object(ScriptInfo)
try:
rv.update(info.load_app().cli.list_commands(ctx))
except Exception:
# Here we intentionally swallow all exceptions as we don't
# want the help page to break if the app does not exist.
# If someone attempts to use the command we try to create
# the app again and this will give us the error.
pass
return sorted(rv)
def main(self, *args, **kwargs):
obj = kwargs.get('obj')
if obj is None:
obj = ScriptInfo(create_app=self.create_app)
kwargs['obj'] = obj
kwargs.setdefault('auto_envvar_prefix', 'FLASK')
return AppGroup.main(self, *args, **kwargs)
@click.command('run', short_help='Runs a development server.')
@click.option('--host', '-h', default='127.0.0.1',
help='The interface to bind to.')
@click.option('--port', '-p', default=5000,
help='The port to bind to.')
@click.option('--reload/--no-reload', default=None,
help='Enable or disable the reloader. By default the reloader '
'is active if debug is enabled.')
@click.option('--debugger/--no-debugger', default=None,
help='Enable or disable the debugger. By default the debugger '
'is active if debug is enabled.')
@click.option('--eager-loading/--lazy-loader', default=None,
help='Enable or disable eager loading. By default eager '
'loading is enabled if the reloader is disabled.')
@click.option('--with-threads/--without-threads', default=False,
help='Enable or disable multithreading.')
@pass_script_info
def run_command(info, host, port, reload, debugger, eager_loading,
with_threads):
"""Runs a local development server for the Flask application.
This local server is recommended for development purposes only but it
can also be used for simple intranet deployments. By default it will
not support any sort of concurrency at all to simplify debugging. This
can be changed with the --with-threads option which will enable basic
multithreading.
The reloader and debugger are by default enabled if the debug flag of
Flask is enabled and disabled otherwise.
"""
from werkzeug.serving import run_simple
debug = get_debug_flag()
if reload is None:
reload = bool(debug)
if debugger is None:
debugger = bool(debug)
if eager_loading is None:
eager_loading = not reload
app = DispatchingApp(info.load_app, use_eager_loading=eager_loading)
# Extra startup messages. This depends a bit on Werkzeug internals to
# not double execute when the reloader kicks in.
if os.environ.get('WERKZEUG_RUN_MAIN') != 'true':
# If we have an import path we can print it out now which can help
# people understand what's being served. If we do not have an
# import path because the app was loaded through a callback then
# we won't print anything.
if info.app_import_path is not None:
print(' * Serving Flask app "%s"' % info.app_import_path)
if debug is not None:
print(' * Forcing debug mode %s' % (debug and 'on' or 'off'))
run_simple(host, port, app, use_reloader=reload,
use_debugger=debugger, threaded=with_threads,
passthrough_errors=True)
@click.command('shell', short_help='Runs a shell in the app context.')
@with_appcontext
def shell_command():
"""Runs an interactive Python shell in the context of a given
Flask application. The application will populate the default
namespace of this shell according to it's configuration.
This is useful for executing small snippets of management code
without having to manually configuring the application.
"""
import code
from flask.globals import _app_ctx_stack
app = _app_ctx_stack.top.app
banner = 'Python %s on %s\nApp: %s%s\nInstance: %s' % (
sys.version,
sys.platform,
app.import_name,
app.debug and ' [debug]' or '',
app.instance_path,
)
ctx = {}
# Support the regular Python interpreter startup script if someone
# is using it.
startup = os.environ.get('PYTHONSTARTUP')
if startup and os.path.isfile(startup):
with open(startup, 'r') as f:
eval(compile(f.read(), startup, 'exec'), ctx)
ctx.update(app.make_shell_context())
code.interact(banner=banner, local=ctx)
cli = FlaskGroup(help="""\
This shell command acts as general utility script for Flask applications.
It loads the application configured (either through the FLASK_APP environment
variable) and then provides commands either provided by the application or
Flask itself.
The most useful commands are the "run" and "shell" command.
Example usage:
\b
%(prefix)s%(cmd)s FLASK_APP=hello.py
%(prefix)s%(cmd)s FLASK_DEBUG=1
%(prefix)sflask run
""" % {
'cmd': os.name == 'posix' and 'export' or 'set',
'prefix': os.name == 'posix' and '$ ' or '',
})
def main(as_module=False):
this_module = __package__ + '.cli'
args = sys.argv[1:]
if as_module:
if sys.version_info >= (2, 7):
name = 'python -m ' + this_module.rsplit('.', 1)[0]
else:
name = 'python -m ' + this_module
# This module is always executed as "python -m flask.run" and as such
# we need to ensure that we restore the actual command line so that
# the reloader can properly operate.
sys.argv = ['-m', this_module] + sys.argv[1:]
else:
name = None
cli.main(args=args, prog_name=name)
if __name__ == '__main__':
main(as_module=True)
| adrianmoisey/cptdevops | flask/cli.py | Python | bsd-3-clause | 18,141 | 0.00022 |
"""Utils for time travel testings."""
def _t(rel=0.0):
"""Return an absolute time from the relative time given.
The minimal allowed time in windows is 86400 seconds, for some reason. In
stead of doing the arithmetic in the tests themselves, this function should
be used.
The value `86400` is exported in `time_travel.MIN_START_TIME`, but I shant
use it for it is forbidden to test the code using the code that is being
tested.
"""
return 86400.0 + rel
| snudler6/time-travel | src/tests/utils.py | Python | mit | 492 | 0 |
#!/usr/bin/python
import argparse
from vmipl_communication.network_connection import NetworkConnection
def main():
parser = setup_options_parser()
# parse given arguments
args = parser.parse_args()
validate_input(parser, args)
port = args.port
script_content = args.vmipl_script.read()
args.vmipl_script.close()
if args.description_file is not None:
start_vm(args.description_file, script_content, port)
elif args.vm_id is not None:
reconfigure_vm(args.vm_id, script_content, port)
def setup_options_parser():
descr = ("Communicate with execution environment to start virtual" +
" machine or reconfigure already running one")
# initialize options parser
parser = argparse.ArgumentParser(description=descr)
parser.add_argument("-s", "--start",
help= "start virtual machine given by <VM " +
" description file> using the monitoring" +
" configuration given in <VMI-PL script>",
dest="description_file",
metavar="<VM description file>")
parser.add_argument("-r", "--reconfig",
help= "reconfigure virtual machine given by <VM Id>"+
" using the monitoring configuration given in"+
" <VMI-PL script>", dest="vm_id",
metavar="<VM Id>")
parser.add_argument("vmipl_script", help="path to VMI-PL script",
type=file, metavar="<VMI-PL script>")
parser.add_argument("-p", "--port", type=int, default=5555, dest="port",
help= "network port to connect to execution" +
" environment (default: 5555)")
return parser
def validate_input(parser, args):
if args.description_file is not None and args.vm_id is not None:
parser.error("only one mode can be chosen at a time")
if args.description_file is None and args.vm_id is None:
parser.error("at least one mode has to be chosen")
def start_vm(description_file_path, script_content, port):
conn = NetworkConnection()
conn.connect('127.0.0.1', port)
conn.start_vm(description_file_path, script_content)
response = conn.receive_server_response()
print response
conn.close()
def reconfigure_vm(vm_id, script_content):
raise NotImplementedError()
if __name__ == '__main__':
main()
| FlorianWestphal/VMI-PL | front_end/client.py | Python | mit | 2,163 | 0.039297 |
import unittest
from hamlpy.parser.core import (
ParseException,
Stream,
peek_indentation,
read_line,
read_number,
read_quoted_string,
read_symbol,
read_whitespace,
read_word,
)
from hamlpy.parser.utils import html_escape
class ParserTest(unittest.TestCase):
def test_read_whitespace(self):
stream = Stream(" \t foo \n bar ")
assert read_whitespace(stream) == " \t "
assert stream.text[stream.ptr :] == "foo \n bar "
stream.ptr += 3 # skip over foo
assert read_whitespace(stream) == " "
assert stream.text[stream.ptr :] == "\n bar "
assert read_whitespace(stream, include_newlines=True) == "\n "
assert stream.text[stream.ptr :] == "bar "
stream.ptr += 3 # skip over bar
assert read_whitespace(stream) == " "
assert stream.text[stream.ptr :] == ""
def test_peek_indentation(self):
assert peek_indentation(Stream("content")) == 0
assert peek_indentation(Stream(" content")) == 2
assert peek_indentation(Stream("\n")) is None
assert peek_indentation(Stream(" \n")) is None
def test_quoted_string(self):
stream = Stream("'hello'---")
assert read_quoted_string(stream) == "hello"
assert stream.text[stream.ptr :] == "---"
stream = Stream('"this don\'t \\"x\\" hmm" not in string')
assert read_quoted_string(stream) == 'this don\'t "x" hmm'
assert stream.text[stream.ptr :] == " not in string"
self.assertRaises(ParseException, read_quoted_string, Stream('"no end quote...'))
def test_read_line(self):
stream = Stream("line1\n line2\n\nline4\n\n")
assert read_line(stream) == "line1"
assert read_line(stream) == " line2"
assert read_line(stream) == ""
assert read_line(stream) == "line4"
assert read_line(stream) == ""
assert read_line(stream) is None
assert read_line(Stream("last line ")) == "last line "
def test_read_number(self):
stream = Stream('123"')
assert read_number(stream) == "123"
assert stream.text[stream.ptr :] == '"'
stream = Stream("123.4xx")
assert read_number(stream) == "123.4"
assert stream.text[stream.ptr :] == "xx"
stream = Stream("0.0001 ")
assert read_number(stream) == "0.0001"
assert stream.text[stream.ptr :] == " "
def test_read_symbol(self):
stream = Stream("=> bar")
assert read_symbol(stream, ["=>", ":"]) == "=>"
assert stream.text[stream.ptr :] == " bar"
self.assertRaises(ParseException, read_symbol, Stream("foo"), ["=>"])
def test_read_word(self):
stream = Stream("foo_bar")
assert read_word(stream) == "foo_bar"
assert stream.text[stream.ptr :] == ""
stream = Stream("foo_bar ")
assert read_word(stream) == "foo_bar"
assert stream.text[stream.ptr :] == " "
stream = Stream("ng-repeat(")
assert read_word(stream) == "ng"
assert stream.text[stream.ptr :] == "-repeat("
stream = Stream("ng-repeat(")
assert read_word(stream, ("-",)) == "ng-repeat"
assert stream.text[stream.ptr :] == "("
stream = Stream("これはテストです...")
assert read_word(stream) == "これはテストです"
assert stream.text[stream.ptr :] == "..."
class UtilsTest(unittest.TestCase):
def test_html_escape(self):
assert html_escape("") == ""
assert html_escape("&<>\"'") == "&<>"'"
assert html_escape('{% trans "hello" %}') == '{% trans "hello" %}'
assert html_escape('{{ foo|default:"hello" }}') == '{{ foo|default:"hello" }}'
assert html_escape("{% }} & %}") == "{% }} & %}"
result = html_escape('<>{% trans "hello" %}<>{{ foo|default:"hello" }}<>')
assert result == '<>{% trans "hello" %}<>{{ foo|default:"hello" }}<>'
| nyaruka/django-hamlpy | hamlpy/test/test_parser.py | Python | mit | 4,020 | 0.004764 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'P3Talk.sub_community'
db.alter_column(u'p3_p3talk', 'sub_community', self.gf('django.db.models.fields.CharField')(max_length=20))
def backwards(self, orm):
# Changing field 'P3Talk.sub_community'
db.alter_column(u'p3_p3talk', 'sub_community', self.gf('django.db.models.fields.TextField')())
models = {
u'assopy.country': {
'Meta': {'ordering': "['name']", 'object_name': 'Country'},
'iso': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
'iso3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'numcode': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'vat_company': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'vat_company_verify': ('django.db.models.fields.CharField', [], {'default': "'-'", 'max_length': '1'}),
'vat_person': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'assopy.user': {
'Meta': {'object_name': 'User'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '150', 'blank': 'True'}),
'assopy_id': ('django.db.models.fields.CharField', [], {'max_length': '22', 'unique': 'True', 'null': 'True'}),
'card_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'cf_code': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['assopy.Country']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '36', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'assopy_user'", 'unique': 'True', 'to': u"orm['auth.User']"}),
'vat_number': ('django.db.models.fields.CharField', [], {'max_length': '22', 'blank': 'True'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'conference.attendeeprofile': {
'Meta': {'object_name': 'AttendeeProfile'},
'birthday': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'company': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'company_homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'job_title': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'personal_homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '6'}),
'visibility': ('django.db.models.fields.CharField', [], {'default': "'x'", 'max_length': '1'})
},
u'conference.conference': {
'Meta': {'object_name': 'Conference'},
'cfp_end': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'cfp_start': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '10', 'primary_key': 'True'}),
'conference_end': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'conference_start': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'voting_end': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'voting_start': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
u'conference.conferencetag': {
'Meta': {'object_name': 'ConferenceTag'},
'category': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'conference.conferencetaggeditem': {
'Meta': {'object_name': 'ConferenceTaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'conference_conferencetaggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'conference_conferencetaggeditem_items'", 'to': u"orm['conference.ConferenceTag']"})
},
u'conference.fare': {
'Meta': {'unique_together': "(('conference', 'code'),)", 'object_name': 'Fare'},
'blob': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'conference': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'description': ('django.db.models.fields.TextField', [], {}),
'end_validity': ('django.db.models.fields.DateField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'payment_type': ('django.db.models.fields.CharField', [], {'default': "'p'", 'max_length': '1'}),
'price': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '2'}),
'recipient_type': ('django.db.models.fields.CharField', [], {'default': "'p'", 'max_length': '1'}),
'start_validity': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'ticket_type': ('django.db.models.fields.CharField', [], {'default': "'conference'", 'max_length': '10', 'db_index': 'True'})
},
u'conference.multilingualcontent': {
'Meta': {'object_name': 'MultilingualContent'},
'body': ('django.db.models.fields.TextField', [], {}),
'content': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
u'conference.speaker': {
'Meta': {'object_name': 'Speaker'},
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True', 'primary_key': 'True'})
},
u'conference.talk': {
'Meta': {'ordering': "['title']", 'object_name': 'Talk'},
'conference': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'duration': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'level': ('django.db.models.fields.CharField', [], {'default': "'beginner'", 'max_length': '12'}),
'qa_duration': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'slides': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}),
'speakers': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['conference.Speaker']", 'through': u"orm['conference.TalkSpeaker']", 'symmetrical': 'False'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'suggested_tags': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'teaser_video': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'training_available': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'s'", 'max_length': '1'}),
'video_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
'video_type': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'video_url': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
u'conference.talkspeaker': {
'Meta': {'unique_together': "(('talk', 'speaker'),)", 'object_name': 'TalkSpeaker'},
'helper': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'speaker': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['conference.Speaker']"}),
'talk': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['conference.Talk']"})
},
u'conference.ticket': {
'Meta': {'object_name': 'Ticket'},
'fare': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['conference.Fare']"}),
'frozen': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60', 'blank': 'True'}),
'ticket_type': ('django.db.models.fields.CharField', [], {'default': "'standard'", 'max_length': '8'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'p3.donation': {
'Meta': {'object_name': 'Donation'},
'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '2'}),
'date': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['assopy.User']"})
},
u'p3.hotelroom': {
'Meta': {'unique_together': "(('conference', 'room_type'),)", 'object_name': 'HotelRoom'},
'amount': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'conference': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['conference.Conference']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {}),
'room_type': ('django.db.models.fields.CharField', [], {'max_length': '2'})
},
u'p3.p3profile': {
'Meta': {'object_name': 'P3Profile'},
'country': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '2', 'db_index': 'True', 'blank': 'True'}),
'image_gravatar': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'image_url': ('django.db.models.fields.URLField', [], {'max_length': '500'}),
'profile': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'p3_profile'", 'unique': 'True', 'primary_key': 'True', 'to': u"orm['conference.AttendeeProfile']"}),
'spam_recruiting': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'spam_sms': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'spam_user_message': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'tagline': ('django.db.models.fields.CharField', [], {'max_length': '60', 'blank': 'True'}),
'twitter': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'})
},
u'p3.p3talk': {
'Meta': {'object_name': 'P3Talk'},
'sub_community': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '20'}),
'talk': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'p3_talk'", 'unique': 'True', 'primary_key': 'True', 'to': u"orm['conference.Talk']"})
},
u'p3.speakerconference': {
'Meta': {'object_name': 'SpeakerConference'},
'first_time': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'speaker': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'p3_speaker'", 'unique': 'True', 'to': u"orm['conference.Speaker']"})
},
u'p3.sprint': {
'Meta': {'object_name': 'Sprint'},
'abstract': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'conference': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['conference.Conference']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['assopy.User']"})
},
u'p3.sprintpresence': {
'Meta': {'object_name': 'SprintPresence'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sprint': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['p3.Sprint']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['assopy.User']"})
},
u'p3.ticketconference': {
'Meta': {'object_name': 'TicketConference'},
'assigned_to': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'badge_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'days': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'diet': ('django.db.models.fields.CharField', [], {'default': "'omnivorous'", 'max_length': '10'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'python_experience': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'shirt_size': ('django.db.models.fields.CharField', [], {'default': "'l'", 'max_length': '4'}),
'tagline': ('django.db.models.fields.CharField', [], {'max_length': '60', 'blank': 'True'}),
'ticket': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'p3_conference'", 'unique': 'True', 'to': u"orm['conference.Ticket']"})
},
u'p3.ticketroom': {
'Meta': {'object_name': 'TicketRoom'},
'checkin': ('django.db.models.fields.DateField', [], {'db_index': 'True'}),
'checkout': ('django.db.models.fields.DateField', [], {'db_index': 'True'}),
'document': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'room_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['p3.HotelRoom']"}),
'ticket': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'p3_conference_room'", 'unique': 'True', 'to': u"orm['conference.Ticket']"}),
'ticket_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'unused': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'p3.ticketsim': {
'Meta': {'object_name': 'TicketSIM'},
'document': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'plan_type': ('django.db.models.fields.CharField', [], {'default': "'std'", 'max_length': '3'}),
'sim_type': ('django.db.models.fields.CharField', [], {'default': "'std'", 'max_length': '5'}),
'ticket': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'p3_conference_sim'", 'unique': 'True', 'to': u"orm['conference.Ticket']"})
}
}
complete_apps = ['p3'] | leriomaggio/pycon_site | p3/migrations/0004_auto__chg_field_p3talk_sub_community.py | Python | bsd-2-clause | 21,667 | 0.008123 |
from itertools import repeat
from xmodule.course_module import CourseDescriptor
from .exceptions import (ItemNotFoundError, NoPathToItem)
from . import Location
def path_to_location(modulestore, course_id, location):
'''
Try to find a course_id/chapter/section[/position] path to location in
modulestore. The courseware insists that the first level in the course is
chapter, but any kind of module can be a "section".
location: something that can be passed to Location
course_id: Search for paths in this course.
raise ItemNotFoundError if the location doesn't exist.
raise NoPathToItem if the location exists, but isn't accessible via
a chapter/section path in the course(s) being searched.
Return a tuple (course_id, chapter, section, position) suitable for the
courseware index view.
A location may be accessible via many paths. This method may
return any valid path.
If the section is a sequential or vertical, position will be the position
of this location in that sequence. Otherwise, position will
be None. TODO (vshnayder): Not true yet.
'''
def flatten(xs):
'''Convert lisp-style (a, (b, (c, ()))) list into a python list.
Not a general flatten function. '''
p = []
while xs != ():
p.append(xs[0])
xs = xs[1]
return p
def find_path_to_course():
'''Find a path up the location graph to a node with the
specified category.
If no path exists, return None.
If a path exists, return it as a list with target location first, and
the starting location last.
'''
# Standard DFS
# To keep track of where we came from, the work queue has
# tuples (location, path-so-far). To avoid lots of
# copying, the path-so-far is stored as a lisp-style
# list--nested hd::tl tuples, and flattened at the end.
queue = [(location, ())]
while len(queue) > 0:
(loc, path) = queue.pop() # Takes from the end
loc = Location(loc)
# get_parent_locations should raise ItemNotFoundError if location
# isn't found so we don't have to do it explicitly. Call this
# first to make sure the location is there (even if it's a course, and
# we would otherwise immediately exit).
parents = modulestore.get_parent_locations(loc, course_id)
# print 'Processing loc={0}, path={1}'.format(loc, path)
if loc.category == "course":
# confirm that this is the right course
if course_id == CourseDescriptor.location_to_id(loc):
# Found it!
path = (loc, path)
return flatten(path)
# otherwise, add parent locations at the end
newpath = (loc, path)
queue.extend(zip(parents, repeat(newpath)))
# If we're here, there is no path
return None
if not modulestore.has_item(location):
raise ItemNotFoundError
path = find_path_to_course()
if path is None:
raise NoPathToItem(location)
n = len(path)
course_id = CourseDescriptor.location_to_id(path[0])
# pull out the location names
chapter = path[1].name if n > 1 else None
section = path[2].name if n > 2 else None
# Figure out the position
position = None
# This block of code will find the position of a module within a nested tree
# of modules. If a problem is on tab 2 of a sequence that's on tab 3 of a
# sequence, the resulting position is 3_2. However, no positional modules
# (e.g. sequential and videosequence) currently deal with this form of
# representing nested positions. This needs to happen before jumping to a
# module nested in more than one positional module will work.
if n > 3:
position_list = []
for path_index in range(2, n - 1):
category = path[path_index].category
if category == 'sequential' or category == 'videosequence':
section_desc = modulestore.get_instance(course_id, path[path_index])
child_locs = [c.location for c in section_desc.get_children()]
# positions are 1-indexed, and should be strings to be consistent with
# url parsing.
position_list.append(str(child_locs.index(path[path_index + 1]) + 1))
position = "_".join(position_list)
return (course_id, chapter, section, position)
| abhinavp13/IITBX-edx-platform-dev | common/lib/xmodule/xmodule/modulestore/search.py | Python | agpl-3.0 | 4,563 | 0.001096 |
from __future__ import absolute_import, print_function, division
from netlib.http import decoded
from .connections import ClientConnection, ServerConnection
from .flow import Flow, Error
from .http import (
HTTPFlow, HTTPRequest, HTTPResponse, Headers,
make_error_response, make_connect_request, make_connect_response, expect_continue_response
)
from .tcp import TCPFlow
FLOW_TYPES = dict(
http=HTTPFlow,
tcp=TCPFlow,
)
__all__ = [
"HTTPFlow", "HTTPRequest", "HTTPResponse", "Headers", "decoded",
"make_error_response", "make_connect_request",
"make_connect_response", "expect_continue_response",
"ClientConnection", "ServerConnection",
"Flow", "Error",
"TCPFlow",
"FLOW_TYPES",
]
| x2Ident/x2Ident_test | mitmproxy/mitmproxy/models/__init__.py | Python | gpl-3.0 | 753 | 0.001328 |
# Copyright 2015 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import Flask, abort, request, render_template, redirect, url_for
from uuid import uuid4
import requests
import requests.auth
import urllib
import json
import operator
import string
import random
import os
SPOTIFY_APP_ID = '9bf2a8a4ade04e6f83b1f80b6f671fff'
SPOTIFY_REDIRECT_URI = 'http://TopSpotifyApp.mybluemix.net/callback'
SPOTIFY_CLIENT_SECRET = 'fbe3a1d865e04fefa7e31b87dab6f04b'
ALL_GENRES = ['acoustic', 'afrobeat', 'alt-rock', 'alternative', 'ambient', 'black-metal', 'bluegrass', 'blues', 'british', 'chill', 'classical', 'club', 'country', 'dance', 'deep-house', 'disco', 'disney', 'dubstep', 'edm', 'electro', 'electronic', 'folk', 'french', 'grunge', 'happy', 'hard-rock', 'heavy-metal', 'hip-hop', 'holidays', 'house', 'indie', 'indie-pop', 'jazz', 'k-pop', 'latin', 'metal', 'minimal-techno', 'new-age', 'new-release', 'party', 'piano', 'pop', 'progressive-house', 'psych-rock', 'punk', 'rainy-day', 'reggae','road-trip', 'rock', 'salsa', 'sleep', 'soul', 'soundtracks', 'spanish', 'study', 'summer', 'synth-pop', 'techno', 'trance', 'work-out']
TOKEN = ''
RANDOM_STATE = ''
SEEDS = ''
RECS = []
app = Flask(__name__)
# Main page: prompt Spotify login
@app.route('/')
def homepage():
text = '<a href="%s">Authenticate with spotify</a>'
return render_template("index.html", requestURL=make_authorization_url())
# Callback page
@app.route('/callback')
def callback():
code = request.args.get('code')
state = request.args.get('state', '')
# Check state, get access token if valid
if state == RANDOM_STATE:
# Request access and refresh tokens
token_json = get_token(code)
global TOKEN
TOKEN = token_json['access_token']
return redirect(url_for('profile'))
# invalid state, abort
else:
abort(403)
@app.route('/profile')
def profile():
resp_json = get_userInfo(TOKEN)
# Get user info, top artists, number top artists with access tokens
user_info = resp_json[0]
top_json = resp_json[1]['items']
profile_img = user_info['images']
# Extract name, links, pictures, ids, top_genres
artists = extract_artist(top_json)
rec_artists = get_recommendations(artists[3])
artist_recs = rec_artists[0]
artist_recs_info = rec_artists[1]
return render_template("profile.html", user_json=user_info, artists=artists,profile_img=profile_img, top_json=top_json, artist_recs=artist_recs, artist_recs_info=artist_recs_info,all_genres=ALL_GENRES)
# Browser Genre page
@app.route('/genre/<genre>')
def genre(genre):
headers = {'Authorization': 'Bearer ' + TOKEN}
genre_str = 'genre:' + '"' + genre + '"'
params = {"q":genre_str, "market":"from_token", "type":"artist"}
url = "https://api.spotify.com/v1/search?" + urllib.urlencode(params)
response = requests.get(url, headers=headers)
artists = response.json()['artists']['items']
return render_template("genres.html",genre=genre, artists=artists)
@app.route('/hipster', methods=['POST'])
def hipster():
data = request.form.to_dict()
genres = data.keys()
seed_genres = ''
for ind, genre in enumerate(genres):
newstr = genre
if ind != (len(genres)-1):
newstr += ','
seed_genres += newstr
# Retrieve tracks with seed_genres max popularity 50
headers = {'Authorization': 'Bearer ' + TOKEN}
params = {"seed_genres":seed_genres, "min_popularity":20, "max_popularity":50, "market":"US"}
url = "https://api.spotify.com/v1/recommendations?" + urllib.urlencode(params)
response = requests.get(url, headers=headers)
recs_json = response.json()['tracks']
global SEEDS, RECS
SEEDS = seed_genres
RECS = recs_json
return render_template("hipster.html", seed_genres=SEEDS, recs_json=RECS)
@app.route('/hipster', methods=['GET'])
def hipster2():
return render_template("hipster.html", seed_genres=SEEDS, recs_json=RECS)
# Create request authorzation url
def make_authorization_url():
global RANDOM_STATE
RANDOM_STATE = generateRandomString(16)
params = {"client_id": SPOTIFY_APP_ID,
"response_type": "code",
"redirect_uri": SPOTIFY_REDIRECT_URI,
"state": RANDOM_STATE,
"scope":"user-top-read"
}
url = "https://accounts.spotify.com/authorize?" + urllib.urlencode(params)
return url
# Return random string for state
def generateRandomString(len):
return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(len))
# Extract name, link, pic, id, and top genres from user top artist data
def extract_artist(artists):
names = []
links = []
pics = []
ids = []
top_genres = {}
for i in range(0, len(artists)):
names.append(artists[i]['name'])
links.append(artists[i]['external_urls']['spotify'])
pics.append(artists[i]['images'][0]['url'])
ids.append(artists[i]['id'])
genres = artists[i]['genres']
for genre in genres:
if genre in top_genres.keys():
top_genres[genre] = top_genres[genre]+1
else:
top_genres[genre] = 1
sorted_x = sorted(top_genres.items(), key=operator.itemgetter(1), reverse=True)
return names, links, pics, ids, sorted_x
# Find top_related artists using user's top artist data
# Input artists_ids
def get_recommendations(artist_ids):
# key artist_name, value: num time recommended
rec_artists = {}
# Key artist_name, value: (spotify_url, image link)
rec_artists_info = {}
for i in range(0,len(artist_ids)):
rec_url = "https://api.spotify.com/v1/artists/" + artist_ids[i] + "/related-artists"
rec_response = requests.get(rec_url)
rec_json = rec_response.json()['artists']
for j in range(0,len(rec_json)):
artist = rec_json[j]
artist_name = artist['name']
artist_id = artist['id']
# Add recommended artist
if artist_id not in artist_ids:
if artist_name in rec_artists.keys():
rec_artists[artist_name] = rec_artists[artist_name]+1
else:
artist_imgages = artist['images']
artist_spotify_url = artist['external_urls']['spotify']
if len(artist_imgages) == 0:
rec_artists_info[artist_name] = 'NA'
else:
rec_artists_info[artist_name] = (artist_spotify_url, artist_imgages[0]['url'])
rec_artists[artist_name] = 1
sorted_x = sorted(rec_artists.items(), key=operator.itemgetter(1), reverse=True)
if len(sorted_x) < 20:
return sorted_x, rec_artists_info
else:
return sorted_x[:20], rec_artists_info
# Use code to obtain access token, refresh token
def get_token(code):
client_auth = requests.auth.HTTPBasicAuth(SPOTIFY_APP_ID, SPOTIFY_CLIENT_SECRET)
headers = {'Authorization': 'Basic '}
post_data = {"grant_type": "authorization_code",
"code": code,
"redirect_uri": SPOTIFY_REDIRECT_URI}
response = requests.post("https://accounts.spotify.com/api/token",
auth=client_auth,
headers=headers,
data=post_data)
token_json = response.json()
return token_json
# Use access token to get user info
# Return user info, top artists, number top artists
def get_userInfo(access_token):
headers = {'Authorization': 'Bearer ' + access_token}
response = requests.get("https://api.spotify.com/v1/me", headers=headers)
me_json = response.json()
# Get user top artists
num_artists = 20;
top_params = {"limit": num_artists}
top_url = "https://api.spotify.com/v1/me/top/artists?" + urllib.urlencode(top_params)
top_response = requests.get(top_url, headers=headers)
top_json = top_response.json()
return me_json, top_json
port = os.getenv('PORT', '5000')
if __name__ == "__main__":
app.run(host='0.0.0.0', port=int(port))
| ecliu110/SpotifyApp | welcome.py | Python | apache-2.0 | 8,683 | 0.004607 |
from gps import *
import time
import threading
import math
class GpsController(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.gpsd = gps(mode=WATCH_ENABLE) #starting the stream of info
self.running = False
def run(self):
self.running = True
while self.running:
# grab EACH set of gpsd info to clear the buffer
self.gpsd.next()
def stopController(self):
self.running = False
@property
def fix(self):
return self.gpsd.fix
@property
def utc(self):
return self.gpsd.utc
@property
def satellites(self):
return self.gpsd.satellites
if __name__ == '__main__':
# create the controller
gpsc = GpsController()
try:
# start controller
gpsc.start()
while True:
print "latitude ", gpsc.fix.latitude
print "longitude ", gpsc.fix.longitude
print "time utc ", gpsc.utc, " + ", gpsc.fix.time
print "altitude (m)", gpsc.fix.altitude
print "eps ", gpsc.fix.eps
print "epx ", gpsc.fix.epx
print "epv ", gpsc.fix.epv
print "ept ", gpsc.gpsd.fix.ept
print "speed (m/s) ", gpsc.fix.speed
print "climb ", gpsc.fix.climb
print "track ", gpsc.fix.track
print "mode ", gpsc.fix.mode
print "sats ", gpsc.satellites
time.sleep(0.5)
#Ctrl C
except KeyboardInterrupt:
print "User cancelled"
#Error
except:
print "Unexpected error:", sys.exc_info()[0]
raise
finally:
print "Stopping gps controller"
gpsc.stopController()
#wait for the tread to finish
gpsc.join()
print "Done" | linusluotsinen/RPiAntiTheft | util/gps_handler/gps_controller.py | Python | mit | 1,812 | 0.007174 |
# Copyright 2014 ARM Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
# standard library modules, , ,
import logging
import os
import threading
from collections import OrderedDict
# fsutils, , misc filesystem utils, internal
import fsutils
# Ordered JSON, , read & write json, internal
import ordered_json
# folders, , get places to install things, internal
import folders
#
# yotta's settings always written to ~/.yotta/config.json, but are read, in
# order from:
#
# 1. environment variables (YOTTA_{section_name.upper()}_{variable_name.upper()})
# 2. ~/.yotta/config.json
# 3. /usr/local/etc/yottaconfig.json
# 4. /etc/yottaconfig.json
#
# As soon as a value is found for a variable, the search is stopped.
#
#
# constants
user_config_file = os.path.join(folders.userSettingsDirectory(), 'config.json')
dir_config_file = os.path.join('.','.yotta.json')
config_files = [
dir_config_file,
user_config_file,
]
if os.name == 'nt':
config_files += [
os.path.expanduser(os.path.join(folders.prefix(),'yotta.json'))
]
else:
config_files += [
os.path.expanduser(os.path.join(folders.prefix(),'etc','yotta.json')),
os.path.join('etc','yotta.json')
]
# private state
parser = None
parser_lock = threading.Lock()
# private API
# class for reading JSON config files,
class _JSONConfigParser(object):
def __init__(self):
self.configs = OrderedDict()
def read(self, filenames):
'''' Read a list of files. Their configuration values are merged, with
preference to values from files earlier in the list.
'''
for fn in filenames:
try:
self.configs[fn] = ordered_json.load(fn)
except IOError:
self.configs[fn] = OrderedDict()
def get(self, path):
''' return a configuration value
usage:
get('section.property')
Note that currently array indexes are not supported. You must
get the whole array.
returns None if any path element or the property is missing
'''
path = _splitPath([path])
for config in self.configs.values():
cur = config
for el in path:
if el in cur:
cur = cur[el]
else:
cur = None
break
if cur is not None:
return cur
return None
def set(self, path, value=None, filename=None):
''' Set a configuration value. If no filename is specified, the
property is set in the first configuration file. Note that if a
filename is specified and the property path is present in an
earlier filename then set property will be hidden.
usage:
set('section.property', value='somevalue')
Note that currently array indexes are not supported. You must
set the whole array.
'''
if filename is None:
config = self._firstConfig()[1]
else:
config = self.configs[filename]
path = _splitPath([path])
for el in path[:-1]:
if el in config:
config = config[el]
else:
config[el] = OrderedDict()
config = config[el]
config[path[-1]] = value
def write(self, filename=None):
if filename is None:
filename, data = self._firstConfig()
elif filename in self.configs:
data = self.configs[filename]
else:
raise ValueError('No such file.')
dirname = os.path.dirname(filename)
fsutils.mkDirP(dirname)
ordered_json.dump(filename, data)
def _firstConfig(self):
for fn, data in self.configs.items():
return fn, data
raise ValueError('No configs available.')
def _splitPath(path):
r = []
for p in path:
r += p.split('.')
if not len(p):
raise ValueError('A path must be specified.')
return r
def _ensureParser():
global parser
with parser_lock:
if not parser:
parser = _JSONConfigParser()
parser.read(config_files)
def _checkEnv(path):
env_key = '_'.join(['YOTTA'] + [x.upper() for x in _splitPath(path)])
try:
return os.environ[env_key]
except KeyError:
return None
# public API
def get(path):
value = _checkEnv(path)
if value:
logging.debug('read property from environment: %s', path)
return value
_ensureParser()
with parser_lock:
return parser.get(path)
def getProperty(section, name):
return get(section + '.' + name)
def set(path, value, save_locally=False):
if save_locally:
filename = dir_config_file
else:
filename = user_config_file
logging.debug('setProperty: %s %s:%s', path, type(value), value)
_ensureParser()
with parser_lock:
parser.set(path, value=value, filename=filename)
parser.write(filename)
def setProperty(section, name, value, save_locally=False):
set(section+'.'+name, value, save_locally)
| BlackstoneEngineering/yotta | yotta/lib/settings.py | Python | apache-2.0 | 5,218 | 0.003066 |
# coding=utf-8
import logging
import rarfile
import os
from subliminal.exceptions import ConfigurationError
from subliminal.providers.legendastv import LegendasTVSubtitle as _LegendasTVSubtitle, \
LegendasTVProvider as _LegendasTVProvider, Episode, Movie, guess_matches, guessit, sanitize, region, type_map, \
raise_for_status, json, SHOW_EXPIRATION_TIME, title_re, season_re, datetime, pytz, NO_VALUE, releases_key, \
SUBTITLE_EXTENSIONS, language_converters
from subzero.language import Language
logger = logging.getLogger(__name__)
class LegendasTVSubtitle(_LegendasTVSubtitle):
def __init__(self, language, type, title, year, imdb_id, season, archive, name):
super(LegendasTVSubtitle, self).__init__(language, type, title, year, imdb_id, season, archive, name)
self.archive.content = None
self.release_info = archive.name
self.page_link = archive.link
def make_picklable(self):
self.archive.content = None
return self
def get_matches(self, video, hearing_impaired=False):
matches = set()
# episode
if isinstance(video, Episode) and self.type == 'episode':
# series
if video.series and (sanitize(self.title) in (
sanitize(name) for name in [video.series] + video.alternative_series)):
matches.add('series')
# year
if video.original_series and self.year is None or video.year and video.year == self.year:
matches.add('year')
# imdb_id
if video.series_imdb_id and self.imdb_id == video.series_imdb_id:
matches.add('series_imdb_id')
# movie
elif isinstance(video, Movie) and self.type == 'movie':
# title
if video.title and (sanitize(self.title) in (
sanitize(name) for name in [video.title] + video.alternative_titles)):
matches.add('title')
# year
if video.year and self.year == video.year:
matches.add('year')
# imdb_id
if video.imdb_id and self.imdb_id == video.imdb_id:
matches.add('imdb_id')
# name
matches |= guess_matches(video, guessit(self.name, {'type': self.type, 'single_value': True}))
return matches
class LegendasTVProvider(_LegendasTVProvider):
languages = {Language(*l) for l in language_converters['legendastv'].to_legendastv.keys()}
subtitle_class = LegendasTVSubtitle
def __init__(self, username=None, password=None):
# Provider needs UNRAR installed. If not available raise ConfigurationError
try:
rarfile.custom_check([rarfile.UNRAR_TOOL], True)
except rarfile.RarExecError:
raise ConfigurationError('UNRAR tool not available')
if any((username, password)) and not all((username, password)):
raise ConfigurationError('Username and password must be specified')
self.username = username
self.password = password
self.logged_in = False
self.session = None
@staticmethod
def is_valid_title(title, title_id, sanitized_title, season, year, imdb_id):
"""Check if is a valid title."""
if title["imdb_id"] and title["imdb_id"] == imdb_id:
logger.debug(u'Matched title "%s" as IMDB ID %s', sanitized_title, title["imdb_id"])
return True
if title["title2"] and sanitize(title['title2']) == sanitized_title:
logger.debug(u'Matched title "%s" as "%s"', sanitized_title, title["title2"])
return True
return _LegendasTVProvider.is_valid_title(title, title_id, sanitized_title, season, year)
@region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME, should_cache_fn=lambda value: value)
def search_titles(self, title, season, title_year, imdb_id):
"""Search for titles matching the `title`.
For episodes, each season has it own title
:param str title: the title to search for.
:param int season: season of the title
:param int title_year: year of the title
:return: found titles.
:rtype: dict
"""
titles = {}
sanitized_titles = [sanitize(title)]
ignore_characters = {'\'', '.'}
if any(c in title for c in ignore_characters):
sanitized_titles.append(sanitize(title, ignore_characters=ignore_characters))
for sanitized_title in sanitized_titles:
# make the query
if season:
logger.info('Searching episode title %r for season %r', sanitized_title, season)
else:
logger.info('Searching movie title %r', sanitized_title)
r = self.session.get(self.server_url + 'legenda/sugestao/{}'.format(sanitized_title), timeout=10)
raise_for_status(r)
results = json.loads(r.text)
# loop over results
for result in results:
source = result['_source']
# extract id
title_id = int(source['id_filme'])
# extract type
title = {'type': type_map[source['tipo']], 'title2': None, 'imdb_id': None}
# extract title, year and country
name, year, country = title_re.match(source['dsc_nome']).groups()
title['title'] = name
if "dsc_nome_br" in source:
name2, ommit1, ommit2 = title_re.match(source['dsc_nome_br']).groups()
title['title2'] = name2
# extract imdb_id
if source['id_imdb'] != '0':
if not source['id_imdb'].startswith('tt'):
title['imdb_id'] = 'tt' + source['id_imdb'].zfill(7)
else:
title['imdb_id'] = source['id_imdb']
# extract season
if title['type'] == 'episode':
if source['temporada'] and source['temporada'].isdigit():
title['season'] = int(source['temporada'])
else:
match = season_re.search(source['dsc_nome_br'])
if match:
title['season'] = int(match.group('season'))
else:
logger.debug('No season detected for title %d (%s)', title_id, name)
# extract year
if year:
title['year'] = int(year)
elif source['dsc_data_lancamento'] and source['dsc_data_lancamento'].isdigit():
# year is based on season air date hence the adjustment
title['year'] = int(source['dsc_data_lancamento']) - title.get('season', 1) + 1
# add title only if is valid
# Check against title without ignored chars
if self.is_valid_title(title, title_id, sanitized_titles[0], season, title_year, imdb_id):
logger.debug(u'Found title: %s', title)
titles[title_id] = title
logger.debug('Found %d titles', len(titles))
return titles
def query(self, language, title, season=None, episode=None, year=None, imdb_id=None):
# search for titles
titles = self.search_titles(title, season, year, imdb_id)
subtitles = []
# iterate over titles
for title_id, t in titles.items():
logger.info('Getting archives for title %d and language %d', title_id, language.legendastv)
archives = self.get_archives(title_id, language.legendastv, t['type'], season, episode)
if not archives:
logger.info('No archives found for title %d and language %d', title_id, language.legendastv)
# iterate over title's archives
for a in archives:
# compute an expiration time based on the archive timestamp
expiration_time = (datetime.utcnow().replace(tzinfo=pytz.utc) - a.timestamp).total_seconds()
# attempt to get the releases from the cache
cache_key = releases_key.format(archive_id=a.id, archive_name=a.name)
releases = region.get(cache_key, expiration_time=expiration_time)
# the releases are not in cache or cache is expired
if releases == NO_VALUE:
logger.info('Releases not found in cache')
# download archive
self.download_archive(a)
# extract the releases
releases = []
for name in a.content.namelist():
# discard the legendastv file
if name.startswith('Legendas.tv'):
continue
# discard hidden files
if os.path.split(name)[-1].startswith('.'):
continue
# discard non-subtitle files
if not name.lower().endswith(SUBTITLE_EXTENSIONS):
continue
releases.append(name)
# cache the releases
region.set(cache_key, releases)
# iterate over releases
for r in releases:
subtitle = self.subtitle_class(language, t['type'], t['title'], t.get('year'), t.get('imdb_id'),
t.get('season'), a, r)
logger.debug('Found subtitle %r', subtitle)
subtitles.append(subtitle)
return subtitles
def list_subtitles(self, video, languages):
season = episode = None
if isinstance(video, Episode):
titles = [video.series] + video.alternative_series
season = video.season
episode = video.episode
else:
titles = [video.title] + video.alternative_titles
for title in titles:
subtitles = [s for l in languages for s in
self.query(l, title, season=season, episode=episode, year=video.year, imdb_id=video.imdb_id)]
if subtitles:
return subtitles
return []
def download_subtitle(self, subtitle):
super(LegendasTVProvider, self).download_subtitle(subtitle)
subtitle.archive.content = None
def get_archives(self, title_id, language_code, title_type, season, episode):
return super(LegendasTVProvider, self).get_archives.original(self, title_id, language_code, title_type,
season, episode)
| dantebarba/docker-media-server | plex/Sub-Zero.bundle/Contents/Libraries/Shared/subliminal_patch/providers/legendastv.py | Python | gpl-3.0 | 10,819 | 0.003697 |
#
# Copyright 2016 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
import logging
import time
import six
from vdsm.storage import constants as sc
from vdsm.storage import exception
# SIZE property was deprecated in metadata v5, but we still need this key to
# read and write legacy metadata. To make sure no other code use it and it's
# used only by metadata code, move it here and make it private.
_SIZE = "SIZE"
ATTRIBUTES = {
sc.DOMAIN: ("domain", str),
sc.IMAGE: ("image", str),
sc.PUUID: ("parent", str),
sc.CAPACITY: ("capacity", int),
sc.FORMAT: ("format", str),
sc.TYPE: ("type", str),
sc.VOLTYPE: ("voltype", str),
sc.DISKTYPE: ("disktype", str),
sc.DESCRIPTION: ("description", str),
sc.LEGALITY: ("legality", str),
sc.CTIME: ("ctime", int),
sc.GENERATION: ("generation", int),
sc.SEQUENCE: ("sequence", int),
}
def _lines_to_dict(lines):
md = {}
errors = []
for line in lines:
# Skip a line if there is invalid value.
try:
line = line.decode("utf-8")
except UnicodeDecodeError as e:
errors.append("Invalid line '{}': {}".format(line, e))
continue
if line.startswith("EOF"):
break
if '=' not in line:
continue
key, value = line.split('=', 1)
md[key.strip()] = value.strip()
return md, errors
def parse(lines):
md, errors = _lines_to_dict(lines)
metadata = {}
if "NONE" in md:
# Before 4.20.34-1 (ovirt 4.2.5) volume metadata could be
# cleared by writing invalid metadata when deleting a volume.
# See https://bugzilla.redhat.com/1574631.
errors.append(str(exception.MetadataCleared()))
return {}, errors
# We work internally in bytes, even if old format store
# value in blocks, we will read SIZE instead of CAPACITY
# from non-converted volumes and use it
if _SIZE in md and sc.CAPACITY not in md:
try:
md[sc.CAPACITY] = int(md[_SIZE]) * sc.BLOCK_SIZE_512
except ValueError as e:
errors.append(str(e))
if sc.GENERATION not in md:
md[sc.GENERATION] = sc.DEFAULT_GENERATION
if sc.SEQUENCE not in md:
md[sc.SEQUENCE] = sc.DEFAULT_SEQUENCE
for key, (name, validate) in ATTRIBUTES.items():
try:
# FIXME: remove pylint skip when bug fixed:
# https://github.com/PyCQA/pylint/issues/5113
metadata[name] = validate(md[key]) # pylint: disable=not-callable
except KeyError:
errors.append("Required key '{}' is missing.".format(name))
except ValueError as e:
errors.append("Invalid '{}' value: {}".format(name, str(e)))
return metadata, errors
def dump(lines):
md, errors = parse(lines)
if errors:
logging.warning(
"Invalid metadata found errors=%s", errors)
md["status"] = sc.VOL_STATUS_INVALID
else:
md["status"] = sc.VOL_STATUS_OK
# Do not include domain in dump output.
md.pop("domain", None)
return md
class VolumeMetadata(object):
log = logging.getLogger('storage.volumemetadata')
def __init__(self, domain, image, parent, capacity, format, type, voltype,
disktype, description="", legality=sc.ILLEGAL_VOL, ctime=None,
generation=sc.DEFAULT_GENERATION,
sequence=sc.DEFAULT_SEQUENCE):
# Storage domain UUID
self.domain = domain
# Image UUID
self.image = image
# UUID of the parent volume or BLANK_UUID
self.parent = parent
# Volume capacity in bytes
self.capacity = capacity
# Format (RAW or COW)
self.format = format
# Allocation policy (PREALLOCATED or SPARSE)
self.type = type
# Relationship to other volumes (LEAF, INTERNAL or SHARED)
self.voltype = voltype
# Intended usage of this volume (unused)
self.disktype = disktype
# Free-form description and may be used to store extra metadata
self.description = description
# Indicates if the volume contents should be considered valid
self.legality = legality
# Volume creation time (in seconds since the epoch)
self.ctime = int(time.time()) if ctime is None else ctime
# Generation increments each time certain operations complete
self.generation = generation
# Sequence number of the volume, increased every time a new volume is
# created in an image.
self.sequence = sequence
@classmethod
def from_lines(cls, lines):
'''
Instantiates a VolumeMetadata object from storage read bytes.
Args:
lines: list of key=value entries given as bytes read from storage
metadata section. "EOF" entry terminates parsing.
'''
metadata, errors = parse(lines)
if errors:
raise exception.InvalidMetadata(
"lines={} errors={}".format(lines, errors))
return cls(**metadata)
@property
def description(self):
return self._description
@description.setter
def description(self, desc):
self._description = self.validate_description(desc)
@property
def capacity(self):
return self._capacity
@capacity.setter
def capacity(self, value):
self._capacity = self._validate_integer("capacity", value)
@property
def ctime(self):
return self._ctime
@ctime.setter
def ctime(self, value):
self._ctime = self._validate_integer("ctime", value)
@property
def generation(self):
return self._generation
@generation.setter
def generation(self, value):
self._generation = self._validate_integer("generation", value)
@property
def sequence(self):
return self._sequence
@sequence.setter
def sequence(self, value):
self._sequence = self._validate_integer("sequence", value)
@classmethod
def _validate_integer(cls, property, value):
if not isinstance(value, six.integer_types):
raise AssertionError(
"Invalid value for metadata property {!r}: {!r}".format(
property, value))
return value
@classmethod
def validate_description(cls, desc):
desc = str(desc)
# We cannot fail when the description is too long, since we must
# support older engine that may send such values, or old disks
# with long description.
if len(desc) > sc.DESCRIPTION_SIZE:
cls.log.warning("Description is too long, truncating to %d bytes",
sc.DESCRIPTION_SIZE)
desc = desc[:sc.DESCRIPTION_SIZE]
return desc
def storage_format(self, domain_version, **overrides):
"""
Format metadata parameters into storage format bytes.
VolumeMetadata is quite restrictive and does not allow
you to make an invalid metadata, but sometimes, for example
for a format conversion, you need some additional fields to
be written to the storage. Those fields can be added using
overrides dict.
Raises MetadataOverflowError if formatted metadata is too long.
"""
info = {
sc.CTIME: str(self.ctime),
sc.DESCRIPTION: self.description,
sc.DISKTYPE: self.disktype,
sc.DOMAIN: self.domain,
sc.FORMAT: self.format,
sc.GENERATION: self.generation,
sc.IMAGE: self.image,
sc.LEGALITY: self.legality,
sc.PUUID: self.parent,
sc.TYPE: self.type,
sc.VOLTYPE: self.voltype,
}
if domain_version < 5:
# Always zero on pre v5 domains
# We need to keep MTIME available on pre v5
# domains, as other code is expecting that
# field to exists and will fail without it.
info[sc.MTIME] = 0
# Pre v5 domains should have SIZE in blocks
# instead of CAPACITY in bytes
info[_SIZE] = self.capacity // sc.BLOCK_SIZE_512
else:
info[sc.CAPACITY] = self.capacity
info[sc.SEQUENCE] = self.sequence
info.update(overrides)
keys = sorted(info.keys())
lines = ["%s=%s\n" % (key, info[key]) for key in keys]
lines.append("EOF\n")
data = "".join(lines).encode("utf-8")
if len(data) > sc.METADATA_SIZE:
raise exception.MetadataOverflowError(data)
return data
# Three defs below allow us to imitate a dictionary
# So intstead of providing a method to return a dictionary
# with values, we return self and mimick dict behaviour.
# In the fieldmap we keep mapping between metadata
# field name and our internal field names
#
# TODO: All dict specific code below should be removed, when rest of VDSM
# will be refactored, to use VolumeMetadata properties, instead of dict
_fieldmap = {
sc.FORMAT: 'format',
sc.TYPE: 'type',
sc.VOLTYPE: 'voltype',
sc.DISKTYPE: 'disktype',
sc.CAPACITY: 'capacity',
sc.CTIME: 'ctime',
sc.DOMAIN: 'domain',
sc.IMAGE: 'image',
sc.DESCRIPTION: 'description',
sc.PUUID: 'parent',
sc.LEGALITY: 'legality',
sc.GENERATION: 'generation',
sc.SEQUENCE: "sequence",
}
def __getitem__(self, item):
try:
value = getattr(self, self._fieldmap[item])
except AttributeError:
raise KeyError(item)
# Some fields needs to be converted to string
if item in (sc.CAPACITY, sc.CTIME):
value = str(value)
return value
def __setitem__(self, item, value):
setattr(self, self._fieldmap[item], value)
def get(self, item, default=None):
try:
return self[item]
except KeyError:
return default
def dump(self):
return {
"capacity": self.capacity,
"ctime": self.ctime,
"description": self.description,
"disktype": self.disktype,
"format": self.format,
"generation": self.generation,
"sequence": self.sequence,
"image": self.image,
"legality": self.legality,
"parent": self.parent,
"type": self.type,
"voltype": self.voltype,
}
| oVirt/vdsm | lib/vdsm/storage/volumemetadata.py | Python | gpl-2.0 | 11,350 | 0 |
# This function runs a .bat file that job handles multiple GridLAB-D files
import subprocess
#C:\Projects\GridLAB-D_Builds\trunk\test\input\batch test\13_node_fault2.glm
def create_batch_file(glm_folder,batch_name):
batch_file = open('{:s}'.format(batch_name),'w')
batch_file.write('gridlabd.exe -T 0 --job\n')
#batch_file.write('pause\n')
batch_file.close()
return None
def run_batch_file(glm_folder,batch_name):
p = subprocess.Popen('{:s}'.format(batch_name),cwd=glm_folder)
code = p.wait()
#print(code)
return None
def main():
#tests here
glm_folder = 'C:\\Projects\\GridLAB-D_Builds\\trunk\\test\\input\\batch_test'
batch_name = 'C:\\Projects\\GridLAB-D_Builds\\trunk\\test\\input\\batch_test\\calibration_batch_file.bat'
create_batch_file(glm_folder,batch_name)
run_batch_file(batch_name)
if __name__ == '__main__':
main()
| NREL/glmgen | glmgen/run_gridlabd_batch_file.py | Python | gpl-2.0 | 850 | 0.04 |
# -*- coding: utf-8 -*-
# #START_LICENSE###########################################################
#
#
# This file is part of the Environment for Tree Exploration program
# (ETE). http://ete.cgenomics.org
#
# ETE is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ETE is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ETE. If not, see <http://www.gnu.org/licenses/>.
#
#
# ABOUT THE ETE PACKAGE
# =====================
#
# ETE is distributed under the GPL copyleft license (2008-2011).
#
# If you make use of ETE in published work, please cite:
#
# Jaime Huerta-Cepas, Joaquin Dopazo and Toni Gabaldon.
# ETE: a python Environment for Tree Exploration. Jaime BMC
# Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24
#
# Note that extra references to the specific methods implemented in
# the toolkit are available in the documentation.
#
# More info at http://ete.cgenomics.org
#
#
# #END_LICENSE#############################################################
__VERSION__="ete2-2.2rev1056"
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'open_newick.ui'
#
# Created: Tue Jan 10 15:56:56 2012
# by: PyQt4 UI code generator 4.7.2
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_OpenNewick(object):
def setupUi(self, OpenNewick):
OpenNewick.setObjectName("OpenNewick")
OpenNewick.resize(569, 353)
self.comboBox = QtGui.QComboBox(OpenNewick)
self.comboBox.setGeometry(QtCore.QRect(460, 300, 81, 23))
self.comboBox.setObjectName("comboBox")
self.widget = QtGui.QWidget(OpenNewick)
self.widget.setGeometry(QtCore.QRect(30, 10, 371, 321))
self.widget.setObjectName("widget")
self.retranslateUi(OpenNewick)
QtCore.QMetaObject.connectSlotsByName(OpenNewick)
def retranslateUi(self, OpenNewick):
OpenNewick.setWindowTitle(QtGui.QApplication.translate("OpenNewick", "Dialog", None, QtGui.QApplication.UnicodeUTF8))
| csc8630Spring2014/Clusterizer | ete2/treeview/_open_newick.py | Python | mit | 2,493 | 0.006418 |
# coding: utf-8
"""
DocuSign REST API
The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign. # noqa: E501
OpenAPI spec version: v2.1
Contact: devcenter@docusign.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class DocumentHtmlCollapsibleDisplaySettings(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'arrow_closed': 'str',
'arrow_color': 'str',
'arrow_location': 'str',
'arrow_open': 'str',
'arrow_size': 'str',
'arrow_style': 'str',
'container_style': 'str',
'label_style': 'str',
'only_arrow_is_clickable': 'bool',
'outer_label_and_arrow_style': 'str'
}
attribute_map = {
'arrow_closed': 'arrowClosed',
'arrow_color': 'arrowColor',
'arrow_location': 'arrowLocation',
'arrow_open': 'arrowOpen',
'arrow_size': 'arrowSize',
'arrow_style': 'arrowStyle',
'container_style': 'containerStyle',
'label_style': 'labelStyle',
'only_arrow_is_clickable': 'onlyArrowIsClickable',
'outer_label_and_arrow_style': 'outerLabelAndArrowStyle'
}
def __init__(self, arrow_closed=None, arrow_color=None, arrow_location=None, arrow_open=None, arrow_size=None, arrow_style=None, container_style=None, label_style=None, only_arrow_is_clickable=None, outer_label_and_arrow_style=None): # noqa: E501
"""DocumentHtmlCollapsibleDisplaySettings - a model defined in Swagger""" # noqa: E501
self._arrow_closed = None
self._arrow_color = None
self._arrow_location = None
self._arrow_open = None
self._arrow_size = None
self._arrow_style = None
self._container_style = None
self._label_style = None
self._only_arrow_is_clickable = None
self._outer_label_and_arrow_style = None
self.discriminator = None
if arrow_closed is not None:
self.arrow_closed = arrow_closed
if arrow_color is not None:
self.arrow_color = arrow_color
if arrow_location is not None:
self.arrow_location = arrow_location
if arrow_open is not None:
self.arrow_open = arrow_open
if arrow_size is not None:
self.arrow_size = arrow_size
if arrow_style is not None:
self.arrow_style = arrow_style
if container_style is not None:
self.container_style = container_style
if label_style is not None:
self.label_style = label_style
if only_arrow_is_clickable is not None:
self.only_arrow_is_clickable = only_arrow_is_clickable
if outer_label_and_arrow_style is not None:
self.outer_label_and_arrow_style = outer_label_and_arrow_style
@property
def arrow_closed(self):
"""Gets the arrow_closed of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
# noqa: E501
:return: The arrow_closed of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:rtype: str
"""
return self._arrow_closed
@arrow_closed.setter
def arrow_closed(self, arrow_closed):
"""Sets the arrow_closed of this DocumentHtmlCollapsibleDisplaySettings.
# noqa: E501
:param arrow_closed: The arrow_closed of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:type: str
"""
self._arrow_closed = arrow_closed
@property
def arrow_color(self):
"""Gets the arrow_color of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
# noqa: E501
:return: The arrow_color of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:rtype: str
"""
return self._arrow_color
@arrow_color.setter
def arrow_color(self, arrow_color):
"""Sets the arrow_color of this DocumentHtmlCollapsibleDisplaySettings.
# noqa: E501
:param arrow_color: The arrow_color of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:type: str
"""
self._arrow_color = arrow_color
@property
def arrow_location(self):
"""Gets the arrow_location of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
# noqa: E501
:return: The arrow_location of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:rtype: str
"""
return self._arrow_location
@arrow_location.setter
def arrow_location(self, arrow_location):
"""Sets the arrow_location of this DocumentHtmlCollapsibleDisplaySettings.
# noqa: E501
:param arrow_location: The arrow_location of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:type: str
"""
self._arrow_location = arrow_location
@property
def arrow_open(self):
"""Gets the arrow_open of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
# noqa: E501
:return: The arrow_open of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:rtype: str
"""
return self._arrow_open
@arrow_open.setter
def arrow_open(self, arrow_open):
"""Sets the arrow_open of this DocumentHtmlCollapsibleDisplaySettings.
# noqa: E501
:param arrow_open: The arrow_open of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:type: str
"""
self._arrow_open = arrow_open
@property
def arrow_size(self):
"""Gets the arrow_size of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
# noqa: E501
:return: The arrow_size of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:rtype: str
"""
return self._arrow_size
@arrow_size.setter
def arrow_size(self, arrow_size):
"""Sets the arrow_size of this DocumentHtmlCollapsibleDisplaySettings.
# noqa: E501
:param arrow_size: The arrow_size of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:type: str
"""
self._arrow_size = arrow_size
@property
def arrow_style(self):
"""Gets the arrow_style of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
# noqa: E501
:return: The arrow_style of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:rtype: str
"""
return self._arrow_style
@arrow_style.setter
def arrow_style(self, arrow_style):
"""Sets the arrow_style of this DocumentHtmlCollapsibleDisplaySettings.
# noqa: E501
:param arrow_style: The arrow_style of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:type: str
"""
self._arrow_style = arrow_style
@property
def container_style(self):
"""Gets the container_style of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
# noqa: E501
:return: The container_style of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:rtype: str
"""
return self._container_style
@container_style.setter
def container_style(self, container_style):
"""Sets the container_style of this DocumentHtmlCollapsibleDisplaySettings.
# noqa: E501
:param container_style: The container_style of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:type: str
"""
self._container_style = container_style
@property
def label_style(self):
"""Gets the label_style of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
# noqa: E501
:return: The label_style of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:rtype: str
"""
return self._label_style
@label_style.setter
def label_style(self, label_style):
"""Sets the label_style of this DocumentHtmlCollapsibleDisplaySettings.
# noqa: E501
:param label_style: The label_style of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:type: str
"""
self._label_style = label_style
@property
def only_arrow_is_clickable(self):
"""Gets the only_arrow_is_clickable of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
# noqa: E501
:return: The only_arrow_is_clickable of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:rtype: bool
"""
return self._only_arrow_is_clickable
@only_arrow_is_clickable.setter
def only_arrow_is_clickable(self, only_arrow_is_clickable):
"""Sets the only_arrow_is_clickable of this DocumentHtmlCollapsibleDisplaySettings.
# noqa: E501
:param only_arrow_is_clickable: The only_arrow_is_clickable of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:type: bool
"""
self._only_arrow_is_clickable = only_arrow_is_clickable
@property
def outer_label_and_arrow_style(self):
"""Gets the outer_label_and_arrow_style of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
# noqa: E501
:return: The outer_label_and_arrow_style of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:rtype: str
"""
return self._outer_label_and_arrow_style
@outer_label_and_arrow_style.setter
def outer_label_and_arrow_style(self, outer_label_and_arrow_style):
"""Sets the outer_label_and_arrow_style of this DocumentHtmlCollapsibleDisplaySettings.
# noqa: E501
:param outer_label_and_arrow_style: The outer_label_and_arrow_style of this DocumentHtmlCollapsibleDisplaySettings. # noqa: E501
:type: str
"""
self._outer_label_and_arrow_style = outer_label_and_arrow_style
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(DocumentHtmlCollapsibleDisplaySettings, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DocumentHtmlCollapsibleDisplaySettings):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| docusign/docusign-python-client | docusign_esign/models/document_html_collapsible_display_settings.py | Python | mit | 11,943 | 0.000084 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import json
import os
from libmozdata import utils as lmdutils
from auto_nag import utils
class Cache(object):
def __init__(self, name, max_days, add_once=True):
super(Cache, self).__init__()
self.name = name
self.max_days = max_days
self.add_once = add_once
self.added = False
self.dryrun = True
self.data = None
def set_dry_run(self, dryrun):
self.dryrun = dryrun or self.max_days < 1
def get_path(self):
cache_path = utils.get_config("common", "cache")
if not os.path.exists(cache_path):
os.mkdir(cache_path)
return "{}/{}.json".format(cache_path, self.name)
def get_data(self):
if self.data is None:
path = self.get_path()
self.data = {}
if os.path.exists(path):
with open(path, "r") as In:
data = json.load(In)
for bugid, date in data.items():
delta = lmdutils.get_date_ymd("today") - lmdutils.get_date_ymd(
date
)
if delta.days < self.max_days:
self.data[str(bugid)] = date
return self.data
def add(self, bugids):
if self.dryrun or (self.add_once and self.added):
return
data = self.get_data()
today = lmdutils.get_today()
for bugid in bugids:
data[str(bugid)] = today
with open(self.get_path(), "w") as Out:
json.dump(data, Out)
self.added = True
def __contains__(self, key):
return not self.dryrun and str(key) in self.get_data()
| mozilla/relman-auto-nag | auto_nag/cache.py | Python | bsd-3-clause | 1,902 | 0.000526 |
"""
Provides functionality for mailboxes.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/mailbox/
"""
import asyncio
import logging
from contextlib import suppress
from datetime import timedelta
import async_timeout
from aiohttp import web
from aiohttp.web_exceptions import HTTPNotFound
from homeassistant.core import callback
from homeassistant.helpers import config_per_platform, discovery
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.entity import Entity
from homeassistant.components.http import HomeAssistantView
from homeassistant.exceptions import HomeAssistantError
from homeassistant.setup import async_prepare_setup_platform
DEPENDENCIES = ['http']
DOMAIN = 'mailbox'
EVENT = 'mailbox_updated'
CONTENT_TYPE_MPEG = 'audio/mpeg'
SCAN_INTERVAL = timedelta(seconds=30)
_LOGGER = logging.getLogger(__name__)
@asyncio.coroutine
def async_setup(hass, config):
"""Track states and offer events for mailboxes."""
mailboxes = []
hass.components.frontend.register_built_in_panel(
'mailbox', 'Mailbox', 'mdi:mailbox')
hass.http.register_view(MailboxPlatformsView(mailboxes))
hass.http.register_view(MailboxMessageView(mailboxes))
hass.http.register_view(MailboxMediaView(mailboxes))
hass.http.register_view(MailboxDeleteView(mailboxes))
@asyncio.coroutine
def async_setup_platform(p_type, p_config=None, discovery_info=None):
"""Set up a mailbox platform."""
if p_config is None:
p_config = {}
if discovery_info is None:
discovery_info = {}
platform = yield from async_prepare_setup_platform(
hass, config, DOMAIN, p_type)
if platform is None:
_LOGGER.error("Unknown mailbox platform specified")
return
_LOGGER.info("Setting up %s.%s", DOMAIN, p_type)
mailbox = None
try:
if hasattr(platform, 'async_get_handler'):
mailbox = yield from \
platform.async_get_handler(hass, p_config, discovery_info)
elif hasattr(platform, 'get_handler'):
mailbox = yield from hass.async_add_job(
platform.get_handler, hass, p_config, discovery_info)
else:
raise HomeAssistantError("Invalid mailbox platform.")
if mailbox is None:
_LOGGER.error(
"Failed to initialize mailbox platform %s", p_type)
return
except Exception: # pylint: disable=broad-except
_LOGGER.exception('Error setting up platform %s', p_type)
return
mailboxes.append(mailbox)
mailbox_entity = MailboxEntity(hass, mailbox)
component = EntityComponent(
logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL)
yield from component.async_add_entity(mailbox_entity)
setup_tasks = [async_setup_platform(p_type, p_config) for p_type, p_config
in config_per_platform(config, DOMAIN)]
if setup_tasks:
yield from asyncio.wait(setup_tasks, loop=hass.loop)
@asyncio.coroutine
def async_platform_discovered(platform, info):
"""Handle for discovered platform."""
yield from async_setup_platform(platform, discovery_info=info)
discovery.async_listen_platform(hass, DOMAIN, async_platform_discovered)
return True
class MailboxEntity(Entity):
"""Entity for each mailbox platform."""
def __init__(self, hass, mailbox):
"""Initialize mailbox entity."""
self.mailbox = mailbox
self.hass = hass
self.message_count = 0
@callback
def _mailbox_updated(event):
self.hass.async_add_job(self.async_update_ha_state(True))
hass.bus.async_listen(EVENT, _mailbox_updated)
@property
def state(self):
"""Return the state of the binary sensor."""
return str(self.message_count)
@property
def name(self):
"""Return the name of the entity."""
return self.mailbox.name
@asyncio.coroutine
def async_update(self):
"""Retrieve messages from platform."""
messages = yield from self.mailbox.async_get_messages()
self.message_count = len(messages)
class Mailbox(object):
"""Represent an mailbox device."""
def __init__(self, hass, name):
"""Initialize mailbox object."""
self.hass = hass
self.name = name
def async_update(self):
"""Send event notification of updated mailbox."""
self.hass.bus.async_fire(EVENT)
@property
def media_type(self):
"""Return the supported media type."""
raise NotImplementedError()
@asyncio.coroutine
def async_get_media(self, msgid):
"""Return the media blob for the msgid."""
raise NotImplementedError()
@asyncio.coroutine
def async_get_messages(self):
"""Return a list of the current messages."""
raise NotImplementedError()
def async_delete(self, msgid):
"""Delete the specified messages."""
raise NotImplementedError()
class StreamError(Exception):
"""Media streaming exception."""
pass
class MailboxView(HomeAssistantView):
"""Base mailbox view."""
def __init__(self, mailboxes):
"""Initialize a basic mailbox view."""
self.mailboxes = mailboxes
def get_mailbox(self, platform):
"""Retrieve the specified mailbox."""
for mailbox in self.mailboxes:
if mailbox.name == platform:
return mailbox
raise HTTPNotFound
class MailboxPlatformsView(MailboxView):
"""View to return the list of mailbox platforms."""
url = "/api/mailbox/platforms"
name = "api:mailbox:platforms"
@asyncio.coroutine
def get(self, request):
"""Retrieve list of platforms."""
platforms = []
for mailbox in self.mailboxes:
platforms.append(mailbox.name)
return self.json(platforms)
class MailboxMessageView(MailboxView):
"""View to return the list of messages."""
url = "/api/mailbox/messages/{platform}"
name = "api:mailbox:messages"
@asyncio.coroutine
def get(self, request, platform):
"""Retrieve messages."""
mailbox = self.get_mailbox(platform)
messages = yield from mailbox.async_get_messages()
return self.json(messages)
class MailboxDeleteView(MailboxView):
"""View to delete selected messages."""
url = "/api/mailbox/delete/{platform}/{msgid}"
name = "api:mailbox:delete"
@asyncio.coroutine
def delete(self, request, platform, msgid):
"""Delete items."""
mailbox = self.get_mailbox(platform)
mailbox.async_delete(msgid)
class MailboxMediaView(MailboxView):
"""View to return a media file."""
url = r"/api/mailbox/media/{platform}/{msgid}"
name = "api:asteriskmbox:media"
@asyncio.coroutine
def get(self, request, platform, msgid):
"""Retrieve media."""
mailbox = self.get_mailbox(platform)
hass = request.app['hass']
with suppress(asyncio.CancelledError, asyncio.TimeoutError):
with async_timeout.timeout(10, loop=hass.loop):
try:
stream = yield from mailbox.async_get_media(msgid)
except StreamError as err:
error_msg = "Error getting media: %s" % (err)
_LOGGER.error(error_msg)
return web.Response(status=500)
if stream:
return web.Response(body=stream,
content_type=mailbox.media_type)
return web.Response(status=500)
| MungoRae/home-assistant | homeassistant/components/mailbox/__init__.py | Python | apache-2.0 | 7,809 | 0 |
#!/usr/bin/env python
'''Batch several changes to Pocket'''
__author__ = 'Felipe Borges'
import sys
sys.path.append("..")
import getopt
import pocket
USAGE = '''Usage: batch_actions [options] array_of_actions
This script adds an Item to Pocket.
Options:
-h --help: print this help
--consumer_key : the Pocket API consumer key
--access_token : the user's Pocket Access Token
'''
def print_usage_and_exit():
print USAGE
sys.exit(2)
def main():
try:
shortflags = 'h'
longflags = ['help', 'consumer_key=', 'access_token=']
opts, args = getopt.gnu_getopt(sys.argv[1:], shortflags, longflags)
except getopt.GetoptError:
print_usage_and_exit()
consumer_key = None
access_token = None
for o, a in opts:
if o in ('-h', '--help'):
print_usage_and_exit()
if o in ('--consumer_key'):
consumer_key = a
if o in ('--access_token'):
access_token = a
actions = ' '.join(args)
if not actions or not consumer_key or not access_token:
print_usage_and_exit()
api = pocket.Api(consumer_key = consumer_key, access_token = access_token)
try:
actionsResult = api.send(actions)
for result in actionsResult:
print (result),
except Exception, e:
print e
sys.exit(2)
if __name__ == "__main__":
main() | felipecorrea/python-pocket | examples/batch_actions.py | Python | apache-2.0 | 1,253 | 0.03352 |
import binascii
import socket
import struct
import sys
def ten_digit_to_comma_format(badge):
"""Returns the comma-format RFID number (without the comma) from the
10-digit RFID number.
Explanation:
*On an EM4100/4001 spec RFID card, there will generally be two sets of
numbers like this: 0015362878 234,27454
*The part of the number before the comma represents the first hex byte of
the "10 digit" number, and the second part is the last 2 hex bytes of the
"10 digit" card number.
*15362878 = EA6B3E
*Splitting EA and 6B3E and converting them to decimal numbers will give
you 234 and 27454 (the number with the comma on the card).
*The comma is excluded in the return value because the controller does not
need the comma.
:param badge: 10-digit RFID card number, must be integer
"""
# only the last 8 digits are the ID
# the 8 digits correspond to only 6 hex values, so the max is FFFFFF
if badge > 16777215:
raise Exception("Error: Invalid RFID Number")
formatted_id = str("{0:x}".format(badge)).zfill(6) # converts to hex
# splits the hex at first two and last 4, converts to dec,
# then combines into string
id_section_1 = str(int(formatted_id[:2], 16)).zfill(3)
id_section_2 = str(int(formatted_id[-4:], 16)).zfill(5)
return int(id_section_1 + id_section_2)
def comma_format_to_ten_digit(badge):
"""Returns the 10-digit number from the comma-format RFID number (without
the comma)
Explanation:
*On an EM4100/4001 spec RFID card, there will generally be two sets of
numbers like this: 0015362878 234,27454
*This function turns the number with the comma (but excluding the comma)
into the 10-digit number which is generally next to it.
*The part of the number before the comma represents the first hex byte of
the "10 digit" number, and the second part is the last 2 hex bytes of the
"10 digit" card number.
**234 = EA
**27454 = 6B3E
**Combining EA and 6B3E and converting it to a decimal number will give you
15362878 (the first 10-digit number on the card).
:param badge: comma-format RFID card number, must be integer with the comma
removed
"""
# the 8 digits correspond to a set of two and four hex values,
# so the max is the decimal version of FF and FFFF concatenated
if badge > 25565535:
raise Exception("Error: Invalid RFID Number")
badge = str(badge).zfill(8)
# splits dec at last 5 digits and everything except last 5,
# converts each section to hex, then combines
id_section_1 = "{0:x}".format(int(badge[:-5])).zfill(2)
id_section_2 = "{0:x}".format(int(badge[-5:])).zfill(4)
formatted_id = id_section_1 + id_section_2
# convert combined hex string to int
return int(formatted_id, 16)
class RFIDClient(object):
# part of the byte string replaced by the CRC, not required to be valid
source_port = "0000"
# these bytes form the packet that starts a transaction with the RFID controller
start_transaction = (
b"\r\r\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
)
def __init__(self, ip, serial):
"""
:param ip: IP address of the controller.
:param serial: Serial number written on the controller, also
"Device NO" on the web interface's configuration page.
"""
self.check_valid_ipv4_address(ip)
if not isinstance(serial, int):
raise TypeError("Serial must be set to an integer")
# pack controller serial as little endian integer
self.controller_serial = self.little_endian_hex(serial)
self.s = self.connect(ip)
@staticmethod
def little_endian_hex(val):
"""Convert integer to little-endian hex string."""
endian = struct.pack("<I", val)
return binascii.hexlify(endian).decode("utf8")
@staticmethod
def check_valid_ipv4_address(ip):
try:
socket.inet_aton(ip)
except socket.error:
raise TypeError("IP Address is not valid")
@staticmethod
def connect(ip, timeout=5, port=60000):
"""
:param ip: IP address of the controller
:param timeout: settimeout value for the sockets connection
:param port: the destination port of the socket, should always be 60000
"""
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0)
s.connect((ip, port))
s.settimeout(timeout)
except Exception as e:
print(e)
sys.exit(1)
return s
@staticmethod
def crc_16_ibm(data):
"""Returns hex byte string with CRC values added to positions 4 through 8.
This CRC value is required by the controller or it will not process the
request.
:param data: original hex string which needs the CRC values added to it
"""
hex_data = bytearray.fromhex(data)
byte_list = list(hex_data)
num1 = 0
for i in range(0, len(byte_list)):
num2 = byte_list[i]
if i == 2 or i == 3:
num2 = 0
num1 ^= num2
for j in range(0, 8):
if num1 & 1 > 0:
num1 = (num1 >> 1) ^ 40961
else:
num1 >>= 1
code = num1 & 65535 # integer returned from CRC function
# change hex string to list to support assignment
data_list = list(data)
# switch order to little endian and return unsigned short, then
# replace characters in list with the CRC values
endian = struct.pack("<H", code)
data_list[4:8] = binascii.hexlify(endian).decode("utf8")
return bytearray.fromhex("".join(data_list))
def add_user(self, badge, doors):
if not isinstance(badge, int):
raise TypeError("RFID number must be set to an integer")
if not isinstance(doors, list):
raise Exception("doors must be set to a list")
# create a list of "01"'s (enabled) and "00"'s (disabled)
# then later join to create "01000000" (which is only door 1 enabled)
doors_enabled = ""
for door in [1, 2, 3, 4]:
if door in doors:
doors_enabled += "01"
else:
doors_enabled += "00"
# pack badge number as little endian integer
badge = self.little_endian_hex(badge)
add_packet1 = self.crc_16_ibm(
"2010"
+ self.source_port
+ "2800000000000000"
+ self.controller_serial
+ "00000200ffffffff"
)
self.s.send(self.start_transaction)
self.s.send(add_packet1)
binary_response_1 = self.s.recv(1024)
if binary_response_1[:2] != b" \x11":
msg = "Unexpected Result Received: {}".format(binary_response_1)
raise Exception(msg)
add_packet2 = self.crc_16_ibm(
"2320"
+ self.source_port
+ "2900000000000000"
+ self.controller_serial
+ "00000200"
+ badge
+ "00000000a04e4605"
+ "87"
+ "1c9f3b"
+ doors_enabled
+ "00000000"
)
self.s.send(self.start_transaction)
self.s.send(add_packet2)
binary_response_2 = self.s.recv(1024)
if binary_response_2[:2] != b"#!":
msg = "Unexpected Result Received: {}".format(binary_response_2)
raise Exception(msg)
def remove_user(self, badge):
if not isinstance(badge, int):
raise TypeError("RFID number must be set to an integer")
# pack badge number as little endian integer
badge = self.little_endian_hex(badge)
remove_packet = self.crc_16_ibm(
"2320"
+ self.source_port
+ "2200000000000000"
+ self.controller_serial
+ "00000200"
+ badge
+ "00000000204e460521149f3b0000000000000000"
)
self.s.send(self.start_transaction)
self.s.send(remove_packet)
binary_response = self.s.recv(1024)
if binary_response[:2] != b"#!":
msg = "Unexpected Result Received: {}".format(binary_response)
raise Exception(msg)
def open_door(self, door_number):
if not isinstance(door_number, int):
raise TypeError("RFID number must be set to an integer")
if not (1 <= door_number <= 4):
raise Exception("door_number must be 1 to 4")
door_number = str(door_number - 1).zfill(2)
open_door_packet = self.crc_16_ibm(
"2040"
+ self.source_port
+ "0500000000000000"
+ self.controller_serial
+ "0000020001000000ffffffffffffffff"
+ door_number
+ "000000"
)
self.s.send(self.start_transaction)
self.s.send(open_door_packet)
binary_response = self.s.recv(1024)
if binary_response[:2] != b" A":
msg = "Unexpected Result Received: {}".format(binary_response)
raise Exception(msg)
def __del__(self):
"""Closes the socket connection."""
if hasattr(self, "s"):
self.s.close()
| pawl/Chinese-RFID-Access-Control-Library | rfid.py | Python | mit | 9,417 | 0.000212 |
"""
rohmu - google cloud object store interface
Copyright (c) 2016 Ohmu Ltd
See LICENSE for details
"""
# pylint: disable=import-error, no-name-in-module
# NOTE: this import is not needed per-se, but it's imported here first to point the
# user to the most important possible missing dependency
import googleapiclient # noqa pylint: disable=unused-import
from contextlib import contextmanager
from io import BytesIO, FileIO
import dateutil.parser
import httplib2
import json
import logging
import os
import time
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
from googleapiclient.http import MediaFileUpload, MediaIoBaseUpload, MediaIoBaseDownload
from oauth2client import GOOGLE_TOKEN_URI
from oauth2client.client import GoogleCredentials
try:
from oauth2client.service_account import ServiceAccountCredentials
except ImportError:
from oauth2client.service_account import _ServiceAccountCredentials as ServiceAccountCredentials
from ..errors import FileNotFoundFromStorageError, InvalidConfigurationError
from .base import BaseTransfer
logging.getLogger("googleapiclient").setLevel(logging.WARNING)
logging.getLogger("oauth2client").setLevel(logging.WARNING)
CHUNK_SIZE = 1024 * 1024 * 5
def unpaginate(domain, initial_op):
"""Iterate thru the request pages until all items have been processed"""
request = initial_op(domain)
while request is not None:
result = request.execute()
for item in result.get("items", []):
yield item
request = domain.list_next(request, result)
def get_credentials(credential_file=None, credentials=None):
if credential_file:
return GoogleCredentials.from_stream(credential_file)
if credentials and credentials["type"] == "service_account":
return ServiceAccountCredentials(
service_account_id=credentials["client_id"],
service_account_email=credentials["client_email"],
private_key_id=credentials["private_key_id"],
private_key_pkcs8_text=credentials["private_key"],
scopes=[])
if credentials and credentials["type"] == "authorized_user":
return GoogleCredentials(
access_token=None,
client_id=credentials["client_id"],
client_secret=credentials["client_secret"],
refresh_token=credentials["refresh_token"],
token_expiry=None,
token_uri=GOOGLE_TOKEN_URI,
user_agent="pghoard")
return GoogleCredentials.get_application_default()
class GoogleTransfer(BaseTransfer):
def __init__(self, project_id, bucket_name, credential_file=None, credentials=None, prefix=None):
super().__init__(prefix=prefix)
self.project_id = project_id
self.google_creds = get_credentials(credential_file=credential_file, credentials=credentials)
self.gs = self._init_google_client()
self.gs_object_client = None
self.bucket_name = self.get_or_create_bucket(bucket_name)
self.log.debug("GoogleTransfer initialized")
def _init_google_client(self):
start_time = time.monotonic()
while True:
try:
# sometimes fails: httplib2.ServerNotFoundError: Unable to find the server at www.googleapis.com
return build("storage", "v1", credentials=self.google_creds)
except httplib2.ServerNotFoundError:
if time.monotonic() - start_time > 40.0:
raise
# retry on DNS issues
time.sleep(1.0)
@contextmanager
def _object_client(self, *, not_found=None):
"""(Re-)initialize object client if required, handle 404 errors gracefully and reset the client on
server errors. Server errors have been shown to be caused by invalid state in the client and do not
seem to be resolved without resetting."""
if self.gs_object_client is None:
if self.gs is None:
self.gs = self._init_google_client()
self.gs_object_client = self.gs.objects() # pylint: disable=no-member
try:
yield self.gs_object_client
except HttpError as ex:
if ex.resp["status"] == "404" and not_found is not None:
raise FileNotFoundFromStorageError(not_found)
if ex.resp["status"] >= "500" and ex.resp["status"] <= "599":
self.log.error("Received server error %r, resetting Google API client", ex.resp["status"])
self.gs = None
self.gs_object_client = None
raise
def get_metadata_for_key(self, key):
key = self.format_key_for_backend(key)
with self._object_client(not_found=key) as clob:
return self._metadata_for_key(clob, key)
def _metadata_for_key(self, clob, key):
req = clob.get(bucket=self.bucket_name, object=key)
obj = req.execute()
return obj.get("metadata", {})
def list_path(self, key):
path = self.format_key_for_backend(key, trailing_slash=True)
self.log.debug("Listing path %r", path)
return_list = []
with self._object_client() as clob:
for item in unpaginate(clob, lambda o: o.list(bucket=self.bucket_name, delimiter="/", prefix=path)):
if item["name"].endswith("/"):
continue # skip directory level objects
return_list.append({
"name": self.format_key_from_backend(item["name"]),
"size": int(item["size"]),
"last_modified": dateutil.parser.parse(item["updated"]),
"metadata": item.get("metadata", {}),
})
return return_list
def delete_key(self, key):
key = self.format_key_for_backend(key)
self.log.debug("Deleting key: %r", key)
with self._object_client(not_found=key) as clob:
req = clob.delete(bucket=self.bucket_name, object=key)
req.execute()
def get_contents_to_file(self, key, filepath_to_store_to):
fileobj = FileIO(filepath_to_store_to, mode="wb")
done = False
metadata = {}
try:
metadata = self.get_contents_to_fileobj(key, fileobj)
done = True
finally:
fileobj.close()
if not done:
os.unlink(filepath_to_store_to)
return metadata
def get_contents_to_fileobj(self, key, fileobj_to_store_to):
key = self.format_key_for_backend(key)
self.log.debug("Starting to fetch the contents of: %r to %r", key, fileobj_to_store_to)
with self._object_client(not_found=key) as clob:
req = clob.get_media(bucket=self.bucket_name, object=key)
download = MediaIoBaseDownload(fileobj_to_store_to, req, chunksize=CHUNK_SIZE)
done = False
while not done:
status, done = download.next_chunk()
if status:
self.log.debug("Download of %r: %d%%", key, status.progress() * 100)
return self._metadata_for_key(clob, key)
def get_contents_to_string(self, key):
key = self.format_key_for_backend(key)
self.log.debug("Starting to fetch the contents of: %r", key)
with self._object_client(not_found=key) as clob:
req = clob.get_media(bucket=self.bucket_name, object=key)
data = req.execute()
return data, self._metadata_for_key(clob, key)
def _upload(self, upload_type, local_object, key, metadata, extra_props):
key = self.format_key_for_backend(key)
self.log.debug("Starting to upload %r", key)
upload = upload_type(local_object, mimetype="application/octet-stream",
resumable=True, chunksize=CHUNK_SIZE)
body = {"metadata": metadata}
if extra_props:
body.update(extra_props)
with self._object_client() as clob:
req = clob.insert(bucket=self.bucket_name, name=key, media_body=upload, body=body)
response = None
while response is None:
status, response = req.next_chunk()
if status:
self.log.debug("Upload of %r to %r: %d%%", local_object, key, status.progress() * 100)
def store_file_from_memory(self, key, memstring, metadata=None, extra_props=None): # pylint: disable=arguments-differ
return self._upload(MediaIoBaseUpload, BytesIO(memstring), key,
self.sanitize_metadata(metadata), extra_props)
def store_file_from_disk(self, key, filepath, metadata=None, # pylint: disable=arguments-differ, unused-variable
*, multipart=None, extra_props=None): # pylint: disable=arguments-differ, unused-variable
return self._upload(MediaFileUpload, filepath, key, self.sanitize_metadata(metadata), extra_props)
def get_or_create_bucket(self, bucket_name):
"""Look up the bucket if it already exists and try to create the
bucket in case it doesn't. Note that we can't just always try to
unconditionally create the bucket as Google imposes a strict rate
limit on bucket creation operations, even if it doesn't result in a
new bucket.
Quietly handle the case where the bucket already exists to avoid
race conditions. Note that we'll get a 400 Bad Request response for
invalid bucket names ("Invalid bucket name") as well as for invalid
project ("Invalid argument"), try to handle both gracefully."""
start_time = time.time()
gs_buckets = self.gs.buckets() # pylint: disable=no-member
try:
gs_buckets.get(bucket=bucket_name).execute()
self.log.debug("Bucket: %r already exists, took: %.3fs", bucket_name, time.time() - start_time)
except HttpError as ex:
if ex.resp["status"] == "404":
pass # we need to create it
elif ex.resp["status"] == "403":
raise InvalidConfigurationError("Bucket {0!r} exists but isn't accessible".format(bucket_name))
else:
raise
else:
return bucket_name
try:
req = gs_buckets.insert(project=self.project_id, body={"name": bucket_name})
req.execute()
self.log.debug("Created bucket: %r successfully, took: %.3fs", bucket_name, time.time() - start_time)
except HttpError as ex:
error = json.loads(ex.content.decode("utf-8"))["error"]
if error["message"].startswith("You already own this bucket"):
self.log.debug("Bucket: %r already exists, took: %.3fs", bucket_name, time.time() - start_time)
elif error["message"] == "Invalid argument.":
raise InvalidConfigurationError("Invalid project id {0!r}".format(self.project_id))
elif error["message"].startswith("Invalid bucket name"):
raise InvalidConfigurationError("Invalid bucket name {0!r}".format(bucket_name))
else:
raise
return bucket_name
| saaros/pghoard | pghoard/rohmu/object_storage/google.py | Python | apache-2.0 | 11,184 | 0.002414 |
import os
config = {
# mozconfig file to use, it depends on branch and platform names
"platform": "macosx64",
"update_platform": "Darwin_x86_64-gcc3",
"mozconfig": "%(branch)s/browser/config/mozconfigs/macosx-universal/l10n-mozconfig",
"bootstrap_env": {
"SHELL": '/bin/bash',
"MOZ_OBJDIR": "obj-l10n",
"EN_US_BINARY_URL": "%(en_us_binary_url)s",
"MOZ_UPDATE_CHANNEL": "%(update_channel)s",
"MOZ_SYMBOLS_EXTRA_BUILDID": "macosx64",
"MOZ_PKG_PLATFORM": "mac",
# "IS_NIGHTLY": "yes",
"DIST": "%(abs_objdir)s",
"LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
"L10NBASEDIR": "../../l10n",
"MOZ_MAKE_COMPLETE_MAR": "1",
"LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
},
"ssh_key_dir": "~/.ssh",
"log_name": "single_locale",
"objdir": "obj-l10n",
"js_src_dir": "js/src",
"make_dirs": ['config'],
"vcs_share_base": "/builds/hg-shared",
"upload_env_extra": {
"MOZ_PKG_PLATFORM": "mac",
},
# tooltool
'tooltool_url': 'https://api.pub.build.mozilla.org/tooltool/',
'tooltool_script': ["/builds/tooltool.py"],
'tooltool_bootstrap': "setup.sh",
'tooltool_manifest_src': 'browser/config/tooltool-manifests/macosx64/releng.manifest',
# balrog credential file:
'balrog_credentials_file': 'oauth.txt',
# l10n
"ignore_locales": ["en-US"],
"l10n_dir": "l10n",
"locales_file": "%(branch)s/browser/locales/all-locales",
"locales_dir": "browser/locales",
"hg_l10n_base": "https://hg.mozilla.org/l10n-central",
"hg_l10n_tag": "default",
"merge_locales": True,
# MAR
"previous_mar_dir": "dist/previous",
"current_mar_dir": "dist/current",
"update_mar_dir": "dist/update", # sure?
"previous_mar_filename": "previous.mar",
"current_work_mar_dir": "current.work",
"package_base_dir": "dist/l10n-stage",
"application_ini": "Contents/Resources/application.ini",
"buildid_section": 'App',
"buildid_option": "BuildID",
"unpack_script": "tools/update-packaging/unwrap_full_update.pl",
"incremental_update_script": "tools/update-packaging/make_incremental_update.sh",
"balrog_release_pusher_script": "scripts/updates/balrog-release-pusher.py",
"update_packaging_dir": "tools/update-packaging",
"local_mar_tool_dir": "dist/host/bin",
"mar": "mar",
"mbsdiff": "mbsdiff",
"current_mar_filename": "firefox-%(version)s.%(locale)s.mac.complete.mar",
"complete_mar": "firefox-%(version)s.en-US.mac.complete.mar",
"localized_mar": "firefox-%(version)s.%(locale)s.mac.complete.mar",
"partial_mar": "firefox-%(version)s.%(locale)s.mac.partial.%(from_buildid)s-%(to_buildid)s.mar",
'installer_file': "firefox-%(version)s.en-US.mac.dmg",
'exes': {
'hgtool.py': os.path.join(
os.getcwd(), 'build', 'tools', 'buildfarm', 'utils', 'hgtool.py'
),
},
}
| armenzg/build-mozharness | configs/single_locale/macosx64.py | Python | mpl-2.0 | 2,942 | 0.00136 |
#!/usr/bin/env python
'''
Lucas-Kanade tracker
====================
Lucas-Kanade sparse optical flow demo. Uses goodFeaturesToTrack
for track initialization and back-tracking for match verification
between frames.
Usage
-----
lk_track.py [<video_source>]
Keys
----
ESC - exit
'''
# Python 2/3 compatibility
from __future__ import print_function
import numpy as np
import numpy
import cv2
import video
import math
from common import anorm2, draw_str
from time import clock
from mavros_msgs.msg import OpticalFlowRad
import rospy
lk_params = dict( winSize = (15, 15),
maxLevel = 2,
criteria = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 0.03))
feature_params = dict( maxCorners = 500,
qualityLevel = 0.3,
minDistance = 7,
blockSize = 7 )
class App:
def __init__(self, video_src):
self.track_len = 10
self.detect_interval = 5
self.tracks = []
self.cam = video.create_capture(video_src)
self.frame_idx = 0
self.DistanceTravelledX = 0
self.DistanceTravelledY = 0
self.pub = rospy.Publisher('OpticalFlowXY', OpticalFlowRad, queue_size=10)
self.msg = OpticalFlowRad()
rospy.init_node('OpticalFlowXYNode')
self.rate = rospy.Rate(1000)
def run(self):
while True:
TickCountBefore = cv2.getTickCount()
ret, frame = self.cam.read()
TimePeriod = 1/cv2.getTickFrequency()
frame_gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
dst = cv2.medianBlur(frame_gray, 5)
abc = dst
frame_gray = dst
vis = frame.copy()
VelocityX = 0
VelocityY = 0
if len(self.tracks) > 0:
img0, img1 = self.prev_gray, frame_gray
p0 = np.float32([tr[-1] for tr in self.tracks]).reshape(-1, 1, 2)
p1, st, err = cv2.calcOpticalFlowPyrLK(img0, img1, p0, None, **lk_params)
p0r, st, err = cv2.calcOpticalFlowPyrLK(img1, img0, p1, None, **lk_params)
d = abs(p0-p0r).reshape(-1, 2).max(-1)
good = d < 1
new_tracks = []
index = 0;
distance = 10000;
for tr, (x, y), good_flag in zip(self.tracks, p1.reshape(-1, 2), good):
if not good_flag:
continue
tr.append((x, y))
if len(tr) > self.track_len:
del tr[0]
new_tracks.append(tr)
cv2.circle(vis, (x, y), 2, (0, 255, 0), -1)
cv2.circle(abc, (x, y), 2, (0, 255, 0), -1)
for i in range(0,len(self.tracks)):
localdistance = math.sqrt(math.pow((p0.item(i)-p0r.item(i)),2) + math.pow((p0.item(i+1)-p0r.item(i+1)),2))
if localdistance < distance :
distance = localdistance
index = i
TickCountAfter = cv2.getTickCount()
TimeElapsed = (TickCountAfter-TickCountBefore)*TimePeriod
draw_str(vis, (20, 110), 'Time Elapsed %f' % TimeElapsed)
draw_str(vis, (20, 130), 'TimePeriod %f' % TimePeriod)
#VelocityX = (Average_X_Velocity_P1 - Average_X_Velocity_P0)/(TimeElapsed *379.05)
#VelocityY = (Average_Y_Velocity_P1 - Average_Y_Velocity_P0)/(TimeElapsed *366.6)
VelocityX = (p1.item(index) - p0.item(index))*100 /(TimeElapsed *379.05)
VelocityY = (p1.item(index+1) - p0.item(index+1))*100/(TimeElapsed*366.6)
self.msg.integrated_x = VelocityX;
self.msg.integrated_y = VelocityY;
self.msg.integration_time_us = TimeElapsed*1000000;
self.msg.header.stamp = rospy.Time.now()
self.pub.publish(self.msg)
self.rate.sleep()
self.DistanceTravelledX = self.DistanceTravelledX + (VelocityX*TimeElapsed)
self.DistanceTravelledY = self.DistanceTravelledY + (VelocityY*TimeElapsed)
index1 = " Item 1 x" + str(p1.item(index)) + " " + str(p1.item(index+1))
index2 = " Item 0 x " + str(p0.item(index)) + " " + str(p0.item(index+1))
index3 = " Item 0r x " + str(p0r.item(index)) + " " + str(p0r.item(index+1))
print(index1)
print(index2)
print(index3)
self.tracks = new_tracks
cv2.polylines(vis, [np.int32(tr) for tr in self.tracks], False, (0, 255, 0))
cv2.polylines(abc, [np.int32(tr) for tr in self.tracks], False, (0, 255, 0))
draw_str(vis, (20, 20), 'track count: %d' % len(self.tracks))
draw_str(vis, (20, 50), 'Distance x %f' % self.DistanceTravelledX)
draw_str(vis, (20, 80), 'Distance y %f' % self.DistanceTravelledY)
#draw_str(vis, (20, 50), 'Velocity x %f' % VelocityX)
#draw_str(vis, (20, 80), 'Velocity y: %f' % VelocityY)
if self.frame_idx % self.detect_interval == 0:
mask = np.zeros_like(frame_gray)
mask[:] = 255
for x, y in [np.int32(tr[-1]) for tr in self.tracks]:
cv2.circle(mask, (x, y), 5, 0, -1)
p = cv2.goodFeaturesToTrack(frame_gray, mask = mask, **feature_params)
if p is not None:
for x, y in np.float32(p).reshape(-1, 2):
self.tracks.append([(x, y)])
self.frame_idx += 1
self.prev_gray = frame_gray
cv2.imshow('lk_track', vis)
cv2.imshow('lk_track2', abc)
ch = 0xFF & cv2.waitKey(1)
if ch == 27:
break
def main():
import sys
try:
video_src = sys.argv[1]
except:
video_src = 0
print(__doc__)
App(video_src).run()
cv2.destroyAllWindows()
if __name__ == '__main__':
main()
| lkumar93/Deep_Learning_Crazyflie | src/deep_learning_crazyflie/src/lk_track.py | Python | mit | 5,660 | 0.030035 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#-------------------------------------------------------------------------------
"""pyzombie HTTP RESTful handler test cases."""
__author__ = ('Lance Finn Helsten',)
__version__ = '1.0.1'
__copyright__ = """Copyright 2009 Lance Finn Helsten (helsten@acm.org)"""
__license__ = """
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__docformat__ = "reStructuredText en"
import sys
import os
import io
import re
import random
import threading
import unittest
from time import sleep
import http.client
from pyzombie.Executable import Executable
from pyzombie.Instance import Instance, DELTA_T
from pyzombie.handlers import HandlerInstanceStdout
from MockRequest import MockRequest
from HTTPResponse import HTTPResponse
import TestSourceCopy
BUFFER = [random.randint(ord(' '), ord('~')) for i in range(4096)]
BUFFER = [chr(i) for i in BUFFER]
BUFFER = ''.join(BUFFER)
class StdinFeeder():
def __call__(self, *args, **kwargs):
test = kwargs["Test"]
test.inst.stdin.write(BUFFER.encode("UTF-8"))
sleep(0.01)
test.inst.stdin.close()
class HandlerInstanceStdoutGetTest(unittest.TestCase):
def setUp(self):
self.ex = Executable.getcached(__name__, mediatype="text/x-python")
self.ex.writeimage(open(TestSourceCopy.__file__, "r"))
self.inst = Instance(self.ex, self.__class__.__name__)
self.thread = threading.Thread(target=StdinFeeder(), kwargs={"Test":self})
self.daemon = True
self.thread.start()
def tearDown(self):
self.thread.join(0.5)
# self.ex.delete()
def makeRequest(self, chunked=False):
req = MockRequest()
req.headers["Accept"] = "spam/eggs; q=1.0, application/json; q=0.5, text/html;q=0.1, text/plain"
hndlr = HandlerInstanceStdout(req, {'execname':__name__, 'instname':self.__class__.__name__})
self.assertEqual(hndlr.executable, self.ex)
urlself = hndlr.serverurl(path="{0}/instances/{1}/stdout".format(__name__, self.__class__.__name__))
hndlr.get()
resp = HTTPResponse(req.wfile.getvalue())
self.assertEqual(resp.protocol, "HTTP/1.1")
self.assertEqual(resp.code, str(http.client.OK))
self.assertEqual(resp.header["Content-Type"], "text/plain;UTF-8")
self.assertEqual(resp.md5, resp.header["ETag"])
return resp
def runTest(self):
resp = self.makeRequest()
self.assertEqual(resp.body, BUFFER)
| lanhel/pyzombie | test/pyzombie/handlers/HandlerInstanceStdoutTestCase.py | Python | apache-2.0 | 2,987 | 0.005022 |
# coding: utf-8
from sqlalchemy.testing import eq_, assert_raises
from sqlalchemy import *
from sqlalchemy import sql, exc, schema
from sqlalchemy.util import u
from sqlalchemy import util
from sqlalchemy.dialects.mysql import base as mysql
from sqlalchemy.testing import fixtures, AssertsCompiledSQL, AssertsExecutionResults
from sqlalchemy import testing
import datetime
import decimal
class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
"Test MySQL column types"
__dialect__ = mysql.dialect()
__only_on__ = 'mysql'
__backend__ = True
def test_numeric(self):
"Exercise type specification and options for numeric types."
columns = [
# column type, args, kwargs, expected ddl
# e.g. Column(Integer(10, unsigned=True)) ==
# 'INTEGER(10) UNSIGNED'
(mysql.MSNumeric, [], {},
'NUMERIC'),
(mysql.MSNumeric, [None], {},
'NUMERIC'),
(mysql.MSNumeric, [12], {},
'NUMERIC(12)'),
(mysql.MSNumeric, [12, 4], {'unsigned':True},
'NUMERIC(12, 4) UNSIGNED'),
(mysql.MSNumeric, [12, 4], {'zerofill':True},
'NUMERIC(12, 4) ZEROFILL'),
(mysql.MSNumeric, [12, 4], {'zerofill':True, 'unsigned':True},
'NUMERIC(12, 4) UNSIGNED ZEROFILL'),
(mysql.MSDecimal, [], {},
'DECIMAL'),
(mysql.MSDecimal, [None], {},
'DECIMAL'),
(mysql.MSDecimal, [12], {},
'DECIMAL(12)'),
(mysql.MSDecimal, [12, None], {},
'DECIMAL(12)'),
(mysql.MSDecimal, [12, 4], {'unsigned':True},
'DECIMAL(12, 4) UNSIGNED'),
(mysql.MSDecimal, [12, 4], {'zerofill':True},
'DECIMAL(12, 4) ZEROFILL'),
(mysql.MSDecimal, [12, 4], {'zerofill':True, 'unsigned':True},
'DECIMAL(12, 4) UNSIGNED ZEROFILL'),
(mysql.MSDouble, [None, None], {},
'DOUBLE'),
(mysql.MSDouble, [12, 4], {'unsigned':True},
'DOUBLE(12, 4) UNSIGNED'),
(mysql.MSDouble, [12, 4], {'zerofill':True},
'DOUBLE(12, 4) ZEROFILL'),
(mysql.MSDouble, [12, 4], {'zerofill':True, 'unsigned':True},
'DOUBLE(12, 4) UNSIGNED ZEROFILL'),
(mysql.MSReal, [None, None], {},
'REAL'),
(mysql.MSReal, [12, 4], {'unsigned':True},
'REAL(12, 4) UNSIGNED'),
(mysql.MSReal, [12, 4], {'zerofill':True},
'REAL(12, 4) ZEROFILL'),
(mysql.MSReal, [12, 4], {'zerofill':True, 'unsigned':True},
'REAL(12, 4) UNSIGNED ZEROFILL'),
(mysql.MSFloat, [], {},
'FLOAT'),
(mysql.MSFloat, [None], {},
'FLOAT'),
(mysql.MSFloat, [12], {},
'FLOAT(12)'),
(mysql.MSFloat, [12, 4], {},
'FLOAT(12, 4)'),
(mysql.MSFloat, [12, 4], {'unsigned':True},
'FLOAT(12, 4) UNSIGNED'),
(mysql.MSFloat, [12, 4], {'zerofill':True},
'FLOAT(12, 4) ZEROFILL'),
(mysql.MSFloat, [12, 4], {'zerofill':True, 'unsigned':True},
'FLOAT(12, 4) UNSIGNED ZEROFILL'),
(mysql.MSInteger, [], {},
'INTEGER'),
(mysql.MSInteger, [4], {},
'INTEGER(4)'),
(mysql.MSInteger, [4], {'unsigned':True},
'INTEGER(4) UNSIGNED'),
(mysql.MSInteger, [4], {'zerofill':True},
'INTEGER(4) ZEROFILL'),
(mysql.MSInteger, [4], {'zerofill':True, 'unsigned':True},
'INTEGER(4) UNSIGNED ZEROFILL'),
(mysql.MSBigInteger, [], {},
'BIGINT'),
(mysql.MSBigInteger, [4], {},
'BIGINT(4)'),
(mysql.MSBigInteger, [4], {'unsigned':True},
'BIGINT(4) UNSIGNED'),
(mysql.MSBigInteger, [4], {'zerofill':True},
'BIGINT(4) ZEROFILL'),
(mysql.MSBigInteger, [4], {'zerofill':True, 'unsigned':True},
'BIGINT(4) UNSIGNED ZEROFILL'),
(mysql.MSMediumInteger, [], {},
'MEDIUMINT'),
(mysql.MSMediumInteger, [4], {},
'MEDIUMINT(4)'),
(mysql.MSMediumInteger, [4], {'unsigned':True},
'MEDIUMINT(4) UNSIGNED'),
(mysql.MSMediumInteger, [4], {'zerofill':True},
'MEDIUMINT(4) ZEROFILL'),
(mysql.MSMediumInteger, [4], {'zerofill':True, 'unsigned':True},
'MEDIUMINT(4) UNSIGNED ZEROFILL'),
(mysql.MSTinyInteger, [], {},
'TINYINT'),
(mysql.MSTinyInteger, [1], {},
'TINYINT(1)'),
(mysql.MSTinyInteger, [1], {'unsigned':True},
'TINYINT(1) UNSIGNED'),
(mysql.MSTinyInteger, [1], {'zerofill':True},
'TINYINT(1) ZEROFILL'),
(mysql.MSTinyInteger, [1], {'zerofill':True, 'unsigned':True},
'TINYINT(1) UNSIGNED ZEROFILL'),
(mysql.MSSmallInteger, [], {},
'SMALLINT'),
(mysql.MSSmallInteger, [4], {},
'SMALLINT(4)'),
(mysql.MSSmallInteger, [4], {'unsigned':True},
'SMALLINT(4) UNSIGNED'),
(mysql.MSSmallInteger, [4], {'zerofill':True},
'SMALLINT(4) ZEROFILL'),
(mysql.MSSmallInteger, [4], {'zerofill':True, 'unsigned':True},
'SMALLINT(4) UNSIGNED ZEROFILL'),
]
for type_, args, kw, res in columns:
type_inst = type_(*args, **kw)
self.assert_compile(
type_inst,
res
)
# test that repr() copies out all arguments
self.assert_compile(
eval("mysql.%r" % type_inst),
res
)
# fixed in mysql-connector as of 2.0.1,
# see http://bugs.mysql.com/bug.php?id=73266
@testing.provide_metadata
def test_precision_float_roundtrip(self):
t = Table('t', self.metadata,
Column('scale_value', mysql.DOUBLE(
precision=15, scale=12, asdecimal=True)),
Column('unscale_value', mysql.DOUBLE(
decimal_return_scale=12, asdecimal=True))
)
t.create(testing.db)
testing.db.execute(
t.insert(), scale_value=45.768392065789,
unscale_value=45.768392065789
)
result = testing.db.scalar(select([t.c.scale_value]))
eq_(result, decimal.Decimal("45.768392065789"))
result = testing.db.scalar(select([t.c.unscale_value]))
eq_(result, decimal.Decimal("45.768392065789"))
@testing.exclude('mysql', '<', (4, 1, 1), 'no charset support')
def test_charset(self):
"""Exercise CHARACTER SET and COLLATE-ish options on string types."""
columns = [
(mysql.MSChar, [1], {},
'CHAR(1)'),
(mysql.NCHAR, [1], {},
'NATIONAL CHAR(1)'),
(mysql.MSChar, [1], {'binary':True},
'CHAR(1) BINARY'),
(mysql.MSChar, [1], {'ascii':True},
'CHAR(1) ASCII'),
(mysql.MSChar, [1], {'unicode':True},
'CHAR(1) UNICODE'),
(mysql.MSChar, [1], {'ascii':True, 'binary':True},
'CHAR(1) ASCII BINARY'),
(mysql.MSChar, [1], {'unicode':True, 'binary':True},
'CHAR(1) UNICODE BINARY'),
(mysql.MSChar, [1], {'charset':'utf8'},
'CHAR(1) CHARACTER SET utf8'),
(mysql.MSChar, [1], {'charset':'utf8', 'binary':True},
'CHAR(1) CHARACTER SET utf8 BINARY'),
(mysql.MSChar, [1], {'charset':'utf8', 'unicode':True},
'CHAR(1) CHARACTER SET utf8'),
(mysql.MSChar, [1], {'charset':'utf8', 'ascii':True},
'CHAR(1) CHARACTER SET utf8'),
(mysql.MSChar, [1], {'collation': 'utf8_bin'},
'CHAR(1) COLLATE utf8_bin'),
(mysql.MSChar, [1], {'charset': 'utf8', 'collation': 'utf8_bin'},
'CHAR(1) CHARACTER SET utf8 COLLATE utf8_bin'),
(mysql.MSChar, [1], {'charset': 'utf8', 'binary': True},
'CHAR(1) CHARACTER SET utf8 BINARY'),
(mysql.MSChar, [1], {'charset': 'utf8', 'collation': 'utf8_bin',
'binary': True},
'CHAR(1) CHARACTER SET utf8 COLLATE utf8_bin'),
(mysql.MSChar, [1], {'national':True},
'NATIONAL CHAR(1)'),
(mysql.MSChar, [1], {'national':True, 'charset':'utf8'},
'NATIONAL CHAR(1)'),
(mysql.MSChar, [1], {'national':True, 'charset':'utf8',
'binary':True},
'NATIONAL CHAR(1) BINARY'),
(mysql.MSChar, [1], {'national':True, 'binary':True,
'unicode':True},
'NATIONAL CHAR(1) BINARY'),
(mysql.MSChar, [1], {'national':True, 'collation':'utf8_bin'},
'NATIONAL CHAR(1) COLLATE utf8_bin'),
(mysql.MSString, [1], {'charset':'utf8', 'collation':'utf8_bin'},
'VARCHAR(1) CHARACTER SET utf8 COLLATE utf8_bin'),
(mysql.MSString, [1], {'national':True, 'collation':'utf8_bin'},
'NATIONAL VARCHAR(1) COLLATE utf8_bin'),
(mysql.MSTinyText, [], {'charset':'utf8', 'collation':'utf8_bin'},
'TINYTEXT CHARACTER SET utf8 COLLATE utf8_bin'),
(mysql.MSMediumText, [], {'charset':'utf8', 'binary':True},
'MEDIUMTEXT CHARACTER SET utf8 BINARY'),
(mysql.MSLongText, [], {'ascii':True},
'LONGTEXT ASCII'),
(mysql.ENUM, ["foo", "bar"], {'unicode':True},
'''ENUM('foo','bar') UNICODE'''),
(String, [20], {"collation": "utf8"}, 'VARCHAR(20) COLLATE utf8')
]
for type_, args, kw, res in columns:
type_inst = type_(*args, **kw)
self.assert_compile(
type_inst,
res
)
# test that repr() copies out all arguments
self.assert_compile(
eval("mysql.%r" % type_inst)
if type_ is not String
else eval("%r" % type_inst),
res
)
@testing.fails_on('mysql+mysqlconnector', "different unicode behavior")
@testing.exclude('mysql', '<', (5, 0, 5), 'a 5.0+ feature')
@testing.provide_metadata
def test_charset_collate_table(self):
t = Table('foo', self.metadata,
Column('id', Integer),
Column('data', UnicodeText),
mysql_default_charset='utf8',
mysql_collate='utf8_bin'
)
t.create()
m2 = MetaData(testing.db)
t2 = Table('foo', m2, autoload=True)
eq_(t2.kwargs['mysql_collate'], 'utf8_bin')
eq_(t2.kwargs['mysql_default charset'], 'utf8')
# test [ticket:2906]
# in order to test the condition here, need to use
# MySQLdb 1.2.3 and also need to pass either use_unicode=1
# or charset=utf8 to the URL.
t.insert().execute(id=1, data=u('some text'))
assert isinstance(testing.db.scalar(select([t.c.data])), util.text_type)
def test_bit_50(self):
"""Exercise BIT types on 5.0+ (not valid for all engine types)"""
for type_, expected in [
(mysql.MSBit(), "BIT"),
(mysql.MSBit(1), "BIT(1)"),
(mysql.MSBit(63), "BIT(63)"),
]:
self.assert_compile(type_, expected)
@testing.exclude('mysql', '<', (5, 0, 5), 'a 5.0+ feature')
@testing.fails_if(
lambda: testing.against("mysql+oursql") and util.py3k,
'some round trips fail, oursql bug ?')
@testing.provide_metadata
def test_bit_50_roundtrip(self):
bit_table = Table('mysql_bits', self.metadata,
Column('b1', mysql.MSBit),
Column('b2', mysql.MSBit()),
Column('b3', mysql.MSBit(), nullable=False),
Column('b4', mysql.MSBit(1)),
Column('b5', mysql.MSBit(8)),
Column('b6', mysql.MSBit(32)),
Column('b7', mysql.MSBit(63)),
Column('b8', mysql.MSBit(64)))
self.metadata.create_all()
meta2 = MetaData(testing.db)
reflected = Table('mysql_bits', meta2, autoload=True)
for table in bit_table, reflected:
def roundtrip(store, expected=None):
expected = expected or store
table.insert(store).execute()
row = table.select().execute().first()
try:
self.assert_(list(row) == expected)
except:
print("Storing %s" % store)
print("Expected %s" % expected)
print("Found %s" % list(row))
raise
table.delete().execute().close()
roundtrip([0] * 8)
roundtrip([None, None, 0, None, None, None, None, None])
roundtrip([1] * 8)
roundtrip([sql.text("b'1'")] * 8, [1] * 8)
i = 255
roundtrip([0, 0, 0, 0, i, i, i, i])
i = 2 ** 32 - 1
roundtrip([0, 0, 0, 0, 0, i, i, i])
i = 2 ** 63 - 1
roundtrip([0, 0, 0, 0, 0, 0, i, i])
i = 2 ** 64 - 1
roundtrip([0, 0, 0, 0, 0, 0, 0, i])
def test_boolean(self):
for type_, expected in [
(BOOLEAN(), "BOOL"),
(Boolean(), "BOOL"),
(mysql.TINYINT(1), "TINYINT(1)"),
(mysql.TINYINT(1, unsigned=True), "TINYINT(1) UNSIGNED")
]:
self.assert_compile(type_, expected)
@testing.provide_metadata
def test_boolean_roundtrip(self):
bool_table = Table(
'mysql_bool',
self.metadata,
Column('b1', BOOLEAN),
Column('b2', Boolean),
Column('b3', mysql.MSTinyInteger(1)),
Column('b4', mysql.MSTinyInteger(1, unsigned=True)),
Column('b5', mysql.MSTinyInteger),
)
self.metadata.create_all()
table = bool_table
def roundtrip(store, expected=None):
expected = expected or store
table.insert(store).execute()
row = table.select().execute().first()
self.assert_(list(row) == expected)
for i, val in enumerate(expected):
if isinstance(val, bool):
self.assert_(val is row[i])
table.delete().execute()
roundtrip([None, None, None, None, None])
roundtrip([True, True, 1, 1, 1])
roundtrip([False, False, 0, 0, 0])
roundtrip([True, True, True, True, True], [True, True, 1,
1, 1])
roundtrip([False, False, 0, 0, 0], [False, False, 0, 0, 0])
meta2 = MetaData(testing.db)
table = Table('mysql_bool', meta2, autoload=True)
eq_(colspec(table.c.b3), 'b3 TINYINT(1)')
eq_(colspec(table.c.b4), 'b4 TINYINT(1) UNSIGNED')
meta2 = MetaData(testing.db)
table = Table(
'mysql_bool',
meta2,
Column('b1', BOOLEAN),
Column('b2', Boolean),
Column('b3', BOOLEAN),
Column('b4', BOOLEAN),
autoload=True,
)
eq_(colspec(table.c.b3), 'b3 BOOL')
eq_(colspec(table.c.b4), 'b4 BOOL')
roundtrip([None, None, None, None, None])
roundtrip([True, True, 1, 1, 1], [True, True, True, True,
1])
roundtrip([False, False, 0, 0, 0], [False, False, False,
False, 0])
roundtrip([True, True, True, True, True], [True, True,
True, True, 1])
roundtrip([False, False, 0, 0, 0], [False, False, False,
False, 0])
def test_timestamp_fsp(self):
self.assert_compile(
mysql.TIMESTAMP(fsp=5),
"TIMESTAMP(5)"
)
def test_timestamp_defaults(self):
"""Exercise funky TIMESTAMP default syntax when used in columns."""
columns = [
([TIMESTAMP],
'TIMESTAMP NULL'),
([mysql.MSTimeStamp],
'TIMESTAMP NULL'),
([mysql.MSTimeStamp,
DefaultClause(sql.text('CURRENT_TIMESTAMP'))],
"TIMESTAMP DEFAULT CURRENT_TIMESTAMP"),
([mysql.MSTimeStamp,
DefaultClause(sql.text("'1999-09-09 09:09:09'"))],
"TIMESTAMP DEFAULT '1999-09-09 09:09:09'"),
([mysql.MSTimeStamp,
DefaultClause(sql.text("'1999-09-09 09:09:09' "
"ON UPDATE CURRENT_TIMESTAMP"))],
"TIMESTAMP DEFAULT '1999-09-09 09:09:09' "
"ON UPDATE CURRENT_TIMESTAMP"),
([mysql.MSTimeStamp,
DefaultClause(sql.text("CURRENT_TIMESTAMP "
"ON UPDATE CURRENT_TIMESTAMP"))],
"TIMESTAMP DEFAULT CURRENT_TIMESTAMP "
"ON UPDATE CURRENT_TIMESTAMP"),
]
for spec, expected in columns:
c = Column('t', *spec)
Table('t', MetaData(), c)
self.assert_compile(
schema.CreateColumn(c),
"t %s" % expected
)
@testing.provide_metadata
def test_timestamp_nullable(self):
ts_table = Table('mysql_timestamp', self.metadata,
Column('t1', TIMESTAMP),
Column('t2', TIMESTAMP, nullable=False),
)
self.metadata.create_all()
now = testing.db.execute("select now()").scalar()
# TIMESTAMP without NULL inserts current time when passed
# NULL. when not passed, generates 0000-00-00 quite
# annoyingly.
ts_table.insert().execute({'t1': now, 't2': None})
ts_table.insert().execute({'t1': None, 't2': None})
# normalize dates that are over the second boundary
def normalize(dt):
if dt is None:
return None
elif (dt - now).seconds < 5:
return now
else:
return dt
eq_(
[tuple([normalize(dt) for dt in row])
for row in ts_table.select().execute()],
[(now, now), (None, now)]
)
def test_datetime_generic(self):
self.assert_compile(
mysql.DATETIME(),
"DATETIME"
)
def test_datetime_fsp(self):
self.assert_compile(
mysql.DATETIME(fsp=4),
"DATETIME(4)"
)
def test_time_generic(self):
""""Exercise TIME."""
self.assert_compile(
mysql.TIME(),
"TIME"
)
def test_time_fsp(self):
self.assert_compile(
mysql.TIME(fsp=5),
"TIME(5)"
)
def test_time_result_processor(self):
eq_(
mysql.TIME().result_processor(None, None)(
datetime.timedelta(seconds=35, minutes=517,
microseconds=450
)),
datetime.time(8, 37, 35, 450)
)
@testing.fails_on("mysql+oursql", "TODO: probable OurSQL bug")
@testing.provide_metadata
def test_time_roundtrip(self):
t = Table('mysql_time', self.metadata,
Column('t1', mysql.TIME())
)
t.create()
t.insert().values(t1=datetime.time(8, 37, 35)).execute()
eq_(select([t.c.t1]).scalar(), datetime.time(8, 37, 35))
@testing.provide_metadata
def test_year(self):
"""Exercise YEAR."""
year_table = Table('mysql_year', self.metadata,
Column('y1', mysql.MSYear),
Column('y2', mysql.MSYear),
Column('y3', mysql.MSYear),
Column('y5', mysql.MSYear(4)))
for col in year_table.c:
self.assert_(repr(col))
year_table.create()
reflected = Table('mysql_year', MetaData(testing.db),
autoload=True)
for table in year_table, reflected:
table.insert(['1950', '50', None, 1950]).execute()
row = table.select().execute().first()
eq_(list(row), [1950, 2050, None, 1950])
table.delete().execute()
self.assert_(colspec(table.c.y1).startswith('y1 YEAR'))
eq_(colspec(table.c.y5), 'y5 YEAR(4)')
class EnumSetTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
__only_on__ = 'mysql'
__dialect__ = mysql.dialect()
@testing.provide_metadata
def test_enum(self):
"""Exercise the ENUM type."""
with testing.expect_deprecated('Manually quoting ENUM value literals'):
e1, e2 = mysql.ENUM("'a'", "'b'"), mysql.ENUM("'a'", "'b'")
enum_table = Table('mysql_enum', self.metadata,
Column('e1', e1),
Column('e2', e2, nullable=False),
Column('e2generic', Enum("a", "b"), nullable=False),
Column('e3', mysql.ENUM("'a'", "'b'", strict=True)),
Column('e4', mysql.ENUM("'a'", "'b'", strict=True),
nullable=False),
Column('e5', mysql.ENUM("a", "b")),
Column('e5generic', Enum("a", "b")),
Column('e6', mysql.ENUM("'a'", "b")),
)
eq_(colspec(enum_table.c.e1),
"e1 ENUM('a','b')")
eq_(colspec(enum_table.c.e2),
"e2 ENUM('a','b') NOT NULL")
eq_(colspec(enum_table.c.e2generic),
"e2generic ENUM('a','b') NOT NULL")
eq_(colspec(enum_table.c.e3),
"e3 ENUM('a','b')")
eq_(colspec(enum_table.c.e4),
"e4 ENUM('a','b') NOT NULL")
eq_(colspec(enum_table.c.e5),
"e5 ENUM('a','b')")
eq_(colspec(enum_table.c.e5generic),
"e5generic ENUM('a','b')")
eq_(colspec(enum_table.c.e6),
"e6 ENUM('''a''','b')")
enum_table.create()
assert_raises(exc.DBAPIError, enum_table.insert().execute,
e1=None, e2=None, e3=None, e4=None)
assert_raises(exc.StatementError, enum_table.insert().execute,
e1='c', e2='c', e2generic='c', e3='c',
e4='c', e5='c', e5generic='c', e6='c')
enum_table.insert().execute()
enum_table.insert().execute(e1='a', e2='a', e2generic='a', e3='a',
e4='a', e5='a', e5generic='a', e6="'a'")
enum_table.insert().execute(e1='b', e2='b', e2generic='b', e3='b',
e4='b', e5='b', e5generic='b', e6='b')
res = enum_table.select().execute().fetchall()
expected = [(None, 'a', 'a', None, 'a', None, None, None),
('a', 'a', 'a', 'a', 'a', 'a', 'a', "'a'"),
('b', 'b', 'b', 'b', 'b', 'b', 'b', 'b')]
eq_(res, expected)
@testing.provide_metadata
def test_set(self):
with testing.expect_deprecated('Manually quoting SET value literals'):
e1, e2 = mysql.SET("'a'", "'b'"), mysql.SET("'a'", "'b'")
set_table = Table('mysql_set', self.metadata,
Column('e1', e1),
Column('e2', e2, nullable=False),
Column('e3', mysql.SET("a", "b")),
Column('e4', mysql.SET("'a'", "b")),
Column('e5', mysql.SET("'a'", "'b'", quoting="quoted"))
)
eq_(colspec(set_table.c.e1),
"e1 SET('a','b')")
eq_(colspec(set_table.c.e2),
"e2 SET('a','b') NOT NULL")
eq_(colspec(set_table.c.e3),
"e3 SET('a','b')")
eq_(colspec(set_table.c.e4),
"e4 SET('''a''','b')")
eq_(colspec(set_table.c.e5),
"e5 SET('a','b')")
set_table.create()
assert_raises(exc.DBAPIError, set_table.insert().execute,
e1=None, e2=None, e3=None, e4=None)
if testing.against("+oursql"):
assert_raises(exc.StatementError, set_table.insert().execute,
e1='c', e2='c', e3='c', e4='c')
set_table.insert().execute(e1='a', e2='a', e3='a', e4="'a'", e5="a,b")
set_table.insert().execute(e1='b', e2='b', e3='b', e4='b', e5="a,b")
res = set_table.select().execute().fetchall()
if testing.against("+oursql"):
expected = [
# 1st row with all c's, data truncated
(set(['']), set(['']), set(['']), set(['']), None),
]
else:
expected = []
expected.extend([
(set(['a']), set(['a']), set(['a']), set(["'a'"]), set(['a', 'b'])),
(set(['b']), set(['b']), set(['b']), set(['b']), set(['a', 'b']))
])
eq_(res, expected)
@testing.provide_metadata
def test_set_roundtrip_plus_reflection(self):
set_table = Table('mysql_set', self.metadata,
Column('s1',
mysql.SET("dq", "sq")),
Column('s2', mysql.SET("a")),
Column('s3', mysql.SET("5", "7", "9")))
eq_(colspec(set_table.c.s1), "s1 SET('dq','sq')")
eq_(colspec(set_table.c.s2), "s2 SET('a')")
eq_(colspec(set_table.c.s3), "s3 SET('5','7','9')")
set_table.create()
reflected = Table('mysql_set', MetaData(testing.db),
autoload=True)
for table in set_table, reflected:
def roundtrip(store, expected=None):
expected = expected or store
table.insert(store).execute()
row = table.select().execute().first()
self.assert_(list(row) == expected)
table.delete().execute()
roundtrip([None, None, None], [None] * 3)
roundtrip(['', '', ''], [set([''])] * 3)
roundtrip([set(['dq']), set(['a']), set(['5'])])
roundtrip(['dq', 'a', '5'], [set(['dq']), set(['a']),
set(['5'])])
roundtrip([1, 1, 1], [set(['dq']), set(['a']), set(['5'
])])
roundtrip([set(['dq', 'sq']), None, set(['9', '5', '7'
])])
set_table.insert().execute({'s3': set(['5'])},
{'s3': set(['5', '7'])}, {'s3': set(['5', '7', '9'])},
{'s3': set(['7', '9'])})
# NOTE: the string sent to MySQL here is sensitive to ordering.
# for some reason the set ordering is always "5, 7" when we test on
# MySQLdb but in Py3K this is not guaranteed. So basically our
# SET type doesn't do ordering correctly (not sure how it can,
# as we don't know how the SET was configured in the first place.)
rows = select([set_table.c.s3],
set_table.c.s3.in_([set(['5']), ['5', '7']])
).execute().fetchall()
found = set([frozenset(row[0]) for row in rows])
eq_(found, set([frozenset(['5']), frozenset(['5', '7'])]))
@testing.provide_metadata
def test_unicode_enum(self):
metadata = self.metadata
t1 = Table('table', metadata,
Column('id', Integer, primary_key=True),
Column('value', Enum(u('réveillé'), u('drôle'), u('S’il'))),
Column('value2', mysql.ENUM(u('réveillé'), u('drôle'), u('S’il')))
)
metadata.create_all()
t1.insert().execute(value=u('drôle'), value2=u('drôle'))
t1.insert().execute(value=u('réveillé'), value2=u('réveillé'))
t1.insert().execute(value=u('S’il'), value2=u('S’il'))
eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
[(1, u('drôle'), u('drôle')), (2, u('réveillé'), u('réveillé')),
(3, u('S’il'), u('S’il'))]
)
# test reflection of the enum labels
m2 = MetaData(testing.db)
t2 = Table('table', m2, autoload=True)
# TODO: what's wrong with the last element ? is there
# latin-1 stuff forcing its way in ?
assert t2.c.value.type.enums[0:2] == \
(u('réveillé'), u('drôle')) # u'S’il') # eh ?
assert t2.c.value2.type.enums[0:2] == \
(u('réveillé'), u('drôle')) # u'S’il') # eh ?
def test_enum_compile(self):
e1 = Enum('x', 'y', 'z', name='somename')
t1 = Table('sometable', MetaData(), Column('somecolumn', e1))
self.assert_compile(schema.CreateTable(t1),
"CREATE TABLE sometable (somecolumn "
"ENUM('x','y','z'))")
t1 = Table('sometable', MetaData(), Column('somecolumn',
Enum('x', 'y', 'z', native_enum=False)))
self.assert_compile(schema.CreateTable(t1),
"CREATE TABLE sometable (somecolumn "
"VARCHAR(1), CHECK (somecolumn IN ('x', "
"'y', 'z')))")
@testing.provide_metadata
@testing.exclude('mysql', '<', (4,), "3.23 can't handle an ENUM of ''")
def test_enum_parse(self):
with testing.expect_deprecated('Manually quoting ENUM value literals'):
enum_table = Table('mysql_enum', self.metadata,
Column('e1', mysql.ENUM("'a'")),
Column('e2', mysql.ENUM("''")),
Column('e3', mysql.ENUM('a')),
Column('e4', mysql.ENUM('')),
Column('e5', mysql.ENUM("'a'", "''")),
Column('e6', mysql.ENUM("''", "'a'")),
Column('e7', mysql.ENUM("''", "'''a'''", "'b''b'", "''''")))
for col in enum_table.c:
self.assert_(repr(col))
enum_table.create()
reflected = Table('mysql_enum', MetaData(testing.db),
autoload=True)
for t in enum_table, reflected:
eq_(t.c.e1.type.enums, ("a",))
eq_(t.c.e2.type.enums, ("",))
eq_(t.c.e3.type.enums, ("a",))
eq_(t.c.e4.type.enums, ("",))
eq_(t.c.e5.type.enums, ("a", ""))
eq_(t.c.e6.type.enums, ("", "a"))
eq_(t.c.e7.type.enums, ("", "'a'", "b'b", "'"))
@testing.provide_metadata
@testing.exclude('mysql', '<', (5,))
def test_set_parse(self):
with testing.expect_deprecated('Manually quoting SET value literals'):
set_table = Table('mysql_set', self.metadata,
Column('e1', mysql.SET("'a'")),
Column('e2', mysql.SET("''")),
Column('e3', mysql.SET('a')),
Column('e4', mysql.SET('')),
Column('e5', mysql.SET("'a'", "''")),
Column('e6', mysql.SET("''", "'a'")),
Column('e7', mysql.SET("''", "'''a'''", "'b''b'", "''''")))
for col in set_table.c:
self.assert_(repr(col))
set_table.create()
# don't want any warnings on reflection
reflected = Table('mysql_set', MetaData(testing.db),
autoload=True)
for t in set_table, reflected:
eq_(t.c.e1.type.values, ("a",))
eq_(t.c.e2.type.values, ("",))
eq_(t.c.e3.type.values, ("a",))
eq_(t.c.e4.type.values, ("",))
eq_(t.c.e5.type.values, ("a", ""))
eq_(t.c.e6.type.values, ("", "a"))
eq_(t.c.e7.type.values, ("", "'a'", "b'b", "'"))
def colspec(c):
return testing.db.dialect.ddl_compiler(
testing.db.dialect, None).get_column_specification(c)
| Abi1ity/uniclust2.0 | SQLAlchemy-0.9.9/test/dialect/mysql/test_types.py | Python | bsd-3-clause | 32,246 | 0.004626 |
#Kunal Gautam
#Codewars : @Kunalpod
#Problem name: Dubstep
#Problem level: 6 kyu
def song_decoder(song):
return " ".join(" ".join(song.split('WUB')).split())
| Kunalpod/codewars | dubstep.py | Python | mit | 163 | 0.02454 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Prueba',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('nombre', models.CharField(max_length=100)),
],
options={
},
bases=(models.Model,),
),
]
| HenryGBC/landing_company | landing/migrations/0001_initial.py | Python | mit | 556 | 0.001799 |
import wpilib
from wpilib.command.commandgroup import CommandGroup
class Autonomous(CommandGroup):
def __init__(self, drive, grabber_lift):
super().__init__()
self.drive = drive
self.grabber_lift = grabber_lift
self.addSequential(ClawGrab(grabber_lift))
self.addSequential(MoveLift(grabber_lift, .5), 1.5)
self.addParallel(TurnToSpecifiedAngle(drive, 180))
self.addSequential(ArcadeDrive(drive, 0, 1)) | frc2423/2015 | recycle_rush/commands/autonomous.py | Python | gpl-2.0 | 485 | 0.010309 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.