text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
from allauth.socialaccount.tests import OAuth2TestsMixin
from allauth.tests import MockedResponse, TestCase
from .provider import MicrosoftGraphProvider
class MicrosoftGraphTests(OAuth2TestsMixin, TestCase):
provider_id = MicrosoftGraphProvider.id
def get_mocked_response(self):
response_data = """
{
"@odata.context": "https://graph.microsoft.com/v1.0/$metadata#users/$entity",
"id": "16f5a7b6-5a15-4568-aa5a-31bb117e9967",
"businessPhones": [],
"displayName": "Anne Weiler",
"givenName": "Anne",
"jobTitle": "Manufacturing Lead",
"mail": "annew@CIE493742.onmicrosoft.com",
"mobilePhone": "+1 3528700812",
"officeLocation": null,
"preferredLanguage": "en-US",
"surname": "Weiler",
"userPrincipalName": "annew@CIE493742.onmicrosoft.com"
}
""" # noqa
return MockedResponse(200, response_data)
|
AltSchool/django-allauth
|
allauth/socialaccount/providers/microsoft/tests.py
|
Python
|
mit
| 988 | 0.001012 |
"""
Python environments and packages
================================
This module provides tools for using Python `virtual environments`_
and installing Python packages using the `pip`_ installer.
.. _virtual environments: http://www.virtualenv.org/
.. _pip: http://www.pip-installer.org/
"""
from __future__ import with_statement
from contextlib import contextmanager
from distutils.version import StrictVersion as V
from pipes import quote
import os
import posixpath
import re
from fabric.api import cd, hide, prefix, run, settings, sudo
from fabric.utils import puts
from fabtools.files import is_file
from fabtools.utils import abspath, download, run_as_root
GET_PIP_URL = 'https://raw.githubusercontent.com/pypa/pip/master/contrib/get-pip.py'
def is_pip_installed(version=None, pip_cmd='pip'):
"""
Check if `pip`_ is installed.
.. _pip: http://www.pip-installer.org/
"""
with settings(hide('running', 'warnings', 'stderr', 'stdout'), warn_only=True):
res = run('%(pip_cmd)s --version 2>/dev/null' % locals())
if res.failed:
return False
if version is None:
return res.succeeded
else:
m = re.search(r'pip (?P<version>.*) from', res)
if m is None:
return False
installed = m.group('version')
if V(installed) < V(version):
puts("pip %s found (version >= %s required)" % (installed, version))
return False
else:
return True
def install_pip(python_cmd='python', use_sudo=True):
"""
Install the latest version of `pip`_, using the given Python
interpreter.
::
import fabtools
if not fabtools.python.is_pip_installed():
fabtools.python.install_pip()
.. note::
pip is automatically installed inside a virtualenv, so there
is no need to install it yourself in this case.
.. _pip: http://www.pip-installer.org/
"""
with cd('/tmp'):
download(GET_PIP_URL)
command = '%(python_cmd)s get-pip.py' % locals()
if use_sudo:
run_as_root(command, pty=False)
else:
run(command, pty=False)
run('rm -f get-pip.py')
def is_installed(package, pip_cmd='pip'):
"""
Check if a Python package is installed (using pip).
Package names are case insensitive.
Example::
from fabtools.python import virtualenv
import fabtools
with virtualenv('/path/to/venv'):
fabtools.python.install('Flask')
assert fabtools.python.is_installed('flask')
.. _pip: http://www.pip-installer.org/
"""
with settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True):
res = run('%(pip_cmd)s freeze' % locals())
packages = [line.split('==')[0].lower() for line in res.splitlines()]
return (package.lower() in packages)
def install(packages, upgrade=False, download_cache=None, allow_external=None,
allow_unverified=None, quiet=False, pip_cmd='pip', use_sudo=False,
user=None, exists_action=None):
"""
Install Python package(s) using `pip`_.
Package names are case insensitive.
Starting with version 1.5, pip no longer scrapes insecure external
urls by default and no longer installs externally hosted files by
default. Use ``allow_external=['foo', 'bar']`` or
``allow_unverified=['bar', 'baz']`` to change these behaviours
for specific packages.
Examples::
import fabtools
# Install a single package
fabtools.python.install('package', use_sudo=True)
# Install a list of packages
fabtools.python.install(['pkg1', 'pkg2'], use_sudo=True)
.. _pip: http://www.pip-installer.org/
"""
if isinstance(packages, basestring):
packages = [packages]
if allow_external in (None, False):
allow_external = []
elif allow_external == True:
allow_external = packages
if allow_unverified in (None, False):
allow_unverified = []
elif allow_unverified == True:
allow_unverified = packages
options = []
if upgrade:
options.append('--upgrade')
if download_cache:
options.append('--download-cache="%s"' % download_cache)
if quiet:
options.append('--quiet')
for package in allow_external:
options.append('--allow-external="%s"' % package)
for package in allow_unverified:
options.append('--allow-unverified="%s"' % package)
if exists_action:
options.append('--exists-action=%s' % exists_action)
options = ' '.join(options)
packages = ' '.join(packages)
command = '%(pip_cmd)s install %(options)s %(packages)s' % locals()
if use_sudo:
sudo(command, user=user, pty=False)
else:
run(command, pty=False)
def install_requirements(filename, upgrade=False, download_cache=None,
allow_external=None, allow_unverified=None,
quiet=False, pip_cmd='pip', use_sudo=False,
user=None, exists_action=None):
"""
Install Python packages from a pip `requirements file`_.
::
import fabtools
fabtools.python.install_requirements('project/requirements.txt')
.. _requirements file: http://www.pip-installer.org/en/latest/requirements.html
"""
if allow_external is None:
allow_external = []
if allow_unverified is None:
allow_unverified = []
options = []
if upgrade:
options.append('--upgrade')
if download_cache:
options.append('--download-cache="%s"' % download_cache)
for package in allow_external:
options.append('--allow-external="%s"' % package)
for package in allow_unverified:
options.append('--allow-unverified="%s"' % package)
if quiet:
options.append('--quiet')
if exists_action:
options.append('--exists-action=%s' % exists_action)
options = ' '.join(options)
command = '%(pip_cmd)s install %(options)s -r %(filename)s' % locals()
if use_sudo:
sudo(command, user=user, pty=False)
else:
run(command, pty=False)
def create_virtualenv(directory, system_site_packages=False, venv_python=None,
use_sudo=False, user=None, clear=False, prompt=None,
virtualenv_cmd='virtualenv'):
"""
Create a Python `virtual environment`_.
::
import fabtools
fabtools.python.create_virtualenv('/path/to/venv')
.. _virtual environment: http://www.virtualenv.org/
"""
options = ['--quiet']
if system_site_packages:
options.append('--system-site-packages')
if venv_python:
options.append('--python=%s' % quote(venv_python))
if clear:
options.append('--clear')
if prompt:
options.append('--prompt=%s' % quote(prompt))
options = ' '.join(options)
directory = quote(directory)
command = '%(virtualenv_cmd)s %(options)s %(directory)s' % locals()
if use_sudo:
sudo(command, user=user)
else:
run(command)
def virtualenv_exists(directory):
"""
Check if a Python `virtual environment`_ exists.
.. _virtual environment: http://www.virtualenv.org/
"""
return is_file(posixpath.join(directory, 'bin', 'python'))
@contextmanager
def virtualenv(directory, local=False):
"""
Context manager to activate an existing Python `virtual environment`_.
::
from fabric.api import run
from fabtools.python import virtualenv
with virtualenv('/path/to/virtualenv'):
run('python -V')
.. _virtual environment: http://www.virtualenv.org/
"""
path_mod = os.path if local else posixpath
# Build absolute path to the virtualenv activation script
venv_path = abspath(directory)
activate_path = path_mod.join(venv_path, 'bin', 'activate')
# Source the activation script
with prefix('. %s' % quote(activate_path)):
yield
|
juanantoniofm/accesible-moodle
|
fabtools/python.py
|
Python
|
gpl-2.0
| 8,076 | 0.001114 |
# testing: to be called by nosetests
import os
from stetl.etl import ETL
from tests.stetl_test_case import StetlTestCase
from stetl.main import parse_args
class ConfigTest(StetlTestCase):
"""Basic configuration tests"""
def setUp(self):
super(ConfigTest, self).setUp()
# Initialize Stetl
self.curr_dir = os.path.dirname(os.path.realpath(__file__))
self.cfg_dict = {'config_file': os.path.join(self.curr_dir, 'configs/copy_in_out_file.cfg')}
def clear_stetl_env(self):
# Restore old enviroment
try:
del os.environ['stetl_out_file']
del os.environ['stetl_in_file']
except:
pass
def tearDown(self):
super(ConfigTest, self).tearDown()
self.clear_stetl_env()
def test_config_args_file_single(self):
"""
Test single -a argsfile option
:return:
"""
args_default = os.path.join(self.curr_dir, 'configs/copy_in_out_file_default.args')
args_parsed = parse_args(['-a', args_default])
# Test args substitution from args_dict
config_args = args_parsed.config_args
self.assertEqual(config_args['in_file'], 'default_infile.txt')
self.assertEqual(config_args['out_file'], 'default_outfile.txt')
def test_config_args_explicit_single(self):
"""
Test single -a "arg1=x arg2=y" option
:return:
"""
args_default = os.path.join(self.curr_dir, 'configs/copy_in_out_file_default.args')
args_parsed = parse_args(['-a', 'in_file=default_infile.txt out_file=default_outfile.txt'])
# Test args substitution from args_dict
config_args = args_parsed.config_args
self.assertEqual(config_args['in_file'], 'default_infile.txt')
self.assertEqual(config_args['out_file'], 'default_outfile.txt')
def test_config_args_file_multi(self):
"""
Test multiple: -a argsfile1 -a argsfile2 option with override
:return:
"""
args_default = os.path.join(self.curr_dir, 'configs/copy_in_out_file_default.args')
args_my = os.path.join(self.curr_dir, 'configs/copy_in_out_file_my.args')
args_parsed = parse_args(['-a', args_default, '-a', args_my])
# Test args substitution from args_dict
config_args = args_parsed.config_args
self.assertEqual(config_args['in_file'], 'my_infile.txt')
self.assertEqual(config_args['out_file'], 'default_outfile.txt')
def test_config_args_file_explicit_multi(self):
"""
Test multiple: -a argsfile1 -a arg=myarg option with override
:return:
"""
args_default = os.path.join(self.curr_dir, 'configs/copy_in_out_file_default.args')
args_parsed = parse_args(['-a', args_default, '-a', 'in_file=my_infile.txt'])
# Test args substitution from args_dict
config_args = args_parsed.config_args
self.assertEqual(config_args['in_file'], 'my_infile.txt')
self.assertEqual(config_args['out_file'], 'default_outfile.txt')
def test_args_dict(self):
args_dict = {'in_file': 'infile.txt', 'out_file': 'outfile.txt'}
etl = ETL(self.cfg_dict, args_dict)
# Test args substitution from args_dict
self.assertEqual(etl.configdict.get('input_file', 'file_path'), 'infile.txt')
self.assertEqual(etl.configdict.get('output_file', 'file_path'), 'outfile.txt')
def test_args_dict_env_override(self):
args_dict = {'in_file': 'infile.txt', 'out_file': 'outfile.txt'}
# Override in OS env
os.environ['stetl_in_file'] = 'env_infile.txt'
etl = ETL(self.cfg_dict, args_dict)
# Test args substitution from args_dict
self.assertEqual(etl.configdict.get('input_file', 'file_path'), os.environ['stetl_in_file'])
self.assertEqual(etl.configdict.get('output_file', 'file_path'), 'outfile.txt')
def test_args_dict_env_all(self):
"""
Substitute ALL args from OS env.
:return:
"""
# Set all args in in OS env
os.environ['stetl_in_file'] = 'env_infile.txt'
os.environ['stetl_out_file'] = 'env_outfile.txt'
args_dict = None
etl = ETL(self.cfg_dict, args_dict)
# Test args substitution from args_dict
self.assertEqual(etl.configdict.get('input_file', 'file_path'), os.environ['stetl_in_file'])
self.assertEqual(etl.configdict.get('output_file', 'file_path'), os.environ['stetl_out_file'])
|
justb4/stetl
|
tests/test_args.py
|
Python
|
gpl-3.0
| 4,523 | 0.003537 |
#!/usr/bin/env python
import os.path
programName = os.path.splitext(os.path.basename(__file__))[0]
from sys import argv,stdin,stderr,exit
from pazookle.shred import zook,Shreduler
from pazookle.ugen import UGen,Mixer
from pazookle.generate import SinOsc
from pazookle.output import WavOut
from pazookle.parse import float_or_fraction
def usage(s=None):
message = """
usage: %s [options]
--freq1=<value> set the first frequency
--freq2=<value> set the second frequency
--dry=<value> set dry level (mix for second oscillator)
--wet=<value> set wet level (mix for second oscillator)
--channels=<1|2> number of output channels
--duration=<seconds> length of the test""" \
% programName
if (s == None): exit (message)
else: exit ("%s\n%s" % (s,message))
def main():
global debug
global freq1,freq2,dry,wet,numChannels
# parse the command line
freq1 = 440.0
freq2 = 660.0
dry = 0.5
wet = 0.2
numChannels = 1
duration = 3.0
debug = []
for arg in argv[1:]:
if ("=" in arg):
argVal = arg.split("=",1)[1]
if (arg.startswith("F1=")) or (arg.startswith("--freq1=")) or (arg.startswith("--frequency1=")):
freq1 = float_or_fraction(argVal)
elif (arg.startswith("F2=")) or (arg.startswith("--freq2=")) or (arg.startswith("--frequency2=")):
freq2 = float_or_fraction(argVal)
elif (arg.startswith("D=")) or (arg.startswith("--dry=")):
dry = float_or_fraction(argVal)
elif (arg.startswith("W=")) or (arg.startswith("--wet=")):
wet = float_or_fraction(argVal)
elif (arg.startswith("--channels=")):
numChannels = int(argVal)
elif (arg.startswith("T=")) or (arg.startswith("--dur=")) or (arg.startswith("--duration=")):
duration = float_or_fraction(argVal)
elif (arg == "--help"):
usage()
elif (arg.startswith("--debug=")):
debug += argVal.split(",")
elif (arg.startswith("--")):
usage("unrecognized option: %s" % arg)
else:
usage("unrecognized option: %s" % arg)
# run the test
UGen.set_debug(debug)
Shreduler.set_debug(debug)
zook.spork(mixer_test(duration*zook.sec))
zook.run()
def mixer_test(duration):
filename = programName + ".wav"
print >>stderr, "writing audio output to %s" % filename
output = WavOut(filename=filename,channels=numChannels)
osc1 = SinOsc(gain=1.0,freq=freq1)
osc2 = SinOsc(gain=1.0,freq=freq2)
mixer = Mixer(dry=dry,wet=wet,channels=numChannels)
osc1 >> mixer >> output
osc2 >> mixer
yield duration
output.close()
if __name__ == "__main__": main()
|
zacko-belsch/pazookle
|
tests/try_Mixer.py
|
Python
|
gpl-3.0
| 2,580 | 0.037209 |
# -*- coding:utf-8; mode:python -*-
"""
Implements a queue efficiently using only two stacks.
"""
from helpers import SingleNode
from stack import Stack
class QueueOf2Stacks:
def __init__(self):
self.stack_1 = Stack()
self.stack_2 = Stack()
def enqueue(self, value):
self.stack_1.push(value)
def dequeue(self):
self.transfer_if_necessary()
if self.stack_2:
return self.stack_2.pop()
def peek(self):
self.transfer_if_necessary()
if self.stack_2:
return self.stack_2.peek()
def transfer_if_necessary(self):
if not self.stack_2:
while self.stack_1:
self.stack_2.push(self.stack_1.pop())
def __len__(self):
return len(self.stack_1) + len(self.stack_2)
def main():
queue = QueueOf2Stacks()
print()
for i in range(10):
queue.enqueue(i)
print(i)
print("---")
for i in range(len(queue)):
print(queue.dequeue())
if __name__ == "__main__":
main()
main()
|
wkmanire/StructuresAndAlgorithms
|
pythonpractice/queueoftwostacks.py
|
Python
|
gpl-3.0
| 1,054 | 0 |
#!/usr/bin/python
#
# Copyright (c) 2011 The Bitcoin developers
// Copyright (c) 2014 Dyffy, Inc.
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import time
import json
import pprint
import hashlib
import struct
import re
import base64
import httplib
import sys
from multiprocessing import Process
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
pp = pprint.PrettyPrinter(indent=4)
class SidecoinRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print "JSON-RPC: no response"
return None
body = resp.read()
resp_obj = json.loads(body)
if resp_obj is None:
print "JSON-RPC: cannot JSON-decode body"
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print "JSON-RPC: no result in object"
return None
return resp_obj['result']
def getblockcount(self):
return self.rpc('getblockcount')
def getwork(self, data=None):
return self.rpc('getwork', data)
def uint32(x):
return x & 0xffffffffL
def bytereverse(x):
return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) |
(((x) >> 8) & 0x0000ff00) | ((x) >> 24) ))
def bufreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
word = struct.unpack('@I', in_buf[i:i+4])[0]
out_words.append(struct.pack('@I', bytereverse(word)))
return ''.join(out_words)
def wordreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
out_words.append(in_buf[i:i+4])
out_words.reverse()
return ''.join(out_words)
class Miner:
def __init__(self, id):
self.id = id
self.max_nonce = MAX_NONCE
def work(self, datastr, targetstr):
# decode work data hex string to binary
static_data = datastr.decode('hex')
static_data = bufreverse(static_data)
# the first 76b of 80b do not change
blk_hdr = static_data[:76]
# decode 256-bit target value
targetbin = targetstr.decode('hex')
targetbin = targetbin[::-1] # byte-swap and dword-swap
targetbin_str = targetbin.encode('hex')
target = long(targetbin_str, 16)
# pre-hash first 76b of block header
static_hash = hashlib.sha256()
static_hash.update(blk_hdr)
for nonce in xrange(self.max_nonce):
# encode 32-bit nonce value
nonce_bin = struct.pack("<I", nonce)
# hash final 4b, the nonce value
hash1_o = static_hash.copy()
hash1_o.update(nonce_bin)
hash1 = hash1_o.digest()
# sha256 hash of sha256 hash
hash_o = hashlib.sha256()
hash_o.update(hash1)
hash = hash_o.digest()
# quick test for winning solution: high 32 bits zero?
if hash[-4:] != '\0\0\0\0':
continue
# convert binary hash to 256-bit Python long
hash = bufreverse(hash)
hash = wordreverse(hash)
hash_str = hash.encode('hex')
l = long(hash_str, 16)
# proof-of-work test: hash < target
if l < target:
print time.asctime(), "PROOF-OF-WORK found: %064x" % (l,)
return (nonce + 1, nonce_bin)
else:
print time.asctime(), "PROOF-OF-WORK false positive %064x" % (l,)
# return (nonce + 1, nonce_bin)
return (nonce + 1, None)
def submit_work(self, rpc, original_data, nonce_bin):
nonce_bin = bufreverse(nonce_bin)
nonce = nonce_bin.encode('hex')
solution = original_data[:152] + nonce + original_data[160:256]
param_arr = [ solution ]
result = rpc.getwork(param_arr)
print time.asctime(), "--> Upstream RPC result:", result
def iterate(self, rpc):
work = rpc.getwork()
if work is None:
time.sleep(ERR_SLEEP)
return
if 'data' not in work or 'target' not in work:
time.sleep(ERR_SLEEP)
return
time_start = time.time()
(hashes_done, nonce_bin) = self.work(work['data'],
work['target'])
time_end = time.time()
time_diff = time_end - time_start
self.max_nonce = long(
(hashes_done * settings['scantime']) / time_diff)
if self.max_nonce > 0xfffffffaL:
self.max_nonce = 0xfffffffaL
if settings['hashmeter']:
print "HashMeter(%d): %d hashes, %.2f Khash/sec" % (
self.id, hashes_done,
(hashes_done / 1000.0) / time_diff)
if nonce_bin is not None:
self.submit_work(rpc, work['data'], nonce_bin)
def loop(self):
rpc = SidecoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpass'])
if rpc is None:
return
while True:
self.iterate(rpc)
def miner_thread(id):
miner = Miner(id)
miner.loop()
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: pyminer.py CONFIG-FILE"
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 8332
if 'threads' not in settings:
settings['threads'] = 1
if 'hashmeter' not in settings:
settings['hashmeter'] = 0
if 'scantime' not in settings:
settings['scantime'] = 30L
if 'rpcuser' not in settings or 'rpcpass' not in settings:
print "Missing username and/or password in cfg file"
sys.exit(1)
settings['port'] = int(settings['port'])
settings['threads'] = int(settings['threads'])
settings['hashmeter'] = int(settings['hashmeter'])
settings['scantime'] = long(settings['scantime'])
thr_list = []
for thr_id in range(settings['threads']):
p = Process(target=miner_thread, args=(thr_id,))
p.start()
thr_list.append(p)
time.sleep(1) # stagger threads
print settings['threads'], "mining threads started"
print time.asctime(), "Miner Starts - %s:%s" % (settings['host'], settings['port'])
try:
for thr_proc in thr_list:
thr_proc.join()
except KeyboardInterrupt:
pass
print time.asctime(), "Miner Stops - %s:%s" % (settings['host'], settings['port'])
|
joeykrug/sidecoin
|
contrib/pyminer/pyminer.py
|
Python
|
mit
| 6,470 | 0.036321 |
# example for using Python with cython bindings as a [HAL]HALFILE
# in the ini file, add as last HALFILE:
#[HAL]
#HALFILE = haltest.py
from machinekit.halfile import rt, hal
rt.loadrt("supply")
hal.addf("supply.0.update","servo-thread")
|
ArcEye/machinekit-testing
|
configs/sim/axis/haltest.py
|
Python
|
lgpl-2.1
| 242 | 0.012397 |
from typing import TypeVar, Dict, Any
from django import template
from django.conf import settings
from util.getters import reverse_model_url
register = template.Library()
T = TypeVar('T')
@register.filter(name='get_from_dict')
def get_from_dict(dictionary: Dict[T, Any], key: T):
value = dictionary.get(key)
if settings.DEBUG and value is None:
print(key, dictionary)
return value
@register.filter(name='url_finder')
def url_finder(obj):
return reverse_model_url(obj)
@register.filter(name="get_model_url")
def get_model_url(obj):
return url_finder(obj)
@register.filter(name='getattr')
def getattrfilter(obj, attr):
try:
return getattr(obj, attr)
except AttributeError:
return None
|
FRC-RS/FRS
|
TBAW/templatetags/getters.py
|
Python
|
mit
| 750 | 0 |
from setuptools import find_packages, setup
version = '0.0.1'
setup(
name='alerta-transient',
version=version,
description='Example Alerta plugin for transient flapping alerts',
url='https://github.com/alerta/alerta-contrib',
license='Apache License 2.0',
author='Your name',
author_email='your.name@example.com',
packages=find_packages(),
py_modules=['alerta_transient'],
install_requires=[],
include_package_data=True,
zip_safe=True,
entry_points={
'alerta.plugins': [
'transient = alerta_transient:TransientAlert'
]
}
)
|
guardian/alerta
|
examples/plugins/transient/setup.py
|
Python
|
apache-2.0
| 609 | 0 |
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
test_port
----------------------------------
Test port resource (managed by neutron)
"""
from openstack.cloud.exc import OpenStackCloudException
from openstack.tests.unit import base
class TestPort(base.TestCase):
mock_neutron_port_create_rep = {
'port': {
'status': 'DOWN',
'binding:host_id': '',
'name': 'test-port-name',
'allowed_address_pairs': [],
'admin_state_up': True,
'network_id': 'test-net-id',
'tenant_id': 'test-tenant-id',
'binding:vif_details': {},
'binding:vnic_type': 'normal',
'binding:vif_type': 'unbound',
'device_owner': '',
'mac_address': '50:1c:0d:e4:f0:0d',
'binding:profile': {},
'fixed_ips': [
{
'subnet_id': 'test-subnet-id',
'ip_address': '29.29.29.29'
}
],
'id': 'test-port-id',
'security_groups': [],
'device_id': ''
}
}
mock_neutron_port_update_rep = {
'port': {
'status': 'DOWN',
'binding:host_id': '',
'name': 'test-port-name-updated',
'allowed_address_pairs': [],
'admin_state_up': True,
'network_id': 'test-net-id',
'tenant_id': 'test-tenant-id',
'binding:vif_details': {},
'binding:vnic_type': 'normal',
'binding:vif_type': 'unbound',
'device_owner': '',
'mac_address': '50:1c:0d:e4:f0:0d',
'binding:profile': {},
'fixed_ips': [
{
'subnet_id': 'test-subnet-id',
'ip_address': '29.29.29.29'
}
],
'id': 'test-port-id',
'security_groups': [],
'device_id': ''
}
}
mock_neutron_port_list_rep = {
'ports': [
{
'status': 'ACTIVE',
'binding:host_id': 'devstack',
'name': 'first-port',
'allowed_address_pairs': [],
'admin_state_up': True,
'network_id': '70c1db1f-b701-45bd-96e0-a313ee3430b3',
'tenant_id': '',
'extra_dhcp_opts': [],
'binding:vif_details': {
'port_filter': True,
'ovs_hybrid_plug': True
},
'binding:vif_type': 'ovs',
'device_owner': 'network:router_gateway',
'mac_address': 'fa:16:3e:58:42:ed',
'binding:profile': {},
'binding:vnic_type': 'normal',
'fixed_ips': [
{
'subnet_id': '008ba151-0b8c-4a67-98b5-0d2b87666062',
'ip_address': '172.24.4.2'
}
],
'id': 'd80b1a3b-4fc1-49f3-952e-1e2ab7081d8b',
'security_groups': [],
'device_id': '9ae135f4-b6e0-4dad-9e91-3c223e385824'
},
{
'status': 'ACTIVE',
'binding:host_id': 'devstack',
'name': '',
'allowed_address_pairs': [],
'admin_state_up': True,
'network_id': 'f27aa545-cbdd-4907-b0c6-c9e8b039dcc2',
'tenant_id': 'd397de8a63f341818f198abb0966f6f3',
'extra_dhcp_opts': [],
'binding:vif_details': {
'port_filter': True,
'ovs_hybrid_plug': True
},
'binding:vif_type': 'ovs',
'device_owner': 'network:router_interface',
'mac_address': 'fa:16:3e:bb:3c:e4',
'binding:profile': {},
'binding:vnic_type': 'normal',
'fixed_ips': [
{
'subnet_id': '288bf4a1-51ba-43b6-9d0a-520e9005db17',
'ip_address': '10.0.0.1'
}
],
'id': 'f71a6703-d6de-4be1-a91a-a570ede1d159',
'security_groups': [],
'device_id': '9ae135f4-b6e0-4dad-9e91-3c223e385824'
}
]
}
def test_create_port(self):
self.register_uris([
dict(method="POST",
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports.json']),
json=self.mock_neutron_port_create_rep,
validate=dict(
json={'port': {
'network_id': 'test-net-id',
'name': 'test-port-name',
'admin_state_up': True}}))
])
port = self.cloud.create_port(
network_id='test-net-id', name='test-port-name',
admin_state_up=True)
self.assertEqual(self.mock_neutron_port_create_rep['port'], port)
self.assert_calls()
def test_create_port_parameters(self):
"""Test that we detect invalid arguments passed to create_port"""
self.assertRaises(
TypeError, self.cloud.create_port,
network_id='test-net-id', nome='test-port-name',
stato_amministrativo_porta=True)
def test_create_port_exception(self):
self.register_uris([
dict(method="POST",
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports.json']),
status_code=500,
validate=dict(
json={'port': {
'network_id': 'test-net-id',
'name': 'test-port-name',
'admin_state_up': True}}))
])
self.assertRaises(
OpenStackCloudException, self.cloud.create_port,
network_id='test-net-id', name='test-port-name',
admin_state_up=True)
self.assert_calls()
def test_update_port(self):
port_id = 'd80b1a3b-4fc1-49f3-952e-1e2ab7081d8b'
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports.json']),
json=self.mock_neutron_port_list_rep),
dict(method='PUT',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'ports', '%s.json' % port_id]),
json=self.mock_neutron_port_update_rep,
validate=dict(
json={'port': {'name': 'test-port-name-updated'}}))
])
port = self.cloud.update_port(
name_or_id=port_id, name='test-port-name-updated')
self.assertEqual(self.mock_neutron_port_update_rep['port'], port)
self.assert_calls()
def test_update_port_parameters(self):
"""Test that we detect invalid arguments passed to update_port"""
self.assertRaises(
TypeError, self.cloud.update_port,
name_or_id='test-port-id', nome='test-port-name-updated')
def test_update_port_exception(self):
port_id = 'd80b1a3b-4fc1-49f3-952e-1e2ab7081d8b'
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports.json']),
json=self.mock_neutron_port_list_rep),
dict(method='PUT',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'ports', '%s.json' % port_id]),
status_code=500,
validate=dict(
json={'port': {'name': 'test-port-name-updated'}}))
])
self.assertRaises(
OpenStackCloudException, self.cloud.update_port,
name_or_id='d80b1a3b-4fc1-49f3-952e-1e2ab7081d8b',
name='test-port-name-updated')
self.assert_calls()
def test_list_ports(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports.json']),
json=self.mock_neutron_port_list_rep)
])
ports = self.cloud.list_ports()
self.assertItemsEqual(self.mock_neutron_port_list_rep['ports'], ports)
self.assert_calls()
def test_list_ports_filtered(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports.json'],
qs_elements=['status=DOWN']),
json=self.mock_neutron_port_list_rep)
])
ports = self.cloud.list_ports(filters={'status': 'DOWN'})
self.assertItemsEqual(self.mock_neutron_port_list_rep['ports'], ports)
self.assert_calls()
def test_list_ports_exception(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports.json']),
status_code=500)
])
self.assertRaises(OpenStackCloudException, self.cloud.list_ports)
def test_search_ports_by_id(self):
port_id = 'f71a6703-d6de-4be1-a91a-a570ede1d159'
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports.json']),
json=self.mock_neutron_port_list_rep)
])
ports = self.cloud.search_ports(name_or_id=port_id)
self.assertEqual(1, len(ports))
self.assertEqual('fa:16:3e:bb:3c:e4', ports[0]['mac_address'])
self.assert_calls()
def test_search_ports_by_name(self):
port_name = "first-port"
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports.json']),
json=self.mock_neutron_port_list_rep)
])
ports = self.cloud.search_ports(name_or_id=port_name)
self.assertEqual(1, len(ports))
self.assertEqual('fa:16:3e:58:42:ed', ports[0]['mac_address'])
self.assert_calls()
def test_search_ports_not_found(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports.json']),
json=self.mock_neutron_port_list_rep)
])
ports = self.cloud.search_ports(name_or_id='non-existent')
self.assertEqual(0, len(ports))
self.assert_calls()
def test_delete_port(self):
port_id = 'd80b1a3b-4fc1-49f3-952e-1e2ab7081d8b'
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports.json']),
json=self.mock_neutron_port_list_rep),
dict(method='DELETE',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'ports', '%s.json' % port_id]),
json={})
])
self.assertTrue(self.cloud.delete_port(name_or_id='first-port'))
def test_delete_port_not_found(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports.json']),
json=self.mock_neutron_port_list_rep)
])
self.assertFalse(self.cloud.delete_port(name_or_id='non-existent'))
self.assert_calls()
def test_delete_subnet_multiple_found(self):
port_name = "port-name"
port1 = dict(id='123', name=port_name)
port2 = dict(id='456', name=port_name)
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports.json']),
json={'ports': [port1, port2]})
])
self.assertRaises(OpenStackCloudException,
self.cloud.delete_port, port_name)
self.assert_calls()
def test_delete_subnet_multiple_using_id(self):
port_name = "port-name"
port1 = dict(id='123', name=port_name)
port2 = dict(id='456', name=port_name)
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports.json']),
json={'ports': [port1, port2]}),
dict(method='DELETE',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'ports', '%s.json' % port1['id']]),
json={})
])
self.assertTrue(self.cloud.delete_port(name_or_id=port1['id']))
self.assert_calls()
def test_get_port_by_id(self):
fake_port = dict(id='123', name='456')
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0',
'ports',
fake_port['id']]),
json={'port': fake_port})
])
r = self.cloud.get_port_by_id(fake_port['id'])
self.assertIsNotNone(r)
self.assertDictEqual(fake_port, r)
self.assert_calls()
|
ctrlaltdel/neutrinator
|
vendor/openstack/tests/unit/cloud/test_port.py
|
Python
|
gpl-3.0
| 14,285 | 0 |
# -*- coding: utf-8 -*-
#!/usr/bin/env python
#
# Copyright 2014 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from world import world
from bigml.api import HTTP_OK
def i_get_the_project(step, resource):
resource = world.api.get_project(resource)
world.status = resource['code']
assert world.status == HTTP_OK
world.project = resource['object']
|
ShaguptaS/python
|
bigml/tests/read_project_steps.py
|
Python
|
apache-2.0
| 869 | 0.002301 |
"""
@author: Gaetan Hadjeres
"""
import torch
from torch.autograd import Variable
def cuda_variable(tensor, volatile=False):
if torch.cuda.is_available():
return Variable(tensor.cuda(), volatile=volatile)
else:
return Variable(tensor, volatile=volatile)
def to_numpy(variable: Variable):
if torch.cuda.is_available():
return variable.data.cpu().numpy()
else:
return variable.data.numpy()
def init_hidden(num_layers, batch_size, lstm_hidden_size,
volatile=False):
hidden = (
cuda_variable(
torch.randn(num_layers, batch_size, lstm_hidden_size),
volatile=volatile),
cuda_variable(
torch.randn(num_layers, batch_size, lstm_hidden_size),
volatile=volatile)
)
return hidden
|
Ghadjeres/DeepBach
|
DeepBach/helpers.py
|
Python
|
mit
| 816 | 0 |
class Resource(object):
"""Abstract class representing a versioned object"""
def __init__(self, identity, revision, repo, isTree):
super(Resource, self).__init__()
self._id = identity # hash of this object
self._revision = revision # revision this object belongs to
self._repo = repo # repository this object belongs to
def get_latest_revision(self):
"""Return the last revision this was modified"""
raise NotImplementedError
def get_properties(self):
"""Return the properties of a resource"""
raise NotImplementedError
latest_revision = property(get_latest_revision)
properties = property(get_properties)
|
hbussell/pinax-tracker
|
apps/pyvcal/git_wrapper/resource.py
|
Python
|
mit
| 719 | 0.011127 |
#!/usr/bin/env python
# Copyright (c) 2016 Lyft Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
setuptools.setup(
setup_requires=['pbr'],
pbr=True)
|
lyft/bandit-high-entropy-string
|
setup.py
|
Python
|
apache-2.0
| 680 | 0 |
import sys
import re
#
# You must include the following class definition at the top of
# your method specification file.
#
class MethodSpec(object):
def __init__(self, name='', source='', class_names='',
class_names_compiled=None):
"""MethodSpec -- A specification of a method.
Member variables:
name -- The method name
source -- The source code for the method. Must be
indented to fit in a class definition.
class_names -- A regular expression that must match the
class names in which the method is to be inserted.
class_names_compiled -- The compiled class names.
generateDS.py will do this compile for you.
"""
self.name = name
self.source = source
if class_names is None:
self.class_names = ('.*', )
else:
self.class_names = class_names
if class_names_compiled is None:
self.class_names_compiled = re.compile(self.class_names)
else:
self.class_names_compiled = class_names_compiled
def get_name(self):
return self.name
def set_name(self, name):
self.name = name
def get_source(self):
return self.source
def set_source(self, source):
self.source = source
def get_class_names(self):
return self.class_names
def set_class_names(self, class_names):
self.class_names = class_names
self.class_names_compiled = re.compile(class_names)
def get_class_names_compiled(self):
return self.class_names_compiled
def set_class_names_compiled(self, class_names_compiled):
self.class_names_compiled = class_names_compiled
def match_name(self, class_name):
"""Match against the name of the class currently being generated.
If this method returns True, the method will be inserted in
the generated class.
"""
if self.class_names_compiled.search(class_name):
return True
else:
return False
def get_interpolated_source(self, values_dict):
"""Get the method source code, interpolating values from values_dict
into it. The source returned by this method is inserted into
the generated class.
"""
source = self.source % values_dict
return source
def show(self):
print 'specification:'
print ' name: %s' % (self.name, )
print self.source
print ' class_names: %s' % (self.class_names, )
print ' names pat : %s' % (self.class_names_compiled.pattern, )
#
# Provide one or more method specification such as the following.
# Notes:
# - Each generated class contains a class variable _member_data_items.
# This variable contains a list of instances of class _MemberSpec.
# See the definition of class _MemberSpec near the top of the
# generated superclass file and also section "User Methods" in
# the documentation, as well as the examples below.
num_segments = MethodSpec(name='num_segments',
source='''\
@property
def num_segments(self):
return len(self.segments)
''',
class_names=("Morphology")
)
length = MethodSpec(name='length',
source='''\
@property
def length(self):
prox_x = self.proximal.x
prox_y = self.proximal.y
prox_z = self.proximal.z
dist_x = self.distal.x
dist_y = self.distal.y
dist_z = self.distal.z
length = ((prox_x-dist_x)**2 + (prox_y-dist_y)**2 + (prox_z-dist_z)**2)**(0.5)
return length
''',
class_names=("Segment")
)
volume = MethodSpec(name='volume',
source='''\
@property
def volume(self):
from math import pi
prox_diam = self.proximal.diameter
dist_diam = self.distal.diameter
length = self.length
volume = (pi/3)*length*(prox_diam**2+dist_diam**2+prox_diam*dist_diam)
return volume
''',
class_names=("Segment")
)
area = MethodSpec(name='area',
source='''\
@property
def area(self):
from math import pi
from math import sqrt
prox_diam = self.proximal.diameter
dist_diam = self.distal.diameter
length = self.length
area = pi*(prox_diam+dist_diam)*sqrt((prox_diam-dist_diam)**2+length**2)
return area
''',
class_names=("Segment")
)
#
# Provide a list of your method specifications.
# This list of specifications must be named METHOD_SPECS.
#
METHOD_SPECS=(length,
volume,
area,
num_segments,
)
def test():
for spec in METHOD_SPECS:
spec.show()
def main():
test()
if __name__ == '__main__':
main()
|
joebowen/movement_validation_cloud
|
djangodev/lib/python2.7/site-packages/neuroml/nml/helper_methods.py
|
Python
|
mit
| 4,806 | 0.005826 |
# -*- coding: utf-8 -*-
## \package custom
# MIT licensing
# See: LICENSE.txt
|
AntumDeluge/desktop_recorder
|
source/custom/__init__.py
|
Python
|
mit
| 80 | 0.0125 |
#!/usr/bin/env python
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
from ctypes import c_char_p, c_int, c_long, byref,\
create_string_buffer, c_void_p
from shadowsocks import common
from shadowsocks.crypto import util
__all__ = ['ciphers']
libcrypto = None
loaded = False
buf_size = 2048
def load_openssl():
global loaded, libcrypto, buf
libcrypto = util.find_library(('crypto', 'eay32'),
'EVP_get_cipherbyname',
'libcrypto')
if libcrypto is None:
raise Exception('libcrypto(OpenSSL) not found')
libcrypto.EVP_get_cipherbyname.restype = c_void_p
libcrypto.EVP_CIPHER_CTX_new.restype = c_void_p
libcrypto.EVP_CipherInit_ex.argtypes = (c_void_p, c_void_p, c_char_p,
c_char_p, c_char_p, c_int)
libcrypto.EVP_CipherUpdate.argtypes = (c_void_p, c_void_p, c_void_p,
c_char_p, c_int)
libcrypto.EVP_CIPHER_CTX_cleanup.argtypes = (c_void_p,)
libcrypto.EVP_CIPHER_CTX_free.argtypes = (c_void_p,)
if hasattr(libcrypto, 'OpenSSL_add_all_ciphers'):
libcrypto.OpenSSL_add_all_ciphers()
buf = create_string_buffer(buf_size)
loaded = True
def load_cipher(cipher_name):
func_name = 'EVP_' + cipher_name.replace('-', '_')
if bytes != str:
func_name = str(func_name, 'utf-8')
cipher = getattr(libcrypto, func_name, None)
if cipher:
cipher.restype = c_void_p
return cipher()
return None
class OpenSSLCrypto(object):
def __init__(self, cipher_name, key, iv, op):
self._ctx = None
if not loaded:
load_openssl()
cipher_name = common.to_bytes(cipher_name)
cipher = libcrypto.EVP_get_cipherbyname(cipher_name)
if not cipher:
cipher = load_cipher(cipher_name)
if not cipher:
raise Exception('cipher %s not found in libcrypto' % cipher_name)
key_ptr = c_char_p(key)
iv_ptr = c_char_p(iv)
self._ctx = libcrypto.EVP_CIPHER_CTX_new()
if not self._ctx:
raise Exception('can not create cipher context')
r = libcrypto.EVP_CipherInit_ex(self._ctx, cipher, None,
key_ptr, iv_ptr, c_int(op))
if not r:
self.clean()
raise Exception('can not initialize cipher context')
def update(self, data):
global buf_size, buf
cipher_out_len = c_long(0)
l = len(data)
if buf_size < l:
buf_size = l * 2
buf = create_string_buffer(buf_size)
libcrypto.EVP_CipherUpdate(self._ctx, byref(buf),
byref(cipher_out_len), c_char_p(data), l)
# buf is copied to a str object when we access buf.raw
return buf.raw[:cipher_out_len.value]
def __del__(self):
self.clean()
def clean(self):
if self._ctx:
libcrypto.EVP_CIPHER_CTX_cleanup(self._ctx)
libcrypto.EVP_CIPHER_CTX_free(self._ctx)
ciphers = {
'aes-128-cfb': (16, 16, OpenSSLCrypto),
'aes-192-cfb': (24, 16, OpenSSLCrypto),
'aes-256-cfb': (32, 16, OpenSSLCrypto),
'aes-128-ofb': (16, 16, OpenSSLCrypto),
'aes-192-ofb': (24, 16, OpenSSLCrypto),
'aes-256-ofb': (32, 16, OpenSSLCrypto),
'aes-128-ctr': (16, 16, OpenSSLCrypto),
'aes-192-ctr': (24, 16, OpenSSLCrypto),
'aes-256-ctr': (32, 16, OpenSSLCrypto),
'aes-128-cfb8': (16, 16, OpenSSLCrypto),
'aes-192-cfb8': (24, 16, OpenSSLCrypto),
'aes-256-cfb8': (32, 16, OpenSSLCrypto),
'aes-128-cfb1': (16, 16, OpenSSLCrypto),
'aes-192-cfb1': (24, 16, OpenSSLCrypto),
'aes-256-cfb1': (32, 16, OpenSSLCrypto),
'bf-cfb': (16, 8, OpenSSLCrypto),
'camellia-128-cfb': (16, 16, OpenSSLCrypto),
'camellia-192-cfb': (24, 16, OpenSSLCrypto),
'camellia-256-cfb': (32, 16, OpenSSLCrypto),
'cast5-cfb': (16, 8, OpenSSLCrypto),
'des-cfb': (8, 8, OpenSSLCrypto),
'idea-cfb': (16, 8, OpenSSLCrypto),
'rc2-cfb': (16, 8, OpenSSLCrypto),
'rc4': (16, 0, OpenSSLCrypto),
'seed-cfb': (16, 16, OpenSSLCrypto),
}
def run_method(method):
cipher = OpenSSLCrypto(method, b'k' * 32, b'i' * 16, 1)
decipher = OpenSSLCrypto(method, b'k' * 32, b'i' * 16, 0)
util.run_cipher(cipher, decipher)
def test_aes_128_cfb():
run_method('aes-128-cfb')
def test_aes_256_cfb():
run_method('aes-256-cfb')
def test_aes_128_cfb8():
run_method('aes-128-cfb8')
def test_aes_256_ofb():
run_method('aes-256-ofb')
def test_aes_256_ctr():
run_method('aes-256-ctr')
def test_bf_cfb():
run_method('bf-cfb')
def test_rc4():
run_method('rc4')
if __name__ == '__main__':
test_aes_128_cfb()
|
Ju2ender/CSharp-Exercise
|
shadowsocks/shadowsocks/crypto/openssl.py
|
Python
|
mit
| 5,414 | 0.000185 |
'''
Created on Oct 18, 2016
@author: Vitoc
'''
import copy
class undoModuleClass:
def __init__(self):
return
'''
Function that undos the operations that you have made
'''
def saveTheFunction(self,l,undo):
undo.append(copy.deepcopy(l))
def undoIntoPast(self,l,undo):
try:
maximus = len(undo) -1
l = copy.deepcopy(undo[maximus])
del undo[maximus]
except IndexError:
print("There are no more undos , sry")
return l
|
valecs97/Contest
|
Contest/iteration_3/undoModule.py
|
Python
|
gpl-3.0
| 593 | 0.026981 |
# -*- coding: utf-8 -*-
# This file is part of Knitlib.
#
# Knitlib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Knitlib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Knitlib. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2015 Sebastian Oliva <http://github.com/fashiontec/knitlib>
import logging
import time
import knitting_plugin
class DummyKnittingPlugin(knitting_plugin.BaseKnittingPlugin):
"""Implements a sample knitting plugin that allows for simple operation emulation."""
__PLUGIN_NAME__ = u"dummy"
def __init__(self):
super(DummyKnittingPlugin, self).__init__()
base_log_string = u"{} has been called on dummy knitting plugin."
def onknit(self, e):
logging.debug(DummyKnittingPlugin.base_log_string.format("onknit"))
# In order to simulate blocking we make it sleep.
total = 5
for i in range(total):
time.sleep(1)
self.interactive_callbacks["progress"](i / float(total), i, total)
self.finish()
def onfinish(self, e):
logging.debug(DummyKnittingPlugin.base_log_string.format("onfinish"))
def onconfigure(self, e):
logging.debug(DummyKnittingPlugin.base_log_string.format("onconfigure"))
def set_port(self, *args, **kwargs):
pass
@staticmethod
def supported_config_features():
return {"$schema": "http://json-schema.org/schema#", "type": "object"}
|
fashiontec/knitlib
|
src/knitlib/plugins/dummy_plugin.py
|
Python
|
lgpl-3.0
| 1,842 | 0.007058 |
#!/usr/bin/env python
"""
Copyright 2012 GroupDocs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class State:
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
self.swaggerTypes = {
}
|
liosha2007/temporary-groupdocs-python3-sdk
|
groupdocs/models/State.py
|
Python
|
apache-2.0
| 851 | 0.005875 |
# encoding: utf-8
"""
Rules system.
"""
import copy
from operator import itemgetter
class RuleBase(object):
"""
All rules inherit from RuleBase. All rules needs a condition, a response.
RuleBase is the base model to all rules. with this class, the rules will can to access
to the main class (Gozokia), the sentence (the input), and/or the
analyzer (if it is active)
"""
completed = False
reload = True
response_output = ""
print_output = ""
def __init__(self):
self.set_reload(False)
def condition_raise(self, *args, **kwargs):
self.gozokia = kwargs.get('gozokia')
self.analyzer = self.gozokia.analyzer
self.sentence = self.gozokia.sentence
def condition_completed(self, *args, **kwargs):
self.gozokia = kwargs.get('gozokia')
self.analyzer = self.gozokia.analyzer
self.sentence = self.gozokia.sentence
def response(self, *args, **kwargs):
raise NotImplementedError(__class__.__name__ + ": response not defined")
def get_response(self, *args, **kwargs):
self.response(*args, **kwargs)
return self.response_output, self.print_output
def is_completed(self, *args, **kwargs):
return self.completed
def set_completed(self, status=True):
self.completed = status
def set_reload(self, reload):
self.reload = reload
def reload_rule(self):
if self.reload:
self.set_completed(False)
return True
else:
return False
def __str__(self):
return self.__class__.__name__
class Rules(object):
__rules_pool = []
__rules_map = {}
__rules_qeue = []
__rules_qeue_completed = []
__active_rule = None
_STATUS_RULES_KEY = "status"
_STATUS_RULES = (0, 1, 2)
_STATUS_RULE_COMPLETED = 0
_STATUS_RULE_PENDING = 1
_STATUS_RULE_ACTIVE = 2
_RAISE_COND = 1
_OBJETIVE_COND = 2
__RULE_KEY_CLASS = "class"
__RULE_KEY_NAME = "rule"
def __init__(self, * args, **kwargs):
self.session_id = kwargs['sessionid']
# Set the session to
for rule_pool in self.__rules_pool:
rule_pool['session'] = self.session_id
self.__rules_map[self.session_id] = []
self._rules_completed = self.__rules_map[self.session_id]
self.__rules_map[self.session_id] = self.__rules_pool
self.__rules_qeue = self.__rules_map[self.session_id]
"""
self.__rules_qeue = copy.copy(self.__rules_pool)
"""
def add(self, rule_class, **options):
rank = 10
type_rule = None
rule_object = rule_class()
if 'rank' in options and type(options['rank']) is int:
rank = options['rank']
if 'type' in options and type(options['type']) is int:
type_rule = options['type']
if 'name' in options and type(options['name']) is str:
rule_name = options['name']
else:
rule_name = str(rule_object)
# Session is none because "add" method is a decorator. When this method is executed
# the init method not exist
if rule_name not in set(r['rule'] for r in self.__rules_pool):
self.__rules_pool.append({'session': None, 'rule': rule_name, self.__RULE_KEY_CLASS: copy.copy(rule_object),
'rank': rank, 'type': type_rule,
self._STATUS_RULES_KEY: self._STATUS_RULE_PENDING})
def get_rules(self, type_rule=None):
"""
return a diccionary of rules order by rank and filter by type or fule
"""
f = lambda x: True
if type_rule in [self._RAISE_COND, self._OBJETIVE_COND]:
f = lambda x: x['type'] == type_rule and x[self.__RULE_KEY_CLASS].completed == False
return sorted(filter(f, self.__rules_qeue), key=itemgetter('rank'))
def get_rules_completed(self):
return sorted(self.__rules_qeue_completed, key=itemgetter('rank'))
def get_raises(self):
for rule in self.get_rules(type_rule=self._RAISE_COND):
yield rule
def get_objetives(self):
for rule in self.get_rules(type_rule=self._OBJETIVE_COND):
yield rule
def get_rule(self, gozokia):
"""
Get the active rule or find one.
"""
if self.exist_active_rule():
active_rule_object = self.get_active_rule().get(self.__RULE_KEY_CLASS)
active_rule_object.condition_completed(gozokia=gozokia)
if active_rule_object.is_completed():
self.complete_active_rule()
self.get_rule(gozokia=gozokia)
else:
for r in self:
if r.get(self.__RULE_KEY_CLASS).condition_raise(gozokia=gozokia):
self.set_active_rule(r)
break
return self.__active_rule
def eval(self, gozokia):
response_output = None
print_output = None
rule = self.get_rule(gozokia)
if rule:
active_rule_object = rule.get(self.__RULE_KEY_CLASS)
response_output, print_output = active_rule_object.get_response()
active_rule_object.condition_completed(gozokia=gozokia)
if active_rule_object.is_completed():
self.complete_active_rule()
return rule, response_output, print_output
def set_rule_status_active(self, rule):
print("RULE {} start".format(rule.get(self.__RULE_KEY_NAME)))
rule[self._STATUS_RULES_KEY] = self._STATUS_RULE_ACTIVE
self.set_active_rule(None)
def set_rule_status_pending(self, rule):
print("RULE {} pending".format(rule.get(self.__RULE_KEY_NAME)))
rule[self._STATUS_RULES_KEY] = self._STATUS_RULE_PENDING
def set_rule_status_completed(self, rule):
print("RULE {} completed".format(rule.get(self.__RULE_KEY_NAME)))
rule[self._STATUS_RULES_KEY] = self._STATUS_RULE_COMPLETED
def complete_active_rule(self):
rule = self.get_active_rule()
self.set_rule_completed(rule)
self.set_active_rule(None)
def set_rule_completed(self, rule):
self.set_rule_status_completed(rule)
if rule.get(self.__RULE_KEY_CLASS).reload_rule() is False:
self.pop(rule)
def set_rule_pending(self, rule):
self.set_rule_status_pending(rule)
def get_active_rule(self, key=None):
if key is None:
rule = self.__active_rule
else:
rule = self.__active_rule[key]
return rule
def set_active_rule(self, rule=None):
if rule:
self.set_rule_status_active(rule)
self.__active_rule = rule
def stop_active_rule(self):
self.set_rule_status_pending(self.__active_rule)
self.set_active_rule(None)
def exist_active_rule(self):
return self.__active_rule is not None
def pop(self, rule):
# Pop rule from main queue
self.__rules_qeue = [r for r in self if r.get(self.__RULE_KEY_CLASS) != rule.get(self.__RULE_KEY_CLASS)]
# Add rule to completed queue
if rule.get(self.__RULE_KEY_CLASS) not in set(r.get(rule.get(self.__RULE_KEY_CLASS)) for r in self.__rules_qeue_completed):
self.__rules_qeue_completed.append(rule)
def __getitem__(self, key):
if key in self.__rules_qeue:
return self.__rules_qeue[key]
raise KeyError
def __iter__(self):
for rule in self.get_rules():
yield rule
|
avara1986/gozokia
|
gozokia/core/rules.py
|
Python
|
mit
| 7,566 | 0.001718 |
# Copyright (c) 2014 Cisco Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
from oslo_utils.fixture import uuidsentinel
from oslo_utils import uuidutils
import six
import webob
from nova.api.openstack import api_version_request as avr
from nova.api.openstack.compute import server_groups as sg_v21
from nova import context
from nova import exception
from nova import objects
from nova.policies import server_groups as sg_policies
from nova import test
from nova.tests import fixtures
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import policy_fixture
class AttrDict(dict):
def __getattr__(self, k):
return self[k]
def server_group_template(**kwargs):
sgroup = kwargs.copy()
sgroup.setdefault('name', 'test')
return sgroup
def server_group_resp_template(**kwargs):
sgroup = kwargs.copy()
sgroup.setdefault('name', 'test')
if 'policy' not in kwargs:
sgroup.setdefault('policies', [])
sgroup.setdefault('members', [])
return sgroup
def server_group_db(sg):
attrs = copy.deepcopy(sg)
if 'id' in attrs:
attrs['uuid'] = attrs.pop('id')
if 'policies' in attrs:
policies = attrs.pop('policies')
attrs['policies'] = policies
else:
attrs['policies'] = []
if 'policy' in attrs:
del attrs['policies']
if 'members' in attrs:
members = attrs.pop('members')
attrs['members'] = members
else:
attrs['members'] = []
attrs['deleted'] = 0
attrs['deleted_at'] = None
attrs['created_at'] = None
attrs['updated_at'] = None
if 'user_id' not in attrs:
attrs['user_id'] = fakes.FAKE_USER_ID
if 'project_id' not in attrs:
attrs['project_id'] = fakes.FAKE_PROJECT_ID
attrs['id'] = 7
return AttrDict(attrs)
class ServerGroupTestV21(test.NoDBTestCase):
USES_DB_SELF = True
validation_error = exception.ValidationError
def setUp(self):
super(ServerGroupTestV21, self).setUp()
self._setup_controller()
self.req = fakes.HTTPRequest.blank('')
self.admin_req = fakes.HTTPRequest.blank('', use_admin_context=True)
self.foo_req = fakes.HTTPRequest.blank('', project_id='foo')
self.policy = self.useFixture(policy_fixture.RealPolicyFixture())
self.useFixture(fixtures.Database(database='api'))
cells = fixtures.CellDatabases()
cells.add_cell_database(uuidsentinel.cell1)
cells.add_cell_database(uuidsentinel.cell2)
self.useFixture(cells)
ctxt = context.get_admin_context()
self.cells = {}
for uuid in (uuidsentinel.cell1, uuidsentinel.cell2):
cm = objects.CellMapping(context=ctxt,
uuid=uuid,
database_connection=uuid,
transport_url=uuid)
cm.create()
self.cells[cm.uuid] = cm
def _setup_controller(self):
self.controller = sg_v21.ServerGroupController()
def test_create_server_group_with_no_policies(self):
sgroup = server_group_template()
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
def _create_server_group_normal(self, policies=None, policy=None,
rules=None):
sgroup = server_group_template()
sgroup['policies'] = policies
res_dict = self.controller.create(self.req,
body={'server_group': sgroup})
self.assertEqual(res_dict['server_group']['name'], 'test')
self.assertTrue(uuidutils.is_uuid_like(res_dict['server_group']['id']))
self.assertEqual(res_dict['server_group']['policies'], policies)
def test_create_server_group_with_new_policy_before_264(self):
req = fakes.HTTPRequest.blank('', version='2.63')
policy = 'anti-affinity'
rules = {'max_server_per_host': 3}
# 'policy' isn't an acceptable request key before 2.64
sgroup = server_group_template(policy=policy)
result = self.assertRaises(
self.validation_error, self.controller.create,
req, body={'server_group': sgroup})
self.assertIn(
"Invalid input for field/attribute server_group",
six.text_type(result)
)
# 'rules' isn't an acceptable request key before 2.64
sgroup = server_group_template(rules=rules)
result = self.assertRaises(
self.validation_error, self.controller.create,
req, body={'server_group': sgroup})
self.assertIn(
"Invalid input for field/attribute server_group",
six.text_type(result)
)
def test_create_server_group(self):
policies = ['affinity', 'anti-affinity']
for policy in policies:
self._create_server_group_normal(policies=[policy])
def test_create_server_group_rbac_default(self):
sgroup = server_group_template()
sgroup['policies'] = ['affinity']
# test as admin
self.controller.create(self.admin_req, body={'server_group': sgroup})
# test as non-admin
self.controller.create(self.req, body={'server_group': sgroup})
def test_create_server_group_rbac_admin_only(self):
sgroup = server_group_template()
sgroup['policies'] = ['affinity']
# override policy to restrict to admin
rule_name = sg_policies.POLICY_ROOT % 'create'
rules = {rule_name: 'is_admin:True'}
self.policy.set_rules(rules, overwrite=False)
# check for success as admin
self.controller.create(self.admin_req, body={'server_group': sgroup})
# check for failure as non-admin
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller.create, self.req,
body={'server_group': sgroup})
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def _create_instance(self, ctx, cell):
with context.target_cell(ctx, cell) as cctx:
instance = objects.Instance(context=cctx,
image_ref=uuidsentinel.fake_image_ref,
node='node1', reservation_id='a',
host='host1', project_id='fake',
vm_state='fake',
system_metadata={'key': 'value'})
instance.create()
im = objects.InstanceMapping(context=ctx,
project_id=ctx.project_id,
user_id=ctx.user_id,
cell_mapping=cell,
instance_uuid=instance.uuid)
im.create()
return instance
def _create_instance_group(self, context, members):
ig = objects.InstanceGroup(context=context, name='fake_name',
user_id='fake_user', project_id='fake',
members=members)
ig.create()
return ig.uuid
def _create_groups_and_instances(self, ctx):
cell1 = self.cells[uuidsentinel.cell1]
cell2 = self.cells[uuidsentinel.cell2]
instances = [self._create_instance(ctx, cell=cell1),
self._create_instance(ctx, cell=cell2),
self._create_instance(ctx, cell=None)]
members = [instance.uuid for instance in instances]
ig_uuid = self._create_instance_group(ctx, members)
return (ig_uuid, instances, members)
def _test_list_server_group_all(self, api_version='2.1'):
self._test_list_server_group(api_version=api_version,
limited='',
path='/os-server-groups?all_projects=True')
def _test_list_server_group_offset_and_limit(self, api_version='2.1'):
self._test_list_server_group(api_version=api_version,
limited='&offset=1&limit=1',
path='/os-server-groups?all_projects=True')
@mock.patch('nova.objects.InstanceGroupList.get_by_project_id')
@mock.patch('nova.objects.InstanceGroupList.get_all')
def _test_list_server_group(self, mock_get_all, mock_get_by_project,
path, api_version='2.1', limited=None):
policies = ['anti-affinity']
policy = "anti-affinity"
members = []
metadata = {} # always empty
names = ['default-x', 'test']
p_id = fakes.FAKE_PROJECT_ID
u_id = fakes.FAKE_USER_ID
ver = avr.APIVersionRequest(api_version)
if ver >= avr.APIVersionRequest("2.64"):
sg1 = server_group_resp_template(id=uuidsentinel.sg1_id,
name=names[0],
policy=policy,
rules={},
members=members,
project_id=p_id,
user_id=u_id)
sg2 = server_group_resp_template(id=uuidsentinel.sg2_id,
name=names[1],
policy=policy,
rules={},
members=members,
project_id=p_id,
user_id=u_id)
elif ver >= avr.APIVersionRequest("2.13"):
sg1 = server_group_resp_template(id=uuidsentinel.sg1_id,
name=names[0],
policies=policies,
members=members,
metadata=metadata,
project_id=p_id,
user_id=u_id)
sg2 = server_group_resp_template(id=uuidsentinel.sg2_id,
name=names[1],
policies=policies,
members=members,
metadata=metadata,
project_id=p_id,
user_id=u_id)
else:
sg1 = server_group_resp_template(id=uuidsentinel.sg1_id,
name=names[0],
policies=policies,
members=members,
metadata=metadata)
sg2 = server_group_resp_template(id=uuidsentinel.sg2_id,
name=names[1],
policies=policies,
members=members,
metadata=metadata)
tenant_groups = [sg2]
all_groups = [sg1, sg2]
if limited:
all = {'server_groups': [sg2]}
tenant_specific = {'server_groups': []}
else:
all = {'server_groups': all_groups}
tenant_specific = {'server_groups': tenant_groups}
def return_all_server_groups():
return objects.InstanceGroupList(
objects=[objects.InstanceGroup(
**server_group_db(sg)) for sg in all_groups])
mock_get_all.return_value = return_all_server_groups()
def return_tenant_server_groups():
return objects.InstanceGroupList(
objects=[objects.InstanceGroup(
**server_group_db(sg)) for sg in tenant_groups])
mock_get_by_project.return_value = return_tenant_server_groups()
path = '/os-server-groups?all_projects=True'
if limited:
path += limited
req = fakes.HTTPRequest.blank(path, version=api_version)
admin_req = fakes.HTTPRequest.blank(path, use_admin_context=True,
version=api_version)
# test as admin
res_dict = self.controller.index(admin_req)
self.assertEqual(all, res_dict)
# test as non-admin
res_dict = self.controller.index(req)
self.assertEqual(tenant_specific, res_dict)
@mock.patch('nova.objects.InstanceGroupList.get_by_project_id')
def _test_list_server_group_by_tenant(self, mock_get_by_project,
api_version='2.1'):
policies = ['anti-affinity']
members = []
metadata = {} # always empty
names = ['default-x', 'test']
p_id = fakes.FAKE_PROJECT_ID
u_id = fakes.FAKE_USER_ID
if api_version >= '2.13':
sg1 = server_group_resp_template(id=uuidsentinel.sg1_id,
name=names[0],
policies=policies,
members=members,
metadata=metadata,
project_id=p_id,
user_id=u_id)
sg2 = server_group_resp_template(id=uuidsentinel.sg2_id,
name=names[1],
policies=policies,
members=members,
metadata=metadata,
project_id=p_id,
user_id=u_id)
else:
sg1 = server_group_resp_template(id=uuidsentinel.sg1_id,
name=names[0],
policies=policies,
members=members,
metadata=metadata)
sg2 = server_group_resp_template(id=uuidsentinel.sg2_id,
name=names[1],
policies=policies,
members=members,
metadata=metadata)
groups = [sg1, sg2]
expected = {'server_groups': groups}
def return_server_groups():
return objects.InstanceGroupList(
objects=[objects.InstanceGroup(
**server_group_db(sg)) for sg in groups])
return_get_by_project = return_server_groups()
mock_get_by_project.return_value = return_get_by_project
path = '/os-server-groups'
req = fakes.HTTPRequest.blank(path, version=api_version)
res_dict = self.controller.index(req)
self.assertEqual(expected, res_dict)
def test_display_members(self):
ctx = context.RequestContext('fake_user', 'fake')
(ig_uuid, instances, members) = self._create_groups_and_instances(ctx)
res_dict = self.controller.show(self.req, ig_uuid)
result_members = res_dict['server_group']['members']
self.assertEqual(3, len(result_members))
for member in members:
self.assertIn(member, result_members)
def test_display_members_with_nonexistent_group(self):
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show, self.req, uuidsentinel.group)
def test_display_active_members_only(self):
ctx = context.RequestContext('fake_user', 'fake')
(ig_uuid, instances, members) = self._create_groups_and_instances(ctx)
# delete an instance
im = objects.InstanceMapping.get_by_instance_uuid(ctx,
instances[1].uuid)
with context.target_cell(ctx, im.cell_mapping) as cctxt:
instances[1]._context = cctxt
instances[1].destroy()
# check that the instance does not exist
self.assertRaises(exception.InstanceNotFound,
objects.Instance.get_by_uuid,
ctx, instances[1].uuid)
res_dict = self.controller.show(self.req, ig_uuid)
result_members = res_dict['server_group']['members']
# check that only the active instance is displayed
self.assertEqual(2, len(result_members))
self.assertIn(instances[0].uuid, result_members)
def test_display_members_rbac_default(self):
ctx = context.RequestContext('fake_user', 'fake')
ig_uuid = self._create_groups_and_instances(ctx)[0]
# test as admin
self.controller.show(self.admin_req, ig_uuid)
# test as non-admin, same project
self.controller.show(self.req, ig_uuid)
# test as non-admin, different project
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show, self.foo_req, ig_uuid)
def test_display_members_rbac_admin_only(self):
ctx = context.RequestContext('fake_user', 'fake')
ig_uuid = self._create_groups_and_instances(ctx)[0]
# override policy to restrict to admin
rule_name = sg_policies.POLICY_ROOT % 'show'
rules = {rule_name: 'is_admin:True'}
self.policy.set_rules(rules, overwrite=False)
# check for success as admin
self.controller.show(self.admin_req, ig_uuid)
# check for failure as non-admin
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller.show, self.req, ig_uuid)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_create_server_group_with_non_alphanumeric_in_name(self):
# The fix for bug #1434335 expanded the allowable character set
# for server group names to include non-alphanumeric characters
# if they are printable.
sgroup = server_group_template(name='good* $%name',
policies=['affinity'])
res_dict = self.controller.create(self.req,
body={'server_group': sgroup})
self.assertEqual(res_dict['server_group']['name'], 'good* $%name')
def test_create_server_group_with_illegal_name(self):
# blank name
sgroup = server_group_template(name='', policies=['test_policy'])
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# name with length 256
sgroup = server_group_template(name='1234567890' * 26,
policies=['test_policy'])
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# non-string name
sgroup = server_group_template(name=12, policies=['test_policy'])
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# name with leading spaces
sgroup = server_group_template(name=' leading spaces',
policies=['test_policy'])
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# name with trailing spaces
sgroup = server_group_template(name='trailing space ',
policies=['test_policy'])
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# name with all spaces
sgroup = server_group_template(name=' ',
policies=['test_policy'])
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# name with unprintable character
sgroup = server_group_template(name='bad\x00name',
policies=['test_policy'])
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# name with out of range char U0001F4A9
sgroup = server_group_template(name=u"\U0001F4A9",
policies=['affinity'])
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
def test_create_server_group_with_illegal_policies(self):
# blank policy
sgroup = server_group_template(name='fake-name', policies='')
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# policy as integer
sgroup = server_group_template(name='fake-name', policies=7)
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# policy as string
sgroup = server_group_template(name='fake-name', policies='invalid')
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# policy as None
sgroup = server_group_template(name='fake-name', policies=None)
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
def test_create_server_group_conflicting_policies(self):
sgroup = server_group_template()
policies = ['anti-affinity', 'affinity']
sgroup['policies'] = policies
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
def test_create_server_group_with_duplicate_policies(self):
sgroup = server_group_template()
policies = ['affinity', 'affinity']
sgroup['policies'] = policies
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
def test_create_server_group_not_supported(self):
sgroup = server_group_template()
policies = ['storage-affinity', 'anti-affinity', 'rack-affinity']
sgroup['policies'] = policies
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
def test_create_server_group_with_no_body(self):
self.assertRaises(self.validation_error,
self.controller.create, self.req, body=None)
def test_create_server_group_with_no_server_group(self):
body = {'no-instanceGroup': None}
self.assertRaises(self.validation_error,
self.controller.create, self.req, body=body)
def test_list_server_group_by_tenant(self):
self._test_list_server_group_by_tenant(api_version='2.1')
def test_list_server_group_all_v20(self):
self._test_list_server_group_all(api_version='2.0')
def test_list_server_group_all(self):
self._test_list_server_group_all(api_version='2.1')
def test_list_server_group_offset_and_limit(self):
self._test_list_server_group_offset_and_limit(api_version='2.1')
def test_list_server_groups_rbac_default(self):
# test as admin
self.controller.index(self.admin_req)
# test as non-admin
self.controller.index(self.req)
def test_list_server_group_multiple_param(self):
self._test_list_server_group(api_version='2.1',
limited='&offset=2&limit=2&limit=1&offset=1',
path='/os-server-groups?all_projects=False&all_projects=True')
def test_list_server_group_additional_param(self):
self._test_list_server_group(api_version='2.1',
limited='&offset=1&limit=1',
path='/os-server-groups?dummy=False&all_projects=True')
def test_list_server_group_param_as_int(self):
self._test_list_server_group(api_version='2.1',
limited='&offset=1&limit=1',
path='/os-server-groups?all_projects=1')
def test_list_server_group_negative_int_as_offset(self):
self.assertRaises(exception.ValidationError,
self._test_list_server_group,
api_version='2.1',
limited='&offset=-1',
path='/os-server-groups?all_projects=1')
def test_list_server_group_string_int_as_offset(self):
self.assertRaises(exception.ValidationError,
self._test_list_server_group,
api_version='2.1',
limited='&offset=dummy',
path='/os-server-groups?all_projects=1')
def test_list_server_group_multiparam_string_as_offset(self):
self.assertRaises(exception.ValidationError,
self._test_list_server_group,
api_version='2.1',
limited='&offset=dummy&offset=1',
path='/os-server-groups?all_projects=1')
def test_list_server_group_negative_int_as_limit(self):
self.assertRaises(exception.ValidationError,
self._test_list_server_group,
api_version='2.1',
limited='&limit=-1',
path='/os-server-groups?all_projects=1')
def test_list_server_group_string_int_as_limit(self):
self.assertRaises(exception.ValidationError,
self._test_list_server_group,
api_version='2.1',
limited='&limit=dummy',
path='/os-server-groups?all_projects=1')
def test_list_server_group_multiparam_string_as_limit(self):
self.assertRaises(exception.ValidationError,
self._test_list_server_group,
api_version='2.1',
limited='&limit=dummy&limit=1',
path='/os-server-groups?all_projects=1')
def test_list_server_groups_rbac_admin_only(self):
# override policy to restrict to admin
rule_name = sg_policies.POLICY_ROOT % 'index'
rules = {rule_name: 'is_admin:True'}
self.policy.set_rules(rules, overwrite=False)
# check for success as admin
self.controller.index(self.admin_req)
# check for failure as non-admin
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller.index, self.req)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
@mock.patch('nova.objects.InstanceGroup.destroy')
def test_delete_server_group_by_id(self, mock_destroy):
sg = server_group_template(id=uuidsentinel.sg1_id)
def return_server_group(_cls, context, group_id):
self.assertEqual(sg['id'], group_id)
return objects.InstanceGroup(**server_group_db(sg))
self.stub_out('nova.objects.InstanceGroup.get_by_uuid',
return_server_group)
resp = self.controller.delete(self.req, uuidsentinel.sg1_id)
mock_destroy.assert_called_once_with()
# NOTE: on v2.1, http status code is set as wsgi_code of API
# method instead of status_int in a response object.
if isinstance(self.controller, sg_v21.ServerGroupController):
status_int = self.controller.delete.wsgi_code
else:
status_int = resp.status_int
self.assertEqual(204, status_int)
def test_delete_non_existing_server_group(self):
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
self.req, 'invalid')
def test_delete_server_group_rbac_default(self):
ctx = context.RequestContext('fake_user', 'fake')
# test as admin
ig_uuid = self._create_groups_and_instances(ctx)[0]
self.controller.delete(self.admin_req, ig_uuid)
# test as non-admin
ig_uuid = self._create_groups_and_instances(ctx)[0]
self.controller.delete(self.req, ig_uuid)
def test_delete_server_group_rbac_admin_only(self):
ctx = context.RequestContext('fake_user', 'fake')
# override policy to restrict to admin
rule_name = sg_policies.POLICY_ROOT % 'delete'
rules = {rule_name: 'is_admin:True'}
self.policy.set_rules(rules, overwrite=False)
# check for success as admin
ig_uuid = self._create_groups_and_instances(ctx)[0]
self.controller.delete(self.admin_req, ig_uuid)
# check for failure as non-admin
ig_uuid = self._create_groups_and_instances(ctx)[0]
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller.delete, self.req, ig_uuid)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
class ServerGroupTestV213(ServerGroupTestV21):
wsgi_api_version = '2.13'
def _setup_controller(self):
self.controller = sg_v21.ServerGroupController()
def test_list_server_group_all(self):
self._test_list_server_group_all(api_version='2.13')
def test_list_server_group_offset_and_limit(self):
self._test_list_server_group_offset_and_limit(api_version='2.13')
def test_list_server_group_by_tenant(self):
self._test_list_server_group_by_tenant(api_version='2.13')
class ServerGroupTestV264(ServerGroupTestV213):
wsgi_api_version = '2.64'
def _setup_controller(self):
self.controller = sg_v21.ServerGroupController()
def _create_server_group_normal(self, policies=None, policy=None,
rules=None):
req = fakes.HTTPRequest.blank('', version=self.wsgi_api_version)
sgroup = server_group_template()
sgroup['rules'] = rules or {}
sgroup['policy'] = policy
res_dict = self.controller.create(req,
body={'server_group': sgroup})
self.assertEqual(res_dict['server_group']['name'], 'test')
self.assertTrue(uuidutils.is_uuid_like(res_dict['server_group']['id']))
self.assertEqual(res_dict['server_group']['policy'], policy)
self.assertEqual(res_dict['server_group']['rules'], rules or {})
return res_dict['server_group']['id']
def test_list_server_group_all(self):
self._test_list_server_group_all(api_version=self.wsgi_api_version)
def test_create_and_show_server_group(self):
policies = ['affinity', 'anti-affinity']
for policy in policies:
g_uuid = self._create_server_group_normal(
policy=policy)
res_dict = self._display_server_group(g_uuid)
self.assertEqual(res_dict['server_group']['policy'], policy)
self.assertEqual(res_dict['server_group']['rules'], {})
def _display_server_group(self, uuid):
req = fakes.HTTPRequest.blank('', version=self.wsgi_api_version)
group = self.controller.show(req, uuid)
return group
@mock.patch('nova.objects.service.get_minimum_version_all_cells',
return_value=33)
def test_create_and_show_server_group_with_rules(self, mock_get_v):
policy = 'anti-affinity'
rules = {'max_server_per_host': 3}
g_uuid = self._create_server_group_normal(
policy=policy, rules=rules)
res_dict = self._display_server_group(g_uuid)
self.assertEqual(res_dict['server_group']['policy'], policy)
self.assertEqual(res_dict['server_group']['rules'], rules)
def test_create_affinity_server_group_with_invalid_policy(self):
req = fakes.HTTPRequest.blank('', version=self.wsgi_api_version)
sgroup = server_group_template(policy='affinity',
rules={'max_server_per_host': 3})
result = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body={'server_group': sgroup})
self.assertIn("Only anti-affinity policy supports rules",
six.text_type(result))
def test_create_anti_affinity_server_group_with_invalid_rules(self):
req = fakes.HTTPRequest.blank('', version=self.wsgi_api_version)
# A negative test for key is unknown, the value is not positive
# and not integer
invalid_rules = [{'unknown_key': '3'},
{'max_server_per_host': 0},
{'max_server_per_host': 'foo'}]
for r in invalid_rules:
sgroup = server_group_template(policy='anti-affinity', rules=r)
result = self.assertRaises(
self.validation_error, self.controller.create,
req, body={'server_group': sgroup})
self.assertIn(
"Invalid input for field/attribute", six.text_type(result)
)
@mock.patch('nova.objects.service.get_minimum_version_all_cells',
return_value=32)
def test_create_server_group_with_low_version_compute_service(self,
mock_get_v):
req = fakes.HTTPRequest.blank('', version=self.wsgi_api_version)
sgroup = server_group_template(policy='anti-affinity',
rules={'max_server_per_host': 3})
result = self.assertRaises(
webob.exc.HTTPConflict,
self.controller.create, req, body={'server_group': sgroup})
self.assertIn("Creating an anti-affinity group with rule "
"max_server_per_host > 1 is not yet supported.",
six.text_type(result))
def test_create_server_group(self):
policies = ['affinity', 'anti-affinity']
for policy in policies:
self._create_server_group_normal(policy=policy)
def test_policies_since_264(self):
req = fakes.HTTPRequest.blank('', version=self.wsgi_api_version)
# 'policies' isn't allowed in request >= 2.64
sgroup = server_group_template(policies=['anti-affinity'])
self.assertRaises(
self.validation_error, self.controller.create,
req, body={'server_group': sgroup})
def test_create_server_group_without_policy(self):
req = fakes.HTTPRequest.blank('', version=self.wsgi_api_version)
# 'policy' is required request key in request >= 2.64
sgroup = server_group_template()
self.assertRaises(self.validation_error, self.controller.create,
req, body={'server_group': sgroup})
def test_create_server_group_with_illegal_policies(self):
req = fakes.HTTPRequest.blank('', version=self.wsgi_api_version)
# blank policy
sgroup = server_group_template(policy='')
self.assertRaises(self.validation_error, self.controller.create,
req, body={'server_group': sgroup})
# policy as integer
sgroup = server_group_template(policy=7)
self.assertRaises(self.validation_error, self.controller.create,
req, body={'server_group': sgroup})
# policy as string
sgroup = server_group_template(policy='invalid')
self.assertRaises(self.validation_error, self.controller.create,
req, body={'server_group': sgroup})
# policy as None
sgroup = server_group_template(policy=None)
self.assertRaises(self.validation_error, self.controller.create,
req, body={'server_group': sgroup})
def test_additional_params(self):
req = fakes.HTTPRequest.blank('', version=self.wsgi_api_version)
sgroup = server_group_template(unknown='unknown')
self.assertRaises(self.validation_error, self.controller.create,
req, body={'server_group': sgroup})
|
mikalstill/nova
|
nova/tests/unit/api/openstack/compute/test_server_groups.py
|
Python
|
apache-2.0
| 36,927 | 0.002194 |
"""User-friendly public interface to polynomial functions. """
from __future__ import print_function, division
from sympy.core import (
S, Basic, Expr, I, Integer, Add, Mul, Dummy, Tuple
)
from sympy.core.mul import _keep_coeff
from sympy.core.symbol import Symbol
from sympy.core.basic import preorder_traversal
from sympy.core.relational import Relational
from sympy.core.sympify import sympify
from sympy.core.decorators import _sympifyit
from sympy.logic.boolalg import BooleanAtom
from sympy.polys.polyclasses import DMP
from sympy.polys.polyutils import (
basic_from_dict,
_sort_gens,
_unify_gens,
_dict_reorder,
_dict_from_expr,
_parallel_dict_from_expr,
)
from sympy.polys.rationaltools import together
from sympy.polys.rootisolation import dup_isolate_real_roots_list
from sympy.polys.groebnertools import groebner as _groebner
from sympy.polys.fglmtools import matrix_fglm
from sympy.polys.monomials import Monomial
from sympy.polys.orderings import monomial_key
from sympy.polys.polyerrors import (
OperationNotSupported, DomainError,
CoercionFailed, UnificationFailed,
GeneratorsNeeded, PolynomialError,
MultivariatePolynomialError,
ExactQuotientFailed,
PolificationFailed,
ComputationFailed,
GeneratorsError,
)
from sympy.utilities import group, sift, public
import sympy.polys
import sympy.mpmath
from sympy.mpmath.libmp.libhyper import NoConvergence
from sympy.polys.domains import FF, QQ, ZZ
from sympy.polys.constructor import construct_domain
from sympy.polys import polyoptions as options
from sympy.core.compatibility import iterable
@public
class Poly(Expr):
"""Generic class for representing polynomial expressions. """
__slots__ = ['rep', 'gens']
is_commutative = True
is_Poly = True
def __new__(cls, rep, *gens, **args):
"""Create a new polynomial instance out of something useful. """
opt = options.build_options(gens, args)
if 'order' in opt:
raise NotImplementedError("'order' keyword is not implemented yet")
if iterable(rep, exclude=str):
if isinstance(rep, dict):
return cls._from_dict(rep, opt)
else:
return cls._from_list(list(rep), opt)
else:
rep = sympify(rep)
if rep.is_Poly:
return cls._from_poly(rep, opt)
else:
return cls._from_expr(rep, opt)
@classmethod
def new(cls, rep, *gens):
"""Construct :class:`Poly` instance from raw representation. """
if not isinstance(rep, DMP):
raise PolynomialError(
"invalid polynomial representation: %s" % rep)
elif rep.lev != len(gens) - 1:
raise PolynomialError("invalid arguments: %s, %s" % (rep, gens))
obj = Basic.__new__(cls)
obj.rep = rep
obj.gens = gens
return obj
@classmethod
def from_dict(cls, rep, *gens, **args):
"""Construct a polynomial from a ``dict``. """
opt = options.build_options(gens, args)
return cls._from_dict(rep, opt)
@classmethod
def from_list(cls, rep, *gens, **args):
"""Construct a polynomial from a ``list``. """
opt = options.build_options(gens, args)
return cls._from_list(rep, opt)
@classmethod
def from_poly(cls, rep, *gens, **args):
"""Construct a polynomial from a polynomial. """
opt = options.build_options(gens, args)
return cls._from_poly(rep, opt)
@classmethod
def from_expr(cls, rep, *gens, **args):
"""Construct a polynomial from an expression. """
opt = options.build_options(gens, args)
return cls._from_expr(rep, opt)
@classmethod
def _from_dict(cls, rep, opt):
"""Construct a polynomial from a ``dict``. """
gens = opt.gens
if not gens:
raise GeneratorsNeeded(
"can't initialize from 'dict' without generators")
level = len(gens) - 1
domain = opt.domain
if domain is None:
domain, rep = construct_domain(rep, opt=opt)
else:
for monom, coeff in rep.items():
rep[monom] = domain.convert(coeff)
return cls.new(DMP.from_dict(rep, level, domain), *gens)
@classmethod
def _from_list(cls, rep, opt):
"""Construct a polynomial from a ``list``. """
gens = opt.gens
if not gens:
raise GeneratorsNeeded(
"can't initialize from 'list' without generators")
elif len(gens) != 1:
raise MultivariatePolynomialError(
"'list' representation not supported")
level = len(gens) - 1
domain = opt.domain
if domain is None:
domain, rep = construct_domain(rep, opt=opt)
else:
rep = list(map(domain.convert, rep))
return cls.new(DMP.from_list(rep, level, domain), *gens)
@classmethod
def _from_poly(cls, rep, opt):
"""Construct a polynomial from a polynomial. """
if cls != rep.__class__:
rep = cls.new(rep.rep, *rep.gens)
gens = opt.gens
field = opt.field
domain = opt.domain
if gens and rep.gens != gens:
if set(rep.gens) != set(gens):
return cls._from_expr(rep.as_expr(), opt)
else:
rep = rep.reorder(*gens)
if 'domain' in opt and domain:
rep = rep.set_domain(domain)
elif field is True:
rep = rep.to_field()
return rep
@classmethod
def _from_expr(cls, rep, opt):
"""Construct a polynomial from an expression. """
rep, opt = _dict_from_expr(rep, opt)
return cls._from_dict(rep, opt)
def _hashable_content(self):
"""Allow SymPy to hash Poly instances. """
return (self.rep, self.gens)
def __hash__(self):
return super(Poly, self).__hash__()
@property
def free_symbols(self):
"""
Free symbols of a polynomial expression.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + 1).free_symbols
set([x])
>>> Poly(x**2 + y).free_symbols
set([x, y])
>>> Poly(x**2 + y, x).free_symbols
set([x, y])
"""
symbols = set([])
for gen in self.gens:
symbols |= gen.free_symbols
return symbols | self.free_symbols_in_domain
@property
def free_symbols_in_domain(self):
"""
Free symbols of the domain of ``self``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + 1).free_symbols_in_domain
set()
>>> Poly(x**2 + y).free_symbols_in_domain
set()
>>> Poly(x**2 + y, x).free_symbols_in_domain
set([y])
"""
domain, symbols = self.rep.dom, set()
if domain.is_Composite:
for gen in domain.symbols:
symbols |= gen.free_symbols
elif domain.is_EX:
for coeff in self.coeffs():
symbols |= coeff.free_symbols
return symbols
@property
def args(self):
"""
Don't mess up with the core.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).args
(x**2 + 1,)
"""
return (self.as_expr(),)
@property
def gen(self):
"""
Return the principal generator.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).gen
x
"""
return self.gens[0]
@property
def domain(self):
"""Get the ground domain of ``self``. """
return self.get_domain()
@property
def zero(self):
"""Return zero polynomial with ``self``'s properties. """
return self.new(self.rep.zero(self.rep.lev, self.rep.dom), *self.gens)
@property
def one(self):
"""Return one polynomial with ``self``'s properties. """
return self.new(self.rep.one(self.rep.lev, self.rep.dom), *self.gens)
@property
def unit(self):
"""Return unit polynomial with ``self``'s properties. """
return self.new(self.rep.unit(self.rep.lev, self.rep.dom), *self.gens)
def unify(f, g):
"""
Make ``f`` and ``g`` belong to the same domain.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> f, g = Poly(x/2 + 1), Poly(2*x + 1)
>>> f
Poly(1/2*x + 1, x, domain='QQ')
>>> g
Poly(2*x + 1, x, domain='ZZ')
>>> F, G = f.unify(g)
>>> F
Poly(1/2*x + 1, x, domain='QQ')
>>> G
Poly(2*x + 1, x, domain='QQ')
"""
_, per, F, G = f._unify(g)
return per(F), per(G)
def _unify(f, g):
g = sympify(g)
if not g.is_Poly:
try:
return f.rep.dom, f.per, f.rep, f.rep.per(f.rep.dom.from_sympy(g))
except CoercionFailed:
raise UnificationFailed("can't unify %s with %s" % (f, g))
if isinstance(f.rep, DMP) and isinstance(g.rep, DMP):
gens = _unify_gens(f.gens, g.gens)
dom, lev = f.rep.dom.unify(g.rep.dom, gens), len(gens) - 1
if f.gens != gens:
f_monoms, f_coeffs = _dict_reorder(
f.rep.to_dict(), f.gens, gens)
if f.rep.dom != dom:
f_coeffs = [dom.convert(c, f.rep.dom) for c in f_coeffs]
F = DMP(dict(list(zip(f_monoms, f_coeffs))), dom, lev)
else:
F = f.rep.convert(dom)
if g.gens != gens:
g_monoms, g_coeffs = _dict_reorder(
g.rep.to_dict(), g.gens, gens)
if g.rep.dom != dom:
g_coeffs = [dom.convert(c, g.rep.dom) for c in g_coeffs]
G = DMP(dict(list(zip(g_monoms, g_coeffs))), dom, lev)
else:
G = g.rep.convert(dom)
else:
raise UnificationFailed("can't unify %s with %s" % (f, g))
cls = f.__class__
def per(rep, dom=dom, gens=gens, remove=None):
if remove is not None:
gens = gens[:remove] + gens[remove + 1:]
if not gens:
return dom.to_sympy(rep)
return cls.new(rep, *gens)
return dom, per, F, G
def per(f, rep, gens=None, remove=None):
"""
Create a Poly out of the given representation.
Examples
========
>>> from sympy import Poly, ZZ
>>> from sympy.abc import x, y
>>> from sympy.polys.polyclasses import DMP
>>> a = Poly(x**2 + 1)
>>> a.per(DMP([ZZ(1), ZZ(1)], ZZ), gens=[y])
Poly(y + 1, y, domain='ZZ')
"""
if gens is None:
gens = f.gens
if remove is not None:
gens = gens[:remove] + gens[remove + 1:]
if not gens:
return f.rep.dom.to_sympy(rep)
return f.__class__.new(rep, *gens)
def set_domain(f, domain):
"""Set the ground domain of ``f``. """
opt = options.build_options(f.gens, {'domain': domain})
return f.per(f.rep.convert(opt.domain))
def get_domain(f):
"""Get the ground domain of ``f``. """
return f.rep.dom
def set_modulus(f, modulus):
"""
Set the modulus of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(5*x**2 + 2*x - 1, x).set_modulus(2)
Poly(x**2 + 1, x, modulus=2)
"""
modulus = options.Modulus.preprocess(modulus)
return f.set_domain(FF(modulus))
def get_modulus(f):
"""
Get the modulus of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, modulus=2).get_modulus()
2
"""
domain = f.get_domain()
if domain.is_FiniteField:
return Integer(domain.characteristic())
else:
raise PolynomialError("not a polynomial over a Galois field")
def _eval_subs(f, old, new):
"""Internal implementation of :func:`subs`. """
if old in f.gens:
if new.is_number:
return f.eval(old, new)
else:
try:
return f.replace(old, new)
except PolynomialError:
pass
return f.as_expr().subs(old, new)
def exclude(f):
"""
Remove unnecessary generators from ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import a, b, c, d, x
>>> Poly(a + x, a, b, c, d, x).exclude()
Poly(a + x, a, x, domain='ZZ')
"""
J, new = f.rep.exclude()
gens = []
for j in range(len(f.gens)):
if j not in J:
gens.append(f.gens[j])
return f.per(new, gens=gens)
def replace(f, x, y=None):
"""
Replace ``x`` with ``y`` in generators list.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + 1, x).replace(x, y)
Poly(y**2 + 1, y, domain='ZZ')
"""
if y is None:
if f.is_univariate:
x, y = f.gen, x
else:
raise PolynomialError(
"syntax supported only in univariate case")
if x == y:
return f
if x in f.gens and y not in f.gens:
dom = f.get_domain()
if not dom.is_Composite or y not in dom.symbols:
gens = list(f.gens)
gens[gens.index(x)] = y
return f.per(f.rep, gens=gens)
raise PolynomialError("can't replace %s with %s in %s" % (x, y, f))
def reorder(f, *gens, **args):
"""
Efficiently apply new order of generators.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + x*y**2, x, y).reorder(y, x)
Poly(y**2*x + x**2, y, x, domain='ZZ')
"""
opt = options.Options((), args)
if not gens:
gens = _sort_gens(f.gens, opt=opt)
elif set(f.gens) != set(gens):
raise PolynomialError(
"generators list can differ only up to order of elements")
rep = dict(list(zip(*_dict_reorder(f.rep.to_dict(), f.gens, gens))))
return f.per(DMP(rep, f.rep.dom, len(gens) - 1), gens=gens)
def ltrim(f, gen):
"""
Remove dummy generators from the "left" of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y, z
>>> Poly(y**2 + y*z**2, x, y, z).ltrim(y)
Poly(y**2 + y*z**2, y, z, domain='ZZ')
"""
rep = f.as_dict(native=True)
j = f._gen_to_level(gen)
terms = {}
for monom, coeff in rep.items():
monom = monom[j:]
if monom not in terms:
terms[monom] = coeff
else:
raise PolynomialError("can't left trim %s" % f)
gens = f.gens[j:]
return f.new(DMP.from_dict(terms, len(gens) - 1, f.rep.dom), *gens)
def has_only_gens(f, *gens):
"""
Return ``True`` if ``Poly(f, *gens)`` retains ground domain.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y, z
>>> Poly(x*y + 1, x, y, z).has_only_gens(x, y)
True
>>> Poly(x*y + z, x, y, z).has_only_gens(x, y)
False
"""
indices = set([])
for gen in gens:
try:
index = f.gens.index(gen)
except ValueError:
raise GeneratorsError(
"%s doesn't have %s as generator" % (f, gen))
else:
indices.add(index)
for monom in f.monoms():
for i, elt in enumerate(monom):
if i not in indices and elt:
return False
return True
def to_ring(f):
"""
Make the ground domain a ring.
Examples
========
>>> from sympy import Poly, QQ
>>> from sympy.abc import x
>>> Poly(x**2 + 1, domain=QQ).to_ring()
Poly(x**2 + 1, x, domain='ZZ')
"""
if hasattr(f.rep, 'to_ring'):
result = f.rep.to_ring()
else: # pragma: no cover
raise OperationNotSupported(f, 'to_ring')
return f.per(result)
def to_field(f):
"""
Make the ground domain a field.
Examples
========
>>> from sympy import Poly, ZZ
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x, domain=ZZ).to_field()
Poly(x**2 + 1, x, domain='QQ')
"""
if hasattr(f.rep, 'to_field'):
result = f.rep.to_field()
else: # pragma: no cover
raise OperationNotSupported(f, 'to_field')
return f.per(result)
def to_exact(f):
"""
Make the ground domain exact.
Examples
========
>>> from sympy import Poly, RR
>>> from sympy.abc import x
>>> Poly(x**2 + 1.0, x, domain=RR).to_exact()
Poly(x**2 + 1, x, domain='QQ')
"""
if hasattr(f.rep, 'to_exact'):
result = f.rep.to_exact()
else: # pragma: no cover
raise OperationNotSupported(f, 'to_exact')
return f.per(result)
def retract(f, field=None):
"""
Recalculate the ground domain of a polynomial.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> f = Poly(x**2 + 1, x, domain='QQ[y]')
>>> f
Poly(x**2 + 1, x, domain='QQ[y]')
>>> f.retract()
Poly(x**2 + 1, x, domain='ZZ')
>>> f.retract(field=True)
Poly(x**2 + 1, x, domain='QQ')
"""
dom, rep = construct_domain(f.as_dict(zero=True),
field=field, composite=f.domain.is_Composite or None)
return f.from_dict(rep, f.gens, domain=dom)
def slice(f, x, m, n=None):
"""Take a continuous subsequence of terms of ``f``. """
if n is None:
j, m, n = 0, x, m
else:
j = f._gen_to_level(x)
m, n = int(m), int(n)
if hasattr(f.rep, 'slice'):
result = f.rep.slice(m, n, j)
else: # pragma: no cover
raise OperationNotSupported(f, 'slice')
return f.per(result)
def coeffs(f, order=None):
"""
Returns all non-zero coefficients from ``f`` in lex order.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**3 + 2*x + 3, x).coeffs()
[1, 2, 3]
See Also
========
all_coeffs
coeff_monomial
nth
"""
return [f.rep.dom.to_sympy(c) for c in f.rep.coeffs(order=order)]
def monoms(f, order=None):
"""
Returns all non-zero monomials from ``f`` in lex order.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + 2*x*y**2 + x*y + 3*y, x, y).monoms()
[(2, 0), (1, 2), (1, 1), (0, 1)]
See Also
========
all_monoms
"""
return f.rep.monoms(order=order)
def terms(f, order=None):
"""
Returns all non-zero terms from ``f`` in lex order.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + 2*x*y**2 + x*y + 3*y, x, y).terms()
[((2, 0), 1), ((1, 2), 2), ((1, 1), 1), ((0, 1), 3)]
See Also
========
all_terms
"""
return [(m, f.rep.dom.to_sympy(c)) for m, c in f.rep.terms(order=order)]
def all_coeffs(f):
"""
Returns all coefficients from a univariate polynomial ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**3 + 2*x - 1, x).all_coeffs()
[1, 0, 2, -1]
"""
return [f.rep.dom.to_sympy(c) for c in f.rep.all_coeffs()]
def all_monoms(f):
"""
Returns all monomials from a univariate polynomial ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**3 + 2*x - 1, x).all_monoms()
[(3,), (2,), (1,), (0,)]
See Also
========
all_terms
"""
return f.rep.all_monoms()
def all_terms(f):
"""
Returns all terms from a univariate polynomial ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**3 + 2*x - 1, x).all_terms()
[((3,), 1), ((2,), 0), ((1,), 2), ((0,), -1)]
"""
return [(m, f.rep.dom.to_sympy(c)) for m, c in f.rep.all_terms()]
def termwise(f, func, *gens, **args):
"""
Apply a function to all terms of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> def func(k, coeff):
... k = k[0]
... return coeff//10**(2-k)
>>> Poly(x**2 + 20*x + 400).termwise(func)
Poly(x**2 + 2*x + 4, x, domain='ZZ')
"""
terms = {}
for monom, coeff in f.terms():
result = func(monom, coeff)
if isinstance(result, tuple):
monom, coeff = result
else:
coeff = result
if coeff:
if monom not in terms:
terms[monom] = coeff
else:
raise PolynomialError(
"%s monomial was generated twice" % monom)
return f.from_dict(terms, *(gens or f.gens), **args)
def length(f):
"""
Returns the number of non-zero terms in ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 2*x - 1).length()
3
"""
return len(f.as_dict())
def as_dict(f, native=False, zero=False):
"""
Switch to a ``dict`` representation.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + 2*x*y**2 - y, x, y).as_dict()
{(0, 1): -1, (1, 2): 2, (2, 0): 1}
"""
if native:
return f.rep.to_dict(zero=zero)
else:
return f.rep.to_sympy_dict(zero=zero)
def as_list(f, native=False):
"""Switch to a ``list`` representation. """
if native:
return f.rep.to_list()
else:
return f.rep.to_sympy_list()
def as_expr(f, *gens):
"""
Convert a Poly instance to an Expr instance.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> f = Poly(x**2 + 2*x*y**2 - y, x, y)
>>> f.as_expr()
x**2 + 2*x*y**2 - y
>>> f.as_expr({x: 5})
10*y**2 - y + 25
>>> f.as_expr(5, 6)
379
"""
if not gens:
gens = f.gens
elif len(gens) == 1 and isinstance(gens[0], dict):
mapping = gens[0]
gens = list(f.gens)
for gen, value in mapping.items():
try:
index = gens.index(gen)
except ValueError:
raise GeneratorsError(
"%s doesn't have %s as generator" % (f, gen))
else:
gens[index] = value
return basic_from_dict(f.rep.to_sympy_dict(), *gens)
def lift(f):
"""
Convert algebraic coefficients to rationals.
Examples
========
>>> from sympy import Poly, I
>>> from sympy.abc import x
>>> Poly(x**2 + I*x + 1, x, extension=I).lift()
Poly(x**4 + 3*x**2 + 1, x, domain='QQ')
"""
if hasattr(f.rep, 'lift'):
result = f.rep.lift()
else: # pragma: no cover
raise OperationNotSupported(f, 'lift')
return f.per(result)
def deflate(f):
"""
Reduce degree of ``f`` by mapping ``x_i**m`` to ``y_i``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**6*y**2 + x**3 + 1, x, y).deflate()
((3, 2), Poly(x**2*y + x + 1, x, y, domain='ZZ'))
"""
if hasattr(f.rep, 'deflate'):
J, result = f.rep.deflate()
else: # pragma: no cover
raise OperationNotSupported(f, 'deflate')
return J, f.per(result)
def inject(f, front=False):
"""
Inject ground domain generators into ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> f = Poly(x**2*y + x*y**3 + x*y + 1, x)
>>> f.inject()
Poly(x**2*y + x*y**3 + x*y + 1, x, y, domain='ZZ')
>>> f.inject(front=True)
Poly(y**3*x + y*x**2 + y*x + 1, y, x, domain='ZZ')
"""
dom = f.rep.dom
if dom.is_Numerical:
return f
elif not dom.is_Poly:
raise DomainError("can't inject generators over %s" % dom)
if hasattr(f.rep, 'inject'):
result = f.rep.inject(front=front)
else: # pragma: no cover
raise OperationNotSupported(f, 'inject')
if front:
gens = dom.symbols + f.gens
else:
gens = f.gens + dom.symbols
return f.new(result, *gens)
def eject(f, *gens):
"""
Eject selected generators into the ground domain.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> f = Poly(x**2*y + x*y**3 + x*y + 1, x, y)
>>> f.eject(x)
Poly(x*y**3 + (x**2 + x)*y + 1, y, domain='ZZ[x]')
>>> f.eject(y)
Poly(y*x**2 + (y**3 + y)*x + 1, x, domain='ZZ[y]')
"""
dom = f.rep.dom
if not dom.is_Numerical:
raise DomainError("can't eject generators over %s" % dom)
n, k = len(f.gens), len(gens)
if f.gens[:k] == gens:
_gens, front = f.gens[k:], True
elif f.gens[-k:] == gens:
_gens, front = f.gens[:-k], False
else:
raise NotImplementedError(
"can only eject front or back generators")
dom = dom.inject(*gens)
if hasattr(f.rep, 'eject'):
result = f.rep.eject(dom, front=front)
else: # pragma: no cover
raise OperationNotSupported(f, 'eject')
return f.new(result, *_gens)
def terms_gcd(f):
"""
Remove GCD of terms from the polynomial ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**6*y**2 + x**3*y, x, y).terms_gcd()
((3, 1), Poly(x**3*y + 1, x, y, domain='ZZ'))
"""
if hasattr(f.rep, 'terms_gcd'):
J, result = f.rep.terms_gcd()
else: # pragma: no cover
raise OperationNotSupported(f, 'terms_gcd')
return J, f.per(result)
def add_ground(f, coeff):
"""
Add an element of the ground domain to ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x + 1).add_ground(2)
Poly(x + 3, x, domain='ZZ')
"""
if hasattr(f.rep, 'add_ground'):
result = f.rep.add_ground(coeff)
else: # pragma: no cover
raise OperationNotSupported(f, 'add_ground')
return f.per(result)
def sub_ground(f, coeff):
"""
Subtract an element of the ground domain from ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x + 1).sub_ground(2)
Poly(x - 1, x, domain='ZZ')
"""
if hasattr(f.rep, 'sub_ground'):
result = f.rep.sub_ground(coeff)
else: # pragma: no cover
raise OperationNotSupported(f, 'sub_ground')
return f.per(result)
def mul_ground(f, coeff):
"""
Multiply ``f`` by a an element of the ground domain.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x + 1).mul_ground(2)
Poly(2*x + 2, x, domain='ZZ')
"""
if hasattr(f.rep, 'mul_ground'):
result = f.rep.mul_ground(coeff)
else: # pragma: no cover
raise OperationNotSupported(f, 'mul_ground')
return f.per(result)
def quo_ground(f, coeff):
"""
Quotient of ``f`` by a an element of the ground domain.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(2*x + 4).quo_ground(2)
Poly(x + 2, x, domain='ZZ')
>>> Poly(2*x + 3).quo_ground(2)
Poly(x + 1, x, domain='ZZ')
"""
if hasattr(f.rep, 'quo_ground'):
result = f.rep.quo_ground(coeff)
else: # pragma: no cover
raise OperationNotSupported(f, 'quo_ground')
return f.per(result)
def exquo_ground(f, coeff):
"""
Exact quotient of ``f`` by a an element of the ground domain.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(2*x + 4).exquo_ground(2)
Poly(x + 2, x, domain='ZZ')
>>> Poly(2*x + 3).exquo_ground(2)
Traceback (most recent call last):
...
ExactQuotientFailed: 2 does not divide 3 in ZZ
"""
if hasattr(f.rep, 'exquo_ground'):
result = f.rep.exquo_ground(coeff)
else: # pragma: no cover
raise OperationNotSupported(f, 'exquo_ground')
return f.per(result)
def abs(f):
"""
Make all coefficients in ``f`` positive.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 1, x).abs()
Poly(x**2 + 1, x, domain='ZZ')
"""
if hasattr(f.rep, 'abs'):
result = f.rep.abs()
else: # pragma: no cover
raise OperationNotSupported(f, 'abs')
return f.per(result)
def neg(f):
"""
Negate all coefficients in ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 1, x).neg()
Poly(-x**2 + 1, x, domain='ZZ')
>>> -Poly(x**2 - 1, x)
Poly(-x**2 + 1, x, domain='ZZ')
"""
if hasattr(f.rep, 'neg'):
result = f.rep.neg()
else: # pragma: no cover
raise OperationNotSupported(f, 'neg')
return f.per(result)
def add(f, g):
"""
Add two polynomials ``f`` and ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).add(Poly(x - 2, x))
Poly(x**2 + x - 1, x, domain='ZZ')
>>> Poly(x**2 + 1, x) + Poly(x - 2, x)
Poly(x**2 + x - 1, x, domain='ZZ')
"""
g = sympify(g)
if not g.is_Poly:
return f.add_ground(g)
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'add'):
result = F.add(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'add')
return per(result)
def sub(f, g):
"""
Subtract two polynomials ``f`` and ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).sub(Poly(x - 2, x))
Poly(x**2 - x + 3, x, domain='ZZ')
>>> Poly(x**2 + 1, x) - Poly(x - 2, x)
Poly(x**2 - x + 3, x, domain='ZZ')
"""
g = sympify(g)
if not g.is_Poly:
return f.sub_ground(g)
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'sub'):
result = F.sub(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'sub')
return per(result)
def mul(f, g):
"""
Multiply two polynomials ``f`` and ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).mul(Poly(x - 2, x))
Poly(x**3 - 2*x**2 + x - 2, x, domain='ZZ')
>>> Poly(x**2 + 1, x)*Poly(x - 2, x)
Poly(x**3 - 2*x**2 + x - 2, x, domain='ZZ')
"""
g = sympify(g)
if not g.is_Poly:
return f.mul_ground(g)
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'mul'):
result = F.mul(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'mul')
return per(result)
def sqr(f):
"""
Square a polynomial ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x - 2, x).sqr()
Poly(x**2 - 4*x + 4, x, domain='ZZ')
>>> Poly(x - 2, x)**2
Poly(x**2 - 4*x + 4, x, domain='ZZ')
"""
if hasattr(f.rep, 'sqr'):
result = f.rep.sqr()
else: # pragma: no cover
raise OperationNotSupported(f, 'sqr')
return f.per(result)
def pow(f, n):
"""
Raise ``f`` to a non-negative power ``n``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x - 2, x).pow(3)
Poly(x**3 - 6*x**2 + 12*x - 8, x, domain='ZZ')
>>> Poly(x - 2, x)**3
Poly(x**3 - 6*x**2 + 12*x - 8, x, domain='ZZ')
"""
n = int(n)
if hasattr(f.rep, 'pow'):
result = f.rep.pow(n)
else: # pragma: no cover
raise OperationNotSupported(f, 'pow')
return f.per(result)
def pdiv(f, g):
"""
Polynomial pseudo-division of ``f`` by ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).pdiv(Poly(2*x - 4, x))
(Poly(2*x + 4, x, domain='ZZ'), Poly(20, x, domain='ZZ'))
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'pdiv'):
q, r = F.pdiv(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'pdiv')
return per(q), per(r)
def prem(f, g):
"""
Polynomial pseudo-remainder of ``f`` by ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).prem(Poly(2*x - 4, x))
Poly(20, x, domain='ZZ')
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'prem'):
result = F.prem(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'prem')
return per(result)
def pquo(f, g):
"""
Polynomial pseudo-quotient of ``f`` by ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).pquo(Poly(2*x - 4, x))
Poly(2*x + 4, x, domain='ZZ')
>>> Poly(x**2 - 1, x).pquo(Poly(2*x - 2, x))
Poly(2*x + 2, x, domain='ZZ')
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'pquo'):
result = F.pquo(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'pquo')
return per(result)
def pexquo(f, g):
"""
Polynomial exact pseudo-quotient of ``f`` by ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 1, x).pexquo(Poly(2*x - 2, x))
Poly(2*x + 2, x, domain='ZZ')
>>> Poly(x**2 + 1, x).pexquo(Poly(2*x - 4, x))
Traceback (most recent call last):
...
ExactQuotientFailed: 2*x - 4 does not divide x**2 + 1
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'pexquo'):
try:
result = F.pexquo(G)
except ExactQuotientFailed as exc:
raise exc.new(f.as_expr(), g.as_expr())
else: # pragma: no cover
raise OperationNotSupported(f, 'pexquo')
return per(result)
def div(f, g, auto=True):
"""
Polynomial division with remainder of ``f`` by ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).div(Poly(2*x - 4, x))
(Poly(1/2*x + 1, x, domain='QQ'), Poly(5, x, domain='QQ'))
>>> Poly(x**2 + 1, x).div(Poly(2*x - 4, x), auto=False)
(Poly(0, x, domain='ZZ'), Poly(x**2 + 1, x, domain='ZZ'))
"""
dom, per, F, G = f._unify(g)
retract = False
if auto and dom.has_Ring and not dom.has_Field:
F, G = F.to_field(), G.to_field()
retract = True
if hasattr(f.rep, 'div'):
q, r = F.div(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'div')
if retract:
try:
Q, R = q.to_ring(), r.to_ring()
except CoercionFailed:
pass
else:
q, r = Q, R
return per(q), per(r)
def rem(f, g, auto=True):
"""
Computes the polynomial remainder of ``f`` by ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).rem(Poly(2*x - 4, x))
Poly(5, x, domain='ZZ')
>>> Poly(x**2 + 1, x).rem(Poly(2*x - 4, x), auto=False)
Poly(x**2 + 1, x, domain='ZZ')
"""
dom, per, F, G = f._unify(g)
retract = False
if auto and dom.has_Ring and not dom.has_Field:
F, G = F.to_field(), G.to_field()
retract = True
if hasattr(f.rep, 'rem'):
r = F.rem(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'rem')
if retract:
try:
r = r.to_ring()
except CoercionFailed:
pass
return per(r)
def quo(f, g, auto=True):
"""
Computes polynomial quotient of ``f`` by ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).quo(Poly(2*x - 4, x))
Poly(1/2*x + 1, x, domain='QQ')
>>> Poly(x**2 - 1, x).quo(Poly(x - 1, x))
Poly(x + 1, x, domain='ZZ')
"""
dom, per, F, G = f._unify(g)
retract = False
if auto and dom.has_Ring and not dom.has_Field:
F, G = F.to_field(), G.to_field()
retract = True
if hasattr(f.rep, 'quo'):
q = F.quo(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'quo')
if retract:
try:
q = q.to_ring()
except CoercionFailed:
pass
return per(q)
def exquo(f, g, auto=True):
"""
Computes polynomial exact quotient of ``f`` by ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 1, x).exquo(Poly(x - 1, x))
Poly(x + 1, x, domain='ZZ')
>>> Poly(x**2 + 1, x).exquo(Poly(2*x - 4, x))
Traceback (most recent call last):
...
ExactQuotientFailed: 2*x - 4 does not divide x**2 + 1
"""
dom, per, F, G = f._unify(g)
retract = False
if auto and dom.has_Ring and not dom.has_Field:
F, G = F.to_field(), G.to_field()
retract = True
if hasattr(f.rep, 'exquo'):
try:
q = F.exquo(G)
except ExactQuotientFailed as exc:
raise exc.new(f.as_expr(), g.as_expr())
else: # pragma: no cover
raise OperationNotSupported(f, 'exquo')
if retract:
try:
q = q.to_ring()
except CoercionFailed:
pass
return per(q)
def _gen_to_level(f, gen):
"""Returns level associated with the given generator. """
if isinstance(gen, int):
length = len(f.gens)
if -length <= gen < length:
if gen < 0:
return length + gen
else:
return gen
else:
raise PolynomialError("-%s <= gen < %s expected, got %s" %
(length, length, gen))
else:
try:
return f.gens.index(sympify(gen))
except ValueError:
raise PolynomialError(
"a valid generator expected, got %s" % gen)
def degree(f, gen=0):
"""
Returns degree of ``f`` in ``x_j``.
The degree of 0 is negative infinity.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + y*x + 1, x, y).degree()
2
>>> Poly(x**2 + y*x + y, x, y).degree(y)
1
>>> Poly(0, x).degree()
-oo
"""
j = f._gen_to_level(gen)
if hasattr(f.rep, 'degree'):
return f.rep.degree(j)
else: # pragma: no cover
raise OperationNotSupported(f, 'degree')
def degree_list(f):
"""
Returns a list of degrees of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + y*x + 1, x, y).degree_list()
(2, 1)
"""
if hasattr(f.rep, 'degree_list'):
return f.rep.degree_list()
else: # pragma: no cover
raise OperationNotSupported(f, 'degree_list')
def total_degree(f):
"""
Returns the total degree of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + y*x + 1, x, y).total_degree()
2
>>> Poly(x + y**5, x, y).total_degree()
5
"""
if hasattr(f.rep, 'total_degree'):
return f.rep.total_degree()
else: # pragma: no cover
raise OperationNotSupported(f, 'total_degree')
def homogenize(f, s):
"""
Returns the homogeneous polynomial of ``f``.
A homogeneous polynomial is a polynomial whose all monomials with
non-zero coefficients have the same total degree. If you only
want to check if a polynomial is homogeneous, then use
:func:`Poly.is_homogeneous`. If you want not only to check if a
polynomial is homogeneous but also compute its homogeneous order,
then use :func:`Poly.homogeneous_order`.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y, z
>>> f = Poly(x**5 + 2*x**2*y**2 + 9*x*y**3)
>>> f.homogenize(z)
Poly(x**5 + 2*x**2*y**2*z + 9*x*y**3*z, x, y, z, domain='ZZ')
"""
if not isinstance(s, Symbol):
raise TypeError("``Symbol`` expected, got %s" % type(s))
if s in f.gens:
i = f.gens.index(s)
gens = f.gens
else:
i = len(f.gens)
gens = f.gens + (s,)
if hasattr(f.rep, 'homogenize'):
return f.per(f.rep.homogenize(i), gens=gens)
raise OperationNotSupported(f, 'homogeneous_order')
def homogeneous_order(f):
"""
Returns the homogeneous order of ``f``.
A homogeneous polynomial is a polynomial whose all monomials with
non-zero coefficients have the same total degree. This degree is
the homogeneous order of ``f``. If you only want to check if a
polynomial is homogeneous, then use :func:`Poly.is_homogeneous`.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> f = Poly(x**5 + 2*x**3*y**2 + 9*x*y**4)
>>> f.homogeneous_order()
5
"""
if hasattr(f.rep, 'homogeneous_order'):
return f.rep.homogeneous_order()
else: # pragma: no cover
raise OperationNotSupported(f, 'homogeneous_order')
def LC(f, order=None):
"""
Returns the leading coefficient of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(4*x**3 + 2*x**2 + 3*x, x).LC()
4
"""
if order is not None:
return f.coeffs(order)[0]
if hasattr(f.rep, 'LC'):
result = f.rep.LC()
else: # pragma: no cover
raise OperationNotSupported(f, 'LC')
return f.rep.dom.to_sympy(result)
def TC(f):
"""
Returns the trailing coefficient of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**3 + 2*x**2 + 3*x, x).TC()
0
"""
if hasattr(f.rep, 'TC'):
result = f.rep.TC()
else: # pragma: no cover
raise OperationNotSupported(f, 'TC')
return f.rep.dom.to_sympy(result)
def EC(f, order=None):
"""
Returns the last non-zero coefficient of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**3 + 2*x**2 + 3*x, x).EC()
3
"""
if hasattr(f.rep, 'coeffs'):
return f.coeffs(order)[-1]
else: # pragma: no cover
raise OperationNotSupported(f, 'EC')
def coeff_monomial(f, monom):
"""
Returns the coefficient of ``monom`` in ``f`` if there, else None.
Examples
========
>>> from sympy import Poly, exp
>>> from sympy.abc import x, y
>>> p = Poly(24*x*y*exp(8) + 23*x, x, y)
>>> p.coeff_monomial(x)
23
>>> p.coeff_monomial(y)
0
>>> p.coeff_monomial(x*y)
24*exp(8)
Note that ``Expr.coeff()`` behaves differently, collecting terms
if possible; the Poly must be converted to an Expr to use that
method, however:
>>> p.as_expr().coeff(x)
24*y*exp(8) + 23
>>> p.as_expr().coeff(y)
24*x*exp(8)
>>> p.as_expr().coeff(x*y)
24*exp(8)
See Also
========
nth: more efficient query using exponents of the monomial's generators
"""
return f.nth(*Monomial(monom, f.gens).exponents)
def nth(f, *N):
"""
Returns the ``n``-th coefficient of ``f`` where ``N`` are the
exponents of the generators in the term of interest.
Examples
========
>>> from sympy import Poly, sqrt
>>> from sympy.abc import x, y
>>> Poly(x**3 + 2*x**2 + 3*x, x).nth(2)
2
>>> Poly(x**3 + 2*x*y**2 + y**2, x, y).nth(1, 2)
2
>>> Poly(4*sqrt(x)*y)
Poly(4*y*sqrt(x), y, sqrt(x), domain='ZZ')
>>> _.nth(1, 1)
4
See Also
========
coeff_monomial
"""
if hasattr(f.rep, 'nth'):
result = f.rep.nth(*list(map(int, N)))
else: # pragma: no cover
raise OperationNotSupported(f, 'nth')
return f.rep.dom.to_sympy(result)
def coeff(f, x, n=1, right=False):
# the semantics of coeff_monomial and Expr.coeff are different;
# if someone is working with a Poly, they should be aware of the
# differences and chose the method best suited for the query.
# Alternatively, a pure-polys method could be written here but
# at this time the ``right`` keyword would be ignored because Poly
# doesn't work with non-commutatives.
raise NotImplementedError(
'Either convert to Expr with `as_expr` method '
'to use Expr\'s coeff method or else use the '
'`coeff_monomial` method of Polys.')
def LM(f, order=None):
"""
Returns the leading monomial of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(4*x**2 + 2*x*y**2 + x*y + 3*y, x, y).LM()
x**2*y**0
"""
return Monomial(f.monoms(order)[0], f.gens)
def EM(f, order=None):
"""
Returns the last non-zero monomial of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(4*x**2 + 2*x*y**2 + x*y + 3*y, x, y).EM()
x**0*y**1
"""
return Monomial(f.monoms(order)[-1], f.gens)
def LT(f, order=None):
"""
Returns the leading term of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(4*x**2 + 2*x*y**2 + x*y + 3*y, x, y).LT()
(x**2*y**0, 4)
"""
monom, coeff = f.terms(order)[0]
return Monomial(monom, f.gens), coeff
def ET(f, order=None):
"""
Returns the last non-zero term of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(4*x**2 + 2*x*y**2 + x*y + 3*y, x, y).ET()
(x**0*y**1, 3)
"""
monom, coeff = f.terms(order)[-1]
return Monomial(monom, f.gens), coeff
def max_norm(f):
"""
Returns maximum norm of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(-x**2 + 2*x - 3, x).max_norm()
3
"""
if hasattr(f.rep, 'max_norm'):
result = f.rep.max_norm()
else: # pragma: no cover
raise OperationNotSupported(f, 'max_norm')
return f.rep.dom.to_sympy(result)
def l1_norm(f):
"""
Returns l1 norm of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(-x**2 + 2*x - 3, x).l1_norm()
6
"""
if hasattr(f.rep, 'l1_norm'):
result = f.rep.l1_norm()
else: # pragma: no cover
raise OperationNotSupported(f, 'l1_norm')
return f.rep.dom.to_sympy(result)
def clear_denoms(f, convert=False):
"""
Clear denominators, but keep the ground domain.
Examples
========
>>> from sympy import Poly, S, QQ
>>> from sympy.abc import x
>>> f = Poly(x/2 + S(1)/3, x, domain=QQ)
>>> f.clear_denoms()
(6, Poly(3*x + 2, x, domain='QQ'))
>>> f.clear_denoms(convert=True)
(6, Poly(3*x + 2, x, domain='ZZ'))
"""
if not f.rep.dom.has_Field:
return S.One, f
dom = f.get_domain()
if dom.has_assoc_Ring:
dom = f.rep.dom.get_ring()
if hasattr(f.rep, 'clear_denoms'):
coeff, result = f.rep.clear_denoms()
else: # pragma: no cover
raise OperationNotSupported(f, 'clear_denoms')
coeff, f = dom.to_sympy(coeff), f.per(result)
if not convert or not dom.has_assoc_Ring:
return coeff, f
else:
return coeff, f.to_ring()
def rat_clear_denoms(f, g):
"""
Clear denominators in a rational function ``f/g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> f = Poly(x**2/y + 1, x)
>>> g = Poly(x**3 + y, x)
>>> p, q = f.rat_clear_denoms(g)
>>> p
Poly(x**2 + y, x, domain='ZZ[y]')
>>> q
Poly(y*x**3 + y**2, x, domain='ZZ[y]')
"""
dom, per, f, g = f._unify(g)
f = per(f)
g = per(g)
if not (dom.has_Field and dom.has_assoc_Ring):
return f, g
a, f = f.clear_denoms(convert=True)
b, g = g.clear_denoms(convert=True)
f = f.mul_ground(b)
g = g.mul_ground(a)
return f, g
def integrate(f, *specs, **args):
"""
Computes indefinite integral of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + 2*x + 1, x).integrate()
Poly(1/3*x**3 + x**2 + x, x, domain='QQ')
>>> Poly(x*y**2 + x, x, y).integrate((0, 1), (1, 0))
Poly(1/2*x**2*y**2 + 1/2*x**2, x, y, domain='QQ')
"""
if args.get('auto', True) and f.rep.dom.has_Ring:
f = f.to_field()
if hasattr(f.rep, 'integrate'):
if not specs:
return f.per(f.rep.integrate(m=1))
rep = f.rep
for spec in specs:
if type(spec) is tuple:
gen, m = spec
else:
gen, m = spec, 1
rep = rep.integrate(int(m), f._gen_to_level(gen))
return f.per(rep)
else: # pragma: no cover
raise OperationNotSupported(f, 'integrate')
def diff(f, *specs):
"""
Computes partial derivative of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + 2*x + 1, x).diff()
Poly(2*x + 2, x, domain='ZZ')
>>> Poly(x*y**2 + x, x, y).diff((0, 0), (1, 1))
Poly(2*x*y, x, y, domain='ZZ')
"""
if hasattr(f.rep, 'diff'):
if not specs:
return f.per(f.rep.diff(m=1))
rep = f.rep
for spec in specs:
if type(spec) is tuple:
gen, m = spec
else:
gen, m = spec, 1
rep = rep.diff(int(m), f._gen_to_level(gen))
return f.per(rep)
else: # pragma: no cover
raise OperationNotSupported(f, 'diff')
def eval(f, x, a=None, auto=True):
"""
Evaluate ``f`` at ``a`` in the given variable.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y, z
>>> Poly(x**2 + 2*x + 3, x).eval(2)
11
>>> Poly(2*x*y + 3*x + y + 2, x, y).eval(x, 2)
Poly(5*y + 8, y, domain='ZZ')
>>> f = Poly(2*x*y + 3*x + y + 2*z, x, y, z)
>>> f.eval({x: 2})
Poly(5*y + 2*z + 6, y, z, domain='ZZ')
>>> f.eval({x: 2, y: 5})
Poly(2*z + 31, z, domain='ZZ')
>>> f.eval({x: 2, y: 5, z: 7})
45
>>> f.eval((2, 5))
Poly(2*z + 31, z, domain='ZZ')
>>> f(2, 5)
Poly(2*z + 31, z, domain='ZZ')
"""
if a is None:
if isinstance(x, dict):
mapping = x
for gen, value in mapping.items():
f = f.eval(gen, value)
return f
elif isinstance(x, (tuple, list)):
values = x
if len(values) > len(f.gens):
raise ValueError("too many values provided")
for gen, value in zip(f.gens, values):
f = f.eval(gen, value)
return f
else:
j, a = 0, x
else:
j = f._gen_to_level(x)
if not hasattr(f.rep, 'eval'): # pragma: no cover
raise OperationNotSupported(f, 'eval')
try:
result = f.rep.eval(a, j)
except CoercionFailed:
if not auto:
raise DomainError("can't evaluate at %s in %s" % (a, f.rep.dom))
else:
a_domain, [a] = construct_domain([a])
new_domain = f.get_domain().unify_with_symbols(a_domain, f.gens)
f = f.set_domain(new_domain)
a = new_domain.convert(a, a_domain)
result = f.rep.eval(a, j)
return f.per(result, remove=j)
def __call__(f, *values):
"""
Evaluate ``f`` at the give values.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y, z
>>> f = Poly(2*x*y + 3*x + y + 2*z, x, y, z)
>>> f(2)
Poly(5*y + 2*z + 6, y, z, domain='ZZ')
>>> f(2, 5)
Poly(2*z + 31, z, domain='ZZ')
>>> f(2, 5, 7)
45
"""
return f.eval(values)
def half_gcdex(f, g, auto=True):
"""
Half extended Euclidean algorithm of ``f`` and ``g``.
Returns ``(s, h)`` such that ``h = gcd(f, g)`` and ``s*f = h (mod g)``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> f = x**4 - 2*x**3 - 6*x**2 + 12*x + 15
>>> g = x**3 + x**2 - 4*x - 4
>>> Poly(f).half_gcdex(Poly(g))
(Poly(-1/5*x + 3/5, x, domain='QQ'), Poly(x + 1, x, domain='QQ'))
"""
dom, per, F, G = f._unify(g)
if auto and dom.has_Ring:
F, G = F.to_field(), G.to_field()
if hasattr(f.rep, 'half_gcdex'):
s, h = F.half_gcdex(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'half_gcdex')
return per(s), per(h)
def gcdex(f, g, auto=True):
"""
Extended Euclidean algorithm of ``f`` and ``g``.
Returns ``(s, t, h)`` such that ``h = gcd(f, g)`` and ``s*f + t*g = h``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> f = x**4 - 2*x**3 - 6*x**2 + 12*x + 15
>>> g = x**3 + x**2 - 4*x - 4
>>> Poly(f).gcdex(Poly(g))
(Poly(-1/5*x + 3/5, x, domain='QQ'),
Poly(1/5*x**2 - 6/5*x + 2, x, domain='QQ'),
Poly(x + 1, x, domain='QQ'))
"""
dom, per, F, G = f._unify(g)
if auto and dom.has_Ring:
F, G = F.to_field(), G.to_field()
if hasattr(f.rep, 'gcdex'):
s, t, h = F.gcdex(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'gcdex')
return per(s), per(t), per(h)
def invert(f, g, auto=True):
"""
Invert ``f`` modulo ``g`` when possible.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 1, x).invert(Poly(2*x - 1, x))
Poly(-4/3, x, domain='QQ')
>>> Poly(x**2 - 1, x).invert(Poly(x - 1, x))
Traceback (most recent call last):
...
NotInvertible: zero divisor
"""
dom, per, F, G = f._unify(g)
if auto and dom.has_Ring:
F, G = F.to_field(), G.to_field()
if hasattr(f.rep, 'invert'):
result = F.invert(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'invert')
return per(result)
def revert(f, n):
"""Compute ``f**(-1)`` mod ``x**n``. """
if hasattr(f.rep, 'revert'):
result = f.rep.revert(int(n))
else: # pragma: no cover
raise OperationNotSupported(f, 'revert')
return f.per(result)
def subresultants(f, g):
"""
Computes the subresultant PRS of ``f`` and ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 1, x).subresultants(Poly(x**2 - 1, x))
[Poly(x**2 + 1, x, domain='ZZ'),
Poly(x**2 - 1, x, domain='ZZ'),
Poly(-2, x, domain='ZZ')]
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'subresultants'):
result = F.subresultants(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'subresultants')
return list(map(per, result))
def resultant(f, g, includePRS=False):
"""
Computes the resultant of ``f`` and ``g`` via PRS.
If includePRS=True, it includes the subresultant PRS in the result.
Because the PRS is used to calculate the resultant, this is more
efficient than calling :func:`subresultants` separately.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> f = Poly(x**2 + 1, x)
>>> f.resultant(Poly(x**2 - 1, x))
4
>>> f.resultant(Poly(x**2 - 1, x), includePRS=True)
(4, [Poly(x**2 + 1, x, domain='ZZ'), Poly(x**2 - 1, x, domain='ZZ'),
Poly(-2, x, domain='ZZ')])
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'resultant'):
if includePRS:
result, R = F.resultant(G, includePRS=includePRS)
else:
result = F.resultant(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'resultant')
if includePRS:
return (per(result, remove=0), list(map(per, R)))
return per(result, remove=0)
def discriminant(f):
"""
Computes the discriminant of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + 2*x + 3, x).discriminant()
-8
"""
if hasattr(f.rep, 'discriminant'):
result = f.rep.discriminant()
else: # pragma: no cover
raise OperationNotSupported(f, 'discriminant')
return f.per(result, remove=0)
def dispersionset(f, g=None):
r"""Compute the *dispersion set* of two polynomials.
For two polynomials `f(x)` and `g(x)` with `\deg f > 0`
and `\deg g > 0` the dispersion set `\operatorname{J}(f, g)` is defined as:
.. math::
\operatorname{J}(f, g)
& := \{a \in \mathbb{N}_0 | \gcd(f(x), g(x+a)) \neq 1\} \\
& = \{a \in \mathbb{N}_0 | \deg \gcd(f(x), g(x+a)) \geq 1\}
For a single polynomial one defines `\operatorname{J}(f) := \operatorname{J}(f, f)`.
Examples
========
>>> from sympy import poly
>>> from sympy.polys.dispersion import dispersion, dispersionset
>>> from sympy.abc import x
Dispersion set and dispersion of a simple polynomial:
>>> fp = poly((x - 3)*(x + 3), x)
>>> sorted(dispersionset(fp))
[0, 6]
>>> dispersion(fp)
6
Note that the definition of the dispersion is not symmetric:
>>> fp = poly(x**4 - 3*x**2 + 1, x)
>>> gp = fp.shift(-3)
>>> sorted(dispersionset(fp, gp))
[2, 3, 4]
>>> dispersion(fp, gp)
4
>>> sorted(dispersionset(gp, fp))
[]
>>> dispersion(gp, fp)
-oo
Computing the dispersion also works over field extensions:
>>> from sympy import sqrt
>>> fp = poly(x**2 + sqrt(5)*x - 1, x, domain='QQ<sqrt(5)>')
>>> gp = poly(x**2 + (2 + sqrt(5))*x + sqrt(5), x, domain='QQ<sqrt(5)>')
>>> sorted(dispersionset(fp, gp))
[2]
>>> sorted(dispersionset(gp, fp))
[1, 4]
We can even perform the computations for polynomials
having symbolic coefficients:
>>> from sympy.abc import a
>>> fp = poly(4*x**4 + (4*a + 8)*x**3 + (a**2 + 6*a + 4)*x**2 + (a**2 + 2*a)*x, x)
>>> sorted(dispersionset(fp))
[0, 1]
See Also
========
dispersion
References
==========
1. [ManWright94]_
2. [Koepf98]_
3. [Abramov71]_
4. [Man93]_
"""
from sympy.polys.dispersion import dispersionset
return dispersionset(f, g)
def dispersion(f, g=None):
r"""Compute the *dispersion* of polynomials.
For two polynomials `f(x)` and `g(x)` with `\deg f > 0`
and `\deg g > 0` the dispersion `\operatorname{dis}(f, g)` is defined as:
.. math::
\operatorname{dis}(f, g)
& := \max\{ J(f,g) \cup \{0\} \} \\
& = \max\{ \{a \in \mathbb{N} | \gcd(f(x), g(x+a)) \neq 1\} \cup \{0\} \}
and for a single polynomial `\operatorname{dis}(f) := \operatorname{dis}(f, f)`.
Examples
========
>>> from sympy import poly
>>> from sympy.polys.dispersion import dispersion, dispersionset
>>> from sympy.abc import x
Dispersion set and dispersion of a simple polynomial:
>>> fp = poly((x - 3)*(x + 3), x)
>>> sorted(dispersionset(fp))
[0, 6]
>>> dispersion(fp)
6
Note that the definition of the dispersion is not symmetric:
>>> fp = poly(x**4 - 3*x**2 + 1, x)
>>> gp = fp.shift(-3)
>>> sorted(dispersionset(fp, gp))
[2, 3, 4]
>>> dispersion(fp, gp)
4
>>> sorted(dispersionset(gp, fp))
[]
>>> dispersion(gp, fp)
-oo
Computing the dispersion also works over field extensions:
>>> from sympy import sqrt
>>> fp = poly(x**2 + sqrt(5)*x - 1, x, domain='QQ<sqrt(5)>')
>>> gp = poly(x**2 + (2 + sqrt(5))*x + sqrt(5), x, domain='QQ<sqrt(5)>')
>>> sorted(dispersionset(fp, gp))
[2]
>>> sorted(dispersionset(gp, fp))
[1, 4]
We can even perform the computations for polynomials
having symbolic coefficients:
>>> from sympy.abc import a
>>> fp = poly(4*x**4 + (4*a + 8)*x**3 + (a**2 + 6*a + 4)*x**2 + (a**2 + 2*a)*x, x)
>>> sorted(dispersionset(fp))
[0, 1]
See Also
========
dispersionset
References
==========
1. [ManWright94]_
2. [Koepf98]_
3. [Abramov71]_
4. [Man93]_
"""
from sympy.polys.dispersion import dispersion
return dispersion(f, g)
def cofactors(f, g):
"""
Returns the GCD of ``f`` and ``g`` and their cofactors.
Returns polynomials ``(h, cff, cfg)`` such that ``h = gcd(f, g)``, and
``cff = quo(f, h)`` and ``cfg = quo(g, h)`` are, so called, cofactors
of ``f`` and ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 1, x).cofactors(Poly(x**2 - 3*x + 2, x))
(Poly(x - 1, x, domain='ZZ'),
Poly(x + 1, x, domain='ZZ'),
Poly(x - 2, x, domain='ZZ'))
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'cofactors'):
h, cff, cfg = F.cofactors(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'cofactors')
return per(h), per(cff), per(cfg)
def gcd(f, g):
"""
Returns the polynomial GCD of ``f`` and ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 1, x).gcd(Poly(x**2 - 3*x + 2, x))
Poly(x - 1, x, domain='ZZ')
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'gcd'):
result = F.gcd(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'gcd')
return per(result)
def lcm(f, g):
"""
Returns polynomial LCM of ``f`` and ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 1, x).lcm(Poly(x**2 - 3*x + 2, x))
Poly(x**3 - 2*x**2 - x + 2, x, domain='ZZ')
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'lcm'):
result = F.lcm(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'lcm')
return per(result)
def trunc(f, p):
"""
Reduce ``f`` modulo a constant ``p``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(2*x**3 + 3*x**2 + 5*x + 7, x).trunc(3)
Poly(-x**3 - x + 1, x, domain='ZZ')
"""
p = f.rep.dom.convert(p)
if hasattr(f.rep, 'trunc'):
result = f.rep.trunc(p)
else: # pragma: no cover
raise OperationNotSupported(f, 'trunc')
return f.per(result)
def monic(f, auto=True):
"""
Divides all coefficients by ``LC(f)``.
Examples
========
>>> from sympy import Poly, ZZ
>>> from sympy.abc import x
>>> Poly(3*x**2 + 6*x + 9, x, domain=ZZ).monic()
Poly(x**2 + 2*x + 3, x, domain='QQ')
>>> Poly(3*x**2 + 4*x + 2, x, domain=ZZ).monic()
Poly(x**2 + 4/3*x + 2/3, x, domain='QQ')
"""
if auto and f.rep.dom.has_Ring:
f = f.to_field()
if hasattr(f.rep, 'monic'):
result = f.rep.monic()
else: # pragma: no cover
raise OperationNotSupported(f, 'monic')
return f.per(result)
def content(f):
"""
Returns the GCD of polynomial coefficients.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(6*x**2 + 8*x + 12, x).content()
2
"""
if hasattr(f.rep, 'content'):
result = f.rep.content()
else: # pragma: no cover
raise OperationNotSupported(f, 'content')
return f.rep.dom.to_sympy(result)
def primitive(f):
"""
Returns the content and a primitive form of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(2*x**2 + 8*x + 12, x).primitive()
(2, Poly(x**2 + 4*x + 6, x, domain='ZZ'))
"""
if hasattr(f.rep, 'primitive'):
cont, result = f.rep.primitive()
else: # pragma: no cover
raise OperationNotSupported(f, 'primitive')
return f.rep.dom.to_sympy(cont), f.per(result)
def compose(f, g):
"""
Computes the functional composition of ``f`` and ``g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + x, x).compose(Poly(x - 1, x))
Poly(x**2 - x, x, domain='ZZ')
"""
_, per, F, G = f._unify(g)
if hasattr(f.rep, 'compose'):
result = F.compose(G)
else: # pragma: no cover
raise OperationNotSupported(f, 'compose')
return per(result)
def decompose(f):
"""
Computes a functional decomposition of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**4 + 2*x**3 - x - 1, x, domain='ZZ').decompose()
[Poly(x**2 - x - 1, x, domain='ZZ'), Poly(x**2 + x, x, domain='ZZ')]
"""
if hasattr(f.rep, 'decompose'):
result = f.rep.decompose()
else: # pragma: no cover
raise OperationNotSupported(f, 'decompose')
return list(map(f.per, result))
def shift(f, a):
"""
Efficiently compute Taylor shift ``f(x + a)``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 2*x + 1, x).shift(2)
Poly(x**2 + 2*x + 1, x, domain='ZZ')
"""
if hasattr(f.rep, 'shift'):
result = f.rep.shift(a)
else: # pragma: no cover
raise OperationNotSupported(f, 'shift')
return f.per(result)
def sturm(f, auto=True):
"""
Computes the Sturm sequence of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**3 - 2*x**2 + x - 3, x).sturm()
[Poly(x**3 - 2*x**2 + x - 3, x, domain='QQ'),
Poly(3*x**2 - 4*x + 1, x, domain='QQ'),
Poly(2/9*x + 25/9, x, domain='QQ'),
Poly(-2079/4, x, domain='QQ')]
"""
if auto and f.rep.dom.has_Ring:
f = f.to_field()
if hasattr(f.rep, 'sturm'):
result = f.rep.sturm()
else: # pragma: no cover
raise OperationNotSupported(f, 'sturm')
return list(map(f.per, result))
def gff_list(f):
"""
Computes greatest factorial factorization of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> f = x**5 + 2*x**4 - x**3 - 2*x**2
>>> Poly(f).gff_list()
[(Poly(x, x, domain='ZZ'), 1), (Poly(x + 2, x, domain='ZZ'), 4)]
"""
if hasattr(f.rep, 'gff_list'):
result = f.rep.gff_list()
else: # pragma: no cover
raise OperationNotSupported(f, 'gff_list')
return [(f.per(g), k) for g, k in result]
def sqf_norm(f):
"""
Computes square-free norm of ``f``.
Returns ``s``, ``f``, ``r``, such that ``g(x) = f(x-sa)`` and
``r(x) = Norm(g(x))`` is a square-free polynomial over ``K``,
where ``a`` is the algebraic extension of the ground domain.
Examples
========
>>> from sympy import Poly, sqrt
>>> from sympy.abc import x
>>> s, f, r = Poly(x**2 + 1, x, extension=[sqrt(3)]).sqf_norm()
>>> s
1
>>> f
Poly(x**2 - 2*sqrt(3)*x + 4, x, domain='QQ<sqrt(3)>')
>>> r
Poly(x**4 - 4*x**2 + 16, x, domain='QQ')
"""
if hasattr(f.rep, 'sqf_norm'):
s, g, r = f.rep.sqf_norm()
else: # pragma: no cover
raise OperationNotSupported(f, 'sqf_norm')
return s, f.per(g), f.per(r)
def sqf_part(f):
"""
Computes square-free part of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**3 - 3*x - 2, x).sqf_part()
Poly(x**2 - x - 2, x, domain='ZZ')
"""
if hasattr(f.rep, 'sqf_part'):
result = f.rep.sqf_part()
else: # pragma: no cover
raise OperationNotSupported(f, 'sqf_part')
return f.per(result)
def sqf_list(f, all=False):
"""
Returns a list of square-free factors of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> f = 2*x**5 + 16*x**4 + 50*x**3 + 76*x**2 + 56*x + 16
>>> Poly(f).sqf_list()
(2, [(Poly(x + 1, x, domain='ZZ'), 2),
(Poly(x + 2, x, domain='ZZ'), 3)])
>>> Poly(f).sqf_list(all=True)
(2, [(Poly(1, x, domain='ZZ'), 1),
(Poly(x + 1, x, domain='ZZ'), 2),
(Poly(x + 2, x, domain='ZZ'), 3)])
"""
if hasattr(f.rep, 'sqf_list'):
coeff, factors = f.rep.sqf_list(all)
else: # pragma: no cover
raise OperationNotSupported(f, 'sqf_list')
return f.rep.dom.to_sympy(coeff), [(f.per(g), k) for g, k in factors]
def sqf_list_include(f, all=False):
"""
Returns a list of square-free factors of ``f``.
Examples
========
>>> from sympy import Poly, expand
>>> from sympy.abc import x
>>> f = expand(2*(x + 1)**3*x**4)
>>> f
2*x**7 + 6*x**6 + 6*x**5 + 2*x**4
>>> Poly(f).sqf_list_include()
[(Poly(2, x, domain='ZZ'), 1),
(Poly(x + 1, x, domain='ZZ'), 3),
(Poly(x, x, domain='ZZ'), 4)]
>>> Poly(f).sqf_list_include(all=True)
[(Poly(2, x, domain='ZZ'), 1),
(Poly(1, x, domain='ZZ'), 2),
(Poly(x + 1, x, domain='ZZ'), 3),
(Poly(x, x, domain='ZZ'), 4)]
"""
if hasattr(f.rep, 'sqf_list_include'):
factors = f.rep.sqf_list_include(all)
else: # pragma: no cover
raise OperationNotSupported(f, 'sqf_list_include')
return [(f.per(g), k) for g, k in factors]
def factor_list(f):
"""
Returns a list of irreducible factors of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> f = 2*x**5 + 2*x**4*y + 4*x**3 + 4*x**2*y + 2*x + 2*y
>>> Poly(f).factor_list()
(2, [(Poly(x + y, x, y, domain='ZZ'), 1),
(Poly(x**2 + 1, x, y, domain='ZZ'), 2)])
"""
if hasattr(f.rep, 'factor_list'):
try:
coeff, factors = f.rep.factor_list()
except DomainError:
return S.One, [(f, 1)]
else: # pragma: no cover
raise OperationNotSupported(f, 'factor_list')
return f.rep.dom.to_sympy(coeff), [(f.per(g), k) for g, k in factors]
def factor_list_include(f):
"""
Returns a list of irreducible factors of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> f = 2*x**5 + 2*x**4*y + 4*x**3 + 4*x**2*y + 2*x + 2*y
>>> Poly(f).factor_list_include()
[(Poly(2*x + 2*y, x, y, domain='ZZ'), 1),
(Poly(x**2 + 1, x, y, domain='ZZ'), 2)]
"""
if hasattr(f.rep, 'factor_list_include'):
try:
factors = f.rep.factor_list_include()
except DomainError:
return [(f, 1)]
else: # pragma: no cover
raise OperationNotSupported(f, 'factor_list_include')
return [(f.per(g), k) for g, k in factors]
def intervals(f, all=False, eps=None, inf=None, sup=None, fast=False, sqf=False):
"""
Compute isolating intervals for roots of ``f``.
For real roots the Vincent-Akritas-Strzebonski (VAS) continued fractions method is used.
References:
===========
1. Alkiviadis G. Akritas and Adam W. Strzebonski: A Comparative Study of Two Real Root
Isolation Methods . Nonlinear Analysis: Modelling and Control, Vol. 10, No. 4, 297-304, 2005.
2. Alkiviadis G. Akritas, Adam W. Strzebonski and Panagiotis S. Vigklas: Improving the
Performance of the Continued Fractions Method Using new Bounds of Positive Roots. Nonlinear
Analysis: Modelling and Control, Vol. 13, No. 3, 265-279, 2008.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 3, x).intervals()
[((-2, -1), 1), ((1, 2), 1)]
>>> Poly(x**2 - 3, x).intervals(eps=1e-2)
[((-26/15, -19/11), 1), ((19/11, 26/15), 1)]
"""
if eps is not None:
eps = QQ.convert(eps)
if eps <= 0:
raise ValueError("'eps' must be a positive rational")
if inf is not None:
inf = QQ.convert(inf)
if sup is not None:
sup = QQ.convert(sup)
if hasattr(f.rep, 'intervals'):
result = f.rep.intervals(
all=all, eps=eps, inf=inf, sup=sup, fast=fast, sqf=sqf)
else: # pragma: no cover
raise OperationNotSupported(f, 'intervals')
if sqf:
def _real(interval):
s, t = interval
return (QQ.to_sympy(s), QQ.to_sympy(t))
if not all:
return list(map(_real, result))
def _complex(rectangle):
(u, v), (s, t) = rectangle
return (QQ.to_sympy(u) + I*QQ.to_sympy(v),
QQ.to_sympy(s) + I*QQ.to_sympy(t))
real_part, complex_part = result
return list(map(_real, real_part)), list(map(_complex, complex_part))
else:
def _real(interval):
(s, t), k = interval
return ((QQ.to_sympy(s), QQ.to_sympy(t)), k)
if not all:
return list(map(_real, result))
def _complex(rectangle):
((u, v), (s, t)), k = rectangle
return ((QQ.to_sympy(u) + I*QQ.to_sympy(v),
QQ.to_sympy(s) + I*QQ.to_sympy(t)), k)
real_part, complex_part = result
return list(map(_real, real_part)), list(map(_complex, complex_part))
def refine_root(f, s, t, eps=None, steps=None, fast=False, check_sqf=False):
"""
Refine an isolating interval of a root to the given precision.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 3, x).refine_root(1, 2, eps=1e-2)
(19/11, 26/15)
"""
if check_sqf and not f.is_sqf:
raise PolynomialError("only square-free polynomials supported")
s, t = QQ.convert(s), QQ.convert(t)
if eps is not None:
eps = QQ.convert(eps)
if eps <= 0:
raise ValueError("'eps' must be a positive rational")
if steps is not None:
steps = int(steps)
elif eps is None:
steps = 1
if hasattr(f.rep, 'refine_root'):
S, T = f.rep.refine_root(s, t, eps=eps, steps=steps, fast=fast)
else: # pragma: no cover
raise OperationNotSupported(f, 'refine_root')
return QQ.to_sympy(S), QQ.to_sympy(T)
def count_roots(f, inf=None, sup=None):
"""
Return the number of roots of ``f`` in ``[inf, sup]`` interval.
Examples
========
>>> from sympy import Poly, I
>>> from sympy.abc import x
>>> Poly(x**4 - 4, x).count_roots(-3, 3)
2
>>> Poly(x**4 - 4, x).count_roots(0, 1 + 3*I)
1
"""
inf_real, sup_real = True, True
if inf is not None:
inf = sympify(inf)
if inf is S.NegativeInfinity:
inf = None
else:
re, im = inf.as_real_imag()
if not im:
inf = QQ.convert(inf)
else:
inf, inf_real = list(map(QQ.convert, (re, im))), False
if sup is not None:
sup = sympify(sup)
if sup is S.Infinity:
sup = None
else:
re, im = sup.as_real_imag()
if not im:
sup = QQ.convert(sup)
else:
sup, sup_real = list(map(QQ.convert, (re, im))), False
if inf_real and sup_real:
if hasattr(f.rep, 'count_real_roots'):
count = f.rep.count_real_roots(inf=inf, sup=sup)
else: # pragma: no cover
raise OperationNotSupported(f, 'count_real_roots')
else:
if inf_real and inf is not None:
inf = (inf, QQ.zero)
if sup_real and sup is not None:
sup = (sup, QQ.zero)
if hasattr(f.rep, 'count_complex_roots'):
count = f.rep.count_complex_roots(inf=inf, sup=sup)
else: # pragma: no cover
raise OperationNotSupported(f, 'count_complex_roots')
return Integer(count)
def root(f, index, radicals=True):
"""
Get an indexed root of a polynomial.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> f = Poly(2*x**3 - 7*x**2 + 4*x + 4)
>>> f.root(0)
-1/2
>>> f.root(1)
2
>>> f.root(2)
2
>>> f.root(3)
Traceback (most recent call last):
...
IndexError: root index out of [-3, 2] range, got 3
>>> Poly(x**5 + x + 1).root(0)
RootOf(x**3 - x**2 + 1, 0)
"""
return sympy.polys.rootoftools.RootOf(f, index, radicals=radicals)
def real_roots(f, multiple=True, radicals=True):
"""
Return a list of real roots with multiplicities.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(2*x**3 - 7*x**2 + 4*x + 4).real_roots()
[-1/2, 2, 2]
>>> Poly(x**3 + x + 1).real_roots()
[RootOf(x**3 + x + 1, 0)]
"""
reals = sympy.polys.rootoftools.RootOf.real_roots(f, radicals=radicals)
if multiple:
return reals
else:
return group(reals, multiple=False)
def all_roots(f, multiple=True, radicals=True):
"""
Return a list of real and complex roots with multiplicities.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(2*x**3 - 7*x**2 + 4*x + 4).all_roots()
[-1/2, 2, 2]
>>> Poly(x**3 + x + 1).all_roots()
[RootOf(x**3 + x + 1, 0),
RootOf(x**3 + x + 1, 1),
RootOf(x**3 + x + 1, 2)]
"""
roots = sympy.polys.rootoftools.RootOf.all_roots(f, radicals=radicals)
if multiple:
return roots
else:
return group(roots, multiple=False)
def nroots(f, n=15, maxsteps=50, cleanup=True):
"""
Compute numerical approximations of roots of ``f``.
Parameters
==========
n ... the number of digits to calculate
maxsteps ... the maximum number of iterations to do
If the accuracy `n` cannot be reached in `maxsteps`, it will raise an
exception. You need to rerun with higher maxsteps.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 3).nroots(n=15)
[-1.73205080756888, 1.73205080756888]
>>> Poly(x**2 - 3).nroots(n=30)
[-1.73205080756887729352744634151, 1.73205080756887729352744634151]
"""
if f.is_multivariate:
raise MultivariatePolynomialError(
"can't compute numerical roots of %s" % f)
if f.degree() <= 0:
return []
# For integer and rational coefficients, convert them to integers only
# (for accuracy). Otherwise just try to convert the coefficients to
# mpmath.mpc and raise an exception if the conversion fails.
if f.rep.dom is ZZ:
coeffs = [int(coeff) for coeff in f.all_coeffs()]
elif f.rep.dom is QQ:
denoms = [coeff.q for coeff in f.all_coeffs()]
from sympy.core.numbers import ilcm
fac = ilcm(*denoms)
coeffs = [int(coeff*fac) for coeff in f.all_coeffs()]
else:
coeffs = [coeff.evalf(n=n).as_real_imag()
for coeff in f.all_coeffs()]
try:
coeffs = [sympy.mpmath.mpc(*coeff) for coeff in coeffs]
except TypeError:
raise DomainError("Numerical domain expected, got %s" % \
f.rep.dom)
dps = sympy.mpmath.mp.dps
sympy.mpmath.mp.dps = n
try:
# We need to add extra precision to guard against losing accuracy.
# 10 times the degree of the polynomial seems to work well.
roots = sympy.mpmath.polyroots(coeffs, maxsteps=maxsteps,
cleanup=cleanup, error=False, extraprec=f.degree()*10)
# Mpmath puts real roots first, then complex ones (as does all_roots)
# so we make sure this convention holds here, too.
roots = list(map(sympify,
sorted(roots, key=lambda r: (1 if r.imag else 0, r.real, r.imag))))
except NoConvergence:
raise NoConvergence(
'convergence to root failed; try n < %s or maxsteps > %s' % (
n, maxsteps))
finally:
sympy.mpmath.mp.dps = dps
return roots
def ground_roots(f):
"""
Compute roots of ``f`` by factorization in the ground domain.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**6 - 4*x**4 + 4*x**3 - x**2).ground_roots()
{0: 2, 1: 2}
"""
if f.is_multivariate:
raise MultivariatePolynomialError(
"can't compute ground roots of %s" % f)
roots = {}
for factor, k in f.factor_list()[1]:
if factor.is_linear:
a, b = factor.all_coeffs()
roots[-b/a] = k
return roots
def nth_power_roots_poly(f, n):
"""
Construct a polynomial with n-th powers of roots of ``f``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> f = Poly(x**4 - x**2 + 1)
>>> f.nth_power_roots_poly(2)
Poly(x**4 - 2*x**3 + 3*x**2 - 2*x + 1, x, domain='ZZ')
>>> f.nth_power_roots_poly(3)
Poly(x**4 + 2*x**2 + 1, x, domain='ZZ')
>>> f.nth_power_roots_poly(4)
Poly(x**4 + 2*x**3 + 3*x**2 + 2*x + 1, x, domain='ZZ')
>>> f.nth_power_roots_poly(12)
Poly(x**4 - 4*x**3 + 6*x**2 - 4*x + 1, x, domain='ZZ')
"""
if f.is_multivariate:
raise MultivariatePolynomialError(
"must be a univariate polynomial")
N = sympify(n)
if N.is_Integer and N >= 1:
n = int(N)
else:
raise ValueError("'n' must an integer and n >= 1, got %s" % n)
x = f.gen
t = Dummy('t')
r = f.resultant(f.__class__.from_expr(x**n - t, x, t))
return r.replace(t, x)
def cancel(f, g, include=False):
"""
Cancel common factors in a rational function ``f/g``.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(2*x**2 - 2, x).cancel(Poly(x**2 - 2*x + 1, x))
(1, Poly(2*x + 2, x, domain='ZZ'), Poly(x - 1, x, domain='ZZ'))
>>> Poly(2*x**2 - 2, x).cancel(Poly(x**2 - 2*x + 1, x), include=True)
(Poly(2*x + 2, x, domain='ZZ'), Poly(x - 1, x, domain='ZZ'))
"""
dom, per, F, G = f._unify(g)
if hasattr(F, 'cancel'):
result = F.cancel(G, include=include)
else: # pragma: no cover
raise OperationNotSupported(f, 'cancel')
if not include:
if dom.has_assoc_Ring:
dom = dom.get_ring()
cp, cq, p, q = result
cp = dom.to_sympy(cp)
cq = dom.to_sympy(cq)
return cp/cq, per(p), per(q)
else:
return tuple(map(per, result))
@property
def is_zero(f):
"""
Returns ``True`` if ``f`` is a zero polynomial.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(0, x).is_zero
True
>>> Poly(1, x).is_zero
False
"""
return f.rep.is_zero
@property
def is_one(f):
"""
Returns ``True`` if ``f`` is a unit polynomial.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(0, x).is_one
False
>>> Poly(1, x).is_one
True
"""
return f.rep.is_one
@property
def is_sqf(f):
"""
Returns ``True`` if ``f`` is a square-free polynomial.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 - 2*x + 1, x).is_sqf
False
>>> Poly(x**2 - 1, x).is_sqf
True
"""
return f.rep.is_sqf
@property
def is_monic(f):
"""
Returns ``True`` if the leading coefficient of ``f`` is one.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x + 2, x).is_monic
True
>>> Poly(2*x + 2, x).is_monic
False
"""
return f.rep.is_monic
@property
def is_primitive(f):
"""
Returns ``True`` if GCD of the coefficients of ``f`` is one.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(2*x**2 + 6*x + 12, x).is_primitive
False
>>> Poly(x**2 + 3*x + 6, x).is_primitive
True
"""
return f.rep.is_primitive
@property
def is_ground(f):
"""
Returns ``True`` if ``f`` is an element of the ground domain.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x, x).is_ground
False
>>> Poly(2, x).is_ground
True
>>> Poly(y, x).is_ground
True
"""
return f.rep.is_ground
@property
def is_linear(f):
"""
Returns ``True`` if ``f`` is linear in all its variables.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x + y + 2, x, y).is_linear
True
>>> Poly(x*y + 2, x, y).is_linear
False
"""
return f.rep.is_linear
@property
def is_quadratic(f):
"""
Returns ``True`` if ``f`` is quadratic in all its variables.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x*y + 2, x, y).is_quadratic
True
>>> Poly(x*y**2 + 2, x, y).is_quadratic
False
"""
return f.rep.is_quadratic
@property
def is_monomial(f):
"""
Returns ``True`` if ``f`` is zero or has only one term.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(3*x**2, x).is_monomial
True
>>> Poly(3*x**2 + 1, x).is_monomial
False
"""
return f.rep.is_monomial
@property
def is_homogeneous(f):
"""
Returns ``True`` if ``f`` is a homogeneous polynomial.
A homogeneous polynomial is a polynomial whose all monomials with
non-zero coefficients have the same total degree. If you want not
only to check if a polynomial is homogeneous but also compute its
homogeneous order, then use :func:`Poly.homogeneous_order`.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + x*y, x, y).is_homogeneous
True
>>> Poly(x**3 + x*y, x, y).is_homogeneous
False
"""
return f.rep.is_homogeneous
@property
def is_irreducible(f):
"""
Returns ``True`` if ``f`` has no factors over its domain.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> Poly(x**2 + x + 1, x, modulus=2).is_irreducible
True
>>> Poly(x**2 + 1, x, modulus=2).is_irreducible
False
"""
return f.rep.is_irreducible
@property
def is_univariate(f):
"""
Returns ``True`` if ``f`` is a univariate polynomial.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + x + 1, x).is_univariate
True
>>> Poly(x*y**2 + x*y + 1, x, y).is_univariate
False
>>> Poly(x*y**2 + x*y + 1, x).is_univariate
True
>>> Poly(x**2 + x + 1, x, y).is_univariate
False
"""
return len(f.gens) == 1
@property
def is_multivariate(f):
"""
Returns ``True`` if ``f`` is a multivariate polynomial.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x, y
>>> Poly(x**2 + x + 1, x).is_multivariate
False
>>> Poly(x*y**2 + x*y + 1, x, y).is_multivariate
True
>>> Poly(x*y**2 + x*y + 1, x).is_multivariate
False
>>> Poly(x**2 + x + 1, x, y).is_multivariate
True
"""
return len(f.gens) != 1
@property
def is_cyclotomic(f):
"""
Returns ``True`` if ``f`` is a cyclotomic polnomial.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> f = x**16 + x**14 - x**10 + x**8 - x**6 + x**2 + 1
>>> Poly(f).is_cyclotomic
False
>>> g = x**16 + x**14 - x**10 - x**8 - x**6 + x**2 + 1
>>> Poly(g).is_cyclotomic
True
"""
return f.rep.is_cyclotomic
def __abs__(f):
return f.abs()
def __neg__(f):
return f.neg()
@_sympifyit('g', NotImplemented)
def __add__(f, g):
if not g.is_Poly:
try:
g = f.__class__(g, *f.gens)
except PolynomialError:
return f.as_expr() + g
return f.add(g)
@_sympifyit('g', NotImplemented)
def __radd__(f, g):
if not g.is_Poly:
try:
g = f.__class__(g, *f.gens)
except PolynomialError:
return g + f.as_expr()
return g.add(f)
@_sympifyit('g', NotImplemented)
def __sub__(f, g):
if not g.is_Poly:
try:
g = f.__class__(g, *f.gens)
except PolynomialError:
return f.as_expr() - g
return f.sub(g)
@_sympifyit('g', NotImplemented)
def __rsub__(f, g):
if not g.is_Poly:
try:
g = f.__class__(g, *f.gens)
except PolynomialError:
return g - f.as_expr()
return g.sub(f)
@_sympifyit('g', NotImplemented)
def __mul__(f, g):
if not g.is_Poly:
try:
g = f.__class__(g, *f.gens)
except PolynomialError:
return f.as_expr()*g
return f.mul(g)
@_sympifyit('g', NotImplemented)
def __rmul__(f, g):
if not g.is_Poly:
try:
g = f.__class__(g, *f.gens)
except PolynomialError:
return g*f.as_expr()
return g.mul(f)
@_sympifyit('n', NotImplemented)
def __pow__(f, n):
if n.is_Integer and n >= 0:
return f.pow(n)
else:
return f.as_expr()**n
@_sympifyit('g', NotImplemented)
def __divmod__(f, g):
if not g.is_Poly:
g = f.__class__(g, *f.gens)
return f.div(g)
@_sympifyit('g', NotImplemented)
def __rdivmod__(f, g):
if not g.is_Poly:
g = f.__class__(g, *f.gens)
return g.div(f)
@_sympifyit('g', NotImplemented)
def __mod__(f, g):
if not g.is_Poly:
g = f.__class__(g, *f.gens)
return f.rem(g)
@_sympifyit('g', NotImplemented)
def __rmod__(f, g):
if not g.is_Poly:
g = f.__class__(g, *f.gens)
return g.rem(f)
@_sympifyit('g', NotImplemented)
def __floordiv__(f, g):
if not g.is_Poly:
g = f.__class__(g, *f.gens)
return f.quo(g)
@_sympifyit('g', NotImplemented)
def __rfloordiv__(f, g):
if not g.is_Poly:
g = f.__class__(g, *f.gens)
return g.quo(f)
@_sympifyit('g', NotImplemented)
def __div__(f, g):
return f.as_expr()/g.as_expr()
@_sympifyit('g', NotImplemented)
def __rdiv__(f, g):
return g.as_expr()/f.as_expr()
__truediv__ = __div__
__rtruediv__ = __rdiv__
@_sympifyit('g', NotImplemented)
def __eq__(f, g):
if not g.is_Poly:
try:
g = f.__class__(g, f.gens, domain=f.get_domain())
except (PolynomialError, DomainError, CoercionFailed):
return False
if f.gens != g.gens:
return False
if f.rep.dom != g.rep.dom:
try:
dom = f.rep.dom.unify(g.rep.dom, f.gens)
except UnificationFailed:
return False
f = f.set_domain(dom)
g = g.set_domain(dom)
return f.rep == g.rep
@_sympifyit('g', NotImplemented)
def __ne__(f, g):
return not f.__eq__(g)
def __nonzero__(f):
return not f.is_zero
__bool__ = __nonzero__
def eq(f, g, strict=False):
if not strict:
return f.__eq__(g)
else:
return f._strict_eq(sympify(g))
def ne(f, g, strict=False):
return not f.eq(g, strict=strict)
def _strict_eq(f, g):
return isinstance(g, f.__class__) and f.gens == g.gens and f.rep.eq(g.rep, strict=True)
@public
class PurePoly(Poly):
"""Class for representing pure polynomials. """
def _hashable_content(self):
"""Allow SymPy to hash Poly instances. """
return (self.rep,)
def __hash__(self):
return super(PurePoly, self).__hash__()
@property
def free_symbols(self):
"""
Free symbols of a polynomial.
Examples
========
>>> from sympy import PurePoly
>>> from sympy.abc import x, y
>>> PurePoly(x**2 + 1).free_symbols
set()
>>> PurePoly(x**2 + y).free_symbols
set()
>>> PurePoly(x**2 + y, x).free_symbols
set([y])
"""
return self.free_symbols_in_domain
@_sympifyit('g', NotImplemented)
def __eq__(f, g):
if not g.is_Poly:
try:
g = f.__class__(g, f.gens, domain=f.get_domain())
except (PolynomialError, DomainError, CoercionFailed):
return False
if len(f.gens) != len(g.gens):
return False
if f.rep.dom != g.rep.dom:
try:
dom = f.rep.dom.unify(g.rep.dom, f.gens)
except UnificationFailed:
return False
f = f.set_domain(dom)
g = g.set_domain(dom)
return f.rep == g.rep
def _strict_eq(f, g):
return isinstance(g, f.__class__) and f.rep.eq(g.rep, strict=True)
def _unify(f, g):
g = sympify(g)
if not g.is_Poly:
try:
return f.rep.dom, f.per, f.rep, f.rep.per(f.rep.dom.from_sympy(g))
except CoercionFailed:
raise UnificationFailed("can't unify %s with %s" % (f, g))
if len(f.gens) != len(g.gens):
raise UnificationFailed("can't unify %s with %s" % (f, g))
if not (isinstance(f.rep, DMP) and isinstance(g.rep, DMP)):
raise UnificationFailed("can't unify %s with %s" % (f, g))
cls = f.__class__
gens = f.gens
dom = f.rep.dom.unify(g.rep.dom, gens)
F = f.rep.convert(dom)
G = g.rep.convert(dom)
def per(rep, dom=dom, gens=gens, remove=None):
if remove is not None:
gens = gens[:remove] + gens[remove + 1:]
if not gens:
return dom.to_sympy(rep)
return cls.new(rep, *gens)
return dom, per, F, G
@public
def poly_from_expr(expr, *gens, **args):
"""Construct a polynomial from an expression. """
opt = options.build_options(gens, args)
return _poly_from_expr(expr, opt)
def _poly_from_expr(expr, opt):
"""Construct a polynomial from an expression. """
orig, expr = expr, sympify(expr)
if not isinstance(expr, Basic):
raise PolificationFailed(opt, orig, expr)
elif expr.is_Poly:
poly = expr.__class__._from_poly(expr, opt)
opt.gens = poly.gens
opt.domain = poly.domain
if opt.polys is None:
opt.polys = True
return poly, opt
elif opt.expand:
expr = expr.expand()
try:
rep, opt = _dict_from_expr(expr, opt)
except GeneratorsNeeded:
raise PolificationFailed(opt, orig, expr)
monoms, coeffs = list(zip(*list(rep.items())))
domain = opt.domain
if domain is None:
opt.domain, coeffs = construct_domain(coeffs, opt=opt)
else:
coeffs = list(map(domain.from_sympy, coeffs))
rep = dict(list(zip(monoms, coeffs)))
poly = Poly._from_dict(rep, opt)
if opt.polys is None:
opt.polys = False
return poly, opt
@public
def parallel_poly_from_expr(exprs, *gens, **args):
"""Construct polynomials from expressions. """
opt = options.build_options(gens, args)
return _parallel_poly_from_expr(exprs, opt)
def _parallel_poly_from_expr(exprs, opt):
"""Construct polynomials from expressions. """
if len(exprs) == 2:
f, g = exprs
if isinstance(f, Poly) and isinstance(g, Poly):
f = f.__class__._from_poly(f, opt)
g = g.__class__._from_poly(g, opt)
f, g = f.unify(g)
opt.gens = f.gens
opt.domain = f.domain
if opt.polys is None:
opt.polys = True
return [f, g], opt
origs, exprs = list(exprs), []
_exprs, _polys = [], []
failed = False
for i, expr in enumerate(origs):
expr = sympify(expr)
if isinstance(expr, Basic):
if expr.is_Poly:
_polys.append(i)
else:
_exprs.append(i)
if opt.expand:
expr = expr.expand()
else:
failed = True
exprs.append(expr)
if failed:
raise PolificationFailed(opt, origs, exprs, True)
if _polys:
# XXX: this is a temporary solution
for i in _polys:
exprs[i] = exprs[i].as_expr()
try:
reps, opt = _parallel_dict_from_expr(exprs, opt)
except GeneratorsNeeded:
raise PolificationFailed(opt, origs, exprs, True)
for k in opt.gens:
if isinstance(k, Piecewise):
raise PolynomialError("Piecewise generators do not make sense")
coeffs_list, lengths = [], []
all_monoms = []
all_coeffs = []
for rep in reps:
monoms, coeffs = list(zip(*list(rep.items())))
coeffs_list.extend(coeffs)
all_monoms.append(monoms)
lengths.append(len(coeffs))
domain = opt.domain
if domain is None:
opt.domain, coeffs_list = construct_domain(coeffs_list, opt=opt)
else:
coeffs_list = list(map(domain.from_sympy, coeffs_list))
for k in lengths:
all_coeffs.append(coeffs_list[:k])
coeffs_list = coeffs_list[k:]
polys = []
for monoms, coeffs in zip(all_monoms, all_coeffs):
rep = dict(list(zip(monoms, coeffs)))
poly = Poly._from_dict(rep, opt)
polys.append(poly)
if opt.polys is None:
opt.polys = bool(_polys)
return polys, opt
def _update_args(args, key, value):
"""Add a new ``(key, value)`` pair to arguments ``dict``. """
args = dict(args)
if key not in args:
args[key] = value
return args
@public
def degree(f, *gens, **args):
"""
Return the degree of ``f`` in the given variable.
The degree of 0 is negative infinity.
Examples
========
>>> from sympy import degree
>>> from sympy.abc import x, y
>>> degree(x**2 + y*x + 1, gen=x)
2
>>> degree(x**2 + y*x + 1, gen=y)
1
>>> degree(0, x)
-oo
"""
options.allowed_flags(args, ['gen', 'polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('degree', 1, exc)
return sympify(F.degree(opt.gen))
@public
def degree_list(f, *gens, **args):
"""
Return a list of degrees of ``f`` in all variables.
Examples
========
>>> from sympy import degree_list
>>> from sympy.abc import x, y
>>> degree_list(x**2 + y*x + 1)
(2, 1)
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('degree_list', 1, exc)
degrees = F.degree_list()
return tuple(map(Integer, degrees))
@public
def LC(f, *gens, **args):
"""
Return the leading coefficient of ``f``.
Examples
========
>>> from sympy import LC
>>> from sympy.abc import x, y
>>> LC(4*x**2 + 2*x*y**2 + x*y + 3*y)
4
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('LC', 1, exc)
return F.LC(order=opt.order)
@public
def LM(f, *gens, **args):
"""
Return the leading monomial of ``f``.
Examples
========
>>> from sympy import LM
>>> from sympy.abc import x, y
>>> LM(4*x**2 + 2*x*y**2 + x*y + 3*y)
x**2
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('LM', 1, exc)
monom = F.LM(order=opt.order)
return monom.as_expr()
@public
def LT(f, *gens, **args):
"""
Return the leading term of ``f``.
Examples
========
>>> from sympy import LT
>>> from sympy.abc import x, y
>>> LT(4*x**2 + 2*x*y**2 + x*y + 3*y)
4*x**2
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('LT', 1, exc)
monom, coeff = F.LT(order=opt.order)
return coeff*monom.as_expr()
@public
def pdiv(f, g, *gens, **args):
"""
Compute polynomial pseudo-division of ``f`` and ``g``.
Examples
========
>>> from sympy import pdiv
>>> from sympy.abc import x
>>> pdiv(x**2 + 1, 2*x - 4)
(2*x + 4, 20)
"""
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('pdiv', 2, exc)
q, r = F.pdiv(G)
if not opt.polys:
return q.as_expr(), r.as_expr()
else:
return q, r
@public
def prem(f, g, *gens, **args):
"""
Compute polynomial pseudo-remainder of ``f`` and ``g``.
Examples
========
>>> from sympy import prem
>>> from sympy.abc import x
>>> prem(x**2 + 1, 2*x - 4)
20
"""
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('prem', 2, exc)
r = F.prem(G)
if not opt.polys:
return r.as_expr()
else:
return r
@public
def pquo(f, g, *gens, **args):
"""
Compute polynomial pseudo-quotient of ``f`` and ``g``.
Examples
========
>>> from sympy import pquo
>>> from sympy.abc import x
>>> pquo(x**2 + 1, 2*x - 4)
2*x + 4
>>> pquo(x**2 - 1, 2*x - 1)
2*x + 1
"""
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('pquo', 2, exc)
try:
q = F.pquo(G)
except ExactQuotientFailed:
raise ExactQuotientFailed(f, g)
if not opt.polys:
return q.as_expr()
else:
return q
@public
def pexquo(f, g, *gens, **args):
"""
Compute polynomial exact pseudo-quotient of ``f`` and ``g``.
Examples
========
>>> from sympy import pexquo
>>> from sympy.abc import x
>>> pexquo(x**2 - 1, 2*x - 2)
2*x + 2
>>> pexquo(x**2 + 1, 2*x - 4)
Traceback (most recent call last):
...
ExactQuotientFailed: 2*x - 4 does not divide x**2 + 1
"""
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('pexquo', 2, exc)
q = F.pexquo(G)
if not opt.polys:
return q.as_expr()
else:
return q
@public
def div(f, g, *gens, **args):
"""
Compute polynomial division of ``f`` and ``g``.
Examples
========
>>> from sympy import div, ZZ, QQ
>>> from sympy.abc import x
>>> div(x**2 + 1, 2*x - 4, domain=ZZ)
(0, x**2 + 1)
>>> div(x**2 + 1, 2*x - 4, domain=QQ)
(x/2 + 1, 5)
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('div', 2, exc)
q, r = F.div(G, auto=opt.auto)
if not opt.polys:
return q.as_expr(), r.as_expr()
else:
return q, r
@public
def rem(f, g, *gens, **args):
"""
Compute polynomial remainder of ``f`` and ``g``.
Examples
========
>>> from sympy import rem, ZZ, QQ
>>> from sympy.abc import x
>>> rem(x**2 + 1, 2*x - 4, domain=ZZ)
x**2 + 1
>>> rem(x**2 + 1, 2*x - 4, domain=QQ)
5
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('rem', 2, exc)
r = F.rem(G, auto=opt.auto)
if not opt.polys:
return r.as_expr()
else:
return r
@public
def quo(f, g, *gens, **args):
"""
Compute polynomial quotient of ``f`` and ``g``.
Examples
========
>>> from sympy import quo
>>> from sympy.abc import x
>>> quo(x**2 + 1, 2*x - 4)
x/2 + 1
>>> quo(x**2 - 1, x - 1)
x + 1
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('quo', 2, exc)
q = F.quo(G, auto=opt.auto)
if not opt.polys:
return q.as_expr()
else:
return q
@public
def exquo(f, g, *gens, **args):
"""
Compute polynomial exact quotient of ``f`` and ``g``.
Examples
========
>>> from sympy import exquo
>>> from sympy.abc import x
>>> exquo(x**2 - 1, x - 1)
x + 1
>>> exquo(x**2 + 1, 2*x - 4)
Traceback (most recent call last):
...
ExactQuotientFailed: 2*x - 4 does not divide x**2 + 1
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('exquo', 2, exc)
q = F.exquo(G, auto=opt.auto)
if not opt.polys:
return q.as_expr()
else:
return q
@public
def half_gcdex(f, g, *gens, **args):
"""
Half extended Euclidean algorithm of ``f`` and ``g``.
Returns ``(s, h)`` such that ``h = gcd(f, g)`` and ``s*f = h (mod g)``.
Examples
========
>>> from sympy import half_gcdex
>>> from sympy.abc import x
>>> half_gcdex(x**4 - 2*x**3 - 6*x**2 + 12*x + 15, x**3 + x**2 - 4*x - 4)
(-x/5 + 3/5, x + 1)
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
domain, (a, b) = construct_domain(exc.exprs)
try:
s, h = domain.half_gcdex(a, b)
except NotImplementedError:
raise ComputationFailed('half_gcdex', 2, exc)
else:
return domain.to_sympy(s), domain.to_sympy(h)
s, h = F.half_gcdex(G, auto=opt.auto)
if not opt.polys:
return s.as_expr(), h.as_expr()
else:
return s, h
@public
def gcdex(f, g, *gens, **args):
"""
Extended Euclidean algorithm of ``f`` and ``g``.
Returns ``(s, t, h)`` such that ``h = gcd(f, g)`` and ``s*f + t*g = h``.
Examples
========
>>> from sympy import gcdex
>>> from sympy.abc import x
>>> gcdex(x**4 - 2*x**3 - 6*x**2 + 12*x + 15, x**3 + x**2 - 4*x - 4)
(-x/5 + 3/5, x**2/5 - 6*x/5 + 2, x + 1)
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
domain, (a, b) = construct_domain(exc.exprs)
try:
s, t, h = domain.gcdex(a, b)
except NotImplementedError:
raise ComputationFailed('gcdex', 2, exc)
else:
return domain.to_sympy(s), domain.to_sympy(t), domain.to_sympy(h)
s, t, h = F.gcdex(G, auto=opt.auto)
if not opt.polys:
return s.as_expr(), t.as_expr(), h.as_expr()
else:
return s, t, h
@public
def invert(f, g, *gens, **args):
"""
Invert ``f`` modulo ``g`` when possible.
Examples
========
>>> from sympy import invert
>>> from sympy.abc import x
>>> invert(x**2 - 1, 2*x - 1)
-4/3
>>> invert(x**2 - 1, x - 1)
Traceback (most recent call last):
...
NotInvertible: zero divisor
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
domain, (a, b) = construct_domain(exc.exprs)
try:
return domain.to_sympy(domain.invert(a, b))
except NotImplementedError:
raise ComputationFailed('invert', 2, exc)
h = F.invert(G, auto=opt.auto)
if not opt.polys:
return h.as_expr()
else:
return h
@public
def subresultants(f, g, *gens, **args):
"""
Compute subresultant PRS of ``f`` and ``g``.
Examples
========
>>> from sympy import subresultants
>>> from sympy.abc import x
>>> subresultants(x**2 + 1, x**2 - 1)
[x**2 + 1, x**2 - 1, -2]
"""
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('subresultants', 2, exc)
result = F.subresultants(G)
if not opt.polys:
return [r.as_expr() for r in result]
else:
return result
@public
def resultant(f, g, *gens, **args):
"""
Compute resultant of ``f`` and ``g``.
Examples
========
>>> from sympy import resultant
>>> from sympy.abc import x
>>> resultant(x**2 + 1, x**2 - 1)
4
"""
includePRS = args.pop('includePRS', False)
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('resultant', 2, exc)
if includePRS:
result, R = F.resultant(G, includePRS=includePRS)
else:
result = F.resultant(G)
if not opt.polys:
if includePRS:
return result.as_expr(), [r.as_expr() for r in R]
return result.as_expr()
else:
if includePRS:
return result, R
return result
@public
def discriminant(f, *gens, **args):
"""
Compute discriminant of ``f``.
Examples
========
>>> from sympy import discriminant
>>> from sympy.abc import x
>>> discriminant(x**2 + 2*x + 3)
-8
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('discriminant', 1, exc)
result = F.discriminant()
if not opt.polys:
return result.as_expr()
else:
return result
@public
def cofactors(f, g, *gens, **args):
"""
Compute GCD and cofactors of ``f`` and ``g``.
Returns polynomials ``(h, cff, cfg)`` such that ``h = gcd(f, g)``, and
``cff = quo(f, h)`` and ``cfg = quo(g, h)`` are, so called, cofactors
of ``f`` and ``g``.
Examples
========
>>> from sympy import cofactors
>>> from sympy.abc import x
>>> cofactors(x**2 - 1, x**2 - 3*x + 2)
(x - 1, x + 1, x - 2)
"""
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
domain, (a, b) = construct_domain(exc.exprs)
try:
h, cff, cfg = domain.cofactors(a, b)
except NotImplementedError:
raise ComputationFailed('cofactors', 2, exc)
else:
return domain.to_sympy(h), domain.to_sympy(cff), domain.to_sympy(cfg)
h, cff, cfg = F.cofactors(G)
if not opt.polys:
return h.as_expr(), cff.as_expr(), cfg.as_expr()
else:
return h, cff, cfg
@public
def gcd_list(seq, *gens, **args):
"""
Compute GCD of a list of polynomials.
Examples
========
>>> from sympy import gcd_list
>>> from sympy.abc import x
>>> gcd_list([x**3 - 1, x**2 - 1, x**2 - 3*x + 2])
x - 1
"""
seq = sympify(seq)
if not gens and not args:
domain, numbers = construct_domain(seq)
if not numbers:
return domain.zero
elif domain.is_Numerical:
result, numbers = numbers[0], numbers[1:]
for number in numbers:
result = domain.gcd(result, number)
if domain.is_one(result):
break
return domain.to_sympy(result)
options.allowed_flags(args, ['polys'])
try:
polys, opt = parallel_poly_from_expr(seq, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('gcd_list', len(seq), exc)
if not polys:
if not opt.polys:
return S.Zero
else:
return Poly(0, opt=opt)
result, polys = polys[0], polys[1:]
for poly in polys:
result = result.gcd(poly)
if result.is_one:
break
if not opt.polys:
return result.as_expr()
else:
return result
@public
def gcd(f, g=None, *gens, **args):
"""
Compute GCD of ``f`` and ``g``.
Examples
========
>>> from sympy import gcd
>>> from sympy.abc import x
>>> gcd(x**2 - 1, x**2 - 3*x + 2)
x - 1
"""
if hasattr(f, '__iter__'):
if g is not None:
gens = (g,) + gens
return gcd_list(f, *gens, **args)
elif g is None:
raise TypeError("gcd() takes 2 arguments or a sequence of arguments")
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
domain, (a, b) = construct_domain(exc.exprs)
try:
return domain.to_sympy(domain.gcd(a, b))
except NotImplementedError:
raise ComputationFailed('gcd', 2, exc)
result = F.gcd(G)
if not opt.polys:
return result.as_expr()
else:
return result
@public
def lcm_list(seq, *gens, **args):
"""
Compute LCM of a list of polynomials.
Examples
========
>>> from sympy import lcm_list
>>> from sympy.abc import x
>>> lcm_list([x**3 - 1, x**2 - 1, x**2 - 3*x + 2])
x**5 - x**4 - 2*x**3 - x**2 + x + 2
"""
seq = sympify(seq)
if not gens and not args:
domain, numbers = construct_domain(seq)
if not numbers:
return domain.one
elif domain.is_Numerical:
result, numbers = numbers[0], numbers[1:]
for number in numbers:
result = domain.lcm(result, number)
return domain.to_sympy(result)
options.allowed_flags(args, ['polys'])
try:
polys, opt = parallel_poly_from_expr(seq, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('lcm_list', len(seq), exc)
if not polys:
if not opt.polys:
return S.One
else:
return Poly(1, opt=opt)
result, polys = polys[0], polys[1:]
for poly in polys:
result = result.lcm(poly)
if not opt.polys:
return result.as_expr()
else:
return result
@public
def lcm(f, g=None, *gens, **args):
"""
Compute LCM of ``f`` and ``g``.
Examples
========
>>> from sympy import lcm
>>> from sympy.abc import x
>>> lcm(x**2 - 1, x**2 - 3*x + 2)
x**3 - 2*x**2 - x + 2
"""
if hasattr(f, '__iter__'):
if g is not None:
gens = (g,) + gens
return lcm_list(f, *gens, **args)
elif g is None:
raise TypeError("lcm() takes 2 arguments or a sequence of arguments")
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
domain, (a, b) = construct_domain(exc.exprs)
try:
return domain.to_sympy(domain.lcm(a, b))
except NotImplementedError:
raise ComputationFailed('lcm', 2, exc)
result = F.lcm(G)
if not opt.polys:
return result.as_expr()
else:
return result
@public
def terms_gcd(f, *gens, **args):
"""
Remove GCD of terms from ``f``.
If the ``deep`` flag is True, then the arguments of ``f`` will have
terms_gcd applied to them.
If a fraction is factored out of ``f`` and ``f`` is an Add, then
an unevaluated Mul will be returned so that automatic simplification
does not redistribute it. The hint ``clear``, when set to False, can be
used to prevent such factoring when all coefficients are not fractions.
Examples
========
>>> from sympy import terms_gcd, cos
>>> from sympy.abc import x, y
>>> terms_gcd(x**6*y**2 + x**3*y, x, y)
x**3*y*(x**3*y + 1)
The default action of polys routines is to expand the expression
given to them. terms_gcd follows this behavior:
>>> terms_gcd((3+3*x)*(x+x*y))
3*x*(x*y + x + y + 1)
If this is not desired then the hint ``expand`` can be set to False.
In this case the expression will be treated as though it were comprised
of one or more terms:
>>> terms_gcd((3+3*x)*(x+x*y), expand=False)
(3*x + 3)*(x*y + x)
In order to traverse factors of a Mul or the arguments of other
functions, the ``deep`` hint can be used:
>>> terms_gcd((3 + 3*x)*(x + x*y), expand=False, deep=True)
3*x*(x + 1)*(y + 1)
>>> terms_gcd(cos(x + x*y), deep=True)
cos(x*(y + 1))
Rationals are factored out by default:
>>> terms_gcd(x + y/2)
(2*x + y)/2
Only the y-term had a coefficient that was a fraction; if one
does not want to factor out the 1/2 in cases like this, the
flag ``clear`` can be set to False:
>>> terms_gcd(x + y/2, clear=False)
x + y/2
>>> terms_gcd(x*y/2 + y**2, clear=False)
y*(x/2 + y)
The ``clear`` flag is ignored if all coefficients are fractions:
>>> terms_gcd(x/3 + y/2, clear=False)
(2*x + 3*y)/6
See Also
========
sympy.core.exprtools.gcd_terms, sympy.core.exprtools.factor_terms
"""
from sympy.core.relational import Equality
orig = sympify(f)
if not isinstance(f, Expr) or f.is_Atom:
return orig
if args.get('deep', False):
new = f.func(*[terms_gcd(a, *gens, **args) for a in f.args])
args.pop('deep')
args['expand'] = False
return terms_gcd(new, *gens, **args)
if isinstance(f, Equality):
return f
clear = args.pop('clear', True)
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed as exc:
return exc.expr
J, f = F.terms_gcd()
if opt.domain.has_Ring:
if opt.domain.has_Field:
denom, f = f.clear_denoms(convert=True)
coeff, f = f.primitive()
if opt.domain.has_Field:
coeff /= denom
else:
coeff = S.One
term = Mul(*[x**j for x, j in zip(f.gens, J)])
if coeff == 1:
coeff = S.One
if term == 1:
return orig
if clear:
return _keep_coeff(coeff, term*f.as_expr())
# base the clearing on the form of the original expression, not
# the (perhaps) Mul that we have now
coeff, f = _keep_coeff(coeff, f.as_expr(), clear=False).as_coeff_Mul()
return _keep_coeff(coeff, term*f, clear=False)
@public
def trunc(f, p, *gens, **args):
"""
Reduce ``f`` modulo a constant ``p``.
Examples
========
>>> from sympy import trunc
>>> from sympy.abc import x
>>> trunc(2*x**3 + 3*x**2 + 5*x + 7, 3)
-x**3 - x + 1
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('trunc', 1, exc)
result = F.trunc(sympify(p))
if not opt.polys:
return result.as_expr()
else:
return result
@public
def monic(f, *gens, **args):
"""
Divide all coefficients of ``f`` by ``LC(f)``.
Examples
========
>>> from sympy import monic
>>> from sympy.abc import x
>>> monic(3*x**2 + 4*x + 2)
x**2 + 4*x/3 + 2/3
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('monic', 1, exc)
result = F.monic(auto=opt.auto)
if not opt.polys:
return result.as_expr()
else:
return result
@public
def content(f, *gens, **args):
"""
Compute GCD of coefficients of ``f``.
Examples
========
>>> from sympy import content
>>> from sympy.abc import x
>>> content(6*x**2 + 8*x + 12)
2
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('content', 1, exc)
return F.content()
@public
def primitive(f, *gens, **args):
"""
Compute content and the primitive form of ``f``.
Examples
========
>>> from sympy.polys.polytools import primitive
>>> from sympy.abc import x
>>> primitive(6*x**2 + 8*x + 12)
(2, 3*x**2 + 4*x + 6)
>>> eq = (2 + 2*x)*x + 2
Expansion is performed by default:
>>> primitive(eq)
(2, x**2 + x + 1)
Set ``expand`` to False to shut this off. Note that the
extraction will not be recursive; use the as_content_primitive method
for recursive, non-destructive Rational extraction.
>>> primitive(eq, expand=False)
(1, x*(2*x + 2) + 2)
>>> eq.as_content_primitive()
(2, x*(x + 1) + 1)
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('primitive', 1, exc)
cont, result = F.primitive()
if not opt.polys:
return cont, result.as_expr()
else:
return cont, result
@public
def compose(f, g, *gens, **args):
"""
Compute functional composition ``f(g)``.
Examples
========
>>> from sympy import compose
>>> from sympy.abc import x
>>> compose(x**2 + x, x - 1)
x**2 - x
"""
options.allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('compose', 2, exc)
result = F.compose(G)
if not opt.polys:
return result.as_expr()
else:
return result
@public
def decompose(f, *gens, **args):
"""
Compute functional decomposition of ``f``.
Examples
========
>>> from sympy import decompose
>>> from sympy.abc import x
>>> decompose(x**4 + 2*x**3 - x - 1)
[x**2 - x - 1, x**2 + x]
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('decompose', 1, exc)
result = F.decompose()
if not opt.polys:
return [r.as_expr() for r in result]
else:
return result
@public
def sturm(f, *gens, **args):
"""
Compute Sturm sequence of ``f``.
Examples
========
>>> from sympy import sturm
>>> from sympy.abc import x
>>> sturm(x**3 - 2*x**2 + x - 3)
[x**3 - 2*x**2 + x - 3, 3*x**2 - 4*x + 1, 2*x/9 + 25/9, -2079/4]
"""
options.allowed_flags(args, ['auto', 'polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('sturm', 1, exc)
result = F.sturm(auto=opt.auto)
if not opt.polys:
return [r.as_expr() for r in result]
else:
return result
@public
def gff_list(f, *gens, **args):
"""
Compute a list of greatest factorial factors of ``f``.
Examples
========
>>> from sympy import gff_list, ff
>>> from sympy.abc import x
>>> f = x**5 + 2*x**4 - x**3 - 2*x**2
>>> gff_list(f)
[(x, 1), (x + 2, 4)]
>>> (ff(x, 1)*ff(x + 2, 4)).expand() == f
True
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('gff_list', 1, exc)
factors = F.gff_list()
if not opt.polys:
return [(g.as_expr(), k) for g, k in factors]
else:
return factors
@public
def gff(f, *gens, **args):
"""Compute greatest factorial factorization of ``f``. """
raise NotImplementedError('symbolic falling factorial')
@public
def sqf_norm(f, *gens, **args):
"""
Compute square-free norm of ``f``.
Returns ``s``, ``f``, ``r``, such that ``g(x) = f(x-sa)`` and
``r(x) = Norm(g(x))`` is a square-free polynomial over ``K``,
where ``a`` is the algebraic extension of the ground domain.
Examples
========
>>> from sympy import sqf_norm, sqrt
>>> from sympy.abc import x
>>> sqf_norm(x**2 + 1, extension=[sqrt(3)])
(1, x**2 - 2*sqrt(3)*x + 4, x**4 - 4*x**2 + 16)
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('sqf_norm', 1, exc)
s, g, r = F.sqf_norm()
if not opt.polys:
return Integer(s), g.as_expr(), r.as_expr()
else:
return Integer(s), g, r
@public
def sqf_part(f, *gens, **args):
"""
Compute square-free part of ``f``.
Examples
========
>>> from sympy import sqf_part
>>> from sympy.abc import x
>>> sqf_part(x**3 - 3*x - 2)
x**2 - x - 2
"""
options.allowed_flags(args, ['polys'])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('sqf_part', 1, exc)
result = F.sqf_part()
if not opt.polys:
return result.as_expr()
else:
return result
def _sorted_factors(factors, method):
"""Sort a list of ``(expr, exp)`` pairs. """
if method == 'sqf':
def key(obj):
poly, exp = obj
rep = poly.rep.rep
return (exp, len(rep), rep)
else:
def key(obj):
poly, exp = obj
rep = poly.rep.rep
return (len(rep), exp, rep)
return sorted(factors, key=key)
def _factors_product(factors):
"""Multiply a list of ``(expr, exp)`` pairs. """
return Mul(*[f.as_expr()**k for f, k in factors])
def _symbolic_factor_list(expr, opt, method):
"""Helper function for :func:`_symbolic_factor`. """
coeff, factors = S.One, []
for arg in Mul.make_args(expr):
if arg.is_Number:
coeff *= arg
continue
elif arg.is_Pow:
base, exp = arg.args
if base.is_Number:
factors.append((base, exp))
continue
else:
base, exp = arg, S.One
try:
poly, _ = _poly_from_expr(base, opt)
except PolificationFailed as exc:
factors.append((exc.expr, exp))
else:
func = getattr(poly, method + '_list')
_coeff, _factors = func()
if _coeff is not S.One:
if exp.is_Integer:
coeff *= _coeff**exp
elif _coeff.is_positive:
factors.append((_coeff, exp))
else:
_factors.append((_coeff, S.One))
if exp is S.One:
factors.extend(_factors)
elif exp.is_integer:
factors.extend([(f, k*exp) for f, k in _factors])
else:
other = []
for f, k in _factors:
if f.as_expr().is_positive:
factors.append((f, k*exp))
else:
other.append((f, k))
factors.append((_factors_product(other), exp))
return coeff, factors
def _symbolic_factor(expr, opt, method):
"""Helper function for :func:`_factor`. """
if isinstance(expr, Expr) and not expr.is_Relational:
if hasattr(expr,'_eval_factor'):
return expr._eval_factor()
coeff, factors = _symbolic_factor_list(together(expr), opt, method)
return _keep_coeff(coeff, _factors_product(factors))
elif hasattr(expr, 'args'):
return expr.func(*[_symbolic_factor(arg, opt, method) for arg in expr.args])
elif hasattr(expr, '__iter__'):
return expr.__class__([_symbolic_factor(arg, opt, method) for arg in expr])
else:
return expr
def _generic_factor_list(expr, gens, args, method):
"""Helper function for :func:`sqf_list` and :func:`factor_list`. """
options.allowed_flags(args, ['frac', 'polys'])
opt = options.build_options(gens, args)
expr = sympify(expr)
if isinstance(expr, Expr) and not expr.is_Relational:
numer, denom = together(expr).as_numer_denom()
cp, fp = _symbolic_factor_list(numer, opt, method)
cq, fq = _symbolic_factor_list(denom, opt, method)
if fq and not opt.frac:
raise PolynomialError("a polynomial expected, got %s" % expr)
_opt = opt.clone(dict(expand=True))
for factors in (fp, fq):
for i, (f, k) in enumerate(factors):
if not f.is_Poly:
f, _ = _poly_from_expr(f, _opt)
factors[i] = (f, k)
fp = _sorted_factors(fp, method)
fq = _sorted_factors(fq, method)
if not opt.polys:
fp = [(f.as_expr(), k) for f, k in fp]
fq = [(f.as_expr(), k) for f, k in fq]
coeff = cp/cq
if not opt.frac:
return coeff, fp
else:
return coeff, fp, fq
else:
raise PolynomialError("a polynomial expected, got %s" % expr)
def _generic_factor(expr, gens, args, method):
"""Helper function for :func:`sqf` and :func:`factor`. """
options.allowed_flags(args, [])
opt = options.build_options(gens, args)
return _symbolic_factor(sympify(expr), opt, method)
def to_rational_coeffs(f):
"""
try to transform a polynomial to have rational coefficients
try to find a transformation ``x = alpha*y``
``f(x) = lc*alpha**n * g(y)`` where ``g`` is a polynomial with
rational coefficients, ``lc`` the leading coefficient.
If this fails, try ``x = y + beta``
``f(x) = g(y)``
Returns ``None`` if ``g`` not found;
``(lc, alpha, None, g)`` in case of rescaling
``(None, None, beta, g)`` in case of translation
Notes
=====
Currently it transforms only polynomials without roots larger than 2.
Examples
========
>>> from sympy import sqrt, Poly, simplify
>>> from sympy.polys.polytools import to_rational_coeffs
>>> from sympy.abc import x
>>> p = Poly(((x**2-1)*(x-2)).subs({x:x*(1 + sqrt(2))}), x, domain='EX')
>>> lc, r, _, g = to_rational_coeffs(p)
>>> lc, r
(7 + 5*sqrt(2), -2*sqrt(2) + 2)
>>> g
Poly(x**3 + x**2 - 1/4*x - 1/4, x, domain='QQ')
>>> r1 = simplify(1/r)
>>> Poly(lc*r**3*(g.as_expr()).subs({x:x*r1}), x, domain='EX') == p
True
"""
from sympy.simplify.simplify import simplify
def _try_rescale(f):
"""
try rescaling ``x -> alpha*x`` to convert f to a polynomial
with rational coefficients.
Returns ``alpha, f``; if the rescaling is successful,
``alpha`` is the rescaling factor, and ``f`` is the rescaled
polynomial; else ``alpha`` is ``None``.
"""
from sympy.core.add import Add
if not len(f.gens) == 1 or not (f.gens[0]).is_Atom:
return None, f
n = f.degree()
lc = f.LC()
coeffs = f.monic().all_coeffs()[1:]
coeffs = [simplify(coeffx) for coeffx in coeffs]
if coeffs[-2] and not all(coeffx.is_rational for coeffx in coeffs):
rescale1_x = simplify(coeffs[-2]/coeffs[-1])
coeffs1 = []
for i in range(len(coeffs)):
coeffx = simplify(coeffs[i]*rescale1_x**(i + 1))
if not coeffx.is_rational:
break
coeffs1.append(coeffx)
else:
rescale_x = simplify(1/rescale1_x)
x = f.gens[0]
v = [x**n]
for i in range(1, n + 1):
v.append(coeffs1[i - 1]*x**(n - i))
f = Add(*v)
f = Poly(f)
return lc, rescale_x, f
return None
def _try_translate(f):
"""
try translating ``x -> x + alpha`` to convert f to a polynomial
with rational coefficients.
Returns ``alpha, f``; if the translating is successful,
``alpha`` is the translating factor, and ``f`` is the shifted
polynomial; else ``alpha`` is ``None``.
"""
from sympy.core.add import Add
if not len(f.gens) == 1 or not (f.gens[0]).is_Atom:
return None, f
n = f.degree()
f1 = f.monic()
coeffs = f1.all_coeffs()[1:]
c = simplify(coeffs[0])
if c and not c.is_rational:
func = Add
if c.is_Add:
args = c.args
func = c.func
else:
args = [c]
sifted = sift(args, lambda z: z.is_rational)
c1, c2 = sifted[True], sifted[False]
alpha = -func(*c2)/n
f2 = f1.shift(alpha)
return alpha, f2
return None
def _has_square_roots(p):
"""
Return True if ``f`` is a sum with square roots but no other root
"""
from sympy.core.exprtools import Factors
coeffs = p.coeffs()
has_sq = False
for y in coeffs:
for x in Add.make_args(y):
f = Factors(x).factors
r = [wx.q for wx in f.values() if wx.is_Rational and wx.q >= 2]
if not r:
continue
if min(r) == 2:
has_sq = True
if max(r) > 2:
return False
return has_sq
if f.get_domain().is_EX and _has_square_roots(f):
r = _try_rescale(f)
if r:
return r[0], r[1], None, r[2]
else:
r = _try_translate(f)
if r:
return None, None, r[0], r[1]
return None
def _torational_factor_list(p, x):
"""
helper function to factor polynomial using to_rational_coeffs
Examples
========
>>> from sympy.polys.polytools import _torational_factor_list
>>> from sympy.abc import x
>>> from sympy import sqrt, expand, Mul
>>> p = expand(((x**2-1)*(x-2)).subs({x:x*(1 + sqrt(2))}))
>>> factors = _torational_factor_list(p, x); factors
(-2, [(-x*(1 + sqrt(2))/2 + 1, 1), (-x*(1 + sqrt(2)) - 1, 1), (-x*(1 + sqrt(2)) + 1, 1)])
>>> expand(factors[0]*Mul(*[z[0] for z in factors[1]])) == p
True
>>> p = expand(((x**2-1)*(x-2)).subs({x:x + sqrt(2)}))
>>> factors = _torational_factor_list(p, x); factors
(1, [(x - 2 + sqrt(2), 1), (x - 1 + sqrt(2), 1), (x + 1 + sqrt(2), 1)])
>>> expand(factors[0]*Mul(*[z[0] for z in factors[1]])) == p
True
"""
from sympy.simplify.simplify import simplify
p1 = Poly(p, x, domain='EX')
n = p1.degree()
res = to_rational_coeffs(p1)
if not res:
return None
lc, r, t, g = res
factors = factor_list(g.as_expr())
if lc:
c = simplify(factors[0]*lc*r**n)
r1 = simplify(1/r)
a = []
for z in factors[1:][0]:
a.append((simplify(z[0].subs({x: x*r1})), z[1]))
else:
c = factors[0]
a = []
for z in factors[1:][0]:
a.append((z[0].subs({x: x - t}), z[1]))
return (c, a)
@public
def sqf_list(f, *gens, **args):
"""
Compute a list of square-free factors of ``f``.
Examples
========
>>> from sympy import sqf_list
>>> from sympy.abc import x
>>> sqf_list(2*x**5 + 16*x**4 + 50*x**3 + 76*x**2 + 56*x + 16)
(2, [(x + 1, 2), (x + 2, 3)])
"""
return _generic_factor_list(f, gens, args, method='sqf')
@public
def sqf(f, *gens, **args):
"""
Compute square-free factorization of ``f``.
Examples
========
>>> from sympy import sqf
>>> from sympy.abc import x
>>> sqf(2*x**5 + 16*x**4 + 50*x**3 + 76*x**2 + 56*x + 16)
2*(x + 1)**2*(x + 2)**3
"""
return _generic_factor(f, gens, args, method='sqf')
@public
def factor_list(f, *gens, **args):
"""
Compute a list of irreducible factors of ``f``.
Examples
========
>>> from sympy import factor_list
>>> from sympy.abc import x, y
>>> factor_list(2*x**5 + 2*x**4*y + 4*x**3 + 4*x**2*y + 2*x + 2*y)
(2, [(x + y, 1), (x**2 + 1, 2)])
"""
return _generic_factor_list(f, gens, args, method='factor')
@public
def factor(f, *gens, **args):
"""
Compute the factorization of expression, ``f``, into irreducibles. (To
factor an integer into primes, use ``factorint``.)
There two modes implemented: symbolic and formal. If ``f`` is not an
instance of :class:`Poly` and generators are not specified, then the
former mode is used. Otherwise, the formal mode is used.
In symbolic mode, :func:`factor` will traverse the expression tree and
factor its components without any prior expansion, unless an instance
of :class:`Add` is encountered (in this case formal factorization is
used). This way :func:`factor` can handle large or symbolic exponents.
By default, the factorization is computed over the rationals. To factor
over other domain, e.g. an algebraic or finite field, use appropriate
options: ``extension``, ``modulus`` or ``domain``.
Examples
========
>>> from sympy import factor, sqrt
>>> from sympy.abc import x, y
>>> factor(2*x**5 + 2*x**4*y + 4*x**3 + 4*x**2*y + 2*x + 2*y)
2*(x + y)*(x**2 + 1)**2
>>> factor(x**2 + 1)
x**2 + 1
>>> factor(x**2 + 1, modulus=2)
(x + 1)**2
>>> factor(x**2 + 1, gaussian=True)
(x - I)*(x + I)
>>> factor(x**2 - 2, extension=sqrt(2))
(x - sqrt(2))*(x + sqrt(2))
>>> factor((x**2 - 1)/(x**2 + 4*x + 4))
(x - 1)*(x + 1)/(x + 2)**2
>>> factor((x**2 + 4*x + 4)**10000000*(x**2 + 1))
(x + 2)**20000000*(x**2 + 1)
By default, factor deals with an expression as a whole:
>>> eq = 2**(x**2 + 2*x + 1)
>>> factor(eq)
2**(x**2 + 2*x + 1)
If the ``deep`` flag is True then subexpressions will
be factored:
>>> factor(eq, deep=True)
2**((x + 1)**2)
See Also
========
sympy.ntheory.factor_.factorint
"""
f = sympify(f)
if args.pop('deep', False):
partials = {}
muladd = f.atoms(Mul, Add)
for p in muladd:
fac = factor(p, *gens, **args)
if (fac.is_Mul or fac.is_Pow) and fac != p:
partials[p] = fac
return f.xreplace(partials)
try:
return _generic_factor(f, gens, args, method='factor')
except PolynomialError as msg:
if not f.is_commutative:
from sympy.core.exprtools import factor_nc
return factor_nc(f)
else:
raise PolynomialError(msg)
@public
def intervals(F, all=False, eps=None, inf=None, sup=None, strict=False, fast=False, sqf=False):
"""
Compute isolating intervals for roots of ``f``.
Examples
========
>>> from sympy import intervals
>>> from sympy.abc import x
>>> intervals(x**2 - 3)
[((-2, -1), 1), ((1, 2), 1)]
>>> intervals(x**2 - 3, eps=1e-2)
[((-26/15, -19/11), 1), ((19/11, 26/15), 1)]
"""
if not hasattr(F, '__iter__'):
try:
F = Poly(F)
except GeneratorsNeeded:
return []
return F.intervals(all=all, eps=eps, inf=inf, sup=sup, fast=fast, sqf=sqf)
else:
polys, opt = parallel_poly_from_expr(F, domain='QQ')
if len(opt.gens) > 1:
raise MultivariatePolynomialError
for i, poly in enumerate(polys):
polys[i] = poly.rep.rep
if eps is not None:
eps = opt.domain.convert(eps)
if eps <= 0:
raise ValueError("'eps' must be a positive rational")
if inf is not None:
inf = opt.domain.convert(inf)
if sup is not None:
sup = opt.domain.convert(sup)
intervals = dup_isolate_real_roots_list(polys, opt.domain,
eps=eps, inf=inf, sup=sup, strict=strict, fast=fast)
result = []
for (s, t), indices in intervals:
s, t = opt.domain.to_sympy(s), opt.domain.to_sympy(t)
result.append(((s, t), indices))
return result
@public
def refine_root(f, s, t, eps=None, steps=None, fast=False, check_sqf=False):
"""
Refine an isolating interval of a root to the given precision.
Examples
========
>>> from sympy import refine_root
>>> from sympy.abc import x
>>> refine_root(x**2 - 3, 1, 2, eps=1e-2)
(19/11, 26/15)
"""
try:
F = Poly(f)
except GeneratorsNeeded:
raise PolynomialError(
"can't refine a root of %s, not a polynomial" % f)
return F.refine_root(s, t, eps=eps, steps=steps, fast=fast, check_sqf=check_sqf)
@public
def count_roots(f, inf=None, sup=None):
"""
Return the number of roots of ``f`` in ``[inf, sup]`` interval.
If one of ``inf`` or ``sup`` is complex, it will return the number of roots
in the complex rectangle with corners at ``inf`` and ``sup``.
Examples
========
>>> from sympy import count_roots, I
>>> from sympy.abc import x
>>> count_roots(x**4 - 4, -3, 3)
2
>>> count_roots(x**4 - 4, 0, 1 + 3*I)
1
"""
try:
F = Poly(f, greedy=False)
except GeneratorsNeeded:
raise PolynomialError("can't count roots of %s, not a polynomial" % f)
return F.count_roots(inf=inf, sup=sup)
@public
def real_roots(f, multiple=True):
"""
Return a list of real roots with multiplicities of ``f``.
Examples
========
>>> from sympy import real_roots
>>> from sympy.abc import x
>>> real_roots(2*x**3 - 7*x**2 + 4*x + 4)
[-1/2, 2, 2]
"""
try:
F = Poly(f, greedy=False)
except GeneratorsNeeded:
raise PolynomialError(
"can't compute real roots of %s, not a polynomial" % f)
return F.real_roots(multiple=multiple)
@public
def nroots(f, n=15, maxsteps=50, cleanup=True):
"""
Compute numerical approximations of roots of ``f``.
Examples
========
>>> from sympy import nroots
>>> from sympy.abc import x
>>> nroots(x**2 - 3, n=15)
[-1.73205080756888, 1.73205080756888]
>>> nroots(x**2 - 3, n=30)
[-1.73205080756887729352744634151, 1.73205080756887729352744634151]
"""
try:
F = Poly(f, greedy=False)
except GeneratorsNeeded:
raise PolynomialError(
"can't compute numerical roots of %s, not a polynomial" % f)
return F.nroots(n=n, maxsteps=maxsteps, cleanup=cleanup)
@public
def ground_roots(f, *gens, **args):
"""
Compute roots of ``f`` by factorization in the ground domain.
Examples
========
>>> from sympy import ground_roots
>>> from sympy.abc import x
>>> ground_roots(x**6 - 4*x**4 + 4*x**3 - x**2)
{0: 2, 1: 2}
"""
options.allowed_flags(args, [])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('ground_roots', 1, exc)
return F.ground_roots()
@public
def nth_power_roots_poly(f, n, *gens, **args):
"""
Construct a polynomial with n-th powers of roots of ``f``.
Examples
========
>>> from sympy import nth_power_roots_poly, factor, roots
>>> from sympy.abc import x
>>> f = x**4 - x**2 + 1
>>> g = factor(nth_power_roots_poly(f, 2))
>>> g
(x**2 - x + 1)**2
>>> R_f = [ (r**2).expand() for r in roots(f) ]
>>> R_g = roots(g).keys()
>>> set(R_f) == set(R_g)
True
"""
options.allowed_flags(args, [])
try:
F, opt = poly_from_expr(f, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('nth_power_roots_poly', 1, exc)
result = F.nth_power_roots_poly(n)
if not opt.polys:
return result.as_expr()
else:
return result
@public
def cancel(f, *gens, **args):
"""
Cancel common factors in a rational function ``f``.
Examples
========
>>> from sympy import cancel, sqrt, Symbol
>>> from sympy.abc import x
>>> A = Symbol('A', commutative=False)
>>> cancel((2*x**2 - 2)/(x**2 - 2*x + 1))
(2*x + 2)/(x - 1)
>>> cancel((sqrt(3) + sqrt(15)*A)/(sqrt(2) + sqrt(10)*A))
sqrt(6)/2
"""
from sympy.core.exprtools import factor_terms
options.allowed_flags(args, ['polys'])
f = sympify(f)
if not isinstance(f, (tuple, Tuple)):
if f.is_Number or isinstance(f, Relational) or not isinstance(f, Expr):
return f
f = factor_terms(f, radical=True)
p, q = f.as_numer_denom()
elif len(f) == 2:
p, q = f
elif isinstance(f, Tuple):
return factor_terms(f)
else:
raise ValueError('unexpected argument: %s' % f)
try:
(F, G), opt = parallel_poly_from_expr((p, q), *gens, **args)
except PolificationFailed:
if not isinstance(f, (tuple, Tuple)):
return f
else:
return S.One, p, q
except PolynomialError as msg:
if f.is_commutative and not f.has(Piecewise):
raise PolynomialError(msg)
# Handling of noncommutative and/or piecewise expressions
if f.is_Add or f.is_Mul:
sifted = sift(f.args, lambda x: x.is_commutative and not x.has(Piecewise))
c, nc = sifted[True], sifted[False]
nc = [cancel(i) for i in nc]
return f.func(cancel(f.func._from_args(c)), *nc)
else:
reps = []
pot = preorder_traversal(f)
next(pot)
for e in pot:
# XXX: This should really skip anything that's not Expr.
if isinstance(e, (tuple, Tuple, BooleanAtom)):
continue
try:
reps.append((e, cancel(e)))
pot.skip() # this was handled successfully
except NotImplementedError:
pass
return f.xreplace(dict(reps))
c, P, Q = F.cancel(G)
if not isinstance(f, (tuple, Tuple)):
return c*(P.as_expr()/Q.as_expr())
else:
if not opt.polys:
return c, P.as_expr(), Q.as_expr()
else:
return c, P, Q
@public
def reduced(f, G, *gens, **args):
"""
Reduces a polynomial ``f`` modulo a set of polynomials ``G``.
Given a polynomial ``f`` and a set of polynomials ``G = (g_1, ..., g_n)``,
computes a set of quotients ``q = (q_1, ..., q_n)`` and the remainder ``r``
such that ``f = q_1*g_1 + ... + q_n*g_n + r``, where ``r`` vanishes or ``r``
is a completely reduced polynomial with respect to ``G``.
Examples
========
>>> from sympy import reduced
>>> from sympy.abc import x, y
>>> reduced(2*x**4 + y**2 - x**2 + y**3, [x**3 - x, y**3 - y])
([2*x, 1], x**2 + y**2 + y)
"""
options.allowed_flags(args, ['polys', 'auto'])
try:
polys, opt = parallel_poly_from_expr([f] + list(G), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('reduced', 0, exc)
domain = opt.domain
retract = False
if opt.auto and domain.has_Ring and not domain.has_Field:
opt = opt.clone(dict(domain=domain.get_field()))
retract = True
from sympy.polys.rings import xring
_ring, _ = xring(opt.gens, opt.domain, opt.order)
for i, poly in enumerate(polys):
poly = poly.set_domain(opt.domain).rep.to_dict()
polys[i] = _ring.from_dict(poly)
Q, r = polys[0].div(polys[1:])
Q = [Poly._from_dict(dict(q), opt) for q in Q]
r = Poly._from_dict(dict(r), opt)
if retract:
try:
_Q, _r = [q.to_ring() for q in Q], r.to_ring()
except CoercionFailed:
pass
else:
Q, r = _Q, _r
if not opt.polys:
return [q.as_expr() for q in Q], r.as_expr()
else:
return Q, r
@public
def groebner(F, *gens, **args):
"""
Computes the reduced Groebner basis for a set of polynomials.
Use the ``order`` argument to set the monomial ordering that will be
used to compute the basis. Allowed orders are ``lex``, ``grlex`` and
``grevlex``. If no order is specified, it defaults to ``lex``.
For more information on Groebner bases, see the references and the docstring
of `solve_poly_system()`.
Examples
========
Example taken from [1].
>>> from sympy import groebner
>>> from sympy.abc import x, y
>>> F = [x*y - 2*y, 2*y**2 - x**2]
>>> groebner(F, x, y, order='lex')
GroebnerBasis([x**2 - 2*y**2, x*y - 2*y, y**3 - 2*y], x, y,
domain='ZZ', order='lex')
>>> groebner(F, x, y, order='grlex')
GroebnerBasis([y**3 - 2*y, x**2 - 2*y**2, x*y - 2*y], x, y,
domain='ZZ', order='grlex')
>>> groebner(F, x, y, order='grevlex')
GroebnerBasis([y**3 - 2*y, x**2 - 2*y**2, x*y - 2*y], x, y,
domain='ZZ', order='grevlex')
By default, an improved implementation of the Buchberger algorithm is
used. Optionally, an implementation of the F5B algorithm can be used.
The algorithm can be set using ``method`` flag or with the :func:`setup`
function from :mod:`sympy.polys.polyconfig`:
>>> F = [x**2 - x - 1, (2*x - 1) * y - (x**10 - (1 - x)**10)]
>>> groebner(F, x, y, method='buchberger')
GroebnerBasis([x**2 - x - 1, y - 55], x, y, domain='ZZ', order='lex')
>>> groebner(F, x, y, method='f5b')
GroebnerBasis([x**2 - x - 1, y - 55], x, y, domain='ZZ', order='lex')
References
==========
1. [Buchberger01]_
2. [Cox97]_
"""
return GroebnerBasis(F, *gens, **args)
@public
def is_zero_dimensional(F, *gens, **args):
"""
Checks if the ideal generated by a Groebner basis is zero-dimensional.
The algorithm checks if the set of monomials not divisible by the
leading monomial of any element of ``F`` is bounded.
References
==========
David A. Cox, John B. Little, Donal O'Shea. Ideals, Varieties and
Algorithms, 3rd edition, p. 230
"""
return GroebnerBasis(F, *gens, **args).is_zero_dimensional
@public
class GroebnerBasis(Basic):
"""Represents a reduced Groebner basis. """
def __new__(cls, F, *gens, **args):
"""Compute a reduced Groebner basis for a system of polynomials. """
options.allowed_flags(args, ['polys', 'method'])
try:
polys, opt = parallel_poly_from_expr(F, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('groebner', len(F), exc)
from sympy.polys.rings import PolyRing
ring = PolyRing(opt.gens, opt.domain, opt.order)
for i, poly in enumerate(polys):
polys[i] = ring.from_dict(poly.rep.to_dict())
G = _groebner(polys, ring, method=opt.method)
G = [Poly._from_dict(g, opt) for g in G]
return cls._new(G, opt)
@classmethod
def _new(cls, basis, options):
obj = Basic.__new__(cls)
obj._basis = tuple(basis)
obj._options = options
return obj
@property
def args(self):
return (Tuple(*self._basis), Tuple(*self._options.gens))
@property
def exprs(self):
return [poly.as_expr() for poly in self._basis]
@property
def polys(self):
return list(self._basis)
@property
def gens(self):
return self._options.gens
@property
def domain(self):
return self._options.domain
@property
def order(self):
return self._options.order
def __len__(self):
return len(self._basis)
def __iter__(self):
if self._options.polys:
return iter(self.polys)
else:
return iter(self.exprs)
def __getitem__(self, item):
if self._options.polys:
basis = self.polys
else:
basis = self.exprs
return basis[item]
def __hash__(self):
return hash((self._basis, tuple(self._options.items())))
def __eq__(self, other):
if isinstance(other, self.__class__):
return self._basis == other._basis and self._options == other._options
elif iterable(other):
return self.polys == list(other) or self.exprs == list(other)
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
@property
def is_zero_dimensional(self):
"""
Checks if the ideal generated by a Groebner basis is zero-dimensional.
The algorithm checks if the set of monomials not divisible by the
leading monomial of any element of ``F`` is bounded.
References
==========
David A. Cox, John B. Little, Donal O'Shea. Ideals, Varieties and
Algorithms, 3rd edition, p. 230
"""
def single_var(monomial):
return sum(map(bool, monomial)) == 1
exponents = Monomial([0]*len(self.gens))
order = self._options.order
for poly in self.polys:
monomial = poly.LM(order=order)
if single_var(monomial):
exponents *= monomial
# If any element of the exponents vector is zero, then there's
# a variable for which there's no degree bound and the ideal
# generated by this Groebner basis isn't zero-dimensional.
return all(exponents)
def fglm(self, order):
"""
Convert a Groebner basis from one ordering to another.
The FGLM algorithm converts reduced Groebner bases of zero-dimensional
ideals from one ordering to another. This method is often used when it
is infeasible to compute a Groebner basis with respect to a particular
ordering directly.
Examples
========
>>> from sympy.abc import x, y
>>> from sympy import groebner
>>> F = [x**2 - 3*y - x + 1, y**2 - 2*x + y - 1]
>>> G = groebner(F, x, y, order='grlex')
>>> list(G.fglm('lex'))
[2*x - y**2 - y + 1, y**4 + 2*y**3 - 3*y**2 - 16*y + 7]
>>> list(groebner(F, x, y, order='lex'))
[2*x - y**2 - y + 1, y**4 + 2*y**3 - 3*y**2 - 16*y + 7]
References
==========
J.C. Faugere, P. Gianni, D. Lazard, T. Mora (1994). Efficient
Computation of Zero-dimensional Groebner Bases by Change of
Ordering
"""
opt = self._options
src_order = opt.order
dst_order = monomial_key(order)
if src_order == dst_order:
return self
if not self.is_zero_dimensional:
raise NotImplementedError("can't convert Groebner bases of ideals with positive dimension")
polys = list(self._basis)
domain = opt.domain
opt = opt.clone(dict(
domain=domain.get_field(),
order=dst_order,
))
from sympy.polys.rings import xring
_ring, _ = xring(opt.gens, opt.domain, src_order)
for i, poly in enumerate(polys):
poly = poly.set_domain(opt.domain).rep.to_dict()
polys[i] = _ring.from_dict(poly)
G = matrix_fglm(polys, _ring, dst_order)
G = [Poly._from_dict(dict(g), opt) for g in G]
if not domain.has_Field:
G = [g.clear_denoms(convert=True)[1] for g in G]
opt.domain = domain
return self._new(G, opt)
def reduce(self, expr, auto=True):
"""
Reduces a polynomial modulo a Groebner basis.
Given a polynomial ``f`` and a set of polynomials ``G = (g_1, ..., g_n)``,
computes a set of quotients ``q = (q_1, ..., q_n)`` and the remainder ``r``
such that ``f = q_1*f_1 + ... + q_n*f_n + r``, where ``r`` vanishes or ``r``
is a completely reduced polynomial with respect to ``G``.
Examples
========
>>> from sympy import groebner, expand
>>> from sympy.abc import x, y
>>> f = 2*x**4 - x**2 + y**3 + y**2
>>> G = groebner([x**3 - x, y**3 - y])
>>> G.reduce(f)
([2*x, 1], x**2 + y**2 + y)
>>> Q, r = _
>>> expand(sum(q*g for q, g in zip(Q, G)) + r)
2*x**4 - x**2 + y**3 + y**2
>>> _ == f
True
"""
poly = Poly._from_expr(expr, self._options)
polys = [poly] + list(self._basis)
opt = self._options
domain = opt.domain
retract = False
if auto and domain.has_Ring and not domain.has_Field:
opt = opt.clone(dict(domain=domain.get_field()))
retract = True
from sympy.polys.rings import xring
_ring, _ = xring(opt.gens, opt.domain, opt.order)
for i, poly in enumerate(polys):
poly = poly.set_domain(opt.domain).rep.to_dict()
polys[i] = _ring.from_dict(poly)
Q, r = polys[0].div(polys[1:])
Q = [Poly._from_dict(dict(q), opt) for q in Q]
r = Poly._from_dict(dict(r), opt)
if retract:
try:
_Q, _r = [q.to_ring() for q in Q], r.to_ring()
except CoercionFailed:
pass
else:
Q, r = _Q, _r
if not opt.polys:
return [q.as_expr() for q in Q], r.as_expr()
else:
return Q, r
def contains(self, poly):
"""
Check if ``poly`` belongs the ideal generated by ``self``.
Examples
========
>>> from sympy import groebner
>>> from sympy.abc import x, y
>>> f = 2*x**3 + y**3 + 3*y
>>> G = groebner([x**2 + y**2 - 1, x*y - 2])
>>> G.contains(f)
True
>>> G.contains(f + 1)
False
"""
return self.reduce(poly)[1] == 0
@public
def poly(expr, *gens, **args):
"""
Efficiently transform an expression into a polynomial.
Examples
========
>>> from sympy import poly
>>> from sympy.abc import x
>>> poly(x*(x**2 + x - 1)**2)
Poly(x**5 + 2*x**4 - x**3 - 2*x**2 + x, x, domain='ZZ')
"""
options.allowed_flags(args, [])
def _poly(expr, opt):
terms, poly_terms = [], []
for term in Add.make_args(expr):
factors, poly_factors = [], []
for factor in Mul.make_args(term):
if factor.is_Add:
poly_factors.append(_poly(factor, opt))
elif factor.is_Pow and factor.base.is_Add and factor.exp.is_Integer:
poly_factors.append(
_poly(factor.base, opt).pow(factor.exp))
else:
factors.append(factor)
if not poly_factors:
terms.append(term)
else:
product = poly_factors[0]
for factor in poly_factors[1:]:
product = product.mul(factor)
if factors:
factor = Mul(*factors)
if factor.is_Number:
product = product.mul(factor)
else:
product = product.mul(Poly._from_expr(factor, opt))
poly_terms.append(product)
if not poly_terms:
result = Poly._from_expr(expr, opt)
else:
result = poly_terms[0]
for term in poly_terms[1:]:
result = result.add(term)
if terms:
term = Add(*terms)
if term.is_Number:
result = result.add(term)
else:
result = result.add(Poly._from_expr(term, opt))
return result.reorder(*opt.get('gens', ()), **args)
expr = sympify(expr)
if expr.is_Poly:
return Poly(expr, *gens, **args)
if 'expand' not in args:
args['expand'] = False
opt = options.build_options(gens, args)
return _poly(expr, opt)
from sympy.functions import Piecewise
|
wolfram74/numerical_methods_iserles_notes
|
venv/lib/python2.7/site-packages/sympy/polys/polytools.py
|
Python
|
mit
| 171,109 | 0.000316 |
from .assembler import IndexCardAssembler
|
johnbachman/indra
|
indra/assemblers/index_card/__init__.py
|
Python
|
bsd-2-clause
| 42 | 0 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014 Violin Memory, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Violin Memory 6000 Series All-Flash Array Common Driver for Openstack Cinder
Uses Violin REST API via XG-Tools to manage a standard V6000 series
flash array to provide network block-storage services.
by Ryan Lucio
Senior Software Engineer
Violin Memory
"""
import re
import time
from oslo.config import cfg
from cinder import context
from cinder import exception
from cinder.openstack.common import log as logging
from cinder import utils
from cinder.volume.drivers.san import san
from cinder.volume import volume_types
LOG = logging.getLogger(__name__)
# support vmos versions V6.3.0.4 or newer
# support vmos versions V6.3.1 or newer
VMOS_SUPPORTED_VERSION_PATTERNS = ['V6.3.0.[4-9]', 'V6.3.[1-9].?[0-9]?']
try:
import vxg
except ImportError:
LOG.exception(
_("The Violin V6000 driver for Cinder requires the presence of "
"the Violin 'XG-Tools', python libraries for facilitating "
"communication between applications and the v6000 XML API. "
"The libraries can be downloaded from the Violin Memory "
"support website at http://www.violin-memory.com/support"))
raise
else:
LOG.info(_("Running with xg-tools version: %s"), vxg.__version__)
violin_opts = [
cfg.StrOpt('gateway_vip',
default='',
help='IP address or hostname of the v6000 master VIP'),
cfg.StrOpt('gateway_mga',
default='',
help='IP address or hostname of mg-a'),
cfg.StrOpt('gateway_mgb',
default='',
help='IP address or hostname of mg-b'),
cfg.StrOpt('gateway_user',
default='admin',
help='User name for connecting to the Memory Gateway'),
cfg.StrOpt('gateway_password',
default='',
help='User name for connecting to the Memory Gateway',
secret=True),
cfg.BoolOpt('use_igroups',
default=False,
help='Use igroups to manage targets and initiators'),
cfg.BoolOpt('use_thin_luns',
default=False,
help='Use thin luns instead of thick luns'), ]
CONF = cfg.CONF
CONF.register_opts(violin_opts)
class InvalidBackendConfig(exception.CinderException):
message = _("Volume backend config is invalid: %(reason)s")
class RequestRetryTimeout(exception.CinderException):
message = _("Backend service retry timeout hit: %(timeout)s sec")
class ViolinBackendErr(exception.CinderException):
message = _("Backend reports: %(message)s")
class ViolinBackendErrExists(exception.CinderException):
message = _("Backend reports: item already exists")
class ViolinBackendErrNotFound(exception.CinderException):
message = _("Backend reports: item not found")
class V6000CommonDriver(san.SanDriver):
"""Executes commands relating to Violin Memory Arrays."""
def __init__(self, *args, **kwargs):
super(V6000CommonDriver, self).__init__(*args, **kwargs)
self.request_timeout = 300
self.vmem_vip = None
self.vmem_mga = None
self.vmem_mgb = None
self.container = ""
self.stats = {}
self.config = kwargs.get('configuration', None)
self.context = None
self.lun_tracker = LunIdList(self.db)
if self.config:
self.config.append_config_values(violin_opts)
def do_setup(self, context):
"""Any initialization the driver does while starting."""
if not self.config.gateway_vip:
raise exception.InvalidInput(
reason=_('Gateway VIP is not set'))
if not self.config.gateway_mga:
raise exception.InvalidInput(
reason=_('Gateway IP for mg-a is not set'))
if not self.config.gateway_mgb:
raise exception.InvalidInput(
reason=_('Gateway IP for mg-b is not set'))
self.vmem_vip = vxg.open(self.config.gateway_vip,
self.config.gateway_user,
self.config.gateway_password,
keepalive=True)
self.vmem_mga = vxg.open(self.config.gateway_mga,
self.config.gateway_user,
self.config.gateway_password,
keepalive=True)
self.vmem_mgb = vxg.open(self.config.gateway_mgb,
self.config.gateway_user,
self.config.gateway_password,
keepalive=True)
self.context = context
vip = self.vmem_vip.basic
ret_dict = vip.get_node_values("/vshare/state/local/container/*")
if ret_dict:
self.container = ret_dict.items()[0][1]
ret_dict = vip.get_node_values(
"/vshare/state/local/container/%s/lun/*"
% self.container)
if ret_dict:
self.lun_tracker.update_from_volume_ids(ret_dict.values())
ret_dict = vip.get_node_values(
"/vshare/state/snapshot/container/%s/lun/*"
% self.container)
if ret_dict:
for vol_id in ret_dict.values():
snaps = vip.get_node_values(
"/vshare/state/snapshot/container/%s/lun/%s/snap/*"
% (self.container, vol_id))
self.lun_tracker.update_from_snapshot_ids(snaps.values())
def check_for_setup_error(self):
"""Returns an error if prerequisites aren't met."""
vip = self.vmem_vip.basic
if len(self.container) == 0:
raise InvalidBackendConfig(reason=_('container is missing'))
if not self._is_supported_vmos_version(self.vmem_vip.version):
msg = _('VMOS version is not supported')
raise InvalidBackendConfig(reason=msg)
bn1 = ("/vshare/state/local/container/%s/threshold/usedspace"
"/threshold_hard_val" % self.container)
bn2 = ("/vshare/state/local/container/%s/threshold/provision"
"/threshold_hard_val" % self.container)
ret_dict = vip.get_node_values([bn1, bn2])
for node in ret_dict:
# The infrastructure does not support space reclamation so
# ensure it is disabled. When used space exceeds the hard
# limit, snapshot space reclamation begins. Default is 0
# => no space reclamation.
#
if node.endswith('/usedspace/threshold_hard_val'):
if ret_dict[node] != 0:
msg = _('space reclamation threshold is enabled')
raise InvalidBackendConfig(reason=msg)
# The infrastructure does not support overprovisioning so
# ensure it is disabled. When provisioned space exceeds
# the hard limit, further provisioning is stopped.
# Default is 100 => provisioned space equals usable space.
#
elif node.endswith('/provision/threshold_hard_val'):
if ret_dict[node] != 100:
msg = _('provisioned space threshold not equal to '
'usable space')
raise InvalidBackendConfig(reason=msg)
def create_volume(self, volume):
"""Creates a volume."""
self._create_lun(volume)
def delete_volume(self, volume):
"""Deletes a volume."""
self._delete_lun(volume)
def create_snapshot(self, snapshot):
"""Creates a snapshot from an existing volume."""
self._create_lun_snapshot(snapshot)
def delete_snapshot(self, snapshot):
"""Deletes a snapshot."""
self._delete_lun_snapshot(snapshot)
def create_volume_from_snapshot(self, volume, snapshot):
"""Creates a volume from a snapshot."""
snapshot['size'] = snapshot['volume']['size']
self._create_lun(volume)
self.copy_volume_data(self.context, snapshot, volume)
def create_cloned_volume(self, volume, src_vref):
"""Creates a full clone of the specified volume."""
self._create_lun(volume)
self.copy_volume_data(self.context, src_vref, volume)
def extend_volume(self, volume, new_size):
"""Extend an existing volume's size.
The equivalent CLI command is "lun resize container
<container_name> name <lun_name> size <gb>"
Arguments:
volume -- volume object provided by the Manager
new_size -- new (increased) size in GB to be applied
"""
v = self.vmem_vip
LOG.info(_("Extending lun %(id)s, from %(size)s to %(new_size)s GB") %
{'id': volume['id'], 'size': volume['size'],
'new_size': new_size})
try:
self._send_cmd(v.lun.resize_lun, 'Success',
self.container, volume['id'], new_size)
except Exception:
LOG.exception(_("LUN extend failed!"))
raise
@utils.synchronized('vmem-lun')
def _create_lun(self, volume):
"""Creates a new lun.
The equivalent CLI command is "lun create container
<container_name> name <lun_name> size <gb>"
Arguments:
volume -- volume object provided by the Manager
"""
lun_type = '0'
v = self.vmem_vip
LOG.info(_("Creating lun %(name)s, %(size)s GB") % volume)
if self.config.use_thin_luns:
lun_type = '1'
# using the defaults for fields: quantity, nozero,
# readonly, startnum, blksize, naca, alua, preferredport
#
try:
self._send_cmd(v.lun.create_lun,
'LUN create: success!',
self.container, volume['id'],
volume['size'], 1, '0', lun_type, 'w',
1, 512, False, False, None)
except ViolinBackendErrExists:
LOG.info(_("Lun %s already exists, continuing"), volume['id'])
except Exception:
LOG.warn(_("Lun create failed!"))
raise
@utils.synchronized('vmem-lun')
def _delete_lun(self, volume):
"""Deletes a lun.
The equivalent CLI command is "no lun create container
<container_name> name <lun_name>"
Arguments:
volume -- volume object provided by the Manager
"""
v = self.vmem_vip
success_msgs = ['lun deletion started', '']
LOG.info(_("Deleting lun %s"), volume['id'])
try:
self._send_cmd(v.lun.bulk_delete_luns,
success_msgs,
self.container, volume['id'])
except ViolinBackendErrNotFound:
LOG.info(_("Lun %s already deleted, continuing"), volume['id'])
except ViolinBackendErrExists:
LOG.warn(_("Lun %s has dependent snapshots, skipping"),
volume['id'])
raise exception.VolumeIsBusy(volume_name=volume['id'])
except Exception:
LOG.exception(_("Lun delete failed!"))
raise
self.lun_tracker.free_lun_id_for_volume(volume)
@utils.synchronized('vmem-snap')
def _create_lun_snapshot(self, snapshot):
"""Creates a new snapshot for a lun.
The equivalent CLI command is "snapshot create container
<container> lun <volume_name> name <snapshot_name>"
Arguments:
snapshot -- snapshot object provided by the Manager
"""
v = self.vmem_vip
LOG.info(_("Creating snapshot %s"), snapshot['id'])
try:
self._send_cmd(v.snapshot.create_lun_snapshot,
'Snapshot create: success!',
self.container, snapshot['volume_id'],
snapshot['id'])
except ViolinBackendErrExists:
LOG.info(_("Snapshot %s already exists, continuing"),
snapshot['id'])
except Exception:
LOG.exception(_("LUN snapshot create failed!"))
raise
@utils.synchronized('vmem-snap')
def _delete_lun_snapshot(self, snapshot):
"""Deletes an existing snapshot for a lun.
The equivalent CLI command is "no snapshot create container
<container> lun <volume_name> name <snapshot_name>"
Arguments:
snapshot -- snapshot object provided by the Manager
"""
v = self.vmem_vip
LOG.info(_("Deleting snapshot %s"), snapshot['id'])
try:
self._send_cmd(v.snapshot.delete_lun_snapshot,
'Snapshot delete: success!',
self.container, snapshot['volume_id'],
snapshot['id'])
except ViolinBackendErrNotFound:
LOG.info(_("Snapshot %s already deleted, continuing"),
snapshot['id'])
except Exception:
LOG.exception(_("LUN snapshot delete failed!"))
raise
self.lun_tracker.free_lun_id_for_snapshot(snapshot)
def _send_cmd(self, request_func, success_msgs, *args):
"""Run an XG request function, and retry until the request
returns a success message, a failure message, or the global
request timeout is hit.
This wrapper is meant to deal with backend requests that can
fail for any variety of reasons, for instance, when the system
is already busy handling other LUN requests. It is also smart
enough to give up if clustering is down (eg no HA available),
there is no space left, or other "fatal" errors are returned
(see _fatal_error_code() for a list of all known error
conditions).
Arguments:
request_func -- XG api method to call
success_msgs -- Success messages expected from the backend
*args -- argument array to be passed to the request_func
Returns:
The response dict from the last XG call.
"""
resp = {}
start = time.time()
done = False
if isinstance(success_msgs, basestring):
success_msgs = [success_msgs]
while not done:
if time.time() - start >= self.request_timeout:
raise RequestRetryTimeout(timeout=self.request_timeout)
resp = request_func(*args)
if not resp['message']:
# XG requests will return None for a message if no message
# string is passed int the raw response
resp['message'] = ''
for msg in success_msgs:
if not resp['code'] and msg in resp['message']:
done = True
break
self._fatal_error_code(resp)
return resp
def _send_cmd_and_verify(self, request_func, verify_func,
request_success_msgs='', rargs=[], vargs=[]):
"""Run an XG request function, and verify success using an
additional verify function. If the verification fails, then
retry the request/verify cycle until both functions are
successful, the request function returns a failure message, or
the global request timeout is hit.
This wrapper is meant to deal with backend requests that can
fail for any variety of reasons, for instance, when the system
is already busy handling other LUN requests. It is also smart
enough to give up if clustering is down (eg no HA available),
there is no space left, or other "fatal" errors are returned
(see _fatal_error_code() for a list of all known error
conditions).
Arguments:
request_func -- XG api method to call
verify_func -- function to call to verify request was
completed successfully (eg for export)
request_success_msg -- Success message expected from the backend
for the request_func
*rargs -- argument array to be passed to the
request_func
*vargs -- argument array to be passed to the
verify_func
Returns:
The response dict from the last XG call.
"""
resp = {}
start = time.time()
request_needed = True
verify_needed = True
if isinstance(request_success_msgs, basestring):
request_success_msgs = [request_success_msgs]
while request_needed or verify_needed:
if time.time() - start >= self.request_timeout:
raise RequestRetryTimeout(timeout=self.request_timeout)
if request_needed:
resp = request_func(*rargs)
if not resp['message']:
# XG requests will return None for a message if no message
# string is passed int the raw response
resp['message'] = ''
for msg in request_success_msgs:
if not resp['code'] and msg in resp['message']:
# XG request func was completed
request_needed = False
break
self._fatal_error_code(resp)
elif verify_needed:
success = verify_func(*vargs)
if success:
# XG verify func was completed
verify_needed = False
else:
# try sending the request again
request_needed = True
return resp
def _get_igroup(self, volume, connector):
"""Gets the igroup that should be used when configuring a volume.
Arguments:
volume -- volume object used to determine the igroup name
Returns:
igroup_name -- name of igroup (for configuring targets &
initiators)
"""
v = self.vmem_vip
# Use the connector's primary hostname and use that as the
# name of the igroup. The name must follow syntax rules
# required by the array: "must contain only alphanumeric
# characters, dashes, and underscores. The first character
# must be alphanumeric".
#
igroup_name = re.sub(r'[\W]', '_', connector['host'])
# verify that the igroup has been created on the backend, and
# if it doesn't exist, create it!
#
bn = "/vshare/config/igroup/%s" % igroup_name
resp = v.basic.get_node_values(bn)
if not len(resp):
v.igroup.create_igroup(igroup_name)
return igroup_name
def _get_volume_type_extra_spec(self, volume, spec_key):
"""Parse data stored in a volume_type's extra_specs table.
Code adapted from examples in
cinder/volume/drivers/solidfire.py and
cinder/openstack/common/scheduler/filters/capabilities_filter.py.
Arguments:
volume -- volume object containing volume_type to query
spec_key -- the metadata key to search for
Returns:
spec_value -- string value associated with spec_key
"""
spec_value = None
ctxt = context.get_admin_context()
typeid = volume['volume_type_id']
if typeid:
volume_type = volume_types.get_volume_type(ctxt, typeid)
volume_specs = volume_type.get('extra_specs')
for key, val in volume_specs.iteritems():
# Havana release altered extra_specs to require a
# prefix on all non-host-capability related extra
# specs, so that prefix is stripped here before
# checking the key.
#
if ':' in key:
scope = key.split(':')
key = scope[1]
if key == spec_key:
spec_value = val
break
return spec_value
def _wait_for_exportstate(self, volume_name, state=False):
"""Polls backend to verify volume's export configuration.
XG sets/queries following a request to create or delete a lun
export may fail on the backend if vshared is still processing
the export action (or times out). We can check whether it is
done by polling the export binding for a lun to ensure it is
created or deleted.
This function will try to verify the creation or removal of
export state on both gateway nodes of the array every 5
seconds for up to 30 seconds.
Arguments:
volume_name -- name of volume to be polled
state -- True to poll for existence, False for lack of
Returns:
True if the export state was correctly added or removed
(depending on 'state' param)
"""
status = [False, False]
mg_conns = [self.vmem_mga.basic, self.vmem_mgb.basic]
success = False
bn = "/vshare/config/export/container/%s/lun/%s" \
% (self.container, volume_name)
for i in xrange(6):
for node_id in xrange(2):
if not status[node_id]:
resp = mg_conns[node_id].get_node_values(bn)
if state and len(resp.keys()):
status[node_id] = True
elif (not state) and (not len(resp.keys())):
status[node_id] = True
if status[0] and status[1]:
success = True
break
else:
time.sleep(5)
return success
def _is_supported_vmos_version(self, version_string):
"""Check that the version of VMOS running on the gateways is
valid for use with the OpenStack drivers."""
for pattern in VMOS_SUPPORTED_VERSION_PATTERNS:
if re.match(pattern, version_string):
LOG.debug("Verified VMOS version %s is supported" %
version_string)
return True
return False
def _fatal_error_code(self, response):
"""Check the error code in a XG response for a fatal error,
and returns an appropriate exception. Error codes extracted
from vdmd_mgmt.c.
Arguments:
response -- a response dict result from an XG request
"""
# known non-fatal response codes
#
retry_codes = {1024: 'lun deletion in progress, try again later',
14032: 'lc_err_lock_busy'}
if response['code'] == 14000:
# lc_generic_error
raise ViolinBackendErr(message=response['message'])
elif response['code'] == 14002:
# lc_err_assertion_failed
raise ViolinBackendErr(message=response['message'])
elif response['code'] == 14004:
# lc_err_not_found
raise ViolinBackendErrNotFound()
elif response['code'] == 14005:
# lc_err_exists
raise ViolinBackendErrExists()
elif response['code'] == 14008:
# lc_err_unexpected_arg
raise ViolinBackendErr(message=response['message'])
elif response['code'] == 14014:
# lc_err_io_error
raise ViolinBackendErr(message=response['message'])
elif response['code'] == 14016:
# lc_err_io_closed
raise ViolinBackendErr(message=response['message'])
elif response['code'] == 14017:
# lc_err_io_timeout
raise ViolinBackendErr(message=response['message'])
elif response['code'] == 14021:
# lc_err_unexpected_case
raise ViolinBackendErr(message=response['message'])
elif response['code'] == 14025:
# lc_err_no_fs_space
raise ViolinBackendErr(message=response['message'])
elif response['code'] == 14035:
# lc_err_range
raise ViolinBackendErr(message=response['message'])
elif response['code'] == 14036:
# lc_err_invalid_param
raise ViolinBackendErr(message=response['message'])
elif response['code'] == 14121:
# lc_err_cancelled_err
raise ViolinBackendErr(message=response['message'])
elif response['code'] == 512:
# Not enough free space in container (vdmd bug)
raise ViolinBackendErr(message=response['message'])
elif response['code'] == 1 and 'LUN ID conflict' \
in response['message']:
# lun id conflict while attempting to export
raise ViolinBackendErr(message=response['message'])
class LunIdList(object):
"""Tracks available lun_ids for use when exporting a new lun for the
first time. After instantiating a new LunIdList object, it should
be updated (basically quiescing volumes/snapshot lun ID allocation
between the array and the corresponding Openstack DB metadata).
After that, the object can be queried to capture the next
'available' lun ID for use with exporting a new volume or
snapshot. Only when the volume/snapshot is deleted entirely, the
lun ID should be freed.
Lun IDs are montonically increasing up to a max value of 16k,
after which the selection will loop around to lun ID 1 and will
continue to increment until an available ID is found.
"""
def __init__(self, db, *args, **kwargs):
self.max_lun_id = 16000
self.lun_id_list = [0] * self.max_lun_id
self.lun_id_list[0] = 1
self.prev_index = 1
self.free_index = 1
self.context = context.get_admin_context()
self.db = db
def update_from_volume_ids(self, id_list=[]):
"""Walk a list of volumes collected that the array knows about and
check for any saved lun_id metadata for each of those volumes to
fully sync the list. Note that the metadata keys are stored as
strings.
Arguments:
id_list -- array containing names of volumes that exist on the
backend (volume 'names' are UUIDs if they were made
via the VMEM driver API)
"""
for item in id_list:
try:
metadata = self.db.volume_metadata_get(self.context, item)
except exception.VolumeNotFound:
LOG.warn(_("No db state for lun %s, skipping lun_id update"),
item)
else:
if metadata and 'lun_id' in metadata:
index = int(metadata['lun_id'])
self.lun_id_list[index] = 1
LOG.debug("Set lun_id=%d for volume_id=%s" % (index, item))
self.update_free_index(index)
def update_from_snapshot_ids(self, id_list=[]):
"""Walk a list of snapshots collected that the array knows about and
check for any saved lun_id metadata for each of those snapshots to
fully sync the list. Note that the metadata keys are stored as
strings.
Arguments:
id_list -- array containing names of snapshots that exist on the
backend (snapshot 'names' are UUIDs if they were made
via the VMEM driver API)
"""
for item in id_list:
try:
metadata = self.db.snapshot_metadata_get(self.context, item)
except exception.SnapshotNotFound:
LOG.warn(_("No db state for snap %s, skipping lun_id update"),
item)
else:
if metadata and 'lun_id' in metadata:
index = int(metadata['lun_id'])
self.lun_id_list[index] = 1
LOG.debug("Set lun_id=%d for snapshot_id=%s" %
(index, item))
self.update_free_index(index)
def get_lun_id_for_volume(self, volume):
"""Allocate a free a lun ID to a volume and create a lun_id tag
in the volume's metadata.
Arguments:
volume -- the volume object to allocate a lun_id to
"""
metadata = self.db.volume_metadata_get(self.context, volume['id'])
if not metadata or 'lun_id' not in metadata:
metadata = {}
metadata['lun_id'] = self.get_next_lun_id_str()
self.db.volume_metadata_update(self.context, volume['id'],
metadata, False)
LOG.debug("Assigned lun_id %s to volume %s" %
(metadata['lun_id'], volume['id']))
return metadata['lun_id']
def get_lun_id_for_snapshot(self, snapshot):
"""Allocate a free a lun ID to a snapshot and create a lun_id tag
in the snapshot's metadata.
Arguments:
snapshot -- the snapshot object to allocate a lun_id to
"""
metadata = self.db.snapshot_metadata_get(self.context, snapshot['id'])
if not metadata or 'lun_id' not in metadata:
metadata = {}
metadata['lun_id'] = self.get_next_lun_id_str()
self.db.snapshot_metadata_update(self.context, snapshot['id'],
metadata, False)
LOG.debug("Assigned lun_id %s to volume %s" %
(metadata['lun_id'], snapshot['id']))
return metadata['lun_id']
def free_lun_id_for_volume(self, volume):
"""Remove the lun_id tag saved in the volume's metadata and
free the lun ID in the internal tracking array.
Arguments:
volume -- the volume object with a lun ID to be free'd
"""
metadata = self.db.volume_metadata_get(self.context, volume['id'])
if metadata and 'lun_id' in metadata:
self.free_lun_id_str(metadata['lun_id'])
def free_lun_id_for_snapshot(self, snapshot):
"""Remove the lun_id tag saved in the snapshot's metadata and
free the lun ID in the internal tracking array.
Arguments:
snapshot -- the snapshot object with a lun ID to be free'd
"""
metadata = self.db.snapshot_metadata_get(self.context, snapshot['id'])
if metadata and 'lun_id' in metadata:
self.free_lun_id_str(metadata['lun_id'])
def get_next_lun_id_str(self):
"""Mark the next available lun_id as allocated and return
it to the caller.
Returns:
next_id -- the lun ID that being allocated to the caller
"""
next_id = self.free_index
self.lun_id_list[next_id] = 1
self.update_free_index()
return str(next_id)
def free_lun_id_str(self, value_str):
"""Mark a lun_id as now available, as if the lun was de-allocated.
Arguments:
value_str -- lun ID to free (in string format)
"""
value = int(value_str)
self.lun_id_list[value] = 0
self.update_free_index()
def update_free_index(self, index=None):
"""Update the free index, monotonically increasing, and
looping back to 1 after the max lun ID value is hit.
Arguments:
index -- assume that all values below this number may be already
allocated, so start searching at that value if it is
higher than the free_index
"""
i = 0
count = 0
max_size = len(self.lun_id_list)
if index and index > self.free_index:
i = index + 1
else:
i = self.free_index
# avoid possibility of indexError
if i >= max_size:
i = 1
while self.lun_id_list[i] == 1 and count < max_size:
count += 1
i += 1
if i >= max_size:
i = 1
self.free_index = i
if count == max_size:
raise exception.Error("Cannot find free lun_id, giving up!")
|
rlucio/cinder-violin-driver-icehouse
|
cinder/volume/drivers/violin/v6000_common.py
|
Python
|
apache-2.0
| 32,857 | 0.00003 |
#!/usr/bin/env python
'''
Generate valid and invalid base58 address and private key test vectors.
Usage:
gen_base58_test_vectors.py valid 50 > ../../src/test/data/base58_keys_valid.json
gen_base58_test_vectors.py invalid 50 > ../../src/test/data/base58_keys_invalid.json
'''
# 2012 Wladimir J. van der Laan
# Released under MIT License
import os
from itertools import islice
from base58 import b58encode, b58decode, b58encode_chk, b58decode_chk, b58chars
import random
from binascii import b2a_hex
# key types
PUBKEY_ADDRESS = 41
SCRIPT_ADDRESS = 5
PUBKEY_ADDRESS_TEST = 111
SCRIPT_ADDRESS_TEST = 196
PRIVKEY = 176
PRIVKEY_TEST = 239
metadata_keys = ['isPrivkey', 'isTestnet', 'addrType', 'isCompressed']
# templates for valid sequences
templates = [
# prefix, payload_size, suffix, metadata
# None = N/A
((PUBKEY_ADDRESS,), 20, (), (False, False, 'pubkey', None)),
((SCRIPT_ADDRESS,), 20, (), (False, False, 'script', None)),
((PUBKEY_ADDRESS_TEST,), 20, (), (False, True, 'pubkey', None)),
((SCRIPT_ADDRESS_TEST,), 20, (), (False, True, 'script', None)),
((PRIVKEY,), 32, (), (True, False, None, False)),
((PRIVKEY,), 32, (1,), (True, False, None, True)),
((PRIVKEY_TEST,), 32, (), (True, True, None, False)),
((PRIVKEY_TEST,), 32, (1,), (True, True, None, True))
]
def is_valid(v):
'''Check vector v for validity'''
result = b58decode_chk(v)
if result is None:
return False
valid = False
for template in templates:
prefix = str(bytearray(template[0]))
suffix = str(bytearray(template[2]))
if result.startswith(prefix) and result.endswith(suffix):
if (len(result) - len(prefix) - len(suffix)) == template[1]:
return True
return False
def gen_valid_vectors():
'''Generate valid test vectors'''
while True:
for template in templates:
prefix = str(bytearray(template[0]))
payload = os.urandom(template[1])
suffix = str(bytearray(template[2]))
rv = b58encode_chk(prefix + payload + suffix)
assert is_valid(rv)
metadata = dict([(x,y) for (x,y) in zip(metadata_keys,template[3]) if y is not None])
yield (rv, b2a_hex(payload), metadata)
def gen_invalid_vector(template, corrupt_prefix, randomize_payload_size, corrupt_suffix):
'''Generate possibly invalid vector'''
if corrupt_prefix:
prefix = os.urandom(1)
else:
prefix = str(bytearray(template[0]))
if randomize_payload_size:
payload = os.urandom(max(int(random.expovariate(0.5)), 50))
else:
payload = os.urandom(template[1])
if corrupt_suffix:
suffix = os.urandom(len(template[2]))
else:
suffix = str(bytearray(template[2]))
return b58encode_chk(prefix + payload + suffix)
def randbool(p = 0.5):
'''Return True with P(p)'''
return random.random() < p
def gen_invalid_vectors():
'''Generate invalid test vectors'''
# start with some manual edge-cases
yield "",
yield "x",
while True:
# kinds of invalid vectors:
# invalid prefix
# invalid payload length
# invalid (randomized) suffix (add random data)
# corrupt checksum
for template in templates:
val = gen_invalid_vector(template, randbool(0.2), randbool(0.2), randbool(0.2))
if random.randint(0,10)<1: # line corruption
if randbool(): # add random character to end
val += random.choice(b58chars)
else: # replace random character in the middle
n = random.randint(0, len(val))
val = val[0:n] + random.choice(b58chars) + val[n+1:]
if not is_valid(val):
yield val,
if __name__ == '__main__':
import sys, json
iters = {'valid':gen_valid_vectors, 'invalid':gen_invalid_vectors}
try:
uiter = iters[sys.argv[1]]
except IndexError:
uiter = gen_valid_vectors
try:
count = int(sys.argv[2])
except IndexError:
count = 0
data = list(islice(uiter(), count))
json.dump(data, sys.stdout, sort_keys=True, indent=4)
sys.stdout.write('\n')
|
InsuraCoinDev/insuracoin
|
contrib/testgen/gen_base58_test_vectors.py
|
Python
|
mit
| 4,344 | 0.006906 |
sWordInfo = ReadValue('WordInfo')
if not sWordInfo: sWordInfo = ReadValue('DictionaryLookup')
sWordInfo = IniFormDialogInput('Input', 'Word', sWordInfo)
if not sWordInfo: Exit()
WriteValue('WordInfo', sWordInfo)
SayLine('Please wait')
# sAddress = 'http://api.wordnik.com/api/word.json/' + sWordInfo + '/definitions'
sAddress = 'http://api.wordnik.com/api-v2/word.json/' + sWordInfo + '/definitions'
dData = {'api_key' : '582e51dde9940b44120020cf4c80f871af2430e98b76dd698'}
sResponse = ''
try: sResponse = WebRequestGetToString(sAddress, dData)
except: SayLine('No information available!')
d = JsToPyObject(sResponse)
lDefinitions = d['definition']
sText = Pluralize('Definition', len(lDefinitions)) + ' of ' + sWordInfo + ' from wordnik.com\r\n\r\n'
# for dDefinition in lDefinitions: sText += dDefinition['headword'] + (', ' + dDefinition['partOfSpeech'] if dDefinition.has_key('partOfSpeech') else '') + '\r\n' + dDefinition['defTxtSummary'] + ('\r\n' + dDefinition.get('defTxtExtended', '')).rstrip() + '\r\n\r\n'
# for dDefinition in lDefinitions: sText += (dDefinition['partOfSpeech'] + '\r\n' if dDefinition.has_key('partOfSpeech') else '').lstrip() + dDefinition['defTxtSummary'] + ('\r\n' + dDefinition.get('defTxtExtended', '')).rstrip() + '\r\n\r\n'
for dDefinition in lDefinitions:
if dDefinition.has_key('partOfSpeech'): sText += dDefinition['partOfSpeech'] + '\r\n'
if dDefinition.has_key('defTxtSummary'): sText += dDefinition['defTxtSummary'] + '\r\n'
if dDefinition.has_key('defTxtExtended'): sText += dDefinition['defTxtExtended'] + '\r\n'
sText += '\r\n'
sDefinitions = sText.strip()
# sAddress = 'http://api.wordnik.com/api/word.json/' + sWordInfo + '/examples'
sAddress = 'http://api.wordnik.com/api-v2/word.json/' + sWordInfo + '/examples'
dData = {'api_key' : '582e51dde9940b44120020cf4c80f871af2430e98b76dd698'}
sResponse = WebRequestGetToString(sAddress, dData)
d = JsToPyObject(sResponse)
lExamples = d['example']
sText = Pluralize('Example', len(lExamples)) + ' of ' + sWordInfo + ' from wordnik.com\r\n\r\n'
# for dExample in lExamples: print dExample.keys()
for dExample in lExamples:
if dExample.has_key('year'): sText += dExample['year'] + '\r\n'
if dExample.has_key('title'): sText += dExample['title'] + '\r\n'
if dExample.has_key('url'): sText += dExample['url'] + '\r\n'
if dExample.has_key('display'): sText += dExample['display'] + '\r\n'
sText += '\r\n'
sExamples = sText.strip()
sApiKey = '20f06c210d4894da53dcb065925e0158'
# sAddress = 'http://words.bighugelabs.com/api/2/' + sApiKey + '/' + sWordInfo + ' + '/json'
sAddress = 'http://words.bighugelabs.com/api/2/' + sApiKey + '/' + sWordInfo + '/'
sResponse = WebRequestGetToString(sAddress)
sResponse = sResponse.replace('|syn|', '|synonym|')
sResponse = sResponse.replace('|ant|', '|antonym|')
sResponse = sResponse.replace('|rel|', '|related term|')
sResponse = sResponse.replace('|sim|', '|similar term|')
sResponse = sResponse.replace('|usr|', '|user suggestion|')
sResponse = sResponse.strip()
lResults = sResponse.split('\n')
sText = Pluralize('Result', len(lResults)) + ' from BigHugeLabs.com\r\n\r\n'
for sResult in lResults:
lParts = sResult.split('|')
sText += lParts[0] + ', ' + lParts[1] + '\r\n' + lParts[2] + '\r\n\r\n'
sText = (sDefinitions + '\r\n\r\n\r\n' + sExamples + '\r\n\r\n\r\n' + sText).strip()
SayLine('Loading results and opening web page')
sUrl = 'http://www.etymonline.com/index.php?search=' + sWordInfo.replace(' ', '+') + '&searchmode=none'
os.startfile(sUrl)
StringToFile(sText, sOutputFile)
|
jamalmazrui/InPy
|
WebClient_XtraWordInfo.py
|
Python
|
lgpl-3.0
| 3,600 | 0.012778 |
#!/usr/bin/python
# coding=utf-8
##########################################################################
from diamond.collector import Collector
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import patch
from mock import Mock
from etcdstat import EtcdCollector
try:
import simplejson as json
except ImportError:
import json
##########################################################################
class TestEtcdCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('EtcdCollector', {
'interval': 10
})
self.collector = EtcdCollector(config, None)
def test_import(self):
self.assertTrue(EtcdCollector)
@patch.object(Collector, 'publish')
def test_should_work_with_real_follower_data(self, publish_mock):
patch1_collector = patch.object(
EtcdCollector,
'get_self_metrics',
Mock(return_value=json.loads(
self.getFixture('follower-self-metrics.json').getvalue())))
patch2_collector = patch.object(
EtcdCollector,
'get_store_metrics',
Mock(return_value=json.loads(
self.getFixture('store-metrics2.json').getvalue())))
patch1_collector.start()
patch2_collector.start()
self.collector.collect()
patch2_collector.stop()
patch1_collector.stop()
metrics = {
'self.is_leader': 0,
'self.sendAppendRequestCnt': 0,
'self.recvAppendRequestCnt': 79367,
'self.recvPkgRate': 6.557436727874493,
'self.recvBandwidthRate': 527.021189819273,
'store.compareAndDeleteFail': 0,
'store.watchers': 0,
'store.setsFail': 12,
'store.createSuccess': 1294,
'store.compareAndSwapFail': 136,
'store.compareAndSwapSuccess': 4839,
'store.deleteSuccess': 6,
'store.updateSuccess': 2,
'store.createFail': 0,
'store.getsSuccess': 396632,
'store.expireCount': 0,
'store.deleteFail': 6,
'store.updateFail': 0,
'store.getsFail': 255837,
'store.compareAndDeleteSuccess': 1239,
'store.setsSuccess': 98571,
}
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_work_with_real_leader_data(self, publish_mock):
patch1_collector = patch.object(
EtcdCollector,
'get_self_metrics',
Mock(return_value=json.loads(
self.getFixture('leader-self-metrics.json').getvalue())))
patch2_collector = patch.object(
EtcdCollector,
'get_store_metrics',
Mock(return_value=json.loads(
self.getFixture('store-metrics.json').getvalue())))
patch1_collector.start()
patch2_collector.start()
self.collector.collect()
patch2_collector.stop()
patch1_collector.stop()
metrics = {
'self.is_leader': 1,
'self.sendAppendRequestCnt': 2097127,
'self.recvAppendRequestCnt': 5870,
'self.sendPkgRate': 11.763588080610418,
'self.sendBandwidthRate': 901.0908469747579,
'store.compareAndDeleteFail': 0,
'store.watchers': 51,
'store.setsFail': 123,
'store.createSuccess': 6468,
'store.compareAndSwapFail': 355,
'store.compareAndSwapSuccess': 9156,
'store.deleteSuccess': 2468,
'store.updateSuccess': 4576,
'store.createFail': 2508,
'store.getsSuccess': 1685131,
'store.expireCount': 0,
'store.deleteFail': 2138,
'store.updateFail': 0,
'store.getsFail': 922428,
'store.compareAndDeleteSuccess': 2047,
'store.setsSuccess': 733,
}
self.assertPublishedMany(publish_mock, metrics)
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
##########################################################################
if __name__ == "__main__":
unittest.main()
|
MichaelDoyle/Diamond
|
src/collectors/etcdstat/test/test_etcdstat.py
|
Python
|
mit
| 4,410 | 0.000227 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
try:
import simplejson as json
except ImportError:
import json # NOQA
from libcloud.utils.py3 import httplib
from libcloud.common.types import InvalidCredsError
from libcloud.compute.base import NodeImage
from libcloud.compute.drivers.digitalocean import DigitalOceanNodeDriver
from libcloud.test import LibcloudTestCase, MockHttpTestCase
from libcloud.test.file_fixtures import ComputeFileFixtures
from libcloud.test.secrets import DIGITAL_OCEAN_PARAMS
# class DigitalOceanTests(unittest.TestCase, TestCaseMixin):
class DigitalOceanTests(LibcloudTestCase):
def setUp(self):
DigitalOceanNodeDriver.connectionCls.conn_classes = \
(None, DigitalOceanMockHttp)
DigitalOceanMockHttp.type = None
self.driver = DigitalOceanNodeDriver(*DIGITAL_OCEAN_PARAMS)
def test_authentication(self):
DigitalOceanMockHttp.type = 'UNAUTHORIZED_CLIENT'
self.assertRaises(InvalidCredsError, self.driver.list_nodes)
def test_list_images_success(self):
images = self.driver.list_images()
self.assertTrue(len(images) >= 1)
image = images[0]
self.assertTrue(image.id is not None)
self.assertTrue(image.name is not None)
def test_list_sizes_success(self):
sizes = self.driver.list_sizes()
self.assertTrue(len(sizes) >= 1)
size = sizes[0]
self.assertTrue(size.id is not None)
self.assertEqual(size.name, '512MB')
self.assertEqual(size.ram, 512)
size = sizes[4]
self.assertTrue(size.id is not None)
self.assertEqual(size.name, '8GB')
self.assertEqual(size.ram, 8 * 1024)
def test_list_locations_success(self):
locations = self.driver.list_locations()
self.assertTrue(len(locations) >= 1)
location = locations[0]
self.assertEqual(location.id, '1')
self.assertEqual(location.name, 'New York 1')
def test_list_nodes_success(self):
nodes = self.driver.list_nodes()
self.assertEqual(len(nodes), 1)
self.assertEqual(nodes[0].name, 'test-2')
self.assertEqual(nodes[0].public_ips, [])
def test_create_node_invalid_size(self):
image = NodeImage(id='invalid', name=None, driver=self.driver)
size = self.driver.list_sizes()[0]
location = self.driver.list_locations()[0]
DigitalOceanMockHttp.type = 'INVALID_IMAGE'
expected_msg = r'You specified an invalid image for Droplet creation. \(code: 404\)'
self.assertRaisesRegexp(Exception, expected_msg,
self.driver.create_node,
name='test', size=size, image=image,
location=location)
def test_reboot_node_success(self):
node = self.driver.list_nodes()[0]
result = self.driver.reboot_node(node)
self.assertTrue(result)
def test_destroy_node_success(self):
node = self.driver.list_nodes()[0]
result = self.driver.destroy_node(node)
self.assertTrue(result)
def test_ex_rename_node_success(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_rename_node(node, 'fedora helios')
self.assertTrue(result)
def test_ex_list_ssh_keys(self):
keys = self.driver.ex_list_ssh_keys()
self.assertEqual(len(keys), 1)
self.assertEqual(keys[0].id, 7717)
self.assertEqual(keys[0].name, 'test1')
self.assertEqual(keys[0].pub_key, None)
def test_ex_destroy_ssh_key(self):
key = self.driver.ex_list_ssh_keys()[0]
result = self.driver.ex_destroy_ssh_key(key.id)
self.assertTrue(result)
class DigitalOceanMockHttp(MockHttpTestCase):
fixtures = ComputeFileFixtures('digitalocean')
def _regions(self, method, url, body, headers):
body = self.fixtures.load('list_locations.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _images(self, method, url, body, headers):
body = self.fixtures.load('list_images.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _sizes(self, method, url, body, headers):
body = self.fixtures.load('list_sizes.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _droplets(self, method, url, body, headers):
body = self.fixtures.load('list_nodes.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _droplets_new_INVALID_IMAGE(self, method, url, body, headers):
# reboot_node
body = self.fixtures.load('error_invalid_image.json')
return (httplib.NOT_FOUND, body, {}, httplib.responses[httplib.NOT_FOUND])
def _droplets_119461_reboot(self, method, url, body, headers):
# reboot_node
body = self.fixtures.load('reboot_node.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _droplets_119461_destroy(self, method, url, body, headers):
# destroy_node
self.assertUrlContainsQueryParams(url, {'scrub_data': '1'})
body = self.fixtures.load('destroy_node.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _droplets_119461_rename(self, method, url, body, headers):
# reboot_node
self.assertUrlContainsQueryParams(url, {'name': 'fedora helios'})
body = self.fixtures.load('ex_rename_node.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _ssh_keys(self, method, url, body, headers):
body = self.fixtures.load('ex_list_ssh_keys.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _ssh_keys_7717_destroy(self, method, url, body, headers):
# destroy_ssh_key
body = self.fixtures.load('ex_destroy_ssh_key.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _droplets_UNAUTHORIZED_CLIENT(self, method, url, body, headers):
body = self.fixtures.load('error.txt')
return (httplib.FOUND, body, {}, httplib.responses[httplib.FOUND])
if __name__ == '__main__':
sys.exit(unittest.main())
|
elastacloud/libcloud
|
libcloud/test/compute/test_digitalocean.py
|
Python
|
apache-2.0
| 6,980 | 0.00043 |
# Copyright 2015 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from manila.cmd import api as manila_api
from manila import test
from manila import version
CONF = manila_api.CONF
class ManilaCmdApiTestCase(test.TestCase):
def setUp(self):
super(ManilaCmdApiTestCase, self).setUp()
sys.argv = ['manila-api']
def test_main(self):
self.mock_object(manila_api.log, 'setup')
self.mock_object(manila_api.log, 'register_options')
self.mock_object(manila_api.utils, 'monkey_patch')
self.mock_object(manila_api.service, 'process_launcher')
self.mock_object(manila_api.service, 'WSGIService')
manila_api.main()
process_launcher = manila_api.service.process_launcher
process_launcher.assert_called_once_with()
self.assertTrue(process_launcher.return_value.launch_service.called)
self.assertTrue(process_launcher.return_value.wait.called)
self.assertEqual('manila', CONF.project)
self.assertEqual(version.version_string(), CONF.version)
manila_api.log.setup.assert_called_once_with(CONF, "manila")
manila_api.log.register_options.assert_called_once_with(CONF)
manila_api.utils.monkey_patch.assert_called_once_with()
manila_api.service.WSGIService.assert_called_once_with('osapi_share')
|
bswartz/manila
|
manila/tests/cmd/test_api.py
|
Python
|
apache-2.0
| 1,877 | 0 |
#!/bin/python
#author: tobias mueller 13.6.13
#byteplay test
from sys import version_info
from dis import dis
if version_info.major == 3:
if version_info.minor < 6:from byteplay import *
else:from wbyteplay import *
else:from byteplay2 import *
from pprint import pprint
def f(a, b):
res = a + b
return res
def g(a, b):
res = a + b if a < b else b + a
r = 0
for a in range(res):
r += 1
return r or 2
for x in (f, g):
#get byte code for f
c = Code.from_code(x.__code__)
pprint(c.code)
#generate byte code
cnew = c.to_code()
x.__code__ = cnew
dis(x)
print(x(3,5))
|
serprex/byteplay
|
sanitytest.py
|
Python
|
lgpl-2.1
| 640 | 0.023438 |
#!/usr/bin/env python
from __future__ import print_function
import json, sys
from common_paths import *
def assert_non_empty_string(obj, field):
assert field in obj, 'Missing field "%s"' % field
assert isinstance(obj[field], basestring), \
'Field "%s" must be a string' % field
assert len(obj[field]) > 0, 'Field "%s" must not be empty' % field
def assert_non_empty_list(obj, field):
assert isinstance(obj[field], list), \
'%s must be a list' % field
assert len(obj[field]) > 0, \
'%s list must not be empty' % field
def assert_non_empty_dict(obj, field):
assert isinstance(obj[field], dict), \
'%s must be a dict' % field
assert len(obj[field]) > 0, \
'%s dict must not be empty' % field
def assert_contains(obj, field):
assert field in obj, 'Must contain field "%s"' % field
def assert_string_from(obj, field, items):
assert obj[field] in items, \
'Field "%s" must be from: %s' % (field, str(items))
def assert_string_or_list_items_from(obj, field, items):
if isinstance(obj[field], basestring):
assert_string_from(obj, field, items)
return
assert isinstance(obj[field], list), "%s must be a list!" % field
for allowed_value in obj[field]:
assert allowed_value != '*', "Wildcard is not supported for lists!"
assert allowed_value in items, \
'Field "%s" must be from: %s' % (field, str(items))
def assert_contains_only_fields(obj, expected_fields):
for expected_field in expected_fields:
assert_contains(obj, expected_field)
for actual_field in obj:
assert actual_field in expected_fields, \
'Unexpected field "%s".' % actual_field
def assert_value_unique_in(value, used_values):
assert value not in used_values, 'Duplicate value "%s"!' % str(value)
used_values[value] = True
def assert_valid_artifact(exp_pattern, artifact_key, schema):
if isinstance(schema, list):
assert_string_or_list_items_from(exp_pattern, artifact_key,
["*"] + schema)
return
for sub_artifact_key, sub_schema in schema.iteritems():
assert_valid_artifact(exp_pattern[artifact_key], sub_artifact_key,
sub_schema)
def validate(spec_json, details):
""" Validates the json specification for generating tests. """
details['object'] = spec_json
assert_contains_only_fields(spec_json, ["specification",
"test_expansion_schema",
"excluded_tests"])
assert_non_empty_list(spec_json, "specification")
assert_non_empty_dict(spec_json, "test_expansion_schema")
assert_non_empty_list(spec_json, "excluded_tests")
specification = spec_json['specification']
test_expansion_schema = spec_json['test_expansion_schema']
excluded_tests = spec_json['excluded_tests']
valid_test_expansion_fields = ['name'] + test_expansion_schema.keys()
# Validate each single spec.
for spec in specification:
details['object'] = spec
# Validate required fields for a single spec.
assert_contains_only_fields(spec, ['name',
'title',
'description',
'specification_url',
'test_expansion'])
assert_non_empty_string(spec, 'name')
assert_non_empty_string(spec, 'title')
assert_non_empty_string(spec, 'description')
assert_non_empty_string(spec, 'specification_url')
assert_non_empty_list(spec, 'test_expansion')
# Validate spec's test expansion.
used_spec_names = {}
for spec_exp in spec['test_expansion']:
details['object'] = spec_exp
assert_non_empty_string(spec_exp, 'name')
# The name is unique in same expansion group.
assert_value_unique_in((spec_exp['expansion'], spec_exp['name']),
used_spec_names)
assert_contains_only_fields(spec_exp, valid_test_expansion_fields)
for artifact in test_expansion_schema:
details['test_expansion_field'] = artifact
assert_valid_artifact(spec_exp, artifact,
test_expansion_schema[artifact])
del details['test_expansion_field']
# Validate the test_expansion schema members.
details['object'] = test_expansion_schema
assert_contains_only_fields(test_expansion_schema, ['expansion',
'source_scheme',
'opt_in_method',
'context_nesting',
'redirection',
'subresource',
'origin',
'expectation'])
# Validate excluded tests.
details['object'] = excluded_tests
for excluded_test_expansion in excluded_tests:
assert_contains_only_fields(excluded_test_expansion,
valid_test_expansion_fields)
del details['object']
def assert_valid_spec_json(spec_json):
error_details = {}
try:
validate(spec_json, error_details)
except AssertionError as err:
print('ERROR:', err.message)
print(json.dumps(error_details, indent=4))
sys.exit(1)
def main():
spec_json = load_spec_json();
assert_valid_spec_json(spec_json)
print("Spec JSON is valid.")
if __name__ == '__main__':
main()
|
Varentsov/servo
|
tests/wpt/web-platform-tests/mixed-content/generic/tools/spec_validator.py
|
Python
|
mpl-2.0
| 5,868 | 0.001363 |
# -*- coding: utf-8 -*-
import logging
import commands
import simplejson
import os
import os.path
import openerp
import time
import random
import subprocess
import simplejson
import werkzeug
import werkzeug.wrappers
_logger = logging.getLogger(__name__)
from openerp import http
from openerp.http import request
# Those are the builtin raspberry pi USB modules, they should
# not appear in the list of connected devices.
BANNED_DEVICES = set([
"0424:9514", # Standard Microsystem Corp. Builtin Ethernet module
"1d6b:0002", # Linux Foundation 2.0 root hub
"0424:ec00", # Standard Microsystem Corp. Other Builtin Ethernet module
])
# drivers modules must add to drivers an object with a get_status() method
# so that 'status' can return the status of all active drivers
drivers = {}
class Proxy(http.Controller):
def get_status(self):
statuses = {}
for driver in drivers:
statuses[driver] = drivers[driver].get_status()
return statuses
@http.route('/hw_proxy/hello', type='http', auth='none', cors='*')
def hello(self):
return "ping"
@http.route('/hw_proxy/handshake', type='json', auth='none', cors='*')
def handshake(self):
return True
@http.route('/hw_proxy/status', type='http', auth='none', cors='*')
def status_http(self):
resp = """
<!DOCTYPE HTML>
<html>
<head>
<title>Odoo's PosBox</title>
<style>
body {
width: 480px;
margin: 60px auto;
font-family: sans-serif;
text-align: justify;
color: #6B6B6B;
}
.device {
border-bottom: solid 1px rgb(216,216,216);
padding: 9px;
}
.device:nth-child(2n) {
background:rgb(240,240,240);
}
</style>
</head>
<body>
<h1>Hardware Status</h1>
<p>The list of enabled drivers and their status</p>
"""
statuses = self.get_status()
for driver in statuses:
status = statuses[driver]
if status['status'] == 'connecting':
color = 'black'
elif status['status'] == 'connected':
color = 'green'
else:
color = 'red'
resp += "<h3 style='color:"+color+";'>"+driver+' : '+status['status']+"</h3>\n"
resp += "<ul>\n"
for msg in status['messages']:
resp += '<li>'+msg+'</li>\n'
resp += "</ul>\n"
resp += """
<h2>Connected Devices</h2>
<p>The list of connected USB devices as seen by the posbox</p>
"""
devices = commands.getoutput("lsusb").split('\n')
count = 0
resp += "<div class='devices'>\n"
for device in devices:
device_name = device[device.find('ID')+2:]
device_id = device_name.split()[0]
if not (device_id in BANNED_DEVICES):
resp+= "<div class='device' data-device='"+device+"'>"+device_name+"</div>\n"
count += 1
if count == 0:
resp += "<div class='device'>No USB Device Found</div>"
resp += "</div>\n"
resp += """
<h2>Add New Printer</h2>
<p>
Copy and paste your printer's device description in the form below. You can find
your printer's description in the device list above. If you find that your printer works
well, please send your printer's description to <a href='mailto:support@odoo.com'>
support@openerp.com</a> so that we can add it to the default list of supported devices.
</p>
<form action='/hw_proxy/escpos/add_supported_device' method='GET'>
<input type='text' style='width:400px' name='device_string' placeholder='123a:b456 Sample Device description' />
<input type='submit' value='submit' />
</form>
<h2>Reset To Defaults</h2>
<p>If the added devices cause problems, you can <a href='/hw_proxy/escpos/reset_supported_devices'>Reset the
device list to factory default.</a> This operation cannot be undone.</p>
"""
resp += "</body>\n</html>\n\n"
return request.make_response(resp,{
'Cache-Control': 'no-cache',
'Content-Type': 'text/html; charset=utf-8',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET',
})
@http.route('/hw_proxy/status_json', type='json', auth='none', cors='*')
def status_json(self):
return self.get_status()
@http.route('/hw_proxy/scan_item_success', type='json', auth='none', cors='*')
def scan_item_success(self, ean):
"""
A product has been scanned with success
"""
print 'scan_item_success: ' + str(ean)
@http.route('/hw_proxy/scan_item_error_unrecognized', type='json', auth='none', cors='*')
def scan_item_error_unrecognized(self, ean):
"""
A product has been scanned without success
"""
print 'scan_item_error_unrecognized: ' + str(ean)
@http.route('/hw_proxy/help_needed', type='json', auth='none', cors='*')
def help_needed(self):
"""
The user wants an help (ex: light is on)
"""
print "help_needed"
@http.route('/hw_proxy/help_canceled', type='json', auth='none', cors='*')
def help_canceled(self):
"""
The user stops the help request
"""
print "help_canceled"
@http.route('/hw_proxy/payment_request', type='json', auth='none', cors='*')
def payment_request(self, price):
"""
The PoS will activate the method payment
"""
print "payment_request: price:"+str(price)
return 'ok'
@http.route('/hw_proxy/payment_status', type='json', auth='none', cors='*')
def payment_status(self):
print "payment_status"
return { 'status':'waiting' }
@http.route('/hw_proxy/payment_cancel', type='json', auth='none', cors='*')
def payment_cancel(self):
print "payment_cancel"
@http.route('/hw_proxy/transaction_start', type='json', auth='none', cors='*')
def transaction_start(self):
print 'transaction_start'
@http.route('/hw_proxy/transaction_end', type='json', auth='none', cors='*')
def transaction_end(self):
print 'transaction_end'
@http.route('/hw_proxy/cashier_mode_activated', type='json', auth='none', cors='*')
def cashier_mode_activated(self):
print 'cashier_mode_activated'
@http.route('/hw_proxy/cashier_mode_deactivated', type='json', auth='none', cors='*')
def cashier_mode_deactivated(self):
print 'cashier_mode_deactivated'
@http.route('/hw_proxy/open_cashbox', type='json', auth='none', cors='*')
def open_cashbox(self):
print 'open_cashbox'
@http.route('/hw_proxy/print_receipt', type='json', auth='none', cors='*')
def print_receipt(self, receipt):
print 'print_receipt' + str(receipt)
@http.route('/hw_proxy/is_scanner_connected', type='json', auth='none', cors='*')
def is_scanner_connected(self, receipt):
print 'is_scanner_connected?'
return False
@http.route('/hw_proxy/scanner', type='json', auth='none', cors='*')
def scanner(self, receipt):
print 'scanner'
time.sleep(10)
return ''
@http.route('/hw_proxy/log', type='json', auth='none', cors='*')
def log(self, arguments):
_logger.info(' '.join(str(v) for v in arguments))
@http.route('/hw_proxy/print_pdf_invoice', type='json', auth='none', cors='*')
def print_pdf_invoice(self, pdfinvoice):
print 'print_pdf_invoice' + str(pdfinvoice)
|
exploreodoo/datStruct
|
odoo/addons/hw_proxy/controllers/main.py
|
Python
|
gpl-2.0
| 7,800 | 0.005769 |
from system.core.controller import *
from rauth import OAuth2Service
from flask import redirect, request
import urllib2
import json
from time import strftime
facebook = OAuth2Service(
name='facebook',
base_url='https://graph.facebook.com/',
access_token_url='/oauth/access_token',
authorize_url='https://www.facebook.com/dialog/oauth',
client_id='259154491127882',
client_secret='c5b9a2e1e25bfa25abc75a9cd2af450a',
)
app_id = "259154491127882"
redirect_uri = 'http://localhost:5000/'
params = {
'scope': 'read_stream',
'response_type': 'code',
'redirect_uri': redirect_uri
}
url = facebook.get_authorize_url(**params)
class Users(Controller):
def __init__(self, action):
super(Users, self).__init__(action)
self.load_model('User')
self.load_model('Event')
self.db = self._app.db
# routes['/'] = "Users#index"
def index(self):
if 'user' in session:
user_rides = self.models['Event'].get_events_by_user(session['user']['user_info']['user_id'])
if user_rides['status']:
for ride in user_rides['events']:
result = self.models['Event'].get_users_by_ride(ride['ride_id'])
ride['users_in_ride'] = result['users']
ride['ride_date'] = ride['ride_date'].strftime('%x')
return self.load_view('index.html', user_rides=user_rides['events'])
return self.load_view('index.html')
# routes['/login'] = "Users#login"
def login(self):
if 'user' in session:
return redirect('/')
return self.load_view('login.html')
# routes['/logout'] = "Users#logout"
def logout(self):
if 'user' in session:
flash('You have successfully logged out', 'success')
session.clear()
return redirect('/')
# routes['/user/<user_id>'] = "Users#show_user"
def show_user(self, user_id):
if 'user' in session:
user = self.models['User'].get_user(user_id)
if user['status']:
return self.load_view('user.html', user=user['user'])
return redirect('/')
return redirect('/')
# routes['/user/inbox'] = "Users#show_inbox"
def show_inbox(self):
if 'user' in session:
return self.load_view('inbox.html')
return redirect('/')
# routes['POST']['/login/process'] = "Users#login_process"
def login_process(self):
if 'user' in session:
return redirect('/')
return redirect("https://www.facebook.com/dialog/oauth?client_id="+app_id+"&redirect_uri=http://localhost:5000/oauth-authorized/")
def oauth_authorized(self):
code = request.args.get('code')
json_str = urllib2.urlopen("https://graph.facebook.com/v2.3/oauth/access_token?client_id=" +
app_id + "&redirect_uri=http://localhost:5000/oauth-authorized/&client_secret"
"=c5b9a2e1e25bfa25abc75a9cd2af450a&code=" + code).read()
token = json.loads(json_str)
token = token['access_token']
fb_session = facebook.get_session(token)
register_data = fb_session.get('/me?fields=id,first_name,last_name,email', params={'format': 'json'}).json()
user_picture = fb_session.get('/me?fields=picture', params={'format': 'json'}).json()
if register_data:
user = self.models['User'].add_user(register_data)
if user['status']:
# just registered
session['new_user'] = True
# already registered
session['user'] = register_data
session['user']['user_info'] = self.models['User'].get_user_by_fbid(session['user']['id'])['user']
session['user']['user_info']['user_id'] = session['user']['user_info']['user_id']
session['user']['picture'] = user_picture['picture']['data']['url']
session['rides_in'] = []
return redirect('/')
def register_process(self):
user = self.models['User'].register(request.form, session['user']['id'])
if user['status']:
# they were updated
session.pop('new_user')
session['user']['city'] = user['user']['city']
return redirect('/')
# they weren't updated
flash('There was a problem with your inputs, please try again')
return redirect('/')
|
RydrDojo/Ridr_app
|
app/controllers/Users.py
|
Python
|
mit
| 4,431 | 0.00316 |
from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"label": _("Documents"),
"icon": "icon-star",
"items": [
{
"type": "doctype",
"name": "Employee",
"description": _("Employee records."),
},
{
"type": "doctype",
"name": "Leave Application",
"description": _("Applications for leave."),
},
{
"type": "doctype",
"name": "Expense Claim",
"description": _("Claims for company expense."),
},
{
"type": "doctype",
"name": "Attendance",
"description": _("Attendance record."),
},
{
"type": "doctype",
"name": "Salary Slip",
"description": _("Monthly salary statement."),
},
{
"type": "doctype",
"name": "Appraisal",
"description": _("Performance appraisal."),
},
{
"type": "doctype",
"name": "Job Applicant",
"description": _("Applicant for a Job."),
},
{
"type": "doctype",
"name": "Job Opening",
"description": _("Opening for a Job."),
},
{
"type": "doctype",
"name": "Offer Letter",
"description": _("Offer candidate a Job."),
},
]
},
{
"label": _("Tools"),
"icon": "icon-wrench",
"items": [
{
"type": "doctype",
"name": "Salary Manager",
"label": _("Process Payroll"),
"description":_("Generate Salary Slips"),
"hide_count": True
},
{
"type": "doctype",
"name": "Timesheet Report",
"label": _("Timesheet Report"),
"description":_("Attendance Report"),
"hide_count": True
},
{
"type": "doctype",
"name": "Upload Attendance",
"description":_("Upload attendance from a .csv file"),
"hide_count": True
},
{
"type": "doctype",
"name": "Leave Control Panel",
"label": _("Leave Allocation Tool"),
"description":_("Allocate leaves for the year."),
"hide_count": True
},
]
},
{
"label": _("Setup"),
"icon": "icon-cog",
"items": [
{
"type": "doctype",
"name": "HR Settings",
"description": _("Settings for HR Module")
},
{
"type": "doctype",
"name": "Employment Type",
"description": _("Types of employment (permanent, contract, intern etc.).")
},
{
"type": "doctype",
"name": "Branch",
"description": _("Organization branch master.")
},
{
"type": "doctype",
"name": "Department",
"description": _("Organization unit (department) master.")
},
{
"type": "doctype",
"name": "Designation",
"description": _("Employee designation (e.g. CEO, Director etc.).")
},
{
"type": "doctype",
"name": "Salary Structure",
"description": _("Salary template master.")
},
{
"type": "doctype",
"name": "Earning Type",
"description": _("Salary components.")
},
{
"type": "doctype",
"name": "Deduction Type",
"description": _("Tax and other salary deductions.")
},
{
"type": "doctype",
"name": "Leave Allocation",
"description": _("Allocate leaves for a period.")
},
{
"type": "doctype",
"name":"Leave Type",
"description": _("Type of leaves like casual, sick etc."),
},
{
"type": "doctype",
"name": "Holiday List",
"description": _("Holiday master.")
},
{
"type": "doctype",
"name": "Leave Block List",
"description": _("Block leave applications by department.")
},
{
"type": "doctype",
"name": "Appraisal Template",
"description": _("Template for performance appraisals.")
},
{
"type": "doctype",
"name": "Expense Claim Type",
"description": _("Types of Expense Claim.")
},
{
"type": "doctype",
"name": "Email Account",
"description": _("Setup incoming server for jobs email id. (e.g. jobs@example.com)")
},
]
},
{
"label": _("Standard Reports"),
"icon": "icon-list",
"items": [
{
"type": "report",
"is_query_report": True,
"name": "Employee Leave Balance",
"doctype": "Leave Application"
},
{
"type": "report",
"is_query_report": True,
"name": "Employee Birthday",
"doctype": "Employee"
},
{
"type": "report",
"name": "Employee Information",
"doctype": "Employee"
},
{
"type": "report",
"is_query_report": True,
"name": "Monthly Salary Register",
"doctype": "Salary Slip"
},
{
"type": "report",
"is_query_report": True,
"name": "Monthly Attendance Sheet",
"doctype": "Attendance"
},
]
},
]
|
indictranstech/tele-erpnext
|
erpnext/config/hr.py
|
Python
|
agpl-3.0
| 4,677 | 0.044901 |
#!/usr/bin/python
# $Id:$
import ctypes
import queue
import pyglet
from pyglet.window.win32 import kernel32, user32, constants, types
class MTWin32EventLoop(pyglet.app.win32.Win32EventLoop):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Force immediate creation of an event queue on this thread
msg = types.MSG()
user32.PeekMessageW(ctypes.byref(msg), 0,
constants.WM_USER, constants.WM_USER,
constants.PM_NOREMOVE)
self._event_thread = kernel32.GetCurrentThreadId()
self._post_event_queue = queue.Queue()
def post_event(self, dispatcher, event, *args):
self._post_event_queue.put((dispatcher, event, args))
# Nudge the event loop with a message it will discard
user32.PostThreadMessageW(self._event_thread, constants.WM_USER, 0, 0)
def _dispatch_posted_events(self):
# Dispatch (synchronised) queued events
while True:
try:
dispatcher, event, args = self._post_event_queue.get(False)
except queue.Empty:
break
dispatcher.dispatch_event(event, *args)
def run(self):
self._setup()
self._timer_proc = types.TIMERPROC(self._timer_func)
self._timer = timer = user32.SetTimer(0, 0, 0, self._timer_proc)
self._polling = False
self._allow_polling = False
msg = types.MSG()
self.dispatch_event('on_enter')
self._dispatch_posted_events()
while not self.has_exit:
if self._polling:
while user32.PeekMessageW(ctypes.byref(msg),
0, 0, 0, constants.PM_REMOVE):
user32.TranslateMessage(ctypes.byref(msg))
user32.DispatchMessageW(ctypes.byref(msg))
self._timer_func(0, 0, timer, 0)
else:
user32.GetMessageW(ctypes.byref(msg), 0, 0, 0)
user32.TranslateMessage(ctypes.byref(msg))
user32.DispatchMessageW(ctypes.byref(msg))
# Manual idle event
msg_types = \
user32.GetQueueStatus(constants.QS_ALLINPUT) & 0xffff0000
if (msg.message != constants.WM_TIMER and
not msg_types & ~(constants.QS_TIMER << 16)):
self._timer_func(0, 0, timer, 0)
self._dispatch_posted_events()
self.dispatch_event('on_exit')
pyglet.app.EventLoop = MTWin32EventLoop
|
bitcraft/pyglet
|
contrib/experimental/mt_media/mt_app_win32.py
|
Python
|
bsd-3-clause
| 2,590 | 0.001158 |
from django.apps import AppConfig
class DesarrollowebConfig(AppConfig):
name = 'desarrolloweb'
|
z1gm4/desarrollo_web_udp
|
desarrolloweb/apps.py
|
Python
|
gpl-3.0
| 101 | 0 |
from __future__ import unicode_literals
from django.core.exceptions import PermissionDenied
from django.core.mail import send_mail
from django.core import validators
from django.db import models
from django.db.models.manager import EmptyManager
from django.utils.crypto import get_random_string, salted_hmac
from django.utils import six
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from django.contrib import auth
from django.contrib.auth.hashers import (
check_password, make_password, is_password_usable)
from django.contrib.auth.signals import user_logged_in
from django.contrib.contenttypes.models import ContentType
from django.utils.encoding import python_2_unicode_compatible
def update_last_login(sender, user, **kwargs):
"""
A signal receiver which updates the last_login date for
the user logging in.
"""
user.last_login = timezone.now()
user.save(update_fields=['last_login'])
user_logged_in.connect(update_last_login)
class PermissionManager(models.Manager):
def get_by_natural_key(self, codename, app_label, model):
return self.get(
codename=codename,
content_type=ContentType.objects.db_manager(self.db).get_by_natural_key(app_label, model),
)
@python_2_unicode_compatible
class Permission(models.Model):
"""
The permissions system provides a way to assign permissions to specific
users and groups of users.
The permission system is used by the Django admin site, but may also be
useful in your own code. The Django admin site uses permissions as follows:
- The "add" permission limits the user's ability to view the "add" form
and add an object.
- The "change" permission limits a user's ability to view the change
list, view the "change" form and change an object.
- The "delete" permission limits the ability to delete an object.
Permissions are set globally per type of object, not per specific object
instance. It is possible to say "Mary may change news stories," but it's
not currently possible to say "Mary may change news stories, but only the
ones she created herself" or "Mary may only change news stories that have a
certain status or publication date."
Three basic permissions -- add, change and delete -- are automatically
created for each Django model.
"""
name = models.CharField(_('name'), max_length=255)
content_type = models.ForeignKey(ContentType)
codename = models.CharField(_('codename'), max_length=100)
objects = PermissionManager()
class Meta:
verbose_name = _('permission')
verbose_name_plural = _('permissions')
unique_together = (('content_type', 'codename'),)
ordering = ('content_type__app_label', 'content_type__model',
'codename')
def __str__(self):
return "%s | %s | %s" % (
six.text_type(self.content_type.app_label),
six.text_type(self.content_type),
six.text_type(self.name))
def natural_key(self):
return (self.codename,) + self.content_type.natural_key()
natural_key.dependencies = ['contenttypes.contenttype']
class GroupManager(models.Manager):
"""
The manager for the auth's Group model.
"""
def get_by_natural_key(self, name):
return self.get(name=name)
@python_2_unicode_compatible
class Group(models.Model):
"""
Groups are a generic way of categorizing users to apply permissions, or
some other label, to those users. A user can belong to any number of
groups.
A user in a group automatically has all the permissions granted to that
group. For example, if the group Site editors has the permission
can_edit_home_page, any user in that group will have that permission.
Beyond permissions, groups are a convenient way to categorize users to
apply some label, or extended functionality, to them. For example, you
could create a group 'Special users', and you could write code that would
do special things to those users -- such as giving them access to a
members-only portion of your site, or sending them members-only email
messages.
"""
name = models.CharField(_('name'), max_length=80, unique=True)
permissions = models.ManyToManyField(Permission,
verbose_name=_('permissions'), blank=True)
objects = GroupManager()
class Meta:
verbose_name = _('group')
verbose_name_plural = _('groups')
def __str__(self):
return self.name
def natural_key(self):
return (self.name,)
class BaseUserManager(models.Manager):
@classmethod
def normalize_email(cls, email):
"""
Normalize the address by lowercasing the domain part of the email
address.
"""
email = email or ''
try:
email_name, domain_part = email.strip().rsplit('@', 1)
except ValueError:
pass
else:
email = '@'.join([email_name, domain_part.lower()])
return email
def make_random_password(self, length=10,
allowed_chars='abcdefghjkmnpqrstuvwxyz'
'ABCDEFGHJKLMNPQRSTUVWXYZ'
'23456789'):
"""
Generates a random password with the given length and given
allowed_chars. Note that the default value of allowed_chars does not
have "I" or "O" or letters and digits that look similar -- just to
avoid confusion.
"""
return get_random_string(length, allowed_chars)
def get_by_natural_key(self, username):
return self.get(**{self.model.USERNAME_FIELD: username})
class UserManager(BaseUserManager):
def _create_user(self, username, email, password,
is_staff, is_superuser, **extra_fields):
"""
Creates and saves a User with the given username, email and password.
"""
now = timezone.now()
if not username:
raise ValueError('The given username must be set')
email = self.normalize_email(email)
user = self.model(username=username, email=email,
is_staff=is_staff, is_active=True,
is_superuser=is_superuser,
date_joined=now, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, username, email=None, password=None, **extra_fields):
return self._create_user(username, email, password, False, False,
**extra_fields)
def create_superuser(self, username, email, password, **extra_fields):
return self._create_user(username, email, password, True, True,
**extra_fields)
@python_2_unicode_compatible
class AbstractBaseUser(models.Model):
password = models.CharField(_('password'), max_length=128)
last_login = models.DateTimeField(_('last login'), blank=True, null=True)
is_active = True
REQUIRED_FIELDS = []
class Meta:
abstract = True
def get_username(self):
"Return the identifying username for this User"
return getattr(self, self.USERNAME_FIELD)
def __str__(self):
return self.get_username()
def natural_key(self):
return (self.get_username(),)
def is_anonymous(self):
"""
Always returns False. This is a way of comparing User objects to
anonymous users.
"""
return False
def is_authenticated(self):
"""
Always return True. This is a way to tell if the user has been
authenticated in templates.
"""
return True
def set_password(self, raw_password):
self.password = make_password(raw_password)
def check_password(self, raw_password):
"""
Returns a boolean of whether the raw_password was correct. Handles
hashing formats behind the scenes.
"""
def setter(raw_password):
self.set_password(raw_password)
self.save(update_fields=["password"])
return check_password(raw_password, self.password, setter)
def set_unusable_password(self):
# Sets a value that will never be a valid hash
self.password = make_password(None)
def has_usable_password(self):
return is_password_usable(self.password)
def get_full_name(self):
raise NotImplementedError('subclasses of AbstractBaseUser must provide a get_full_name() method')
def get_short_name(self):
raise NotImplementedError('subclasses of AbstractBaseUser must provide a get_short_name() method.')
def get_session_auth_hash(self):
"""
Returns an HMAC of the password field.
"""
key_salt = "django.contrib.auth.models.AbstractBaseUser.get_session_auth_hash"
return salted_hmac(key_salt, self.password).hexdigest()
# A few helper functions for common logic between User and AnonymousUser.
def _user_get_all_permissions(user, obj):
permissions = set()
for backend in auth.get_backends():
if hasattr(backend, "get_all_permissions"):
permissions.update(backend.get_all_permissions(user, obj))
return permissions
def _user_has_perm(user, perm, obj):
"""
A backend can raise `PermissionDenied` to short-circuit permission checking.
"""
for backend in auth.get_backends():
if not hasattr(backend, 'has_perm'):
continue
try:
if backend.has_perm(user, perm, obj):
return True
except PermissionDenied:
return False
return False
def _user_has_module_perms(user, app_label):
"""
A backend can raise `PermissionDenied` to short-circuit permission checking.
"""
for backend in auth.get_backends():
if not hasattr(backend, 'has_module_perms'):
continue
try:
if backend.has_module_perms(user, app_label):
return True
except PermissionDenied:
return False
return False
class PermissionsMixin(models.Model):
"""
A mixin class that adds the fields and methods necessary to support
Django's Group and Permission model using the ModelBackend.
"""
is_superuser = models.BooleanField(_('superuser status'), default=False,
help_text=_('Designates that this user has all permissions without '
'explicitly assigning them.'))
groups = models.ManyToManyField(Group, verbose_name=_('groups'),
blank=True, help_text=_('The groups this user belongs to. A user will '
'get all permissions granted to each of '
'their groups.'),
related_name="user_set", related_query_name="user")
user_permissions = models.ManyToManyField(Permission,
verbose_name=_('user permissions'), blank=True,
help_text=_('Specific permissions for this user.'),
related_name="user_set", related_query_name="user")
class Meta:
abstract = True
def get_group_permissions(self, obj=None):
"""
Returns a list of permission strings that this user has through their
groups. This method queries all available auth backends. If an object
is passed in, only permissions matching this object are returned.
"""
permissions = set()
for backend in auth.get_backends():
if hasattr(backend, "get_group_permissions"):
permissions.update(backend.get_group_permissions(self, obj))
return permissions
def get_all_permissions(self, obj=None):
return _user_get_all_permissions(self, obj)
def has_perm(self, perm, obj=None):
"""
Returns True if the user has the specified permission. This method
queries all available auth backends, but returns immediately if any
backend returns True. Thus, a user who has permission from a single
auth backend is assumed to have permission in general. If an object is
provided, permissions for this specific object are checked.
"""
# Active superusers have all permissions.
if self.is_active and self.is_superuser:
return True
# Otherwise we need to check the backends.
return _user_has_perm(self, perm, obj)
def has_perms(self, perm_list, obj=None):
"""
Returns True if the user has each of the specified permissions. If
object is passed, it checks if the user has all required perms for this
object.
"""
for perm in perm_list:
if not self.has_perm(perm, obj):
return False
return True
def has_module_perms(self, app_label):
"""
Returns True if the user has any permissions in the given app label.
Uses pretty much the same logic as has_perm, above.
"""
# Active superusers have all permissions.
if self.is_active and self.is_superuser:
return True
return _user_has_module_perms(self, app_label)
class AbstractUser(AbstractBaseUser, PermissionsMixin):
"""
An abstract base class implementing a fully featured User model with
admin-compliant permissions.
Username, password and email are required. Other fields are optional.
"""
username = models.CharField(_('username'), max_length=30, unique=True,
help_text=_('Required. 30 characters or fewer. Letters, digits and '
'@/./+/-/_ only.'),
validators=[
validators.RegexValidator(r'^[\w.@+-]+$',
_('Enter a valid username. '
'This value may contain only letters, numbers '
'and @/./+/-/_ characters.'), 'invalid'),
],
error_messages={
'unique': _("A user with that username already exists."),
})
first_name = models.CharField(_('first name'), max_length=30, blank=True)
last_name = models.CharField(_('last name'), max_length=30, blank=True)
email = models.EmailField(_('email address'), blank=True)
is_staff = models.BooleanField(_('staff status'), default=False,
help_text=_('Designates whether the user can log into this admin '
'site.'))
is_active = models.BooleanField(_('active'), default=True,
help_text=_('Designates whether this user should be treated as '
'active. Unselect this instead of deleting accounts.'))
date_joined = models.DateTimeField(_('date joined'), default=timezone.now)
objects = UserManager()
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = ['email']
class Meta:
verbose_name = _('user')
verbose_name_plural = _('users')
abstract = True
def get_full_name(self):
"""
Returns the first_name plus the last_name, with a space in between.
"""
full_name = '%s %s' % (self.first_name, self.last_name)
return full_name.strip()
def get_short_name(self):
"Returns the short name for the user."
return self.first_name
def email_user(self, subject, message, from_email=None, **kwargs):
"""
Sends an email to this User.
"""
send_mail(subject, message, from_email, [self.email], **kwargs)
class User(AbstractUser):
"""
Users within the Django authentication system are represented by this
model.
Username, password and email are required. Other fields are optional.
"""
class Meta(AbstractUser.Meta):
swappable = 'AUTH_USER_MODEL'
@python_2_unicode_compatible
class AnonymousUser(object):
id = None
pk = None
username = ''
is_staff = False
is_active = False
is_superuser = False
_groups = EmptyManager(Group)
_user_permissions = EmptyManager(Permission)
def __init__(self):
pass
def __str__(self):
return 'AnonymousUser'
def __eq__(self, other):
return isinstance(other, self.__class__)
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return 1 # instances always return the same hash value
def save(self):
raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.")
def delete(self):
raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.")
def set_password(self, raw_password):
raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.")
def check_password(self, raw_password):
raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.")
def _get_groups(self):
return self._groups
groups = property(_get_groups)
def _get_user_permissions(self):
return self._user_permissions
user_permissions = property(_get_user_permissions)
def get_group_permissions(self, obj=None):
return set()
def get_all_permissions(self, obj=None):
return _user_get_all_permissions(self, obj=obj)
def has_perm(self, perm, obj=None):
return _user_has_perm(self, perm, obj=obj)
def has_perms(self, perm_list, obj=None):
for perm in perm_list:
if not self.has_perm(perm, obj):
return False
return True
def has_module_perms(self, module):
return _user_has_module_perms(self, module)
def is_anonymous(self):
return True
def is_authenticated(self):
return False
|
pwmarcz/django
|
django/contrib/auth/models.py
|
Python
|
bsd-3-clause
| 17,843 | 0.001121 |
from dbmail.providers.microsoft.base import MPNSBase
class MPNSRaw(MPNSBase):
NOTIFICATION_CLS = 3
TARGET = 'raw'
def payload(self, payload):
return payload
def send(uri, *_, **kwargs):
return MPNSRaw().send(uri, kwargs)
|
LPgenerator/django-db-mailer
|
dbmail/providers/microsoft/raw.py
|
Python
|
gpl-2.0
| 250 | 0 |
#
# Copyright 2010 Free Software Foundation, Inc.
#
# This file was generated by gr_modtool, a tool from the GNU Radio framework
# This file is a part of gr-display
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
"""
Utilities for extracting text from generated classes.
"""
from __future__ import unicode_literals
def is_string(txt):
if isinstance(txt, str):
return True
try:
if isinstance(txt, str):
return True
except NameError:
pass
return False
def description(obj):
if obj is None:
return None
return description_bit(obj).strip()
def description_bit(obj):
if hasattr(obj, 'content'):
contents = [description_bit(item) for item in obj.content]
result = ''.join(contents)
elif hasattr(obj, 'content_'):
contents = [description_bit(item) for item in obj.content_]
result = ''.join(contents)
elif hasattr(obj, 'value'):
result = description_bit(obj.value)
elif is_string(obj):
return obj
else:
raise Exception('Expecting a string or something with content, content_ or value attribute')
# If this bit is a paragraph then add one some line breaks.
if hasattr(obj, 'name') and obj.name == 'para':
result += "\n\n"
return result
|
dl1ksv/gr-display
|
docs/doxygen/doxyxml/text.py
|
Python
|
gpl-3.0
| 1,297 | 0.003084 |
from ..utils.api import api_handler
from ..utils.decorators import detect_batch_decorator
@detect_batch_decorator
def sentiment(text, cloud=None, batch=False, api_key=None, version=None, **kwargs):
"""
Given input text, returns a scalar estimate of the sentiment of that text.
Values are roughly in the range 0 to 1 with 0.5 indicating neutral sentiment.
For reference, 0 suggests very negative sentiment and 1 suggests very positive sentiment.
Example usage:
.. code-block:: python
>>> from indicoio import sentiment
>>> text = 'Thanks everyone for the birthday wishes!! It was a crazy few days ><'
>>> sentiment = sentiment(text)
>>> sentiment
0.6946439339979863
:param text: The text to be analyzed.
:type text: str or unicode
:rtype: Float
"""
url_params = {"batch": batch, "api_key": api_key, "version": version}
return api_handler(
text, cloud=cloud, api="sentiment", url_params=url_params, **kwargs
)
|
IndicoDataSolutions/IndicoIo-python
|
indicoio/text/sentiment.py
|
Python
|
mit
| 1,009 | 0.003964 |
"""hicks URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
urlpatterns = [
# Toolbar static pages
url( # /
regex=r'^$',
view=TemplateView.as_view(template_name='pages/home.html'),
name='home'),
url( # /about/
regex=r'^about/$',
view=TemplateView.as_view(template_name='pages/about.html'),
name='about'),
# Django Admin
url( # /admin/
regex=r'^admin/',
view=include(admin.site.urls)),
# django-allauth
url( # /accounts/
regex=r'^accounts/',
view=include('allauth.urls')
),
# hicks_language
url( # /lang/
regex=r'^lang/',
view=include('hicks.hicks_language.urls', namespace='hicks_language'),
),
# hicks_glossary
url( # /project/
regex=r'^project/',
view=include('hicks.hicks_glossary.urls', namespace='hicks_glossary'),
),
]
if settings.DEBUG:
urlpatterns += [
url(r'^400/$', 'django.views.defaults.bad_request'),
url(r'^403/$', 'django.views.defaults.permission_denied'),
url(r'^404/$', 'django.views.defaults.page_not_found'),
url(r'^500/$', 'django.views.defaults.server_error'),
]
|
sgarrad/hicks
|
config/urls.py
|
Python
|
gpl-3.0
| 1,959 | 0.001021 |
# -*- coding: utf-8 -*-
"""
tests.regression
~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests regressions.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import gc
import sys
import threading
import pytest
from werkzeug.exceptions import NotFound
import flask
_gc_lock = threading.Lock()
class assert_no_leak(object):
def __enter__(self):
gc.disable()
_gc_lock.acquire()
loc = flask._request_ctx_stack._local
# Force Python to track this dictionary at all times.
# This is necessary since Python only starts tracking
# dicts if they contain mutable objects. It's a horrible,
# horrible hack but makes this kinda testable.
loc.__storage__['FOOO'] = [1, 2, 3]
gc.collect()
self.old_objects = len(gc.get_objects())
def __exit__(self, exc_type, exc_value, tb):
gc.collect()
new_objects = len(gc.get_objects())
if new_objects > self.old_objects:
pytest.fail('Example code leaked')
_gc_lock.release()
gc.enable()
def test_memory_consumption():
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('simple_template.html', whiskey=42)
def fire():
with app.test_client() as c:
rv = c.get('/')
assert rv.status_code == 200
assert rv.data == b'<h1>42</h1>'
# Trigger caches
fire()
# This test only works on CPython 2.7.
if sys.version_info >= (2, 7) and \
not hasattr(sys, 'pypy_translation_info'):
with assert_no_leak():
for x in range(10):
fire()
def test_safe_join_toplevel_pardir():
from flask.helpers import safe_join
with pytest.raises(NotFound):
safe_join('/foo', '..')
def test_aborting():
class Foo(Exception):
whatever = 42
app = flask.Flask(__name__)
app.testing = True
@app.errorhandler(Foo)
def handle_foo(e):
return str(e.whatever)
@app.route('/')
def index():
raise flask.abort(flask.redirect(flask.url_for('test')))
@app.route('/test')
def test():
raise Foo()
with app.test_client() as c:
rv = c.get('/')
assert rv.headers['Location'] == 'http://localhost/test'
rv = c.get('/test')
assert rv.data == b'42'
|
dawran6/flask
|
tests/test_regression.py
|
Python
|
bsd-3-clause
| 2,409 | 0 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Search Architecture:
- Have a list of accounts
- Create an "overseer" thread
- Search Overseer:
- Tracks incoming new location values
- Tracks "paused state"
- During pause or new location will clears current search queue
- Starts search_worker threads
- Search Worker Threads each:
- Have a unique API login
- Listens to the same Queue for areas to scan
- Can re-login as needed
- Pushes finds to db queue and webhook queue
'''
import logging
import math
import os
import sys
import traceback
import random
import time
import copy
import requests
import schedulers
import terminalsize
import timeit
from datetime import datetime
from threading import Thread, Lock
from queue import Queue, Empty
from sets import Set
from collections import deque
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
from distutils.version import StrictVersion
from pgoapi.utilities import f2i
from pgoapi import utilities as util
from pgoapi.hash_server import (HashServer, BadHashRequestException,
HashingOfflineException)
from .models import (parse_map, GymDetails, parse_gyms, MainWorker,
WorkerStatus, HashKeys)
from .utils import now, clear_dict_response
from .transform import get_new_coords, jitter_location
from .account import (setup_api, check_login, get_tutorial_state,
complete_tutorial, AccountSet, parse_new_timestamp_ms)
from .captcha import captcha_overseer_thread, handle_captcha
from .proxy import get_new_proxy
log = logging.getLogger(__name__)
loginDelayLock = Lock()
# Thread to handle user input.
def switch_status_printer(display_type, current_page, mainlog,
loglevel, logmode):
# Disable logging of the first handler - the stream handler, and disable
# it's output.
if (logmode != 'logs'):
mainlog.handlers[0].setLevel(logging.CRITICAL)
while True:
# Wait for the user to press a key.
command = raw_input()
if command == '':
# Switch between logging and display.
if display_type[0] != 'logs':
# Disable display, enable on screen logging.
mainlog.handlers[0].setLevel(loglevel)
display_type[0] = 'logs'
# If logs are going slowly, sometimes it's hard to tell you
# switched. Make it clear.
print 'Showing logs...'
elif display_type[0] == 'logs':
# Enable display, disable on screen logging (except for
# critical messages).
mainlog.handlers[0].setLevel(logging.CRITICAL)
display_type[0] = 'workers'
elif command.isdigit():
current_page[0] = int(command)
mainlog.handlers[0].setLevel(logging.CRITICAL)
display_type[0] = 'workers'
elif command.lower() == 'f':
mainlog.handlers[0].setLevel(logging.CRITICAL)
display_type[0] = 'failedaccounts'
elif command.lower() == 'h':
mainlog.handlers[0].setLevel(logging.CRITICAL)
display_type[0] = 'hashstatus'
# Thread to print out the status of each worker.
def status_printer(threadStatus, search_items_queue_array, db_updates_queue,
wh_queue, account_queue, account_failures, account_captchas,
logmode, hash_key, key_scheduler):
if (logmode == 'logs'):
display_type = ['logs']
else:
display_type = ['workers']
current_page = [1]
# Grab current log / level.
mainlog = logging.getLogger()
loglevel = mainlog.getEffectiveLevel()
# Start another thread to get user input.
t = Thread(target=switch_status_printer,
name='switch_status_printer',
args=(display_type, current_page, mainlog, loglevel, logmode))
t.daemon = True
t.start()
while True:
time.sleep(1)
if display_type[0] == 'logs':
# In log display mode, we don't want to show anything.
continue
# Create a list to hold all the status lines, so they can be printed
# all at once to reduce flicker.
status_text = []
if display_type[0] == 'workers':
# Get the terminal size.
width, height = terminalsize.get_terminal_size()
# Queue and overseer take 2 lines. Switch message takes up 2
# lines. Remove an extra 2 for things like screen status lines.
usable_height = height - 6
# Prevent people running terminals only 6 lines high from getting a
# divide by zero.
if usable_height < 1:
usable_height = 1
# Print the queue length.
search_items_queue_size = 0
for i in range(0, len(search_items_queue_array)):
search_items_queue_size += search_items_queue_array[i].qsize()
skip_total = threadStatus['Overseer']['skip_total']
status_text.append((
'Queues: {} search items, {} db updates, {} webhook. ' +
'Total skipped items: {}. Spare accounts available: {}. ' +
'Accounts on hold: {}. Accounts with captcha: {}').format(
search_items_queue_size, db_updates_queue.qsize(),
wh_queue.qsize(), skip_total, account_queue.qsize(),
len(account_failures), len(account_captchas)))
# Print status of overseer.
status_text.append('{} Overseer: {}'.format(
threadStatus['Overseer']['scheduler'],
threadStatus['Overseer']['message']))
# Calculate the total number of pages. Subtracting for the
# overseer.
overseer_line_count = (
threadStatus['Overseer']['message'].count('\n'))
total_pages = math.ceil(
(len(threadStatus) - 1 - overseer_line_count) /
float(usable_height))
# Prevent moving outside the valid range of pages.
if current_page[0] > total_pages:
current_page[0] = total_pages
if current_page[0] < 1:
current_page[0] = 1
# Calculate which lines to print.
start_line = usable_height * (current_page[0] - 1)
end_line = start_line + usable_height
current_line = 1
# Find the longest username and proxy.
userlen = 4
proxylen = 5
for item in threadStatus:
if threadStatus[item]['type'] == 'Worker':
userlen = max(userlen, len(threadStatus[item]['username']))
if 'proxy_display' in threadStatus[item]:
proxylen = max(proxylen, len(
str(threadStatus[item]['proxy_display'])))
# How pretty.
status = '{:10} | {:5} | {:' + str(userlen) + '} | {:' + str(
proxylen) + '} | {:7} | {:6} | {:5} | {:7} | {:8} | {:10}'
# Print the worker status.
status_text.append(status.format('Worker ID', 'Start', 'User',
'Proxy', 'Success', 'Failed',
'Empty', 'Skipped', 'Captchas',
'Message'))
for item in sorted(threadStatus):
if(threadStatus[item]['type'] == 'Worker'):
current_line += 1
# Skip over items that don't belong on this page.
if current_line < start_line:
continue
if current_line > end_line:
break
status_text.append(status.format(
item,
time.strftime('%H:%M',
time.localtime(
threadStatus[item]['starttime'])),
threadStatus[item]['username'],
threadStatus[item]['proxy_display'],
threadStatus[item]['success'],
threadStatus[item]['fail'],
threadStatus[item]['noitems'],
threadStatus[item]['skip'],
threadStatus[item]['captcha'],
threadStatus[item]['message']))
elif display_type[0] == 'failedaccounts':
status_text.append('-----------------------------------------')
status_text.append('Accounts on hold:')
status_text.append('-----------------------------------------')
# Find the longest account name.
userlen = 4
for account in account_failures:
userlen = max(userlen, len(account['account']['username']))
status = '{:' + str(userlen) + '} | {:10} | {:20}'
status_text.append(status.format('User', 'Hold Time', 'Reason'))
for account in account_failures:
status_text.append(status.format(
account['account']['username'],
time.strftime('%H:%M:%S',
time.localtime(account['last_fail_time'])),
account['reason']))
elif display_type[0] == 'hashstatus':
status_text.append(
'----------------------------------------------------------')
status_text.append('Hash key status:')
status_text.append(
'----------------------------------------------------------')
status = '{:21} | {:9} | {:9} | {:9}'
status_text.append(status.format('Key', 'Remaining', 'Maximum',
'Peak'))
if hash_key is not None:
for key in hash_key:
key_instance = key_scheduler.keys[key]
key_text = key
if key_scheduler.current() == key:
key_text += '*'
status_text.append(status.format(
key_text,
key_instance['remaining'],
key_instance['maximum'],
key_instance['peak']))
# Print the status_text for the current screen.
status_text.append((
'Page {}/{}. Page number to switch pages. F to show on hold ' +
'accounts. H to show hash status. <ENTER> alone to switch ' +
'between status and log view').format(current_page[0],
total_pages))
# Clear the screen.
os.system('cls' if os.name == 'nt' else 'clear')
# Print status.
print '\n'.join(status_text)
# The account recycler monitors failed accounts and places them back in the
# account queue 2 hours after they failed.
# This allows accounts that were soft banned to be retried after giving
# them a chance to cool down.
def account_recycler(args, accounts_queue, account_failures):
while True:
# Run once a minute.
time.sleep(60)
log.info('Account recycler running. Checking status of %d accounts.',
len(account_failures))
# Create a new copy of the failure list to search through, so we can
# iterate through it without it changing.
failed_temp = list(account_failures)
# Search through the list for any item that last failed before
# -ari/--account-rest-interval seconds.
ok_time = now() - args.account_rest_interval
for a in failed_temp:
if a['last_fail_time'] <= ok_time:
# Remove the account from the real list, and add to the account
# queue.
log.info('Account {} returning to active duty.'.format(
a['account']['username']))
account_failures.remove(a)
accounts_queue.put(a['account'])
else:
if 'notified' not in a:
log.info((
'Account {} needs to cool off for {} minutes due ' +
'to {}.').format(
a['account']['username'],
round((a['last_fail_time'] - ok_time) / 60, 0),
a['reason']))
a['notified'] = True
def worker_status_db_thread(threads_status, name, db_updates_queue):
while True:
workers = {}
overseer = None
for status in threads_status.values():
if status['type'] == 'Overseer':
overseer = {
'worker_name': name,
'message': status['message'],
'method': status['scheduler'],
'last_modified': datetime.utcnow(),
'accounts_working': status['active_accounts'],
'accounts_captcha': status['accounts_captcha'],
'accounts_failed': status['accounts_failed']
}
elif status['type'] == 'Worker':
workers[status['username']] = WorkerStatus.db_format(
status, name)
if overseer is not None:
db_updates_queue.put((MainWorker, {0: overseer}))
db_updates_queue.put((WorkerStatus, workers))
time.sleep(3)
# The main search loop that keeps an eye on the over all process.
def search_overseer_thread(args, new_location_queue, pause_bit, heartb,
db_updates_queue, wh_queue):
log.info('Search overseer starting...')
search_items_queue_array = []
scheduler_array = []
account_queue = Queue()
account_sets = AccountSet(args.hlvl_kph)
threadStatus = {}
key_scheduler = None
api_check_time = 0
hashkeys_last_upsert = timeit.default_timer()
hashkeys_upsert_min_delay = 5.0
'''
Create a queue of accounts for workers to pull from. When a worker has
failed too many times, it can get a new account from the queue and
reinitialize the API. Workers should return accounts to the queue so
they can be tried again later, but must wait a bit before doing do so
to prevent accounts from being cycled through too quickly.
'''
for i, account in enumerate(args.accounts):
account_queue.put(account)
'''
Create sets of special case accounts.
Currently limited to L30+ IV/CP scanning.
'''
account_sets.create_set('30', args.accounts_L30)
# Debug.
log.info('Added %s accounts to the L30 pool.', len(args.accounts_L30))
# Create a list for failed accounts.
account_failures = []
# Create a double-ended queue for captcha'd accounts
account_captchas = deque()
threadStatus['Overseer'] = {
'message': 'Initializing',
'type': 'Overseer',
'starttime': now(),
'accounts_captcha': 0,
'accounts_failed': 0,
'active_accounts': 0,
'skip_total': 0,
'captcha_total': 0,
'success_total': 0,
'fail_total': 0,
'empty_total': 0,
'scheduler': args.scheduler,
'scheduler_status': {'tth_found': 0}
}
# Create the key scheduler.
if args.hash_key:
log.info('Enabling hashing key scheduler...')
key_scheduler = schedulers.KeyScheduler(args.hash_key,
db_updates_queue)
if(args.print_status):
log.info('Starting status printer thread...')
t = Thread(target=status_printer,
name='status_printer',
args=(threadStatus, search_items_queue_array,
db_updates_queue, wh_queue, account_queue,
account_failures, account_captchas,
args.print_status, args.hash_key,
key_scheduler))
t.daemon = True
t.start()
# Create account recycler thread.
log.info('Starting account recycler thread...')
t = Thread(target=account_recycler, name='account-recycler',
args=(args, account_queue, account_failures))
t.daemon = True
t.start()
# Create captcha overseer thread.
if args.captcha_solving:
log.info('Starting captcha overseer thread...')
t = Thread(target=captcha_overseer_thread, name='captcha-overseer',
args=(args, account_queue, account_captchas, key_scheduler,
wh_queue))
t.daemon = True
t.start()
if args.status_name is not None:
log.info('Starting status database thread...')
t = Thread(target=worker_status_db_thread,
name='status_worker_db',
args=(threadStatus, args.status_name, db_updates_queue))
t.daemon = True
t.start()
# Create specified number of search_worker_thread.
log.info('Starting search worker threads...')
log.info('Configured scheduler is %s.', args.scheduler)
for i in range(0, args.workers):
log.debug('Starting search worker thread %d...', i)
if i == 0 or (args.beehive and i % args.workers_per_hive == 0):
search_items_queue = Queue()
# Create the appropriate type of scheduler to handle the search
# queue.
scheduler = schedulers.SchedulerFactory.get_scheduler(
args.scheduler, [search_items_queue], threadStatus, args)
scheduler_array.append(scheduler)
search_items_queue_array.append(search_items_queue)
# Set proxy for each worker, using round robin.
proxy_display = 'No'
proxy_url = False # Will be assigned inside a search thread.
workerId = 'Worker {:03}'.format(i)
threadStatus[workerId] = {
'type': 'Worker',
'message': 'Creating thread...',
'success': 0,
'fail': 0,
'noitems': 0,
'skip': 0,
'captcha': 0,
'username': '',
'proxy_display': proxy_display,
'proxy_url': proxy_url,
}
t = Thread(target=search_worker_thread,
name='search-worker-{}'.format(i),
args=(args, account_queue, account_sets,
account_failures, account_captchas,
search_items_queue, pause_bit,
threadStatus[workerId], db_updates_queue,
wh_queue, scheduler, key_scheduler))
t.daemon = True
t.start()
if not args.no_version_check:
log.info('Enabling new API force Watchdog.')
# A place to track the current location.
current_location = False
# Keep track of the last status for accounts so we can calculate
# what have changed since the last check
last_account_status = {}
stats_timer = 0
# The real work starts here but will halt on pause_bit.set().
while True:
if (args.hash_key is not None and
(hashkeys_last_upsert + hashkeys_upsert_min_delay)
<= timeit.default_timer()):
upsertKeys(args.hash_key, key_scheduler, db_updates_queue)
hashkeys_last_upsert = timeit.default_timer()
odt_triggered = (args.on_demand_timeout > 0 and
(now() - args.on_demand_timeout) > heartb[0])
if odt_triggered:
pause_bit.set()
log.info('Searching paused due to inactivity...')
# Wait here while scanning is paused.
while pause_bit.is_set():
for i in range(0, len(scheduler_array)):
scheduler_array[i].scanning_paused()
# API Watchdog - Continue to check API version.
if not args.no_version_check and not odt_triggered:
api_check_time = check_forced_version(
args, api_check_time, pause_bit)
time.sleep(1)
# If a new location has been passed to us, get the most recent one.
if not new_location_queue.empty():
log.info('New location caught, moving search grid.')
try:
while True:
current_location = new_location_queue.get_nowait()
except Empty:
pass
step_distance = 0.45 if args.no_pokemon else 0.07
locations = generate_hive_locations(
current_location, step_distance,
args.step_limit, len(scheduler_array))
for i in range(0, len(scheduler_array)):
scheduler_array[i].location_changed(locations[i],
db_updates_queue)
# If there are no search_items_queue either the loop has finished or
# it's been cleared above. Either way, time to fill it back up.
for i in range(0, len(scheduler_array)):
if scheduler_array[i].time_to_refresh_queue():
threadStatus['Overseer']['message'] = (
'Search queue {} empty, scheduling ' +
'more items to scan.').format(i)
log.debug(
'Search queue %d empty, scheduling more items to scan.', i)
try: # Can't have the scheduler die because of a DB deadlock.
scheduler_array[i].schedule()
except Exception as e:
log.error(
'Schedule creation had an Exception: {}.'.format(
repr(e)))
traceback.print_exc(file=sys.stdout)
time.sleep(10)
else:
threadStatus['Overseer']['message'] = scheduler_array[
i].get_overseer_message()
# Let's update the total stats and add that info to message
# Added exception handler as dict items change
try:
update_total_stats(threadStatus, last_account_status)
except Exception as e:
log.error(
'Update total stats had an Exception: {}.'.format(
repr(e)))
traceback.print_exc(file=sys.stdout)
time.sleep(10)
threadStatus['Overseer']['message'] += '\n' + get_stats_message(
threadStatus)
# If enabled, display statistics information into logs on a
# periodic basis.
if args.stats_log_timer:
stats_timer += 1
if stats_timer == args.stats_log_timer:
log.info(get_stats_message(threadStatus))
stats_timer = 0
# Update Overseer statistics
threadStatus['Overseer']['accounts_failed'] = len(account_failures)
threadStatus['Overseer']['accounts_captcha'] = len(account_captchas)
# Send webhook updates when scheduler status changes.
if args.webhook_scheduler_updates:
wh_status_update(args, threadStatus['Overseer'], wh_queue,
scheduler_array[0])
# API Watchdog - Check if Niantic forces a new API.
if not args.no_version_check and not odt_triggered:
api_check_time = check_forced_version(
args, api_check_time, pause_bit)
# Now we just give a little pause here.
time.sleep(1)
def get_scheduler_tth_found_pct(scheduler):
tth_found_pct = getattr(scheduler, 'tth_found', 0)
if tth_found_pct > 0:
# Avoid division by zero. Keep 0.0 default for consistency.
active_sp = max(getattr(scheduler, 'active_sp', 0.0), 1.0)
tth_found_pct = tth_found_pct * 100.0 / float(active_sp)
return tth_found_pct
def wh_status_update(args, status, wh_queue, scheduler):
scheduler_name = status['scheduler']
if args.speed_scan:
tth_found = get_scheduler_tth_found_pct(scheduler)
spawns_found = getattr(scheduler, 'spawns_found', 0)
if (tth_found - status['scheduler_status']['tth_found']) > 0.01:
log.debug('Scheduler update is due, sending webhook message.')
wh_queue.put(('scheduler', {'name': scheduler_name,
'instance': args.status_name,
'tth_found': tth_found,
'spawns_found': spawns_found}))
status['scheduler_status']['tth_found'] = tth_found
def get_stats_message(threadStatus):
overseer = threadStatus['Overseer']
starttime = overseer['starttime']
elapsed = now() - starttime
# Just to prevent division by 0 errors, when needed
# set elapsed to 1 millisecond
if elapsed == 0:
elapsed = 1
sph = overseer['success_total'] * 3600.0 / elapsed
fph = overseer['fail_total'] * 3600.0 / elapsed
eph = overseer['empty_total'] * 3600.0 / elapsed
skph = overseer['skip_total'] * 3600.0 / elapsed
cph = overseer['captcha_total'] * 3600.0 / elapsed
ccost = cph * 0.00299
cmonth = ccost * 730
message = ('Total active: {} | Success: {} ({:.1f}/hr) | ' +
'Fails: {} ({:.1f}/hr) | Empties: {} ({:.1f}/hr) | ' +
'Skips {} ({:.1f}/hr) | ' +
'Captchas: {} ({:.1f}/hr)|${:.5f}/hr|${:.3f}/mo').format(
overseer['active_accounts'],
overseer['success_total'], sph,
overseer['fail_total'], fph,
overseer['empty_total'], eph,
overseer['skip_total'], skph,
overseer['captcha_total'], cph,
ccost, cmonth)
return message
def update_total_stats(threadStatus, last_account_status):
overseer = threadStatus['Overseer']
# Calculate totals.
usercount = 0
current_accounts = Set()
for tstatus in threadStatus.itervalues():
if tstatus.get('type', '') == 'Worker':
usercount += 1
username = tstatus.get('username', '')
current_accounts.add(username)
last_status = last_account_status.get(username, {})
overseer['skip_total'] += stat_delta(tstatus, last_status, 'skip')
overseer[
'captcha_total'] += stat_delta(tstatus, last_status, 'captcha')
overseer[
'empty_total'] += stat_delta(tstatus, last_status, 'noitems')
overseer['fail_total'] += stat_delta(tstatus, last_status, 'fail')
overseer[
'success_total'] += stat_delta(tstatus, last_status, 'success')
last_account_status[username] = copy.deepcopy(tstatus)
overseer['active_accounts'] = usercount
# Remove last status for accounts that workers
# are not using anymore
for username in last_account_status.keys():
if username not in current_accounts:
del last_account_status[username]
# Generates the list of locations to scan.
def generate_hive_locations(current_location, step_distance,
step_limit, hive_count):
NORTH = 0
EAST = 90
SOUTH = 180
WEST = 270
xdist = math.sqrt(3) * step_distance # Distance between column centers.
ydist = 3 * (step_distance / 2) # Distance between row centers.
results = []
results.append((current_location[0], current_location[1], 0))
loc = current_location
ring = 1
while len(results) < hive_count:
loc = get_new_coords(loc, ydist * (step_limit - 1), NORTH)
loc = get_new_coords(loc, xdist * (1.5 * step_limit - 0.5), EAST)
results.append((loc[0], loc[1], 0))
for i in range(ring):
loc = get_new_coords(loc, ydist * step_limit, NORTH)
loc = get_new_coords(loc, xdist * (1.5 * step_limit - 1), WEST)
results.append((loc[0], loc[1], 0))
for i in range(ring):
loc = get_new_coords(loc, ydist * (step_limit - 1), SOUTH)
loc = get_new_coords(loc, xdist * (1.5 * step_limit - 0.5), WEST)
results.append((loc[0], loc[1], 0))
for i in range(ring):
loc = get_new_coords(loc, ydist * (2 * step_limit - 1), SOUTH)
loc = get_new_coords(loc, xdist * 0.5, WEST)
results.append((loc[0], loc[1], 0))
for i in range(ring):
loc = get_new_coords(loc, ydist * (step_limit), SOUTH)
loc = get_new_coords(loc, xdist * (1.5 * step_limit - 1), EAST)
results.append((loc[0], loc[1], 0))
for i in range(ring):
loc = get_new_coords(loc, ydist * (step_limit - 1), NORTH)
loc = get_new_coords(loc, xdist * (1.5 * step_limit - 0.5), EAST)
results.append((loc[0], loc[1], 0))
# Back to start.
for i in range(ring - 1):
loc = get_new_coords(loc, ydist * (2 * step_limit - 1), NORTH)
loc = get_new_coords(loc, xdist * 0.5, EAST)
results.append((loc[0], loc[1], 0))
loc = get_new_coords(loc, ydist * (2 * step_limit - 1), NORTH)
loc = get_new_coords(loc, xdist * 0.5, EAST)
ring += 1
return results
def search_worker_thread(args, account_queue, account_sets,
account_failures, account_captchas,
search_items_queue, pause_bit, status, dbq, whq,
scheduler, key_scheduler):
log.debug('Search worker thread starting...')
# The outer forever loop restarts only when the inner one is
# intentionally exited - which should only be done when the worker
# is failing too often, and probably banned.
# This reinitializes the API and grabs a new account from the queue.
while True:
try:
# Force storing of previous worker info to keep consistency.
if 'starttime' in status:
dbq.put((WorkerStatus, {0: WorkerStatus.db_format(status)}))
status['starttime'] = now()
# Track per loop.
first_login = True
# Make sure the scheduler is done for valid locations.
while not scheduler.ready:
time.sleep(1)
status['message'] = ('Waiting to get new account from the'
+ ' queue...')
log.info(status['message'])
# Get an account.
account = account_queue.get()
status.update(WorkerStatus.get_worker(
account['username'], scheduler.scan_location))
status['message'] = 'Switching to account {}.'.format(
account['username'])
log.info(status['message'])
# New lease of life right here.
status['fail'] = 0
status['success'] = 0
status['noitems'] = 0
status['skip'] = 0
status['captcha'] = 0
stagger_thread(args)
# Sleep when consecutive_fails reaches max_failures, overall fails
# for stat purposes.
consecutive_fails = 0
# Sleep when consecutive_noitems reaches max_empty, overall noitems
# for stat purposes.
consecutive_noitems = 0
api = setup_api(args, status, account)
# The forever loop for the searches.
while True:
while pause_bit.is_set():
status['message'] = 'Scanning paused.'
time.sleep(2)
# If this account has been messing up too hard, let it rest.
if ((args.max_failures > 0) and
(consecutive_fails >= args.max_failures)):
status['message'] = (
'Account {} failed more than {} scans; possibly bad ' +
'account. Switching accounts...').format(
account['username'],
args.max_failures)
log.warning(status['message'])
account_failures.append({'account': account,
'last_fail_time': now(),
'reason': 'failures'})
# Exit this loop to get a new account and have the API
# recreated.
break
# If this account has not found anything for too long, let it
# rest.
if ((args.max_empty > 0) and
(consecutive_noitems >= args.max_empty)):
status['message'] = (
'Account {} returned empty scan for more than {} ' +
'scans; possibly ip is banned. Switching ' +
'accounts...').format(account['username'],
args.max_empty)
log.warning(status['message'])
account_failures.append({'account': account,
'last_fail_time': now(),
'reason': 'empty scans'})
# Exit this loop to get a new account and have the API
# recreated.
break
# If used proxy disappears from "live list" after background
# checking - switch account but do not freeze it (it's not an
# account failure).
if args.proxy and status['proxy_url'] not in args.proxy:
status['message'] = (
'Account {} proxy {} is not in a live list any ' +
'more. Switching accounts...').format(
account['username'], status['proxy_url'])
log.warning(status['message'])
# Experimental, nobody did this before.
account_queue.put(account)
# Exit this loop to get a new account and have the API
# recreated.
break
# If this account has been running too long, let it rest.
if (args.account_search_interval is not None):
if (status['starttime'] <=
(now() - args.account_search_interval)):
status['message'] = (
'Account {} is being rotated out to rest.'.format(
account['username']))
log.info(status['message'])
account_failures.append({'account': account,
'last_fail_time': now(),
'reason': 'rest interval'})
break
# Grab the next thing to search (when available).
step, step_location, appears, leaves, messages, wait = (
scheduler.next_item(status))
status['message'] = messages['wait']
# The next_item will return the value telling us how long
# to sleep. This way the status can be updated
time.sleep(wait)
# Using step as a flag for no valid next location returned.
if step == -1:
time.sleep(scheduler.delay(status['last_scan_date']))
continue
# Too soon?
# Adding a 10 second grace period.
if appears and now() < appears + 10:
first_loop = True
paused = False
while now() < appears + 10:
if pause_bit.is_set():
paused = True
break # Why can't python just have `break 2`...
status['message'] = messages['early']
if first_loop:
log.info(status['message'])
first_loop = False
time.sleep(1)
if paused:
scheduler.task_done(status)
continue
# Too late?
if leaves and now() > (leaves - args.min_seconds_left):
scheduler.task_done(status)
status['skip'] += 1
status['message'] = messages['late']
log.info(status['message'])
# No sleep here; we've not done anything worth sleeping
# for. Plus we clearly need to catch up!
continue
status['message'] = messages['search']
log.debug(status['message'])
# Let the api know where we intend to be for this loop.
# Doing this before check_login so it does not also have
# to be done when the auth token is refreshed.
api.set_position(*step_location)
if args.hash_key:
key = key_scheduler.next()
log.debug('Using key {} for this scan.'.format(key))
api.activate_hash_server(key)
# Ok, let's get started -- check our login status.
status['message'] = 'Logging in...'
check_login(args, account, api, step_location,
status['proxy_url'])
# Only run this when it's the account's first login, after
# check_login().
if first_login:
first_login = False
# Check tutorial completion.
if args.complete_tutorial:
tutorial_state = get_tutorial_state(args, api, account)
if not all(x in tutorial_state
for x in (0, 1, 3, 4, 7)):
log.info('Completing tutorial steps for %s.',
account['username'])
complete_tutorial(args, api, account,
tutorial_state)
else:
log.info('Account %s already completed tutorial.',
account['username'])
# Putting this message after the check_login so the messages
# aren't out of order.
status['message'] = messages['search']
log.info(status['message'])
# Make the actual request.
scan_date = datetime.utcnow()
response_dict = map_request(api, account, step_location,
args.no_jitter)
status['last_scan_date'] = datetime.utcnow()
# Record the time and the place that the worker made the
# request.
status['latitude'] = step_location[0]
status['longitude'] = step_location[1]
dbq.put((WorkerStatus, {0: WorkerStatus.db_format(status)}))
# Nothing back. Mark it up, sleep, carry on.
if not response_dict:
status['fail'] += 1
consecutive_fails += 1
status['message'] = messages['invalid']
log.error(status['message'])
time.sleep(scheduler.delay(status['last_scan_date']))
continue
# Got the response, check for captcha, parse it out, then send
# todo's to db/wh queues.
try:
captcha = handle_captcha(args, status, api, account,
account_failures,
account_captchas, whq,
response_dict, step_location)
if captcha is not None and captcha:
# Make another request for the same location
# since the previous one was captcha'd.
scan_date = datetime.utcnow()
response_dict = map_request(api, account,
step_location,
args.no_jitter)
elif captcha is not None:
account_queue.task_done()
time.sleep(3)
break
parsed = parse_map(args, response_dict, step_location,
dbq, whq, key_scheduler, api, status,
scan_date, account, account_sets)
del response_dict
scheduler.task_done(status, parsed)
if parsed['count'] > 0:
status['success'] += 1
consecutive_noitems = 0
else:
status['noitems'] += 1
consecutive_noitems += 1
consecutive_fails = 0
status['message'] = ('Search at {:6f},{:6f} completed ' +
'with {} finds.').format(
step_location[0], step_location[1],
parsed['count'])
log.debug(status['message'])
except Exception as e:
parsed = False
status['fail'] += 1
consecutive_fails += 1
# consecutive_noitems = 0 - I propose to leave noitems
# counter in case of error.
status['message'] = ('Map parse failed at {:6f},{:6f}, ' +
'abandoning location. {} may be ' +
'banned.').format(step_location[0],
step_location[1],
account['username'])
log.exception('{}. Exception message: {}'.format(
status['message'], repr(e)))
if response_dict is not None:
del response_dict
# Get detailed information about gyms.
if args.gym_info and parsed:
# Build a list of gyms to update.
gyms_to_update = {}
for gym in parsed['gyms'].values():
# Can only get gym details within 1km of our position.
distance = calc_distance(
step_location, [gym['latitude'], gym['longitude']])
if distance < 1.0:
# Check if we already have details on this gym.
# Get them if not.
try:
record = GymDetails.get(gym_id=gym['gym_id'])
except GymDetails.DoesNotExist as e:
gyms_to_update[gym['gym_id']] = gym
continue
# If we have a record of this gym already, check if
# the gym has been updated since our last update.
if record.last_scanned < gym['last_modified']:
gyms_to_update[gym['gym_id']] = gym
continue
else:
log.debug(
('Skipping update of gym @ %f/%f, ' +
'up to date.'),
gym['latitude'], gym['longitude'])
continue
else:
log.debug(
('Skipping update of gym @ %f/%f, too far ' +
'away from our location at %f/%f (%fkm).'),
gym['latitude'], gym['longitude'],
step_location[0], step_location[1], distance)
if len(gyms_to_update):
gym_responses = {}
current_gym = 1
status['message'] = (
'Updating {} gyms for location {},{}...').format(
len(gyms_to_update), step_location[0],
step_location[1])
log.debug(status['message'])
for gym in gyms_to_update.values():
status['message'] = (
'Getting details for gym {} of {} for ' +
'location {:6f},{:6f}...').format(
current_gym, len(gyms_to_update),
step_location[0], step_location[1])
time.sleep(random.random() + 2)
response = gym_request(api, account, step_location,
gym, args.api_version)
# Make sure the gym was in range. (Sometimes the
# API gets cranky about gyms that are ALMOST 1km
# away.)
if response:
if response['responses'][
'GYM_GET_INFO']['result'] == 2:
log.warning(
('Gym @ %f/%f is out of range (%dkm),'
+ ' skipping.'),
gym['latitude'], gym['longitude'],
distance)
else:
gym_responses[gym['gym_id']] = response[
'responses']['GYM_GET_INFO']
del response
# Increment which gym we're on for status messages.
current_gym += 1
status['message'] = (
'Processing details of {} gyms for location ' +
'{:6f},{:6f}...').format(len(gyms_to_update),
step_location[0],
step_location[1])
log.debug(status['message'])
if gym_responses:
parse_gyms(args, gym_responses,
whq, dbq)
del gym_responses
# Update hashing key stats in the database based on the values
# reported back by the hashing server.
if args.hash_key:
key = HashServer.status.get('token', None)
key_instance = key_scheduler.keys[key]
key_instance['remaining'] = HashServer.status.get(
'remaining', 0)
key_instance['maximum'] = (
HashServer.status.get('maximum', 0))
usage = (
key_instance['maximum'] -
key_instance['remaining'])
if key_instance['peak'] < usage:
key_instance['peak'] = usage
if key_instance['expires'] is None:
expires = HashServer.status.get(
'expiration', None)
if expires is not None:
expires = datetime.utcfromtimestamp(expires)
key_instance['expires'] = expires
key_instance['last_updated'] = datetime.utcnow()
log.debug('Hash key %s has %s/%s RPM left.', key,
key_instance['remaining'],
key_instance['maximum'])
# Delay the desired amount after "scan" completion.
delay = scheduler.delay(status['last_scan_date'])
status['message'] += ' Sleeping {}s until {}.'.format(
delay,
time.strftime(
'%H:%M:%S',
time.localtime(time.time() + args.scan_delay)))
log.info(status['message'])
time.sleep(delay)
# Catch any process exceptions, log them, and continue the thread.
except Exception as e:
log.error((
'Exception in search_worker under account {} Exception ' +
'message: {}.').format(account['username'], repr(e)))
status['message'] = (
'Exception in search_worker using account {}. Restarting ' +
'with fresh account. See logs for details.').format(
account['username'])
traceback.print_exc(file=sys.stdout)
account_failures.append({'account': account,
'last_fail_time': now(),
'reason': 'exception'})
time.sleep(args.scan_delay)
def upsertKeys(keys, key_scheduler, db_updates_queue):
# Prepare hashing keys to be sent to the db. But only
# sent latest updates of the 'peak' value per key.
hashkeys = {}
for key in keys:
key_instance = key_scheduler.keys[key]
hashkeys[key] = key_instance
hashkeys[key]['key'] = key
hashkeys[key]['peak'] = max(key_instance['peak'],
HashKeys.getStoredPeak(key))
db_updates_queue.put((HashKeys, hashkeys))
def map_request(api, account, position, no_jitter=False):
# Create scan_location to send to the api based off of position, because
# tuples aren't mutable.
if no_jitter:
# Just use the original coordinates.
scan_location = position
else:
# Jitter it, just a little bit.
scan_location = jitter_location(position)
log.debug('Jittered to: %f/%f/%f',
scan_location[0], scan_location[1], scan_location[2])
try:
cell_ids = util.get_cell_ids(scan_location[0], scan_location[1])
timestamps = [0, ] * len(cell_ids)
req = api.create_request()
req.get_map_objects(latitude=f2i(scan_location[0]),
longitude=f2i(scan_location[1]),
since_timestamp_ms=timestamps,
cell_id=cell_ids)
req.check_challenge()
req.get_hatched_eggs()
req.get_inventory(last_timestamp_ms=account['last_timestamp_ms'])
req.check_awarded_badges()
req.get_buddy_walked()
req.get_inbox(is_history=True)
response = req.call()
response = clear_dict_response(response, True)
parse_new_timestamp_ms(account, response)
return response
except HashingOfflineException as e:
log.error('Hashing server is unreachable, it might be offline.')
except BadHashRequestException as e:
log.error('Invalid or expired hashing key: %s.',
api._hash_server_token)
except Exception as e:
log.exception('Exception while downloading map: %s', repr(e))
return False
def gym_request(api, account, position, gym, api_version):
try:
log.info('Getting details for gym @ %f/%f (%fkm away)',
gym['latitude'], gym['longitude'],
calc_distance(position, [gym['latitude'], gym['longitude']]))
req = api.create_request()
req.gym_get_info(
gym_id=gym['gym_id'],
player_lat_degrees=f2i(position[0]),
player_lng_degrees=f2i(position[1]),
gym_lat_degrees=gym['latitude'],
gym_lng_degrees=gym['longitude'])
req.check_challenge()
req.get_hatched_eggs()
req.get_inventory(last_timestamp_ms=account['last_timestamp_ms'])
req.check_awarded_badges()
req.get_buddy_walked()
req.get_inbox(is_history=True)
response = req.call()
parse_new_timestamp_ms(account, response)
response = clear_dict_response(response)
return response
except Exception as e:
log.exception('Exception while downloading gym details: %s.', repr(e))
return False
def calc_distance(pos1, pos2):
R = 6378.1 # KM radius of the earth.
dLat = math.radians(pos1[0] - pos2[0])
dLon = math.radians(pos1[1] - pos2[1])
a = math.sin(dLat / 2) * math.sin(dLat / 2) + \
math.cos(math.radians(pos1[0])) * math.cos(math.radians(pos2[0])) * \
math.sin(dLon / 2) * math.sin(dLon / 2)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
d = R * c
return d
# Delay each thread start time so that logins occur after delay.
def stagger_thread(args):
loginDelayLock.acquire()
delay = args.login_delay + ((random.random() - .5) / 2)
log.debug('Delaying thread startup for %.2f seconds', delay)
time.sleep(delay)
loginDelayLock.release()
# The delta from last stat to current stat
def stat_delta(current_status, last_status, stat_name):
return current_status.get(stat_name, 0) - last_status.get(stat_name, 0)
def check_forced_version(args, api_check_time, pause_bit):
if int(time.time()) > api_check_time:
log.debug("Checking forced API version.")
api_check_time = int(time.time()) + args.version_check_interval
forced_api = get_api_version(args)
if not forced_api:
# Couldn't retrieve API version. Pause scanning.
pause_bit.set()
log.warning('Forced API check got no or invalid response. ' +
'Possible bad proxy.')
log.warning('Scanner paused due to failed API check.')
return api_check_time
# Got a response let's compare version numbers.
try:
if StrictVersion(args.api_version) < StrictVersion(forced_api):
# Installed API version is lower. Pause scanning.
pause_bit.set()
log.warning('Started with API: %s, ' +
'Niantic forced to API: %s',
args.api_version,
forced_api)
log.warning('Scanner paused due to forced Niantic API update.')
else:
# API check was successful and
# installed API version is newer or equal forced API.
# Continue scanning.
log.debug("API check was successful. Continue scanning.")
pause_bit.clear()
except ValueError as e:
# Unknown version format. Pause scanning as well.
pause_bit.set()
log.warning('Niantic forced unknown API version format: %s.',
forced_api)
log.warning('Scanner paused due to unknown API version format.')
except Exception as e:
# Something else happened. Pause scanning as well.
pause_bit.set()
log.warning('Unknown error on API version comparison: %s.',
repr(e))
log.warning('Scanner paused due to unknown API check error.')
return api_check_time
def get_api_version(args):
"""Retrieve forced API version by Niantic
Args:
args: Command line arguments
Returns:
API version string. False if request failed.
"""
proxies = {}
if args.proxy:
num, proxy = get_new_proxy(args)
proxies = {
'http': proxy,
'https': proxy
}
try:
s = requests.Session()
s.mount('https://',
HTTPAdapter(max_retries=Retry(total=3,
backoff_factor=0.5,
status_forcelist=[500, 502,
503, 504])))
r = s.get(
'https://pgorelease.nianticlabs.com/plfe/version',
proxies=proxies,
verify=False,
timeout=5)
return r.text[2:] if r.status_code == requests.codes.ok else False
except Exception as e:
log.warning('error on API check: %s', repr(e))
return False
|
mpw1337/RocketMap
|
pogom/search.py
|
Python
|
agpl-3.0
| 56,889 | 0 |
#!/usr/bin/env python3
# Copyright (c) 2012-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Extract _("...") strings for translation and convert to Qt stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import operator
import os
import sys
OUT_CPP = "qt/bitcoinstrings.cpp"
EMPTY = ['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = sys.argv[1:]
# xgettext -n --keyword=_ $FILES
XGETTEXT = os.getenv('XGETTEXT', 'xgettext')
if not XGETTEXT:
print(
'Cannot extract strings: xgettext utility is not installed or not configured.',
file=sys.stderr)
print('Please install package "gettext" and re-run \'./configure\'.',
file=sys.stderr)
sys.exit(1)
child = Popen([XGETTEXT, '--output=-', '-n',
'--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out.decode('utf-8'))
f = open(OUT_CPP, 'w', encoding="utf8")
f.write("""
#include <QtGlobal>
// Automatically generated by extract_strings_qt.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {\n')
f.write('QT_TRANSLATE_NOOP("bitcoin-core", "{}"),\n'.format(os.getenv('PACKAGE_NAME'),))
f.write('QT_TRANSLATE_NOOP("bitcoin-core", "{}"),\n'.format(os.getenv('COPYRIGHT_HOLDERS'),))
if os.getenv('COPYRIGHT_HOLDERS_SUBSTITUTION') != os.getenv('PACKAGE_NAME'):
f.write('QT_TRANSLATE_NOOP("bitcoin-core", "{}"),\n'.format(
os.getenv('COPYRIGHT_HOLDERS_SUBSTITUTION'),))
messages.sort(key=operator.itemgetter(0))
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", {}),\n'.format('\n'.join(msgid)))
f.write('};\n')
f.close()
|
ftrader-bitcoinabc/bitcoin-abc
|
share/qt/extract_strings_qt.py
|
Python
|
mit
| 2,747 | 0.001456 |
#!/usr/bin/env python
# coding: utf-8
# Binary with Spots
# ============================
#
# Setup
# -----------------------------
# Let's first make sure we have the latest version of PHOEBE 2.2 installed. (You can comment out this line if you don't use pip for your installation or don't want to update to the latest release).
# In[ ]:
get_ipython().system('pip install -I "phoebe>=2.2,<2.3"')
# As always, let's do imports and initialize a logger and a new bundle. See [Building a System](../tutorials/building_a_system.ipynb) for more details.
# In[1]:
get_ipython().run_line_magic('matplotlib', 'inline')
# In[2]:
import phoebe
from phoebe import u # units
import numpy as np
import matplotlib.pyplot as plt
logger = phoebe.logger()
b = phoebe.default_binary()
# Model without Spots
# --------------------------
# In[3]:
b.add_dataset('lc', times=phoebe.linspace(0,1,101))
# In[4]:
b.run_compute(irrad_method='none', model='no_spot')
# Adding Spots
# ---------------------
# Let's add a spot to the primary component in our binary.
#
# The 'colat' parameter defines the colatitude on the star measured from its North (spin) Pole. The 'long' parameter measures the longitude of the spot - with longitude = 0 being defined as pointing towards the other star at t0. See the [spots tutorial](../tutorials/spots.ipynb) for more details.
# In[5]:
b.add_feature('spot', component='primary', feature='spot01', relteff=0.9, radius=30, colat=45, long=90)
# In[6]:
b.run_compute(irrad_method='none', model='with_spot')
# Comparing Light Curves
# ------------------------------
# In[7]:
afig, mplfig = b.plot(show=True, legend=True)
# In[ ]:
|
phoebe-project/phoebe2-docs
|
2.2/examples/binary_spots.py
|
Python
|
gpl-3.0
| 1,686 | 0.008304 |
#! /usr/bin/env python
# coding=utf-8
# Copyright (c) 2019 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import h5py
import numpy as np
from ludwig.constants import PREPROCESSING
from ludwig.data.batcher.random_access import RandomAccessBatcher
from ludwig.data.dataset.base import Dataset
from ludwig.data.sampler import DistributedSampler
from ludwig.utils.data_utils import to_numpy_dataset
class PandasDataset(Dataset):
def __init__(self, dataset, features, data_hdf5_fp):
self.features = features
self.data_hdf5_fp = data_hdf5_fp
self.size = len(dataset)
self.dataset = to_numpy_dataset(dataset)
def get(self, proc_column, idx=None):
if idx is None:
idx = range(self.size)
if (self.data_hdf5_fp is None or
PREPROCESSING not in self.features[proc_column] or
'in_memory' not in self.features[proc_column][
'preprocessing']):
return self.dataset[proc_column][idx]
if self.features[proc_column][PREPROCESSING]['in_memory']:
return self.dataset[proc_column][idx]
sub_batch = self.dataset[proc_column][idx]
indices = np.empty((3, len(sub_batch)), dtype=np.int64)
indices[0, :] = sub_batch
indices[1, :] = np.arange(len(sub_batch))
indices = indices[:, np.argsort(indices[0])]
with h5py.File(self.data_hdf5_fp, 'r') as h5_file:
im_data = h5_file[proc_column + '_data'][indices[0, :], :, :]
indices[2, :] = np.arange(len(sub_batch))
indices = indices[:, np.argsort(indices[1])]
return im_data[indices[2, :]]
def get_dataset(self):
return self.dataset
def __len__(self):
return self.size
def initialize_batcher(self, batch_size=128,
should_shuffle=True,
seed=0,
ignore_last=False,
horovod=None):
sampler = DistributedSampler(len(self),
shuffle=should_shuffle,
seed=seed,
horovod=horovod)
batcher = RandomAccessBatcher(self,
sampler,
batch_size=batch_size,
ignore_last=ignore_last)
return batcher
|
uber/ludwig
|
ludwig/data/dataset/pandas.py
|
Python
|
apache-2.0
| 3,028 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('comercial', '0046_auto_20151004_1747'),
]
operations = [
migrations.AddField(
model_name='itemgrupodocumento',
name='titulo_centralizado',
field=models.BooleanField(default=False),
),
]
|
dudanogueira/microerp
|
microerp/comercial/migrations/0047_itemgrupodocumento_titulo_centralizado.py
|
Python
|
lgpl-3.0
| 430 | 0 |
import file
ciphertext = str(file.openAsAscii("cipher.txt"),"utf-8").replace(" ","")
matches = 0
diffs = set()
N=5
print(len(ciphertext)/N)
for wordlen in range(N*5,N*8,N):
print("Wordlen: {0}".format(wordlen))
for wordIndex in range(len(ciphertext)-wordlen):
for word2Index in range(wordIndex+wordlen,len(ciphertext) - wordlen,N):
word,word2 = ciphertext[wordIndex:wordIndex+wordlen:N],ciphertext[word2Index:word2Index+wordlen:N]
if word == word2:
print(word,word2,wordIndex,word2Index)
matches += 1
diffs.add(word2Index - wordIndex)
print(diffs)
|
tim-clifford/py-cipher
|
src/test1.py
|
Python
|
mit
| 569 | 0.038664 |
#!/usr/bin/env python2.7
import sys, getopt
if len(sys.argv) < 3:
print """
Usage: compare_blast.py <input1.b6> <input2.b6>
"""
blast1= sys.argv[1]
blast2= sys.argv[2]
dict1= {}
for line in open(blast1):
line= line.strip().split("\t")
query= line[0].split()[0]
dict1[query]= line
dict2= {}
for line in open(blast2):
line= line.strip().split("\t")
query= line[0].split()[0]
dict2[query]= line
blast1better= blast1.split(".b6")[0] + "_good.b6"
blast1worse= blast1.split(".b6")[0] + "_bad.b6"
blast2better= blast2.split(".b6")[0] + "_good.b6"
blast2worse= blast2.split(".b6")[0] + "_bad.b6"
b1b_handle= open(blast1better, 'w')
b1w_handle= open(blast1worse, 'w')
b2b_handle= open(blast2better, 'w')
b2w_handle= open(blast2worse, 'w')
for query in dict1:
line1= dict1[query]
if query not in dict2:
b1b_handle.write("\t".join(line1)+"\n")
else:
line2= dict2[query]
if float(line1[-1]) > float(line2[-1]):
b1b_handle.write("\t".join(line1)+"\n")
else:
b1w_handle.write("\t".join(line1)+"\n")
for query in dict2:
line2= dict2[query]
if query not in dict1:
b2b_handle.write("\t".join(line2)+"\n")
else:
line1= dict1[query]
if float(line2[-1]) > float(line1[-1]):
b2b_handle.write("\t".join(line2)+"\n")
else:
b2w_handle.write("\t".join(line2)+"\n")
b1b_handle.close()
b1w_handle.close()
b2b_handle.close()
b2w_handle.close()
|
alexherns/biotite-scripts
|
compare_blasts.py
|
Python
|
mit
| 1,490 | 0.015436 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from datetime import date
from decimal import Decimal, ROUND_DOWN
from django.contrib.sites.models import Site
from django.db import models
from django.db.models import Sum
from django.db.models.signals import post_save
from django.dispatch import receiver
from .utils import duration_string, duration_decimal
from conf.utils import current_site_id
from conf.managers import CurrentSiteManager
@receiver(post_save)
def add_current_site(sender, instance, **kwargs):
"""
Add the current site to a model's sites property after a save. This is
required in post save because ManyToManyField fields require an existing
key.
TODO: Don't run this on *every* post_save.
"""
if hasattr(instance, 'sites'):
if not instance.sites.all():
instance.sites = Site.objects.filter(id=current_site_id())
instance.save()
class Client(models.Model):
name = models.CharField(max_length=255)
archive = models.BooleanField(default=False)
payment_id = models.CharField(max_length=255, blank=True, null=True)
invoice_email = models.EmailField(max_length=255, blank=True, null=True)
sites = models.ManyToManyField(Site)
objects = models.Manager()
on_site = CurrentSiteManager()
class Meta:
default_permissions = ('view', 'add', 'change', 'delete')
ordering = ['-id']
def __str__(self):
return 'Client: ' + self.name
def get_total_projects(self):
return self.projects.count()
def get_total_duration(self):
return duration_string(self.projects.aggregate(
Sum('entries__duration')
)['entries__duration__sum'])
class Project(models.Model):
client = models.ForeignKey('Client', related_name='projects')
name = models.CharField(max_length=255)
archive = models.BooleanField(default=False)
estimate = models.DecimalField(max_digits=10, decimal_places=2, blank=True, null=True)
hourly_rate = models.DecimalField(max_digits=10, decimal_places=2, blank=True, null=True)
class Meta:
default_permissions = ('view', 'add', 'change', 'delete')
ordering = ['client', '-id']
def __str__(self):
return 'Project: ' + self.name
def get_total_entries(self):
return self.entries.count()
def get_total_cost(self):
total_cost = Decimal()
for entry in self.entries.iterator():
try:
if entry.task.hourly_rate:
total_cost += (
duration_decimal(entry.duration)
* entry.task.hourly_rate
)
except:
continue
return total_cost.quantize(Decimal('.01'), rounding=ROUND_DOWN)
def get_total_duration(self):
return duration_string(self.entries.aggregate(
Sum('duration')
)['duration__sum'])
def get_percent_done(self):
if self.estimate is not None:
total_cost = Decimal(self.get_total_cost())
total_estimate = Decimal(self.estimate)
if total_cost != 0 and total_estimate != 0:
return int(100 * (total_cost/total_estimate))
return None
class Task(models.Model):
name = models.CharField(max_length=255)
hourly_rate = models.DecimalField(max_digits=10, decimal_places=2,
blank=True, null=True)
sites = models.ManyToManyField(Site)
objects = models.Manager()
on_site = CurrentSiteManager()
class Meta:
default_permissions = ('view', 'add', 'change', 'delete')
ordering = ['-id']
def __str__(self):
return 'Task: ' + self.name
class EntryManager(models.Manager):
def invoiced(self):
return super(EntryManager, self).get_queryset().filter(
invoices__isnull=False)
def uninvoiced(self):
return super(EntryManager, self).get_queryset().filter(
invoices__isnull=True)
class Entry(models.Model):
project = models.ForeignKey('Project', related_name='entries')
task = models.ForeignKey('core.Task', related_name='entries',
blank=True, null=True)
user = models.ForeignKey('auth.User', related_name='entries')
date = models.DateField(blank=True)
duration = models.DurationField(blank=True)
note = models.TextField(blank=True, null=True)
site = models.ForeignKey(Site, default=current_site_id(),
on_delete=models.CASCADE)
objects = EntryManager()
on_site = CurrentSiteManager()
class Meta:
default_permissions = ('view', 'add', 'change', 'delete')
ordering = ['-date', '-id']
verbose_name_plural = 'Entries'
def save(self, *args, **kwargs):
if not self.date:
self.date = date.today()
if not self.site:
self.site = Site.objects.get(id=current_site_id())
super(Entry, self).save(*args, **kwargs)
def __str__(self):
return 'Entry for ' + self.project.name + ' by ' + self.user.username
def is_invoiced(self):
if self.invoices.count() > 0:
return True
return False
class Invoice(models.Model):
client = models.ForeignKey('Client') # Redundant with entries?
note = models.CharField(max_length=255, blank=True, null=True)
entries = models.ManyToManyField('Entry', related_name='invoices')
created = models.DateTimeField(auto_now_add=True)
paid = models.DateTimeField(blank=True, null=True)
transaction_id = models.CharField(max_length=255, blank=True, null=True)
site = models.ForeignKey(Site, default=current_site_id(),
on_delete=models.CASCADE)
objects = models.Manager()
on_site = CurrentSiteManager()
class Meta:
default_permissions = ('view', 'add', 'change', 'delete')
def save(self, *args, **kwargs):
self.site = Site.objects.get(id=current_site_id())
super(Invoice, self).save(*args, **kwargs)
def __str__(self):
return 'Invoice: ' + self.client.name
def total_duration(self):
total = 0
for entry in self.entries:
total += entry.duration
def total_billed(self):
total = 0
for entry in self.entries:
if entry.task.hourly_rate:
total += entry.duration * entry.hourly_rate
return total
|
muhleder/timestrap
|
core/models.py
|
Python
|
bsd-2-clause
| 6,476 | 0.000463 |
"""Development settings and globals."""
from os.path import join, normpath
from base import *
########## DEBUG CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
########## END DEBUG CONFIGURATION
########## EMAIL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
########## END EMAIL CONFIGURATION
########## DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
########## END DATABASE CONFIGURATION
########## CACHE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
########## END CACHE CONFIGURATION
########## TOOLBAR CONFIGURATION
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INSTALLED_APPS += (
'debug_toolbar',
)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INTERNAL_IPS = ('127.0.0.1',)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
MIDDLEWARE_CLASSES += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TEMPLATE_CONTEXT': True,
}
########## END TOOLBAR CONFIGURATION
|
assertnotnull/servicebusmtl
|
servicebusmtl/servicebusmtl/settings/local.py
|
Python
|
mit
| 1,780 | 0.007865 |
"""
Module for processing Sitemaps.
Note: The main purpose of this module is to provide support for the
SitemapSpider, its API is subject to change without notice.
"""
from urllib.parse import urljoin
import lxml.etree
class Sitemap:
"""Class to parse Sitemap (type=urlset) and Sitemap Index
(type=sitemapindex) files"""
def __init__(self, xmltext):
xmlp = lxml.etree.XMLParser(recover=True, remove_comments=True, resolve_entities=False)
self._root = lxml.etree.fromstring(xmltext, parser=xmlp)
rt = self._root.tag
self.type = self._root.tag.split('}', 1)[1] if '}' in rt else rt
def __iter__(self):
for elem in self._root.getchildren():
d = {}
for el in elem.getchildren():
tag = el.tag
name = tag.split('}', 1)[1] if '}' in tag else tag
if name == 'link':
if 'href' in el.attrib:
d.setdefault('alternate', []).append(el.get('href'))
else:
d[name] = el.text.strip() if el.text else ''
if 'loc' in d:
yield d
def sitemap_urls_from_robots(robots_text, base_url=None):
"""Return an iterator over all sitemap urls contained in the given
robots.txt file
"""
for line in robots_text.splitlines():
if line.lstrip().lower().startswith('sitemap:'):
url = line.split(':', 1)[1].strip()
yield urljoin(base_url, url)
|
starrify/scrapy
|
scrapy/utils/sitemap.py
|
Python
|
bsd-3-clause
| 1,501 | 0.000666 |
# -*- coding: utf-8 -*-
from flask import current_app as app
from sqlalchemy_utils import TSVectorType
db = app.db
class Author(db.Model):
id = db.Column(db.Integer(), primary_key=True, nullable=False)
name = db.Column(db.Unicode(1024), nullable=False, unique=True)
name_vector = db.Column(TSVectorType('name'))
|
philiptzou/taxonwiki
|
taxonwiki/models/author.py
|
Python
|
mit
| 331 | 0 |
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
import logging
import os
import sys
try:
from magic import from_file as magic_from_file
except ImportError:
magic_from_file = None
from six.moves import SimpleHTTPServer as srvmod
from six.moves import socketserver
class ComplexHTTPRequestHandler(srvmod.SimpleHTTPRequestHandler):
SUFFIXES = ['', '.html', '/index.html']
def do_GET(self):
# Try to detect file by applying various suffixes
for suffix in self.SUFFIXES:
if not hasattr(self, 'original_path'):
self.original_path = self.path
self.path = self.original_path + suffix
path = self.translate_path(self.path)
if os.path.exists(path):
srvmod.SimpleHTTPRequestHandler.do_GET(self)
logging.info("Found `%s`." % self.path)
break
logging.info("Tried to find `%s`, but it doesn't exist.",
self.path)
else:
# Fallback if there were no matches
logging.warning("Unable to find `%s` or variations.",
self.original_path)
def guess_type(self, path):
"""Guess at the mime type for the specified file.
"""
mimetype = srvmod.SimpleHTTPRequestHandler.guess_type(self, path)
# If the default guess is too generic, try the python-magic library
if mimetype == 'application/octet-stream' and magic_from_file:
mimetype = magic_from_file(path, mime=True)
return mimetype
if __name__ == '__main__':
PORT = len(sys.argv) in (2, 3) and int(sys.argv[1]) or 8000
SERVER = len(sys.argv) == 3 and sys.argv[2] or ""
socketserver.TCPServer.allow_reuse_address = True
try:
httpd = socketserver.TCPServer(
(SERVER, PORT), ComplexHTTPRequestHandler)
except OSError as e:
logging.error("Could not listen on port %s, server %s.", PORT, SERVER)
sys.exit(getattr(e, 'exitcode', 1))
logging.info("Serving at port %s, server %s.", PORT, SERVER)
try:
httpd.serve_forever()
except KeyboardInterrupt as e:
logging.info("Shutting down server.")
httpd.socket.close()
|
jimperio/pelican
|
pelican/server.py
|
Python
|
agpl-3.0
| 2,272 | 0 |
#
# Copyright (c) 2013-2014, Scott J Maddox
#
# This file is part of SimplePL.
#
# SimplePL is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# SimplePL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with SimplePL. If not, see
# <http://www.gnu.org/licenses/>.
#
#######################################################################
# std lib imports
import logging
log = logging.getLogger(__name__)
import time
import string
# third party imports
#import serial
# local imports
SLEEP_TIME = 0.01
#TODO: make sure the asked for nm is available on the given grating?
class FW102C(object):
'''
Device driver for ThorLabs FW102C Motorized Filter Wheel.
'''
def __init__(self, port, timeout=5.):
# self._inst = serial.Serial(port,
# baudrate=115200,
# bytesize=serial.EIGHTBITS,
# parity=serial.PARITY_NONE,
# stopbits=serial.STOPBITS_ONE,
# timeout=timeout)
self.current_filter = 1
time.sleep(SLEEP_TIME * 100)
def __read(self):
r = self._inst.readline()
log.debug("__read: return %s", r)
return r
def _read(self):
r = self.__read()
r = string.join(r.split()[1:-1]) # strip command echo and "ok"
return r
def __write(self, s):
log.debug("__write: _inst.write(%s)", s)
self._inst.write(s+"\r")
def _write(self, s):
self.__write(s)
while True:
if self.__read()[-1:] == "\r\n>":
break
def _ask(self, s):
self.__write(s)
return self._read()
#TODO: check how it terminates this, and compensate
def get_id(self):
return self._ask('*idn?')
#TODO: check how it confirms pos=1, if at all, and compensate
def set_filter(self, i):
'''
Sets the filter wheel position to the given index
'''
if not isinstance(i, int) or i < 1 or i > 6:
raise ValueError('i must be an integer in the range [1, 6]')
#self._write('pos=%d'%i)
log.debug("set_filter: %d", i)
self.current_filter = i
time.sleep(3)
def get_filter(self):
#return int(self._ask('pos?'))
log.debug("get_filter: %d", self.current_filter)
return self.current_filter
if __name__ == "__main__":
# enable DEBUG output
logging.basicConfig(level=logging.DEBUG)
# Test
fw = FW102C(port=3)
print fw.get_id()
print fw.get_filter()
fw.set_filter(1)
print fw.get_filter()
fw.set_filter(2)
print fw.get_filter()
|
scott-maddox/simplepl
|
src/simplepl/instruments/drivers/thorlabs_fw102c_sim.py
|
Python
|
agpl-3.0
| 3,169 | 0.005364 |
"""switch to server-side creation timestamps
Revision ID: 1dfc65e583bf
Revises: 1b0b4e6fdf96
Create Date: 2018-02-08 23:06:09.384416
"""
# revision identifiers, used by Alembic.
revision = '1dfc65e583bf'
down_revision = '1b0b4e6fdf96'
from alembic import op
from sqlalchemy.sql import text
# SELECT table_name FROM information_schema.columns WHERE table_schema='inbox' AND column_name='created_at'
TABLES = ['account', 'accounttransaction', 'actionlog', 'block', 'calendar', 'category', 'contact', 'contactsearchindexcursor', 'dataprocessingcache', 'event', 'folder', 'gmailauthcredentials', 'imapfolderinfo', 'imapfoldersyncstatus', 'imapuid', 'label', 'labelitem', 'message', 'messagecategory', 'messagecontactassociation', 'metadata', 'namespace', 'part', 'phonenumber', 'secret', 'thread', 'transaction']
def upgrade():
conn = op.get_bind()
for table in TABLES:
conn.execute(text('ALTER TABLE `{}` MODIFY COLUMN `created_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP'.format(table)))
def downgrade():
conn = op.get_bind()
for table in TABLES:
conn.execute(text('ALTER TABLE `{}` MODIFY COLUMN `created_at` DATETIME NOT NULL'.format(table)))
|
closeio/nylas
|
migrations/versions/239_server_default_created_at.py
|
Python
|
agpl-3.0
| 1,186 | 0.006745 |
###############################################################################
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
import os
def version():
"""Read program version from file."""
import drawm
version_file = open(os.path.join(__path__[0], 'VERSION'))
return version_file.read().strip()
|
dparks1134/DrawM
|
drawm/__init__.py
|
Python
|
gpl-3.0
| 1,491 | 0.000671 |
import sys
sys.path.insert(0,'../src/')
# Begin From obstacle_avoidance
import rospy
import math
from math import sin, cos
from geometry_msgs.msg import Twist
from sensor_msgs.msg import LaserScan
from collections import namedtuple
Obstacle = namedtuple('Obstacle', ['r', 'theta'])
# End From obstacle_avoidance
from obstacle_avoidance import ObstacleAvoidance
import unittest
class TestCurvatureCalculations(unittest.TestCase):
def test_left(self):
# Obstacle = namedtuple('Obstacle', ['r', 'theta'])
oa = ObstacleAvoidance()
v = 2
omega = .1
originalCurvature = omega/v
pathWidth = 1
filteredListOfRThetaPairs = []
filteredListOfRThetaPairs.append(Obstacle(r=1.6328, theta=-0.4421))
filteredListOfRThetaPairs.append(Obstacle(r=1.4904, theta=-0.2019))
filteredListOfRThetaPairs.append(Obstacle(r=1.0792, theta=-0.3143))
filteredListOfRThetaPairs.append(Obstacle(r=1.4444, theta=-0.3247))
filteredListOfRThetaPairs.append(Obstacle(r=1.1740, theta=-0.2601))
filteredListOfRThetaPairs.append(Obstacle(r=1.2565, theta=-0.2686))
filteredListOfRThetaPairs.append(Obstacle(r=1.5160, theta=-0.5730))
filteredListOfRThetaPairs.append(Obstacle(r=1.7103, theta=-0.5350))
filteredListOfRThetaPairs.append(Obstacle(r=1.2089, theta=-0.0008))
filteredListOfRThetaPairs.append(Obstacle(r=1.7064, theta=-0.5072))
curvatureToPassObstaclesOnLeft = oa.calculateCurvatureToPassObstaclesOnLeft(originalCurvature, pathWidth, filteredListOfRThetaPairs)
print(str(curvatureToPassObstaclesOnLeft))
self.assertTrue(abs(curvatureToPassObstaclesOnLeft-0.8240)<0.001)
def test_right(self):
# Obstacle = namedtuple('Obstacle', ['r', 'theta'])
oa = ObstacleAvoidance()
v = 2
omega = .1
originalCurvature = omega/v
pathWidth = 1
filteredListOfRThetaPairs = []
filteredListOfRThetaPairs.append(Obstacle(r=1.6328, theta=-0.4421))
filteredListOfRThetaPairs.append(Obstacle(r=1.4904, theta=-0.2019))
filteredListOfRThetaPairs.append(Obstacle(r=1.0792, theta=-0.3143))
filteredListOfRThetaPairs.append(Obstacle(r=1.4444, theta=-0.3247))
filteredListOfRThetaPairs.append(Obstacle(r=1.1740, theta=-0.2601))
filteredListOfRThetaPairs.append(Obstacle(r=1.2565, theta=-0.2686))
filteredListOfRThetaPairs.append(Obstacle(r=1.5160, theta=-0.5730))
filteredListOfRThetaPairs.append(Obstacle(r=1.7103, theta=-0.5350))
filteredListOfRThetaPairs.append(Obstacle(r=1.2089, theta=-0.0008))
filteredListOfRThetaPairs.append(Obstacle(r=1.7064, theta=-0.5072))
curvatureToPassObstaclesOnRight = oa.calculateCurvatureToPassObstaclesOnRight(originalCurvature, pathWidth, filteredListOfRThetaPairs)
print(str(curvatureToPassObstaclesOnRight))
self.assertTrue(abs(curvatureToPassObstaclesOnRight-(-1.8228))<0.001)
if __name__ == '__main__':
unittest.main()
|
cwrucutter/snowmower_obstacles
|
test/test_obstacle_avoidance.py
|
Python
|
mit
| 3,038 | 0.004937 |
from os import curdir, sep
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from urlparse import urlparse, parse_qs
from subprocess import call
import SocketServer
import osc
import random
import json
import socket
import sys
class Drummers(object):
def __init__(self):
self.drummer_map = {}
self.num_pads = 16
self.drum_width = 400
self.drum_height = 200
self.drum_layout = (8, 2) #8 wide, two high
def add_touches(self, ip_address, coords):
self.drummer_map[ip_address] = coords
def get_total_hits(self):
hits = [0] * self.num_pads
for coords in self.drummer_map.values():
for c in coords:
p = self.coord_to_pad(c)
if (p >= 0 and p < (self.drum_layout[0] * self.drum_layout[1])):
hits[self.coord_to_pad(c)] += 1
return hits
def coord_to_pad(self, c):
pix_per_pad_w = self.drum_width / self.drum_layout[0]
pix_per_pad_h = self.drum_height / self.drum_layout[1]
column = c[0] // pix_per_pad_w
row = c[1] // pix_per_pad_h
pad = row * self.drum_layout[0] + column
return pad
drummers = Drummers()
def handle_input(input, ipAddress):
for evt in input:
if evt['elementId'] == "touchmove":
if evt['action'] == "MultitouchTouchList":
osc.sendMsg("/movestart", [ipAddress, evt['coords'][0][0], evt['coords'][0][1]] , "127.0.0.1", 9002)
if evt['action'] == "MultitouchTouchListEnd":
osc.sendMsg("/moveend", [ipAddress, evt['coords'][0][0], evt['coords'][0][1]] , "127.0.0.1", 9002)
if evt['elementId'] == "touchshoot":
if evt['action'] == "MultitouchTouchList":
osc.sendMsg("/shootstart", [ipAddress, evt['coords'][0][0], evt['coords'][0][1]] , "127.0.0.1", 9002)
if evt['action'] == "MultitouchTouchListEnd":
osc.sendMsg("/shootend", [ipAddress, evt['coords'][0][0], evt['coords'][0][1]] , "127.0.0.1", 9002)
if evt['elementId'] == "drumpad":
if evt['action'] == "MultitouchTouchList":
drummers.add_touches(ipAddress, evt['coords'])
hits = drummers.get_total_hits()
print "hits =", hits, len(hits)
osc.sendMsg("/drumhitlist", [int(h) for h in hits], "127.0.0.1", 6767)
osc.sendMsg("/drumhitlist", [int(h) for h in hits], "127.0.0.1", 6768)
if evt['action'] == "MultitouchTouchNone":
print "got none!"
drummers.add_touches(ipAddress, [])
hits = drummers.get_total_hits()
print "hits =", hits, len(hits)
osc.sendMsg("/drumhitlist", [int(h) for h in hits], "127.0.0.1", 6767)
osc.sendMsg("/drumhitlist", [int(h) for h in hits], "127.0.0.1", 6768)
if evt['elementId'] == "drumpadc":
print "touish:", evt
class ThreadedHTTPServer(SocketServer.ThreadingMixIn, HTTPServer):
daemon_threads = True
pass
move_id = 0
PORT = 8080
if len(sys.argv) == 2:
PORT = int(sys.argv[1])
class MyHandler(BaseHTTPRequestHandler):
def do_POST(self):
if (True):
try:
clen = int(self.headers.getheader('content-length'))
incoming = self.rfile.read(clen)
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write("ok")
incoming_parsed = json.loads(incoming)
handle_input(incoming_parsed, self.address_string())
except IOError:
self.send_error(404,'File Not Found: %s' % self.path)
def do_GET(self):
# print "got a request from", self.address_string()
if (self.path in ["/demo-game.html", "/demo-drum.html", "/touchlib.js", "/"]):
try:
if (self.path == "/"):
self.path = "/demo-game.html"
f = open(curdir + sep + self.path) #self.path has /test.html
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(f.read())
f.close()
except IOError:
self.send_error(404,'File Not Found: %s' % self.path)
else:
self.send_error(404,'File Not Found: %s' % self.path)
def main():
try:
# server = HTTPServer(('', PORT), MyHandler)
server = ThreadedHTTPServer(('', PORT), MyHandler)
print 'Welcome to the machine...(%s:%d)'%(socket.gethostbyname(socket.gethostname()), PORT)
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down server'
server.socket.close()
if __name__ == '__main__':
osc.init()
main()
|
aturley/spaceshooter
|
server-demogame.py
|
Python
|
gpl-3.0
| 4,932 | 0.006894 |
#!/usr/bin/env python
# -*- coding: utf-8 -
#
# This file is part of restkit released under the MIT license.
# See the NOTICE for more information.
from StringIO import StringIO
import urlparse
import urllib
try:
from webob import Request as BaseRequest
except ImportError:
raise ImportError('WebOb (http://pypi.python.org/pypi/WebOb) is required')
from .wsgi_proxy import Proxy
__doc__ = '''Subclasses of webob.Request who use restkit to get a
webob.Response via restkit.ext.wsgi_proxy.Proxy.
Example::
>>> req = Request.blank('http://pypi.python.org/pypi/restkit')
>>> resp = req.get_response()
>>> print resp #doctest: +ELLIPSIS
200 OK
Date: ...
Transfer-Encoding: chunked
Content-Type: text/html; charset=utf-8
Server: Apache/2...
<BLANKLINE>
<?xml version="1.0" encoding="UTF-8"?>
...
'''
PROXY = Proxy(allowed_methods=['GET', 'POST', 'HEAD', 'DELETE', 'PUT', 'PURGE'])
class Method(property):
def __init__(self, name):
self.name = name
def __get__(self, instance, klass):
if not instance:
return self
instance.method = self.name.upper()
def req(*args, **kwargs):
return instance.get_response(*args, **kwargs)
return req
class Request(BaseRequest):
get = Method('get')
post = Method('post')
put = Method('put')
head = Method('head')
delete = Method('delete')
def get_response(self):
if self.content_length < 0:
self.content_length = 0
if self.method in ('DELETE', 'GET'):
self.body = ''
elif self.method == 'POST' and self.POST:
body = urllib.urlencode(self.POST.copy())
stream = StringIO(body)
stream.seek(0)
self.body_file = stream
self.content_length = stream.len
if 'form' not in self.content_type:
self.content_type = 'application/x-www-form-urlencoded'
self.server_name = self.host
return BaseRequest.get_response(self, PROXY)
__call__ = get_response
def set_url(self, url_or_path):
path = url_or_path.lstrip('/')
if '?' in path:
path, self.query_string = path.split('?', 1)
if path.startswith('http'):
url = path
else:
self.path_info = '/'+path
url = self.url
self.scheme, self.host, self.path_info = urlparse.urlparse(url)[0:3]
|
arnaudsj/restkit
|
restkit/contrib/webob_api.py
|
Python
|
mit
| 2,457 | 0.003256 |
# -*- test-case-name: twisted.test.test_threadpool -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
twisted.python.threadpool: a pool of threads to which we dispatch tasks.
In most cases you can just use C{reactor.callInThread} and friends
instead of creating a thread pool directly.
"""
from __future__ import division, absolute_import
try:
from Queue import Queue
except ImportError:
from queue import Queue
import contextlib
import threading
import copy
from twisted.python import log, context, failure
WorkerStop = object()
class ThreadPool:
"""
This class (hopefully) generalizes the functionality of a pool of
threads to which work can be dispatched.
L{callInThread} and L{stop} should only be called from
a single thread, unless you make a subclass where L{stop} and
L{_startSomeWorkers} are synchronized.
@ivar started: Whether or not the thread pool is currently running.
@type started: L{bool}
@ivar threads: List of workers currently running in this thread pool.
@type threads: L{list}
"""
min = 5
max = 20
joined = False
started = False
workers = 0
name = None
threadFactory = threading.Thread
currentThread = staticmethod(threading.currentThread)
def __init__(self, minthreads=5, maxthreads=20, name=None):
"""
Create a new threadpool.
@param minthreads: minimum number of threads in the pool
@param maxthreads: maximum number of threads in the pool
"""
assert minthreads >= 0, 'minimum is negative'
assert minthreads <= maxthreads, 'minimum is greater than maximum'
self.q = Queue(0)
self.min = minthreads
self.max = maxthreads
self.name = name
self.waiters = []
self.threads = []
self.working = []
def start(self):
"""
Start the threadpool.
"""
self.joined = False
self.started = True
# Start some threads.
self.adjustPoolsize()
def startAWorker(self):
self.workers += 1
name = "PoolThread-%s-%s" % (self.name or id(self), self.workers)
newThread = self.threadFactory(target=self._worker, name=name)
self.threads.append(newThread)
newThread.start()
def stopAWorker(self):
self.q.put(WorkerStop)
self.workers -= 1
def __setstate__(self, state):
self.__dict__ = state
ThreadPool.__init__(self, self.min, self.max)
def __getstate__(self):
state = {}
state['min'] = self.min
state['max'] = self.max
return state
def _startSomeWorkers(self):
neededSize = self.q.qsize() + len(self.working)
# Create enough, but not too many
while self.workers < min(self.max, neededSize):
self.startAWorker()
def callInThread(self, func, *args, **kw):
"""
Call a callable object in a separate thread.
@param func: callable object to be called in separate thread
@param *args: positional arguments to be passed to C{func}
@param **kw: keyword args to be passed to C{func}
"""
self.callInThreadWithCallback(None, func, *args, **kw)
def callInThreadWithCallback(self, onResult, func, *args, **kw):
"""
Call a callable object in a separate thread and call C{onResult}
with the return value, or a L{twisted.python.failure.Failure}
if the callable raises an exception.
The callable is allowed to block, but the C{onResult} function
must not block and should perform as little work as possible.
A typical action for C{onResult} for a threadpool used with a
Twisted reactor would be to schedule a
L{twisted.internet.defer.Deferred} to fire in the main
reactor thread using C{.callFromThread}. Note that C{onResult}
is called inside the separate thread, not inside the reactor thread.
@param onResult: a callable with the signature C{(success, result)}.
If the callable returns normally, C{onResult} is called with
C{(True, result)} where C{result} is the return value of the
callable. If the callable throws an exception, C{onResult} is
called with C{(False, failure)}.
Optionally, C{onResult} may be C{None}, in which case it is not
called at all.
@param func: callable object to be called in separate thread
@param *args: positional arguments to be passed to C{func}
@param **kwargs: keyword arguments to be passed to C{func}
"""
if self.joined:
return
ctx = context.theContextTracker.currentContext().contexts[-1]
o = (ctx, func, args, kw, onResult)
self.q.put(o)
if self.started:
self._startSomeWorkers()
@contextlib.contextmanager
def _workerState(self, stateList, workerThread):
"""
Manages adding and removing this worker from a list of workers
in a particular state.
@param stateList: the list managing workers in this state
@param workerThread: the thread the worker is running in, used to
represent the worker in stateList
"""
stateList.append(workerThread)
try:
yield
finally:
stateList.remove(workerThread)
def _worker(self):
"""
Method used as target of the created threads: retrieve a task to run
from the threadpool, run it, and proceed to the next task until
threadpool is stopped.
"""
ct = self.currentThread()
o = self.q.get()
while o is not WorkerStop:
with self._workerState(self.working, ct):
ctx, function, args, kwargs, onResult = o
del o
try:
result = context.call(ctx, function, *args, **kwargs)
success = True
except:
success = False
if onResult is None:
context.call(ctx, log.err)
result = None
else:
result = failure.Failure()
del function, args, kwargs
if onResult is not None:
try:
context.call(ctx, onResult, success, result)
except:
context.call(ctx, log.err)
del ctx, onResult, result
with self._workerState(self.waiters, ct):
o = self.q.get()
self.threads.remove(ct)
def stop(self):
"""
Shutdown the threads in the threadpool.
"""
self.joined = True
self.started = False
threads = copy.copy(self.threads)
while self.workers:
self.q.put(WorkerStop)
self.workers -= 1
# and let's just make sure
# FIXME: threads that have died before calling stop() are not joined.
for thread in threads:
thread.join()
def adjustPoolsize(self, minthreads=None, maxthreads=None):
if minthreads is None:
minthreads = self.min
if maxthreads is None:
maxthreads = self.max
assert minthreads >= 0, 'minimum is negative'
assert minthreads <= maxthreads, 'minimum is greater than maximum'
self.min = minthreads
self.max = maxthreads
if not self.started:
return
# Kill of some threads if we have too many.
while self.workers > self.max:
self.stopAWorker()
# Start some threads if we have too few.
while self.workers < self.min:
self.startAWorker()
# Start some threads if there is a need.
self._startSomeWorkers()
def dumpStats(self):
log.msg('queue: %s' % self.q.queue)
log.msg('waiters: %s' % self.waiters)
log.msg('workers: %s' % self.working)
log.msg('total: %s' % self.threads)
|
hlzz/dotfiles
|
graphics/VTK-7.0.0/ThirdParty/Twisted/twisted/python/threadpool.py
|
Python
|
bsd-3-clause
| 8,392 | 0.002026 |
"""
=====================
SVM: Weighted samples
=====================
Plot decision function of a weighted dataset, where the size of points
is proportional to its weight.
"""
print __doc__
import numpy as np
import pylab as pl
from sklearn import svm
# we create 20 points
np.random.seed(0)
X = np.r_[np.random.randn(10, 2) + [1, 1], np.random.randn(10, 2)]
Y = [1] * 10 + [-1] * 10
sample_weight = 100 * np.abs(np.random.randn(20))
# and assign a bigger weight to the last 10 samples
sample_weight[:10] *= 10
# # fit the model
clf = svm.SVC()
clf.fit(X, Y, sample_weight=sample_weight)
# plot the decision function
xx, yy = np.meshgrid(np.linspace(-4, 5, 500), np.linspace(-4, 5, 500))
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
# plot the line, the points, and the nearest vectors to the plane
pl.contourf(xx, yy, Z, alpha=0.75, cmap=pl.cm.bone)
pl.scatter(X[:, 0], X[:, 1], c=Y, s=sample_weight, alpha=0.9, cmap=pl.cm.bone)
pl.axis('off')
pl.show()
|
seckcoder/lang-learn
|
python/sklearn/examples/svm/plot_weighted_samples.py
|
Python
|
unlicense
| 999 | 0.003003 |
#! /usr/bin/env python3
from __future__ import print_function
import argparse
import os
import sys
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('locale_dir')
return parser.parse_args()
args = parse_args()
basedir = os.path.abspath(os.path.join(os.path.abspath(__file__), '..', '..'))
sys.path.insert(0, basedir)
import setup
po_dir = os.path.join(basedir, 'po')
locale_dir = os.path.abspath(args.locale_dir)
print('Building translations')
print(po_dir, '-->', locale_dir)
setup.build_translation_files(po_dir, locale_dir)
|
jendrikseipp/rednotebook-elementary
|
win/build-translations.py
|
Python
|
gpl-2.0
| 569 | 0.005272 |
# -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""A simple utility for constructing filesystem-like trees from beets
libraries.
"""
from __future__ import division, absolute_import, print_function
from collections import namedtuple
from beets import util
Node = namedtuple('Node', ['files', 'dirs'])
def _insert(node, path, itemid):
"""Insert an item into a virtual filesystem node."""
if len(path) == 1:
# Last component. Insert file.
node.files[path[0]] = itemid
else:
# In a directory.
dirname = path[0]
rest = path[1:]
if dirname not in node.dirs:
node.dirs[dirname] = Node({}, {})
_insert(node.dirs[dirname], rest, itemid)
def libtree(lib):
"""Generates a filesystem-like directory tree for the files
contained in `lib`. Filesystem nodes are (files, dirs) named
tuples in which both components are dictionaries. The first
maps filenames to Item ids. The second maps directory names to
child node tuples.
"""
root = Node({}, {})
for item in lib.items():
dest = item.destination(fragment=True)
parts = util.components(dest)
_insert(root, parts, item.id)
return root
|
clinton-hall/nzbToMedia
|
libs/common/beets/vfs.py
|
Python
|
gpl-3.0
| 1,839 | 0 |
import uuid
from django.db import models
from django.conf import settings
from django.contrib.auth.models import AbstractUser
from django.contrib.auth.models import BaseUserManager
from django.utils import timezone
from accelerator_abstract.models import BaseUserRole
from accelerator_abstract.models.base_base_profile import EXPERT_USER_TYPE
MAX_USERNAME_LENGTH = 30
class UserManager(BaseUserManager):
use_in_migrations = True
def _create_user(self, email, password,
is_staff, is_superuser, **extra_fields):
"""
Creates and saves an User with the given email and password.
"""
now = timezone.now()
if not email:
raise ValueError('An email address must be provided.')
email = self.normalize_email(email)
if "is_active" not in extra_fields:
extra_fields["is_active"] = True
if "username" not in extra_fields:
# For now we need to have a unique id that is at
# most 30 characters long. Using uuid and truncating.
# Ideally username goes away entirely at some point
# since we're really using email. If we have to keep
# username for some reason then we could switch over
# to a string version of the pk which is guaranteed
# be unique.
extra_fields["username"] = str(uuid.uuid4())[:MAX_USERNAME_LENGTH]
user = self.model(email=email,
is_staff=is_staff,
is_superuser=is_superuser,
last_login=None,
date_joined=now,
**extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, email=None, password=None, **extra_fields):
return self._create_user(email, password, False, False,
**extra_fields)
def create_superuser(self, email, password, **extra_fields):
return self._create_user(email, password, True, True,
**extra_fields)
class User(AbstractUser):
# Override the parent email field to add uniqueness constraint
email = models.EmailField(blank=True, unique=True)
objects = UserManager()
class Meta:
db_table = 'auth_user'
managed = settings.ACCELERATOR_MODELS_ARE_MANAGED
def __init__(self, *args, **kwargs):
super(User, self).__init__(*args, **kwargs)
self.startup = None
self.team_member = None
self.profile = None
self.user_finalist_roles = None
class AuthenticationException(Exception):
pass
def __str__(self):
return self.email
def full_name(self):
fn = self.first_name
ln = self.last_name
if fn and ln:
name = u"%s %s" % (fn, ln)
else:
name = str(self.email)
return name
def user_phone(self):
return self._get_profile().phone
def image_url(self):
return self._get_profile().image_url()
def team_member_id(self):
return self.team_member.id if self._get_member() else ''
def user_title(self):
return self._get_title_and_company()['title']
def user_twitter_handle(self):
return self._get_profile().twitter_handle
def user_linked_in_url(self):
return self._get_profile().linked_in_url
def user_facebook_url(self):
return self._get_profile().facebook_url
def user_personal_website_url(self):
return self._get_profile().personal_website_url
def type(self):
return self._get_profile().user_type
def startup_name(self):
return self._get_title_and_company()['company']
def _get_title_and_company(self):
if self._is_expert() and self._has_expert_details():
profile = self._get_profile()
title = profile.title
company = profile.company
return {
"title": title,
"company": company
}
self._get_member()
title = self.team_member.title if self.team_member else ""
company = self.startup.name if self._get_startup() else None
return {
"title": title,
"company": company
}
def _has_expert_details(self):
if self._is_expert():
profile = self._get_profile()
return True if profile.title or profile.company else False
def startup_industry(self):
return self.startup.primary_industry if self._get_startup() else None
def top_level_startup_industry(self):
industry = (
self.startup.primary_industry if self._get_startup() else None)
return industry.parent if industry and industry.parent else industry
def startup_status_names(self):
if self._get_startup():
return [startup_status.program_startup_status.startup_status
for startup_status in self.startup.startupstatus_set.all()]
def finalist_user_roles(self):
if not self.user_finalist_roles:
finalist_roles = BaseUserRole.FINALIST_USER_ROLES
self.user_finalist_roles = self.programrolegrant_set.filter(
program_role__user_role__name__in=finalist_roles
).values_list('program_role__name', flat=True).distinct()
return list(self.user_finalist_roles)
def program(self):
return self.startup.current_program() if self._get_startup() else None
def location(self):
program = self.program()
return program.program_family.name if program else None
def year(self):
program = self.program()
return program.start_date.year if program else None
def is_team_member(self):
return True if self._get_member() else False
def _get_startup(self):
if not self.startup:
self._get_member()
if self.team_member:
self.startup = self.team_member.startup
return self.startup
def _get_member(self):
if not self.team_member:
self.team_member = self.startupteammember_set.last()
return self.team_member
def _get_profile(self):
if self.profile:
return self.profile
self.profile = self.get_profile()
return self.profile
def has_a_finalist_role(self):
return len(self.finalist_user_roles()) > 0
def _is_expert(self):
profile = self._get_profile()
return profile.user_type == EXPERT_USER_TYPE.lower()
|
masschallenge/django-accelerator
|
simpleuser/models.py
|
Python
|
mit
| 6,632 | 0 |
#!/usr/bin/env python
from distutils.core import setup
setup(
name='sewpy',
version='1.0dev',
packages=['sewpy'],
description='Source Extractor Wrapper for Python',
long_description=open('README.rst').read(),
author='The MegaLUT developers',
license='GPLv3',
url='http://github.com/megalut/sewpy'
)
|
megalut/sewpy
|
setup.py
|
Python
|
gpl-3.0
| 310 | 0.029032 |
# -*- coding: utf-8 -*-
###############################################################################
#
# Search
# Returns current committees, subcommittees, and their membership.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class Search(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the Search Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(Search, self).__init__(temboo_session, '/Library/SunlightLabs/Congress/Legislator/Search')
def new_input_set(self):
return SearchInputSet()
def _make_result_set(self, result, path):
return SearchResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return SearchChoreographyExecution(session, exec_id, path)
class SearchInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the Search
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((required, string) The API Key provided by Sunlight Labs.)
"""
super(SearchInputSet, self)._set_input('APIKey', value)
def set_AllLegislators(self, value):
"""
Set the value of the AllLegislators input for this Choreo. ((optional, boolean) A boolean flag indicating to search for all legislators even when they are no longer in office.)
"""
super(SearchInputSet, self)._set_input('AllLegislators', value)
def set_Fields(self, value):
"""
Set the value of the Fields input for this Choreo. ((optional, string) A comma-separated list of fields to include in the response.)
"""
super(SearchInputSet, self)._set_input('Fields', value)
def set_Filters(self, value):
"""
Set the value of the Filters input for this Choreo. ((optional, string) A JSON object containing key/value pairs to be used as filters.)
"""
super(SearchInputSet, self)._set_input('Filters', value)
def set_Name(self, value):
"""
Set the value of the Name input for this Choreo. ((optional, string) Deprecated (retained for backward compatibility only).)
"""
super(SearchInputSet, self)._set_input('Name', value)
def set_Order(self, value):
"""
Set the value of the Order input for this Choreo. ((optional, string) Used to order the results by field name (e.g. field__asc).)
"""
super(SearchInputSet, self)._set_input('Order', value)
def set_Page(self, value):
"""
Set the value of the Page input for this Choreo. ((optional, integer) The page offset.)
"""
super(SearchInputSet, self)._set_input('Page', value)
def set_PerPage(self, value):
"""
Set the value of the PerPage input for this Choreo. ((optional, integer) The number of results to return per page.)
"""
super(SearchInputSet, self)._set_input('PerPage', value)
def set_Query(self, value):
"""
Set the value of the Query input for this Choreo. ((conditional, string) A search term.)
"""
super(SearchInputSet, self)._set_input('Query', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are: json (the default) and xml.)
"""
super(SearchInputSet, self)._set_input('ResponseFormat', value)
class SearchResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the Search Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from the Sunlight Congress API.)
"""
return self._output.get('Response', None)
class SearchChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return SearchResultSet(response, path)
|
jordanemedlock/psychtruths
|
temboo/core/Library/SunlightLabs/Congress/Legislator/Search.py
|
Python
|
apache-2.0
| 5,201 | 0.005768 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/painting/shared_painting_leia_wanted.iff"
result.attribute_template_id = -1
result.stfName("frn_n","leia_wanted")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
obi-two/Rebelion
|
data/scripts/templates/object/tangible/painting/shared_painting_leia_wanted.py
|
Python
|
mit
| 450 | 0.046667 |
# -*- coding: utf-8 -*-
from __future__ import print_function
import logging
if __name__ == '__main__':
logging.basicConfig()
_log = logging.getLogger(__name__)
from pyxb.exceptions_ import *
import unittest
import pyxb.binding.datatypes as xsd
class Test_QName (unittest.TestCase):
def testValid (self):
valid = [ 'schema', 'xs:something', 'with.dots' ]
for f in valid:
self.assertEqual(f, xsd.QName(f))
def testInvalid (self):
invalid = [ '-NonName', '-also:-not', 'and:-not', 'too:many:colons', ' whitespace ' ]
for f in invalid:
try:
xsd.QName(f)
print('Unexpected pass with %s' % (f,))
except:
pass
self.assertRaises(SimpleTypeValueError, xsd.QName, f)
if __name__ == '__main__':
unittest.main()
|
jonfoster/pyxb-upstream-mirror
|
tests/datatypes/test-QName.py
|
Python
|
apache-2.0
| 848 | 0.01533 |
# coding=utf-8
from WindowHelper import WindowHelper
from constants import *
class ListDialog:
def __init__(self):
self.win = WindowHelper.Instance()
self.win.new_color((5, 51, 90), 'dark_blue')
self.win.new_color((176, 194, 238), 'light_blue')
self.win.new_font('Arial', 40, 'title')
self.win.new_font('Arial', 20, 'sub_title')
self.win.new_font('Arial', 25, 'options')
def get_answer(self, choices, question=None, sub_text=None):
page_label = self.win.go_to(self.win.new_page(question, WIN_WIDTH, WIN_HEIGHT, bg='white'))
self.win.import_template('menu')
if question is not None:
self.win.edit_text('title_list_dialog', question)
else:
self.win.edit_text('title_list_dialog', " ") # TODO: Permettre de supprimer des éléments des templates
if sub_text is not None:
self.win.edit_text('sub_title_list_dialog', sub_text)
else:
self.win.edit_text('sub_title_list_dialog', " ")
self.win.new_menu(choices, label='menu_list_dialog')
opt = {
"font": "options",
"color": "dark_blue",
"border": None,
"color_active": "dark_blue",
"border_active": "light_blue",
"font_active": "options",
"margin": WIN_MARGIN
}
self.win.add_menu('menu_list_dialog', 'centered', 180, opt=opt, page=page_label)
self.win.refresh()
return self.win.get_menu_result('menu_list_dialog')
|
totorigolo/WiiQuizz
|
ListDialog.py
|
Python
|
gpl-2.0
| 1,552 | 0.001935 |
class Register:
@property
def value(self):
raise NotImplementedError
@value.setter
def value(self, value):
raise NotImplementedError
|
Hexadorsimal/pynes
|
nes/processors/registers/register.py
|
Python
|
mit
| 166 | 0 |
# Przykladowy agent do zadania 'zagubiony Wumpus'. Agent porusza sie wezykiem.
import random
from action import Action
# nie zmieniac nazwy klasy
class Agent:
# nie zmieniac naglowka konstruktora, tutaj agent dostaje wszystkie informacje o srodowisku
def __init__(self, p, pj, pn, height, width, areaMap):
self.times_moved = 0
self.direction = Action.LEFT
# w ten sposob mozna zapamietac zmienne obiektu
self.p = p
self.pj = pj
self.pn = pn
self.height = height
self.width = width
self.map = areaMap
# w tym przykladzie histogram wypelniany jest tak aby na planszy wyszedl gradient
self.hist = []
for y in range(self.height):
self.hist.append([])
for x in range(self.width):
self.hist[y].append(float(y + x) / (self.width + self.height - 2))
# dopisac reszte inicjalizacji agenta
return
# nie zmieniac naglowka metody, tutaj agent dokonuje obserwacji swiata
# sensor przyjmuje wartosc True gdy agent ma uczucie stania w jamie
def sense(self, sensor):
pass
# nie zmieniac naglowka metody, tutaj agent decyduje w ktora strone sie ruszyc,
# funkcja MUSI zwrocic jedna z wartosci [Action.UP, Action.DOWN, Action.LEFT, Action.RIGHT]
def move(self):
if self.times_moved < self.width - 1:
self.times_moved += 1
return self.direction
else:
self.times_moved = 0
self.direction = Action.RIGHT if self.direction == Action.LEFT else Action.LEFT
return Action.DOWN
# nie zmieniac naglowka metody, tutaj agent udostepnia swoj histogram (ten z filtru
# histogramowego), musi to byc tablica (lista list, krotka krotek...) o wymarach takich jak
# plansza, pobranie wartosci agent.histogram()[y][x] zwraca prawdopodobienstwo stania na polu
# w wierszu y i kolumnie x
def histogram(self):
return self.hist
|
uHappyLogic/lost-wumpus
|
agents/snake_agent.py
|
Python
|
mit
| 1,993 | 0.005018 |
from datetime import datetime
from sqlalchemy import (
Column,
ForeignKey,
Integer,
Text,
Unicode,
)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
relationship
)
from zope.sqlalchemy import ZopeTransactionExtension
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(Unicode(255), nullable=False)
password = Column(Unicode(255), nullable=False)
email = Column(Unicode(255))
group_id = Column(Integer, ForeignKey('groups.id'), nullable=False)
group = relationship('Group', backref='users')
class Group(Base):
__tablename__ = 'groups'
id = Column(Integer, primary_key=True)
name = Column(Unicode(255), nullable=False)
class Permission(Base):
__tablename__ = 'permissions'
id = Column(Integer, primary_key=True)
name = Column(Unicode(255), nullable=False)
class Item(Base):
__tablename__ = 'items'
id = Column(Integer, primary_key=True)
name = Column(Unicode(255), nullable=False)
description = Column(Text)
price = Column(Float, nullable=False, default=0.0)
category_id = Column(Integer, ForeignKey('categories.id'), nullable=False)
category = relationship('Category', backref='items')
class Category(Base):
__tablename__ = 'categories'
id = Column(Integer, primary_key=True)
name = Column(Unicode(255), nullable=False)
parent_id = Column(Integer, ForeignKey('categories.id'), nullable=True)
parent = relationship('Category', backref='children')
class ItemImage(Base):
__tablename__ = 'images'
id = Column(Integer, primary_key=True)
path = Column(Unicode(255), nullable=False)
item_id = Column(Integer, ForeignKey('items.id'), nullable=False)
item = relationship('Item', backref='images')
class Comment(Base):
__tablename__ = 'comments'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
user = relationship('User', backref='comments')
item_id = Column(Item, ForeignKey('items.id'), nullable=False)
item = relationship('Item', backref='comments')
rank = Column(Integer, nullable=False, default=3);
content = Column(Text)
class Cart(Base):
__tablename__ = 'carts'
id = Column(Integer, primary_key=True)
# TODO: Many-to-many
items = relationship('Item')
user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
user = relationship('User', backref='cart')
class Order(Base):
__tablename__ = 'orders'
id = Column(Integer, primary_key)
user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
user = relationship('User', 'orders')
# TODO: Many-to-many
items = relationship('Item')
add_time = Column(DateTime, nullable=False, default=datetime.now())
address = Column(Unicode(255), nullable=False)
telephone = Column(Unicode(25), nullable=False)
|
DataMonster/Python
|
web/MyShop/myshop/models.py
|
Python
|
unlicense
| 2,927 | 0.022549 |
"""blah."""
from pyiem.util import get_dbconn
pgconn = get_dbconn("idep")
cursor = pgconn.cursor()
cursor.execute(
"""
SELECT r.hs_id, r.huc_12, p.fpath, extract(year from valid) as yr,
sum(runoff) as sum_runoff,
sum(loss) as sum_loss, sum(delivery) as sum_delivery from
results r JOIN flowpaths p on (r.hs_id = p.fid)
WHERE r.scenario = 5
GROUP by r.hs_id, r.huc_12, fpath, yr
"""
)
print("CATCHMENT,HUC12,FPATH,YEAR,RUNOFF,LOSS,DELIVERY")
for row in cursor:
fpath = row[0]
if fpath < 100:
catchment = 0
else:
catchment = int(str(fpath)[:-2])
print(str(catchment) + ",%s,%s,%s,%.4f,%.4f,%.4f" % row[1:])
|
akrherz/idep
|
scripts/convergence/dump_results.py
|
Python
|
mit
| 656 | 0 |
# -*- mode:python -*-
# Copyright (c) 2007 MIPS Technologies, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Jaidev Patwardhan
from m5.defines import buildEnv
from m5.params import *
from m5.proxy import *
from System import System
class Lily2System(System):
type = 'Lily2System'
cxx_header = 'arch/lily2/system.hh'
console = Param.String("file that contains the console code")
bare_iron = Param.Bool(False, "Using Bare Iron Mode?")
hex_file_name = Param.String("test.hex","hex file that contains [address,data] pairs")
system_type = Param.UInt64("Type of system we are emulating")
system_rev = Param.UInt64("Revision of system we are emulating")
load_addr_mask = 0xffffffffff
class LinuxLily2System(Lily2System):
type = 'LinuxLily2System'
cxx_header = 'arch/lily2/linux/system.hh'
system_type = 34
system_rev = 1 << 10
boot_cpu_frequency = Param.Frequency(Self.cpu[0].clk_domain.clock.frequency,
"boot processor frequency")
class BareIronLily2System(Lily2System):
type = 'BareIronLily2System'
cxx_header = 'arch/lily2/bare_iron/system.hh'
bare_iron = True
system_type = 34
system_rev = 1 << 10
hex_file_name = Param.String('test.hex',"hex file that contains [address,data] pairs")
|
lixt/lily2-gem5
|
src/arch/lily2/Lily2System.py
|
Python
|
bsd-3-clause
| 2,762 | 0.003259 |
import os, sys
from setuptools import setup, find_packages
version = '0.4'
install_requires = ['setuptools', 'PyYAML']
if sys.version_info < (2, 7):
install_requires.append('simplejson')
install_requires.append('argparse')
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='varstack',
version=version,
description='A tool to create stacked configuration structures',
url = 'https://github.com/conversis/varstack',
license = 'MIT',
author='Dennis Jacobfeuerborn',
author_email='d.jacobfeuerborn@conversis.de',
packages=['varstack'],
scripts=['bin/varstack'],
install_requires=install_requires,
)
|
conversis/varstack
|
setup.py
|
Python
|
mit
| 697 | 0.010043 |
# Copyright 2014-2021 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Extension to scipy.linalg module developed for PBC branch.
'''
import numpy as np
import scipy.linalg
def davidson_nosymm(matvec,size,nroots,Adiag=None):
'''Davidson diagonalization method to solve A c = E c
when A is not Hermitian.
'''
# We don't pass args
def matvec_args(vec, args):
return matvec(vec)
nroots = min(nroots,size)
#if Adiag == None:
# Adiag = matvec(numpy.ones(size))
# Currently not used:
x = np.ones((size,1))
P = np.ones((size,1))
arnold = Arnoldi(matvec_args, x, P, nroots=nroots)
return arnold.solve()
VERBOSE = False
class Arnoldi:
def __init__(self,matr_multiply,xStart,inPreCon,nroots=1,tol=1e-6):
self.matrMultiply = matr_multiply
self.size = xStart.shape[0]
self.nEigen = min(nroots, self.size)
self.maxM = min(30, self.size)
self.maxOuterLoop = 10
self.tol = tol
#
# Creating initial guess and preconditioner
#
self.x0 = xStart.real.copy()
self.iteration = 0
self.totalIter = 0
self.converged = False
self.preCon = inPreCon.copy()
#
# Allocating other vectors
#
self.allocateVecs()
def solve(self):
while self.converged == 0:
if self.totalIter == 0:
self.guessInitial()
for i in range(self.maxM):
if self.deflated == 1:
self.currentSize = self.nEigen
if self.deflated == 0 and self.totalIter > 0:
self.hMult()
self.push_Av()
self.constructSubspace()
self.solveSubspace()
self.constructSol()
self.computeResidual()
self.checkConvergence()
self.deflated = 0
if self.converged:
break
self.updateVecs()
self.checkDeflate()
self.constructDeflatedSub()
self.totalIter += 1
self.currentSize += 1
print("")
print("Converged in %3d cycles" % self.totalIter)
self.constructAllSolV()
return self.outeigs, self.outevecs
def allocateVecs(self):
self.subH = np.zeros( shape=(self.maxM,self.maxM), dtype=complex )
self.sol = np.zeros( shape=(self.maxM), dtype=complex )
self.dgks = np.zeros( shape=(self.maxM), dtype=complex )
self.nConv = np.zeros( shape=(self.maxM), dtype=int )
self.eigs = np.zeros( shape=(self.maxM), dtype=complex )
self.evecs = np.zeros( shape=(self.maxM,self.maxM), dtype=complex )
self.oldeigs = np.zeros( shape=(self.maxM), dtype=complex )
self.deigs = np.zeros( shape=(self.maxM), dtype=complex )
self.outeigs = np.zeros( shape=(self.nEigen), dtype=complex )
self.outevecs = np.zeros( shape=(self.size,self.nEigen), dtype=complex)
self.currentSize = 0
self.Ax = np.zeros( shape=(self.size), dtype=complex )
self.res = np.zeros( shape=(self.size), dtype=complex )
self.vlist = np.zeros( shape=(self.maxM,self.size), dtype=complex )
self.cv = np.zeros( shape = (self.size), dtype = complex )
self.cAv = np.zeros( shape = (self.size), dtype = complex )
self.Avlist = np.zeros( shape=(self.maxM,self.size), dtype=complex )
self.dres = 999.9
self.resnorm = 999.9
self.cvEig = 0.1
self.ciEig = 0
self.deflated = 0
def guessInitial(self):
nrm = np.linalg.norm(self.x0)
self.x0 *= 1./nrm
self.currentSize = self.nEigen
for i in range(self.currentSize):
self.vlist[i] *= 0.0
self.vlist[i,i] = 1.0 + 0.0*1j
self.vlist[i] /= np.linalg.norm(self.vlist[i])
for i in range(self.currentSize):
self.cv = self.vlist[i].copy()
self.hMult()
self.Avlist[i] = self.cAv.copy()
self.constructSubspace()
def hMult(self):
args = 0
self.cAv = self.matrMultiply(self.cv.reshape(self.size),args)
def push_Av(self):
self.Avlist[self.currentSize-1] = self.cAv.reshape(self.size)
def constructSubspace(self):
if self.totalIter == 0 or self.deflated == 1: # construct the full block of v^*Av
for i in range(self.currentSize):
for j in range(self.currentSize):
val = np.vdot(self.vlist[i],self.Avlist[j])
self.subH[i,j] = val
else:
for j in range(self.currentSize):
if j <= (self.currentSize-1):
val = np.vdot(self.vlist[j],self.Avlist[self.currentSize-1])
self.subH[j,self.currentSize-1] = val
if j < (self.currentSize-1):
val = np.vdot(self.vlist[self.currentSize-1],self.Avlist[j])
self.subH[self.currentSize-1,j] = val
def solveSubspace(self):
w, v = scipy.linalg.eig(self.subH[:self.currentSize,:self.currentSize])
idx = w.real.argsort()
#imag_norm = np.linalg.norm(w.imag)
#if imag_norm > 1e-12:
# print " *************************************************** "
# print " WARNING IMAGINARY EIGENVALUE OF NORM %.15g " % (imag_norm)
# print " *************************************************** "
#print "Imaginary norm eigenvectors = ", np.linalg.norm(v.imag)
#print "Imaginary norm eigenvalue = ", np.linalg.norm(w.imag)
v = v[:,idx]
w = w[idx].real
self.sol[:self.currentSize] = v[:,self.ciEig]
self.evecs[:self.currentSize,:self.currentSize] = v
self.eigs[:self.currentSize] = w[:self.currentSize]
self.outeigs[:self.nEigen] = w[:self.nEigen]
self.cvEig = self.eigs[self.ciEig]
def constructAllSolV(self):
for i in range(self.nEigen):
self.sol[:] = self.evecs[:,i]
self.cv = np.dot(self.vlist[:self.currentSize].transpose(),self.sol[:self.currentSize])
self.outevecs[:,i] = self.cv
def constructSol(self):
self.constructSolV()
self.constructSolAv()
def constructSolV(self):
self.cv = np.dot(self.vlist[:self.currentSize].transpose(),self.sol[:self.currentSize])
def constructSolAv(self):
self.cAv = np.dot(self.Avlist[:self.currentSize].transpose(),self.sol[:self.currentSize])
def computeResidual(self):
self.res = self.cAv - self.cvEig * self.cv
self.dres = np.vdot(self.res,self.res)**0.5
#
# gram-schmidt for residual vector
#
for i in range(self.currentSize):
self.dgks[i] = np.vdot( self.vlist[i], self.res )
self.res -= self.dgks[i]*self.vlist[i]
#
# second gram-schmidt to make them really orthogonal
#
for i in range(self.currentSize):
self.dgks[i] = np.vdot( self.vlist[i], self.res )
self.res -= self.dgks[i]*self.vlist[i]
self.resnorm = np.linalg.norm(self.res)
self.res /= self.resnorm
orthog = 0.0
for i in range(self.currentSize):
orthog += np.vdot(self.res,self.vlist[i])**2.0
orthog = orthog ** 0.5
if not self.deflated:
if VERBOSE:
print("%3d %20.14f %20.14f %10.4g" % (self.ciEig, self.cvEig.real, self.resnorm.real, orthog.real))
#else:
# print "%3d %20.14f %20.14f %20.14f (deflated)" % (self.ciEig, self.cvEig,
# self.resnorm, orthog)
self.iteration += 1
def updateVecs(self):
self.vlist[self.currentSize] = self.res.copy()
self.cv = self.vlist[self.currentSize]
def checkConvergence(self):
if self.resnorm < self.tol:
if VERBOSE:
print("Eigenvalue %3d converged! (res = %.15g)" % (self.ciEig, self.resnorm))
self.ciEig += 1
if self.ciEig == self.nEigen:
self.converged = True
if self.resnorm < self.tol and not self.converged:
if VERBOSE:
print("")
print("")
print("%-3s %-20s %-20s %-8s" % ("#", " Eigenvalue", " Res. Norm.", " Ortho. (should be ~0)"))
def gramSchmidtCurrentVec(self,northo):
for i in range(northo):
self.dgks[i] = np.vdot( self.vlist[i], self.cv )
self.cv -= self.dgks[i]*self.vlist[i] #/ np.vdot(self.vlist[i],self.vlist[i])
self.cv /= np.linalg.norm(self.cv)
def checkDeflate(self):
if self.currentSize == self.maxM-1:
self.deflated = 1
#print "deflating..."
for i in range(self.nEigen):
self.sol[:self.currentSize] = self.evecs[:self.currentSize,i]
# Finds the "best" eigenvector for this eigenvalue
self.constructSolV()
# Puts this guess in self.Avlist rather than self.vlist for now...
# since this would mess up self.constructSolV()'s solution
self.Avlist[i] = self.cv.copy()
for i in range(self.nEigen):
# This is actually the "best" eigenvector v, not A*v (see above)
self.cv = self.Avlist[i].copy()
self.gramSchmidtCurrentVec(i)
self.vlist[i] = self.cv.copy()
for i in range(self.nEigen):
# This is actually the "best" eigenvector v, not A*v (see above)
self.cv = self.vlist[i].copy()
# Use current vector cv to create cAv
self.hMult()
self.Avlist[i] = self.cAv.copy()
def constructDeflatedSub(self):
if self.deflated == 1:
self.currentSize = self.nEigen
self.constructSubspace()
|
sunqm/pyscf
|
pyscf/pbc/lib/arnoldi.py
|
Python
|
apache-2.0
| 10,549 | 0.010522 |
import unittest
from datetime import datetime
from flask import url_for
from app import current_app as app
from app.helpers.data import save_to_db
from app.models.call_for_papers import CallForPaper
from tests.unittests.object_mother import ObjectMother
from tests.unittests.utils import OpenEventTestCase
class TestGuestEventPage(OpenEventTestCase):
def test_published_event_view(self):
with app.test_request_context():
event = ObjectMother.get_event()
event.state = 'Published'
save_to_db(event, "Event Saved")
rv = self.app.get(url_for('event_detail.display_event_detail_home', identifier=event.identifier),
follow_redirects=True)
self.assertTrue("Open Event" in rv.data, msg=rv.data)
def test_published_event_view_coc(self):
with app.test_request_context():
event = ObjectMother.get_event()
event.state = 'Published'
event.code_of_conduct = 'Test Code of Conduct'
save_to_db(event, "Event Saved")
rv = self.app.get(url_for('event_detail.display_event_coc', identifier=event.identifier),
follow_redirects=True)
self.assertTrue("Code of Conduct" in rv.data, msg=rv.data)
def test_unpublished_event_view_coc(self):
with app.test_request_context():
event = ObjectMother.get_event()
event.state = 'Published'
save_to_db(event, "Event Saved")
rv = self.app.get(url_for('event_detail.display_event_coc', identifier=event.identifier),
follow_redirects=True)
self.assertEqual(rv.status_code, 404)
def test_unpublished_event_view_attempt(self):
with app.test_request_context():
event = ObjectMother.get_event()
save_to_db(event, "Event Saved")
rv = self.app.get(url_for('event_detail.display_event_detail_home', identifier=event.identifier),
follow_redirects=True)
self.assertEqual(rv.status_code, 404)
def test_soft_deleted_event_view_attempt(self):
with app.test_request_context():
event = ObjectMother.get_event()
event.state = 'Published'
event.in_trash = True
save_to_db(event, "Event Saved")
rv = self.app.get(url_for('event_detail.display_event_detail_home', identifier=event.identifier),
follow_redirects=True)
self.assertEqual(rv.status_code, 404)
def test_published_event_sessions_view(self):
with app.test_request_context():
event = ObjectMother.get_event()
event.state = 'Published'
save_to_db(event, "Event Saved")
track = ObjectMother.get_track()
track.event_id = event.id
save_to_db(track, "Track Saved")
speaker = ObjectMother.get_speaker()
speaker.event_id = event.id
save_to_db(speaker, "Speaker Saved")
session = ObjectMother.get_session()
session.event_id = event.id
session.speakers = [speaker]
session.state = 'accepted'
save_to_db(session, "Session Saved")
rv = self.app.get(url_for('event_detail.display_event_sessions', identifier=event.identifier),
follow_redirects=True)
self.assertTrue("Sessions" in rv.data, msg=rv.data)
def test_published_event_schedule_view(self):
with app.test_request_context():
event = ObjectMother.get_event()
event.state = 'Published'
event.schedule_published_on = datetime.now()
save_to_db(event, "Event Saved")
track = ObjectMother.get_track()
track.event_id = event.id
save_to_db(track, "Track Saved")
speaker = ObjectMother.get_speaker()
speaker.event_id = event.id
save_to_db(speaker, "Speaker Saved")
microlocation = ObjectMother.get_microlocation()
save_to_db(microlocation, "Microlocation Saved")
session = ObjectMother.get_session()
session.event_id = event.id
session.state = 'accepted'
session.microlocation_id = microlocation.id
session.speakers = [speaker]
save_to_db(session, "Session Saved")
rv = self.app.get(url_for('event_detail.display_event_schedule', identifier=event.identifier),
follow_redirects=True)
self.assertTrue("Schedule" in rv.data, msg=rv.data)
def test_published_event_unpublished_schedule_view_attempt(self):
with app.test_request_context():
event = ObjectMother.get_event()
event.state = 'Published'
save_to_db(event, "Event Saved")
track = ObjectMother.get_track()
track.event_id = event.id
save_to_db(track, "Track Saved")
speaker = ObjectMother.get_speaker()
speaker.event_id = event.id
save_to_db(speaker, "Speaker Saved")
microlocation = ObjectMother.get_microlocation()
save_to_db(microlocation, "Microlocation Saved")
session = ObjectMother.get_session()
session.event_id = event.id
session.microlocation_id = microlocation.id
session.speakers = [speaker]
session.state = 'accepted'
save_to_db(session, "Session Saved")
rv = self.app.get(url_for('event_detail.display_event_schedule', identifier=event.identifier),
follow_redirects=True)
self.assertEqual(rv.status_code, 404)
def test_published_event_cfs_view(self):
with app.test_request_context():
event = ObjectMother.get_event()
event.state = 'Published'
save_to_db(event, "Event Saved")
custom_form = ObjectMother.get_custom_form()
custom_form.event_id = event.id
save_to_db(custom_form, "Custom form saved")
call_for_papers = CallForPaper(announcement="Announce",
start_date=datetime(2003, 8, 4, 12, 30, 45),
end_date=datetime(2004, 8, 4, 12, 30, 45),
event_id=event.id)
save_to_db(call_for_papers, "Call for papers saved")
rv = self.app.get(url_for('event_detail.display_event_cfs',
identifier=event.identifier), follow_redirects=True)
self.assertTrue("Closed" in rv.data, msg=rv.data)
def test_published_event_cfs_view_attempt(self):
with app.test_request_context():
event = ObjectMother.get_event()
event.state = 'Published'
save_to_db(event, "Event Saved")
rv = self.app.get(url_for('event_detail.display_event_cfs',
identifier=event.identifier), follow_redirects=True)
self.assertEqual(rv.status_code, 404)
if __name__ == '__main__':
unittest.main()
|
Achint08/open-event-orga-server
|
tests/unittests/views/guest/test_guest_event_page.py
|
Python
|
gpl-3.0
| 7,240 | 0.001657 |
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test replace by fee code
#
from test_framework.test_framework import ElectrumTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
MAX_REPLACEMENT_LIMIT = 100
def txToHex(tx):
return bytes_to_hex_str(tx.serialize())
def make_utxo(node, amount, confirmed=True, scriptPubKey=CScript([1])):
"""Create a txout with a given amount and scriptPubKey
Mines coins as needed.
confirmed - txouts created will be confirmed in the blockchain;
unconfirmed otherwise.
"""
fee = 1*COIN
while node.getbalance() < satoshi_round((amount + fee)/COIN):
node.generate(100)
#print (node.getbalance(), amount, fee)
new_addr = node.getnewaddress()
#print new_addr
txid = node.sendtoaddress(new_addr, satoshi_round((amount+fee)/COIN))
tx1 = node.getrawtransaction(txid, 1)
txid = int(txid, 16)
i = None
for i, txout in enumerate(tx1['vout']):
#print i, txout['scriptPubKey']['addresses']
if txout['scriptPubKey']['addresses'] == [new_addr]:
#print i
break
assert i is not None
tx2 = CTransaction()
tx2.vin = [CTxIn(COutPoint(txid, i))]
tx2.vout = [CTxOut(amount, scriptPubKey)]
tx2.rehash()
signed_tx = node.signrawtransaction(txToHex(tx2))
txid = node.sendrawtransaction(signed_tx['hex'], True)
# If requested, ensure txouts are confirmed.
if confirmed:
mempool_size = len(node.getrawmempool())
while mempool_size > 0:
node.generate(1)
new_size = len(node.getrawmempool())
# Error out if we have something stuck in the mempool, as this
# would likely be a bug.
assert(new_size < mempool_size)
mempool_size = new_size
return COutPoint(int(txid, 16), 0)
class ReplaceByFeeTest(ElectrumTestFramework):
def setup_network(self):
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, ["-maxorphantx=1000", "-debug",
"-relaypriority=0", "-whitelist=127.0.0.1",
"-limitancestorcount=50",
"-limitancestorsize=101",
"-limitdescendantcount=200",
"-limitdescendantsize=101"
]))
self.is_network_split = False
def run_test(self):
make_utxo(self.nodes[0], 1*COIN)
print("Running test simple doublespend...")
self.test_simple_doublespend()
print("Running test doublespend chain...")
self.test_doublespend_chain()
print("Running test doublespend tree...")
self.test_doublespend_tree()
print("Running test replacement feeperkb...")
self.test_replacement_feeperkb()
print("Running test spends of conflicting outputs...")
self.test_spends_of_conflicting_outputs()
print("Running test new unconfirmed inputs...")
self.test_new_unconfirmed_inputs()
print("Running test too many replacements...")
self.test_too_many_replacements()
print("Running test opt-in...")
self.test_opt_in()
print("Running test prioritised transactions...")
self.test_prioritised_transactions()
print("Passed\n")
def test_simple_doublespend(self):
"""Simple doublespend"""
tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
tx1a = CTransaction()
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1a.vout = [CTxOut(1*COIN, CScript([b'a']))]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)
# Should fail because we haven't changed the fee
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(1*COIN, CScript([b'b']))]
tx1b_hex = txToHex(tx1b)
try:
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
except JSONRPCException as exp:
assert_equal(exp.error['code'], -26) # insufficient fee
else:
assert(False)
# Extra 0.1 ELC fee
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(int(0.9*COIN), CScript([b'b']))]
tx1b_hex = txToHex(tx1b)
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
mempool = self.nodes[0].getrawmempool()
assert (tx1a_txid not in mempool)
assert (tx1b_txid in mempool)
assert_equal(tx1b_hex, self.nodes[0].getrawtransaction(tx1b_txid))
def test_doublespend_chain(self):
"""Doublespend of a long chain"""
initial_nValue = 50*COIN
tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
prevout = tx0_outpoint
remaining_value = initial_nValue
chain_txids = []
while remaining_value > 10*COIN:
remaining_value -= 1*COIN
tx = CTransaction()
tx.vin = [CTxIn(prevout, nSequence=0)]
tx.vout = [CTxOut(remaining_value, CScript([1]))]
tx_hex = txToHex(tx)
txid = self.nodes[0].sendrawtransaction(tx_hex, True)
chain_txids.append(txid)
prevout = COutPoint(int(txid, 16), 0)
# Whether the double-spend is allowed is evaluated by including all
# child fees - 40 ELC - so this attempt is rejected.
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(initial_nValue - 30*COIN, CScript([1]))]
dbl_tx_hex = txToHex(dbl_tx)
try:
self.nodes[0].sendrawtransaction(dbl_tx_hex, True)
except JSONRPCException as exp:
assert_equal(exp.error['code'], -26) # insufficient fee
else:
assert(False) # transaction mistakenly accepted!
# Accepted with sufficient fee
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(1*COIN, CScript([1]))]
dbl_tx_hex = txToHex(dbl_tx)
self.nodes[0].sendrawtransaction(dbl_tx_hex, True)
mempool = self.nodes[0].getrawmempool()
for doublespent_txid in chain_txids:
assert(doublespent_txid not in mempool)
def test_doublespend_tree(self):
"""Doublespend of a big tree of transactions"""
initial_nValue = 50*COIN
tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
def branch(prevout, initial_value, max_txs, tree_width=5, fee=0.0001*COIN, _total_txs=None):
if _total_txs is None:
_total_txs = [0]
if _total_txs[0] >= max_txs:
return
txout_value = (initial_value - fee) // tree_width
if txout_value < fee:
return
vout = [CTxOut(txout_value, CScript([i+1]))
for i in range(tree_width)]
tx = CTransaction()
tx.vin = [CTxIn(prevout, nSequence=0)]
tx.vout = vout
tx_hex = txToHex(tx)
assert(len(tx.serialize()) < 100000)
txid = self.nodes[0].sendrawtransaction(tx_hex, True)
yield tx
_total_txs[0] += 1
txid = int(txid, 16)
for i, txout in enumerate(tx.vout):
for x in branch(COutPoint(txid, i), txout_value,
max_txs,
tree_width=tree_width, fee=fee,
_total_txs=_total_txs):
yield x
fee = int(0.0001*COIN)
n = MAX_REPLACEMENT_LIMIT
tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
assert_equal(len(tree_txs), n)
# Attempt double-spend, will fail because too little fee paid
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(initial_nValue - fee*n, CScript([1]))]
dbl_tx_hex = txToHex(dbl_tx)
try:
self.nodes[0].sendrawtransaction(dbl_tx_hex, True)
except JSONRPCException as exp:
assert_equal(exp.error['code'], -26) # insufficient fee
else:
assert(False)
# 1 ELC fee is enough
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(initial_nValue - fee*n - 1*COIN, CScript([1]))]
dbl_tx_hex = txToHex(dbl_tx)
self.nodes[0].sendrawtransaction(dbl_tx_hex, True)
mempool = self.nodes[0].getrawmempool()
for tx in tree_txs:
tx.rehash()
assert (tx.hash not in mempool)
# Try again, but with more total transactions than the "max txs
# double-spent at once" anti-DoS limit.
for n in (MAX_REPLACEMENT_LIMIT+1, MAX_REPLACEMENT_LIMIT*2):
fee = int(0.0001*COIN)
tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
assert_equal(len(tree_txs), n)
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(initial_nValue - 2*fee*n, CScript([1]))]
dbl_tx_hex = txToHex(dbl_tx)
try:
self.nodes[0].sendrawtransaction(dbl_tx_hex, True)
except JSONRPCException as exp:
assert_equal(exp.error['code'], -26)
assert_equal("too many potential replacements" in exp.error['message'], True)
else:
assert(False)
for tx in tree_txs:
tx.rehash()
self.nodes[0].getrawtransaction(tx.hash)
def test_replacement_feeperkb(self):
"""Replacement requires fee-per-KB to be higher"""
tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
tx1a = CTransaction()
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1a.vout = [CTxOut(1*COIN, CScript([b'a']))]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)
# Higher fee, but the fee per KB is much lower, so the replacement is
# rejected.
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(int(0.001*COIN), CScript([b'a'*999000]))]
tx1b_hex = txToHex(tx1b)
try:
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
except JSONRPCException as exp:
assert_equal(exp.error['code'], -26) # insufficient fee
else:
assert(False)
def test_spends_of_conflicting_outputs(self):
"""Replacements that spend conflicting tx outputs are rejected"""
utxo1 = make_utxo(self.nodes[0], int(1.2*COIN))
utxo2 = make_utxo(self.nodes[0], 3*COIN)
tx1a = CTransaction()
tx1a.vin = [CTxIn(utxo1, nSequence=0)]
tx1a.vout = [CTxOut(int(1.1*COIN), CScript([b'a']))]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)
tx1a_txid = int(tx1a_txid, 16)
# Direct spend an output of the transaction we're replacing.
tx2 = CTransaction()
tx2.vin = [CTxIn(utxo1, nSequence=0), CTxIn(utxo2, nSequence=0)]
tx2.vin.append(CTxIn(COutPoint(tx1a_txid, 0), nSequence=0))
tx2.vout = tx1a.vout
tx2_hex = txToHex(tx2)
try:
tx2_txid = self.nodes[0].sendrawtransaction(tx2_hex, True)
except JSONRPCException as exp:
assert_equal(exp.error['code'], -26)
else:
assert(False)
# Spend tx1a's output to test the indirect case.
tx1b = CTransaction()
tx1b.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)]
tx1b.vout = [CTxOut(1*COIN, CScript([b'a']))]
tx1b_hex = txToHex(tx1b)
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
tx1b_txid = int(tx1b_txid, 16)
tx2 = CTransaction()
tx2.vin = [CTxIn(utxo1, nSequence=0), CTxIn(utxo2, nSequence=0),
CTxIn(COutPoint(tx1b_txid, 0))]
tx2.vout = tx1a.vout
tx2_hex = txToHex(tx2)
try:
tx2_txid = self.nodes[0].sendrawtransaction(tx2_hex, True)
except JSONRPCException as exp:
assert_equal(exp.error['code'], -26)
else:
assert(False)
def test_new_unconfirmed_inputs(self):
"""Replacements that add new unconfirmed inputs are rejected"""
confirmed_utxo = make_utxo(self.nodes[0], int(1.1*COIN))
unconfirmed_utxo = make_utxo(self.nodes[0], int(0.1*COIN), False)
tx1 = CTransaction()
tx1.vin = [CTxIn(confirmed_utxo)]
tx1.vout = [CTxOut(1*COIN, CScript([b'a']))]
tx1_hex = txToHex(tx1)
tx1_txid = self.nodes[0].sendrawtransaction(tx1_hex, True)
tx2 = CTransaction()
tx2.vin = [CTxIn(confirmed_utxo), CTxIn(unconfirmed_utxo)]
tx2.vout = tx1.vout
tx2_hex = txToHex(tx2)
try:
tx2_txid = self.nodes[0].sendrawtransaction(tx2_hex, True)
except JSONRPCException as exp:
assert_equal(exp.error['code'], -26)
else:
assert(False)
def test_too_many_replacements(self):
"""Replacements that evict too many transactions are rejected"""
# Try directly replacing more than MAX_REPLACEMENT_LIMIT
# transactions
# Start by creating a single transaction with many outputs
initial_nValue = 10*COIN
utxo = make_utxo(self.nodes[0], initial_nValue)
fee = int(0.0001*COIN)
split_value = int((initial_nValue-fee)/(MAX_REPLACEMENT_LIMIT+1))
actual_fee = initial_nValue - split_value*(MAX_REPLACEMENT_LIMIT+1)
outputs = []
for i in range(MAX_REPLACEMENT_LIMIT+1):
outputs.append(CTxOut(split_value, CScript([1])))
splitting_tx = CTransaction()
splitting_tx.vin = [CTxIn(utxo, nSequence=0)]
splitting_tx.vout = outputs
splitting_tx_hex = txToHex(splitting_tx)
txid = self.nodes[0].sendrawtransaction(splitting_tx_hex, True)
txid = int(txid, 16)
# Now spend each of those outputs individually
for i in range(MAX_REPLACEMENT_LIMIT+1):
tx_i = CTransaction()
tx_i.vin = [CTxIn(COutPoint(txid, i), nSequence=0)]
tx_i.vout = [CTxOut(split_value-fee, CScript([b'a']))]
tx_i_hex = txToHex(tx_i)
self.nodes[0].sendrawtransaction(tx_i_hex, True)
# Now create doublespend of the whole lot; should fail.
# Need a big enough fee to cover all spending transactions and have
# a higher fee rate
double_spend_value = (split_value-100*fee)*(MAX_REPLACEMENT_LIMIT+1)
inputs = []
for i in range(MAX_REPLACEMENT_LIMIT+1):
inputs.append(CTxIn(COutPoint(txid, i), nSequence=0))
double_tx = CTransaction()
double_tx.vin = inputs
double_tx.vout = [CTxOut(double_spend_value, CScript([b'a']))]
double_tx_hex = txToHex(double_tx)
try:
self.nodes[0].sendrawtransaction(double_tx_hex, True)
except JSONRPCException as exp:
assert_equal(exp.error['code'], -26)
assert_equal("too many potential replacements" in exp.error['message'], True)
else:
assert(False)
# If we remove an input, it should pass
double_tx = CTransaction()
double_tx.vin = inputs[0:-1]
double_tx.vout = [CTxOut(double_spend_value, CScript([b'a']))]
double_tx_hex = txToHex(double_tx)
self.nodes[0].sendrawtransaction(double_tx_hex, True)
def test_opt_in(self):
""" Replacing should only work if orig tx opted in """
tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
# Create a non-opting in transaction
tx1a = CTransaction()
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0xffffffff)]
tx1a.vout = [CTxOut(1*COIN, CScript([b'a']))]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)
# Shouldn't be able to double-spend
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(int(0.9*COIN), CScript([b'b']))]
tx1b_hex = txToHex(tx1b)
try:
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
except JSONRPCException as exp:
assert_equal(exp.error['code'], -26)
else:
print(tx1b_txid)
assert(False)
tx1_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
# Create a different non-opting in transaction
tx2a = CTransaction()
tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0xfffffffe)]
tx2a.vout = [CTxOut(1*COIN, CScript([b'a']))]
tx2a_hex = txToHex(tx2a)
tx2a_txid = self.nodes[0].sendrawtransaction(tx2a_hex, True)
# Still shouldn't be able to double-spend
tx2b = CTransaction()
tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
tx2b.vout = [CTxOut(int(0.9*COIN), CScript([b'b']))]
tx2b_hex = txToHex(tx2b)
try:
tx2b_txid = self.nodes[0].sendrawtransaction(tx2b_hex, True)
except JSONRPCException as exp:
assert_equal(exp.error['code'], -26)
else:
assert(False)
# Now create a new transaction that spends from tx1a and tx2a
# opt-in on one of the inputs
# Transaction should be replaceable on either input
tx1a_txid = int(tx1a_txid, 16)
tx2a_txid = int(tx2a_txid, 16)
tx3a = CTransaction()
tx3a.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0xffffffff),
CTxIn(COutPoint(tx2a_txid, 0), nSequence=0xfffffffd)]
tx3a.vout = [CTxOut(int(0.9*COIN), CScript([b'c'])), CTxOut(int(0.9*COIN), CScript([b'd']))]
tx3a_hex = txToHex(tx3a)
self.nodes[0].sendrawtransaction(tx3a_hex, True)
tx3b = CTransaction()
tx3b.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)]
tx3b.vout = [CTxOut(int(0.5*COIN), CScript([b'e']))]
tx3b_hex = txToHex(tx3b)
tx3c = CTransaction()
tx3c.vin = [CTxIn(COutPoint(tx2a_txid, 0), nSequence=0)]
tx3c.vout = [CTxOut(int(0.5*COIN), CScript([b'f']))]
tx3c_hex = txToHex(tx3c)
self.nodes[0].sendrawtransaction(tx3b_hex, True)
# If tx3b was accepted, tx3c won't look like a replacement,
# but make sure it is accepted anyway
self.nodes[0].sendrawtransaction(tx3c_hex, True)
def test_prioritised_transactions(self):
# Ensure that fee deltas used via prioritisetransaction are
# correctly used by replacement logic
# 1. Check that feeperkb uses modified fees
tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
tx1a = CTransaction()
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1a.vout = [CTxOut(1*COIN, CScript([b'a']))]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)
# Higher fee, but the actual fee per KB is much lower.
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(int(0.001*COIN), CScript([b'a'*740000]))]
tx1b_hex = txToHex(tx1b)
# Verify tx1b cannot replace tx1a.
try:
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
except JSONRPCException as exp:
assert_equal(exp.error['code'], -26)
else:
assert(False)
# Use prioritisetransaction to set tx1a's fee to 0.
self.nodes[0].prioritisetransaction(tx1a_txid, 0, int(-0.1*COIN))
# Now tx1b should be able to replace tx1a
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
assert(tx1b_txid in self.nodes[0].getrawmempool())
# 2. Check that absolute fee checks use modified fee.
tx1_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
tx2a = CTransaction()
tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0)]
tx2a.vout = [CTxOut(1*COIN, CScript([b'a']))]
tx2a_hex = txToHex(tx2a)
tx2a_txid = self.nodes[0].sendrawtransaction(tx2a_hex, True)
# Lower fee, but we'll prioritise it
tx2b = CTransaction()
tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
tx2b.vout = [CTxOut(int(1.01*COIN), CScript([b'a']))]
tx2b.rehash()
tx2b_hex = txToHex(tx2b)
# Verify tx2b cannot replace tx2a.
try:
tx2b_txid = self.nodes[0].sendrawtransaction(tx2b_hex, True)
except JSONRPCException as exp:
assert_equal(exp.error['code'], -26)
else:
assert(False)
# Now prioritise tx2b to have a higher modified fee
self.nodes[0].prioritisetransaction(tx2b.hash, 0, int(0.1*COIN))
# tx2b should now be accepted
tx2b_txid = self.nodes[0].sendrawtransaction(tx2b_hex, True)
assert(tx2b_txid in self.nodes[0].getrawmempool())
if __name__ == '__main__':
ReplaceByFeeTest().main()
|
sinraf96/electrum
|
qa/rpc-tests/replace-by-fee.py
|
Python
|
mit
| 21,883 | 0.001188 |
"""
Stateful module base class and interface description.
All stateful Python modules
- Get Skype4Py Skype instance on init - have full control over Skype and
thus are not limited to !command handlers
- Reside in the some modules/ folder as UNIX script modules
- Have .py extension and be valid Python 2.7 modules
- Have #!/sevabot magic string at the head of the file
- Exports Python attribute *sevabot_handler* which is an instance of the class
as described below
Please note that in the future we might have different chat backends (GTalk)
and thus have a same-same-but-different stateful handlers.
"""
class StatefulSkypeHandler:
"""
Base class for stateful handlers.
All exceptions slip through are caught and logged.
"""
def init(self, sevabot):
"""
Set-up our state. This is called every time module is (re)loaded.
You can get Skype4Py instance via ``sevabot.getSkype()``.
:param sevabot: Handle to Sevabot instance
"""
def handle_message(self, msg, status):
"""Override this method to have a customized handler for each Skype message.
:param msg: ChatMessage instance https://github.com/awahlig/skype4py/blob/master/Skype4Py/chat.py#L409
:param status: -
:return: True if the message was handled and should not be further processed
"""
def shutdown():
""" Called when the module is reloaded.
In ``shutdown()`` you must
* Stop all created threads
* Unregister all event handlers
..note ::
We do *not* guaranteed to be call when Sevabot process shutdowns as
the process may terminate with SIGKILL.
"""
def register_callback(self, skype, event, callback):
"""
Register any callable as a callback for a skype event.
Thin wrapper for RegisterEventHandler https://github.com/awahlig/skype4py/blob/master/Skype4Py/utils.py
:param skype: Skype4Py instance
:param event: Same as Event
:param callback: Same as Target
:return: Same as RegisterEventHandler
"""
return skype.RegisterEventHandler(event, callback)
def unregister_callback(self, skype, event, callback):
"""
Unregister a callback previously registered with register_callback.
Thin wrapper for UnregisterEventHandler https://github.com/awahlig/skype4py/blob/master/Skype4Py/utils.py
:param skype: Skype4Py instance
:param event: Same as Event
:param callback: Same as Target
:return: Same as UnregisterEventHandler
"""
return skype.UnregisterEventHandler(event, callback)
|
mikemike/SkypeBot
|
unused-modules/stateful.py
|
Python
|
gpl-2.0
| 2,755 | 0.004356 |
# -*- coding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, exceptions, api, _
import openerp.addons.decimal_precision as dp
class PurchaseCostDistribution(models.Model):
_name = "purchase.cost.distribution"
_description = "Purchase landed costs distribution"
_order = 'name desc'
@api.one
@api.depends('total_expense', 'total_purchase')
def _compute_amount_total(self):
self.amount_total = self.total_purchase + self.total_expense
@api.one
@api.depends('cost_lines', 'cost_lines.total_amount')
def _compute_total_purchase(self):
self.total_purchase = sum([x.total_amount for x in self.cost_lines])
@api.one
@api.depends('cost_lines', 'cost_lines.product_price_unit')
def _compute_total_price_unit(self):
self.total_price_unit = sum([x.product_price_unit for x in
self.cost_lines])
@api.one
@api.depends('cost_lines', 'cost_lines.product_qty')
def _compute_total_uom_qty(self):
self.total_uom_qty = sum([x.product_qty for x in self.cost_lines])
@api.one
@api.depends('cost_lines', 'cost_lines.total_weight')
def _compute_total_weight(self):
self.total_weight = sum([x.total_weight for x in self.cost_lines])
@api.one
@api.depends('cost_lines', 'cost_lines.total_weight_net')
def _compute_total_weight_net(self):
self.total_weight_net = sum([x.total_weight_net for x in
self.cost_lines])
@api.one
@api.depends('cost_lines', 'cost_lines.total_volume')
def _compute_total_volume(self):
self.total_volume = sum([x.total_volume for x in self.cost_lines])
@api.one
@api.depends('expense_lines', 'expense_lines.expense_amount')
def _compute_total_expense(self):
self.total_expense = sum([x.expense_amount for x in
self.expense_lines])
def _expense_lines_default(self):
expenses = self.env['purchase.expense.type'].search(
[('default_expense', '=', True)])
return [{'type': x, 'expense_amount': x.default_amount}
for x in expenses]
name = fields.Char(string='Distribution number', required=True,
select=True, default='/')
company_id = fields.Many2one(
comodel_name='res.company', string='Company', required=True,
default=(lambda self: self.env['res.company']._company_default_get(
'purchase.cost.distribution')))
currency_id = fields.Many2one(
comodel_name='res.currency', string='Currency',
related="company_id.currency_id")
state = fields.Selection(
[('draft', 'Draft'),
('calculated', 'Calculated'),
('done', 'Done'),
('error', 'Error'),
('cancel', 'Cancel')], string='Status', readonly=True,
default='draft')
cost_update_type = fields.Selection(
[('direct', 'Direct Update')], string='Cost Update Type',
default='direct', required=True)
date = fields.Date(
string='Date', required=True, readonly=True, select=True,
states={'draft': [('readonly', False)]},
default=fields.Date.context_today)
total_uom_qty = fields.Float(
compute=_compute_total_uom_qty, readonly=True,
digits_compute=dp.get_precision('Product UoS'),
string='Total quantity')
total_weight = fields.Float(
compute=_compute_total_weight, string='Total gross weight',
readonly=True,
digits_compute=dp.get_precision('Stock Weight'))
total_weight_net = fields.Float(
compute=_compute_total_weight_net,
digits_compute=dp.get_precision('Stock Weight'),
string='Total net weight', readonly=True)
total_volume = fields.Float(
compute=_compute_total_volume, string='Total volume', readonly=True)
total_purchase = fields.Float(
compute=_compute_total_purchase,
digits_compute=dp.get_precision('Account'), string='Total purchase')
total_price_unit = fields.Float(
compute=_compute_total_price_unit, string='Total price unit',
digits_compute=dp.get_precision('Product Price'))
amount_total = fields.Float(
compute=_compute_amount_total,
digits_compute=dp.get_precision('Account'), string='Total')
total_expense = fields.Float(
compute=_compute_total_expense,
digits_compute=dp.get_precision('Account'), string='Total expenses')
note = fields.Text(string='Documentation for this order')
cost_lines = fields.One2many(
comodel_name='purchase.cost.distribution.line', ondelete="cascade",
inverse_name='distribution', string='Distribution lines')
expense_lines = fields.One2many(
comodel_name='purchase.cost.distribution.expense', ondelete="cascade",
inverse_name='distribution', string='Expenses',
default=_expense_lines_default)
@api.multi
def unlink(self):
for c in self:
if c.state not in ('draft', 'calculated'):
raise exceptions.Warning(
_("You can't delete a confirmed cost distribution"))
return super(PurchaseCostDistribution, self).unlink()
@api.model
def create(self, vals):
if vals.get('name', '/') == '/':
vals['name'] = self.env['ir.sequence'].next_by_code(
'purchase.cost.distribution')
return super(PurchaseCostDistribution, self).create(vals)
@api.multi
def action_calculate(self):
for distribution in self:
# Check expense lines for amount 0
if any([not x.expense_amount for x in distribution.expense_lines]):
raise exceptions.Warning(
_('Please enter an amount for all the expenses'))
# Check if exist lines in distribution
if not distribution.cost_lines:
raise exceptions.Warning(
_('There is no picking lines in the distribution'))
# Calculating expense line
for line in distribution.cost_lines:
line.expense_lines.unlink()
for expense in distribution.expense_lines:
if (expense.affected_lines and
line.id not in expense.affected_lines.ids):
continue
if expense.type.calculation_method == 'amount':
multiplier = line.total_amount
if expense.affected_lines:
divisor = sum([x.total_amount for x in
expense.affected_lines])
else:
divisor = distribution.total_purchase
elif expense.type.calculation_method == 'price':
multiplier = line.product_price_unit
if expense.affected_lines:
divisor = sum([x.product_price_unit for x in
expense.affected_lines])
else:
divisor = distribution.total_price_unit
elif expense.type.calculation_method == 'qty':
multiplier = line.product_qty
if expense.affected_lines:
divisor = sum([x.product_qty for x in
expense.affected_lines])
else:
divisor = distribution.total_uom_qty
elif expense.type.calculation_method == 'weight':
multiplier = line.total_weight
if expense.affected_lines:
divisor = sum([x.total_weight for x in
expense.affected_lines])
else:
divisor = distribution.total_weight
elif expense.type.calculation_method == 'weight_net':
multiplier = line.total_weight_net
if expense.affected_lines:
divisor = sum([x.total_weight_net for x in
expense.affected_lines])
else:
divisor = distribution.total_weight_net
elif expense.type.calculation_method == 'volume':
multiplier = line.total_volume
if expense.affected_lines:
divisor = sum([x.total_volume for x in
expense.affected_lines])
else:
divisor = distribution.total_volume
elif expense.type.calculation_method == 'equal':
multiplier = 1
divisor = (len(expense.affected_lines) or
len(distribution.cost_lines))
else:
raise exceptions.Warning(
_('No valid distribution type.'))
expense_amount = (expense.expense_amount * multiplier /
divisor)
expense_line = {
'distribution_expense': expense.id,
'expense_amount': expense_amount,
'cost_ratio': expense_amount / line.product_qty,
}
line.expense_lines = [(0, 0, expense_line)]
distribution.state = 'calculated'
return True
def _product_price_update(self, move, new_price):
"""Method that mimicks stock.move's product_price_update_before_done
method behaviour, but taking into account that calculations are made
on an already done move, and prices sources are given as parameters.
"""
if (move.location_id.usage == 'supplier' and move.product_id.cost_method == 'average'):
product = move.product_id
qty_available = product.product_tmpl_id.qty_available
product_avail = qty_available - move.product_qty
if product_avail <= 0:
new_std_price = move.price_unit
else:
domain_quant = [
('product_id', 'in',
product.product_tmpl_id.product_variant_ids.ids),
('id', 'not in', move.quant_ids.ids)]
quants = self.env['stock.quant'].read_group(
domain_quant, ['product_id', 'qty', 'cost'], [])[0]
# Get the standard price
new_std_price = ((quants['cost'] * quants['qty'] +
new_price * move.product_qty) /
qty_available)
# Write the standard price, as SUPERUSER_ID, because a
# warehouse manager may not have the right to write on products
product.sudo().write({'standard_price': new_std_price})
@api.one
def action_done(self):
for line in self.cost_lines:
if self.cost_update_type == 'direct':
line.move_id.quant_ids._price_update(line.standard_price_new)
self._product_price_update(line.move_id, line.standard_price_new)
line.move_id.product_price_update_after_done()
self.state = 'done'
@api.multi
def action_draft(self):
self.write({'state': 'draft'})
return True
@api.one
def action_cancel(self):
for line in self.cost_lines:
if self.cost_update_type == 'direct':
if self.currency_id.compare_amounts(
line.move_id.quant_ids[0].cost,
line.standard_price_new) != 0:
raise exceptions.Warning(
_('Cost update cannot be undone because there has '
'been a later update. Restore correct price and try '
'again.'))
line.move_id.quant_ids._price_update(line.standard_price_old)
self._product_price_update(
line.move_id, line.standard_price_old)
line.move_id.product_price_update_after_done()
self.state = 'draft'
class PurchaseCostDistributionLine(models.Model):
_name = "purchase.cost.distribution.line"
_description = "Purchase cost distribution Line"
_rec_name = 'picking_id'
@api.one
@api.depends('product_price_unit', 'product_qty')
def _compute_total_amount(self):
self.total_amount = self.product_price_unit * self.product_qty
@api.one
@api.depends('product_id', 'product_qty')
def _compute_total_weight(self):
self.total_weight = self.product_weight * self.product_qty
@api.one
@api.depends('product_id', 'product_qty')
def _compute_total_weight_net(self):
self.total_weight_net = self.product_weight_net * self.product_qty
@api.one
@api.depends('product_id', 'product_qty')
def _compute_total_volume(self):
self.total_volume = self.product_volume * self.product_qty
@api.one
@api.depends('expense_lines', 'expense_lines.cost_ratio')
def _compute_cost_ratio(self):
self.cost_ratio = sum([x.cost_ratio for x in self.expense_lines])
@api.one
@api.depends('expense_lines', 'expense_lines.expense_amount')
def _compute_expense_amount(self):
self.expense_amount = sum([x.expense_amount for x in
self.expense_lines])
@api.one
@api.depends('standard_price_old', 'cost_ratio')
def _compute_standard_price_new(self):
self.standard_price_new = self.standard_price_old + self.cost_ratio
@api.one
@api.depends('move_id', 'move_id.picking_id', 'move_id.product_id',
'move_id.product_qty')
def _compute_display_name(self):
self.name = '%s / %s / %s' % (
self.move_id.picking_id.name, self.move_id.product_id.display_name,
self.move_id.product_qty)
@api.one
@api.depends('move_id', 'move_id.product_id')
def _get_product_id(self):
# Cannot be done via related field due to strange bug in update chain
self.product_id = self.move_id.product_id.id
@api.one
@api.depends('move_id', 'move_id.product_qty')
def _get_product_qty(self):
# Cannot be done via related field due to strange bug in update chain
self.product_qty = self.move_id.product_qty
@api.one
@api.depends('move_id')
def _get_standard_price_old(self):
self.standard_price_old = (
self.move_id and self.move_id.get_price_unit(self.move_id) or 0.0)
name = fields.Char(
string='Name', compute='_compute_display_name')
distribution = fields.Many2one(
comodel_name='purchase.cost.distribution', string='Cost distribution',
ondelete='cascade')
move_id = fields.Many2one(
comodel_name='stock.move', string='Picking line', ondelete="restrict")
purchase_line_id = fields.Many2one(
comodel_name='purchase.order.line', string='Purchase order line',
related='move_id.purchase_line_id')
purchase_id = fields.Many2one(
comodel_name='purchase.order', string='Purchase order', readonly=True,
related='move_id.purchase_line_id.order_id', store=True)
partner = fields.Many2one(
comodel_name='res.partner', string='Supplier', readonly=True,
related='move_id.purchase_line_id.order_id.partner_id')
picking_id = fields.Many2one(
'stock.picking', string='Picking', related='move_id.picking_id',
store=True)
product_id = fields.Many2one(
comodel_name='product.product', string='Product', store=True,
compute='_get_product_id')
product_qty = fields.Float(
string='Quantity', compute='_get_product_qty', store=True)
product_uom = fields.Many2one(
comodel_name='product.uom', string='Unit of measure',
related='move_id.product_uom')
product_uos_qty = fields.Float(
string='Quantity (UoS)', related='move_id.product_uos_qty')
product_uos = fields.Many2one(
comodel_name='product.uom', string='Product UoS',
related='move_id.product_uos')
product_price_unit = fields.Float(
string='Unit price', related='move_id.price_unit')
expense_lines = fields.One2many(
comodel_name='purchase.cost.distribution.line.expense',
inverse_name='distribution_line', string='Expenses distribution lines',
ondelete='cascade')
product_volume = fields.Float(
string='Volume', help="The volume in m3.",
related='product_id.product_tmpl_id.volume')
product_weight = fields.Float(
string='Gross weight', related='product_id.product_tmpl_id.weight',
help="The gross weight in Kg.")
product_weight_net = fields.Float(
string='Net weight', related='product_id.product_tmpl_id.weight_net',
help="The net weight in Kg.")
standard_price_old = fields.Float(
string='Previous cost', compute="_get_standard_price_old", store=True,
digits_compute=dp.get_precision('Product Price'))
expense_amount = fields.Float(
string='Cost amount', digits_compute=dp.get_precision('Account'),
compute='_compute_expense_amount')
cost_ratio = fields.Float(
string='Unit cost', digits_compute=dp.get_precision('Account'),
compute='_compute_cost_ratio')
standard_price_new = fields.Float(
string='New cost', digits_compute=dp.get_precision('Product Price'),
compute='_compute_standard_price_new')
total_amount = fields.Float(
compute=_compute_total_amount, string='Amount line',
digits_compute=dp.get_precision('Account'))
total_weight = fields.Float(
compute=_compute_total_weight, string="Line weight", store=True,
digits_compute=dp.get_precision('Stock Weight'),
help="The line gross weight in Kg.")
total_weight_net = fields.Float(
compute=_compute_total_weight_net, string='Line net weight',
digits_compute=dp.get_precision('Stock Weight'), store=True,
help="The line net weight in Kg.")
total_volume = fields.Float(
compute=_compute_total_volume, string='Line volume', store=True,
help="The line volume in m3.")
class PurchaseCostDistributionLineExpense(models.Model):
_name = "purchase.cost.distribution.line.expense"
_description = "Purchase cost distribution line expense"
distribution_line = fields.Many2one(
comodel_name='purchase.cost.distribution.line',
string='Cost distribution line', ondelete="cascade")
distribution_expense = fields.Many2one(
comodel_name='purchase.cost.distribution.expense',
string='Distribution expense', ondelete="cascade")
type = fields.Many2one(
'purchase.expense.type', string='Expense type',
related='distribution_expense.type')
expense_amount = fields.Float(
string='Expense amount', default=0.0,
digits_compute=dp.get_precision('Account'))
cost_ratio = fields.Float(
'Unit cost', default=0.0,
digits_compute=dp.get_precision('Account'))
class PurchaseCostDistributionExpense(models.Model):
_name = "purchase.cost.distribution.expense"
_description = "Purchase cost distribution expense"
_rec_name = "type"
@api.one
@api.depends('distribution', 'distribution.cost_lines')
def _get_imported_lines(self):
self.imported_lines = self.env['purchase.cost.distribution.line']
self.imported_lines |= self.distribution.cost_lines
distribution = fields.Many2one(
comodel_name='purchase.cost.distribution', string='Cost distribution',
select=True, ondelete="cascade", required=True)
ref = fields.Char(string="Reference")
type = fields.Many2one(
comodel_name='purchase.expense.type', string='Expense type',
select=True, ondelete="restrict")
calculation_method = fields.Selection(
string='Calculation method', related='type.calculation_method',
readonly=True)
imported_lines = fields.Many2many(
comodel_name='purchase.cost.distribution.line',
string='Imported lines', compute='_get_imported_lines')
affected_lines = fields.Many2many(
comodel_name='purchase.cost.distribution.line', column1="expense_id",
relation="distribution_expense_aff_rel", column2="line_id",
string='Affected lines',
help="Put here specific lines that this expense is going to be "
"distributed across. Leave it blank to use all imported lines.",
domain="[('id', 'in', imported_lines[0][2])]")
expense_amount = fields.Float(
string='Expense amount', digits_compute=dp.get_precision('Account'),
required=True)
invoice_line = fields.Many2one(
comodel_name='account.invoice.line', string="Supplier invoice line",
domain="[('invoice_id.type', '=', 'in_invoice'),"
"('invoice_id.state', 'in', ('open', 'paid'))]")
@api.onchange('type')
def onchange_type(self):
if self.type and self.type.default_amount:
self.expense_amount = self.type.default_amount
|
MarcosCommunity/odoo
|
comunity_modules/purchase_landed_cost/models/purchase_cost_distribution.py
|
Python
|
agpl-3.0
| 22,280 | 0.00009 |
import argparse
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as ec
from selenium.webdriver.common.by import By
import time
import socket
import os
import os.path
from pathlib import Path
def clean(file):
try:
os.remove(file)
except OSError:
pass
def chrome(url, video_name, wait_timeout):
capabilities = {
"browserName": "chrome",
"version": "84.0",
"enableVNC": True,
"enableVideo": True,
"videoName": video_name
}
chrome = webdriver.Remote(command_executor='http://selenoid:4444/wd/hub',
desired_capabilities=capabilities)
chrome.set_window_rect(-4, -125, 1928, 1208)
# this is needed since Chrome has not docs entry host in loaded /etc/hosts
fixed_url = url.replace('docs', socket.gethostbyname('docs'))
print('Opening Chrome on '+fixed_url)
chrome.get(fixed_url)
#chrome.find_element_by_css_selector('body').send_keys('f')
WebDriverWait(chrome, wait_timeout).until(ec.visibility_of_element_located((By.CSS_SELECTOR, '.the-end')))
chrome.quit()
def wait_until_video_is_exported(file, timeout):
time_counter = 0
while not os.path.exists(file):
time.sleep(1)
time_counter += 1
if time_counter > timeout:break
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Run chrome browser on provided url.')
parser.add_argument('url')
parser.add_argument('--slide-end-timeout', default=300)
parser.add_argument('--recorded-video', default='/tmp/showcase.mp4')
parser.add_argument('--recorded-video-timeout', default=120)
args = parser.parse_args()
clean(args.recorded_video)
chrome(args.url, Path(args.recorded_video).name, args.slide_end_timeout)
# wait for video to be ready and renamed
wait_until_video_is_exported(args.recorded_video, args.recorded_video_timeout)
|
groupe-sii/ogham
|
.tools/showcase-recorder/showcase-launcher/play_showcase.py
|
Python
|
apache-2.0
| 2,008 | 0.005976 |
# -*- coding: utf-8 -*-
import datetime
from django.shortcuts import render
from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
def home(request):
today = datetime.date.today()
return render(request, "taskbuster/index.html",
{'today': today,
'now': now()})
def home_files(request, filename):
return render(request, filename, {}, content_type="text/plain")
|
caithess/taskbuster
|
taskbuster/views.py
|
Python
|
mit
| 451 | 0 |
# coding=utf-8
# Copyright 2022 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Miscellaneous utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pprint
import re
# Camel case to snake case utils
_first_cap_re = re.compile("(.)([A-Z][a-z0-9]+)")
_all_cap_re = re.compile("([a-z0-9])([A-Z])")
def camelcase_to_snakecase(name):
s1 = _first_cap_re.sub(r"\1_\2", name)
return _all_cap_re.sub(r"\1_\2", s1).lower()
def snakecase_to_camelcase(name):
return "".join([w[0].upper() + w[1:] for w in name.split("_")])
def pprint_hparams(hparams):
"""Represents hparams using its dictionary and calls pprint.pformat on it."""
return "\n{}".format(pprint.pformat(hparams.values(), width=1))
|
tensorflow/tensor2tensor
|
tensor2tensor/utils/misc_utils.py
|
Python
|
apache-2.0
| 1,305 | 0.003831 |
import sys
import copy
DEBUG = False
ROOM_SIZE = 10
LIGHT_DISTRIBUTION = 20
class Point(object):
def __init__(self, row, col):
self.row = row
self.col = col
def __eq__(self, other):
return self.row == other.row and self.col == other.col
def __str__(self):
return "(%i,%i)" % (self.row, self.col)
class Element(object):
WALL = '#'
COLUMN = 'o'
PRISM = '*'
EMPTY = ' '
LIGHT_X = 'X'
LIGHT_BACK = '\\'
LIGHT_FORWARD = '/'
class Trajectory(object):
UP_L = 0
UP_R = 1
DN_L = 2
DN_R = 3
ALL = [0, 1, 2, 3]
@staticmethod
def reverse(trajectory):
return {
Trajectory.UP_L: Trajectory.DN_R,
Trajectory.UP_R: Trajectory.DN_L,
Trajectory.DN_L: Trajectory.UP_R,
Trajectory.DN_R: Trajectory.UP_L
} [trajectory]
@staticmethod
def reflect_vertical(trajectory):
return {
Trajectory.UP_L: Trajectory.UP_R,
Trajectory.UP_R: Trajectory.UP_L,
Trajectory.DN_L: Trajectory.DN_R,
Trajectory.DN_R: Trajectory.DN_L
} [trajectory]
@staticmethod
def reflect_horizontal(trajectory):
return {
Trajectory.UP_L: Trajectory.DN_L,
Trajectory.UP_R: Trajectory.DN_R,
Trajectory.DN_L: Trajectory.UP_L,
Trajectory.DN_R: Trajectory.UP_R
} [trajectory]
class Ray(object):
def __init__(self, position, trajectory, intensity):
self.position = position
self.trajectory = trajectory
self.intensity = intensity
def __str__(self):
return "Ray: {%s, %s, %i}" % (self.position, self.trajectory, self.intensity)
class Room(object):
def __init__(self, schematic):
self.schematic = schematic
def __str__(self):
delimiter = '\n' if DEBUG else ''
return delimiter.join(''.join(x) for x in self.schematic)
def _get_element(self, point):
return self.schematic[point.row][point.col]
def _set_element(self, point, trajectory):
element = self._get_element(point)
if trajectory == Trajectory.UP_R or trajectory == Trajectory.DN_L:
# Forward slash
self.schematic[point.row][point.col] = {
' ': '/',
'/': '/',
'\\': 'X',
'X': 'X'
} [element]
else:
# Back slash
self.schematic[point.row][point.col] = {
' ': '\\',
'/': 'X',
'\\': '\\',
'X': 'X'
} [element]
@property
def _hole(self):
# Top and bottom rows.
for c in range(0, ROOM_SIZE):
if self._get_element(Point(0, c)) != Element.WALL: return Point(0, c)
if self._get_element(Point(ROOM_SIZE-1, c)) != Element.WALL: return Point(ROOM_SIZE-1, c)
# Sides
for r in range(1, ROOM_SIZE-1):
if self._get_element(Point(r, 0)) != Element.WALL: return Point(r, 0)
if self._get_element(Point(r, ROOM_SIZE-1)) != Element.WALL: return Point(r, ROOM_SIZE-1)
# Should never reach here given valid input.
return None
@property
def _initial_trajectory(self):
hole = self._hole
hole_element = self._get_element(hole)
# Top.
if hole.row == 0:
return Trajectory.DN_L if hole_element == Element.LIGHT_FORWARD else Trajectory.DN_R
# Bottom.
if hole.row == ROOM_SIZE-1:
return Trajectory.UP_R if hole_element == Element.LIGHT_FORWARD else Trajectory.UP_L
# Left.
if hole.col == 0:
return Trajectory.UP_R if hole_element == Element.LIGHT_FORWARD else Trajectory.DN_R
# Right.
if hole.col == ROOM_SIZE-1:
return Trajectory.DN_L if hole_element == Element.LIGHT_FORWARD else Trajectory.UP_L
# Should never reach this point.
return None
@staticmethod
def _next_position(current_position, trajectory):
return {
Trajectory.UP_L: Point(current_position.row-1, current_position.col-1),
Trajectory.UP_R: Point(current_position.row-1, current_position.col+1),
Trajectory.DN_L: Point(current_position.row+1, current_position.col-1),
Trajectory.DN_R: Point(current_position.row+1, current_position.col+1)
} [trajectory]
@staticmethod
def _is_corner(point):
corners = [Point(0, 0), Point(0, ROOM_SIZE-1), Point(ROOM_SIZE-1, 0), Point(ROOM_SIZE-1, ROOM_SIZE-1)]
return point in corners
@staticmethod
def _is_in_room(point):
return 0 <= point.col < ROOM_SIZE and 0 <= point.row < ROOM_SIZE
@staticmethod
def _is_on_wall(point):
return point.row == 0 or point.row == ROOM_SIZE-1 or point.col == 0 or point.col == ROOM_SIZE-1
def _propagate_light(self, rays):
result = []
for ray in rays:
new_rays = []
next_position = Room._next_position(ray.position, ray.trajectory)
if not self._is_in_room(next_position): continue
next_element = self._get_element(next_position)
if next_element == Element.WALL:
if not Room._is_corner(next_position):
# Left wall.
if next_position.col == 0:
row = next_position.row
col = next_position.col + 1
trajectory = Trajectory.reflect_vertical(ray.trajectory)
# Right wall.
elif next_position.col == ROOM_SIZE-1:
row = next_position.row
col = next_position.col - 1
trajectory = Trajectory.reflect_vertical(ray.trajectory)
# Top wall.
elif next_position.row == 0:
row = next_position.row + 1
col = next_position.col
trajectory = Trajectory.reflect_horizontal(ray.trajectory)
# Bottom wall.
else:
row = next_position.row -1
col = next_position.col
trajectory = Trajectory.reflect_horizontal(ray.trajectory)
next_position = Point(row, col)
self._set_element(next_position, trajectory)
new_rays.append(Ray(next_position, trajectory, ray.intensity-1))
elif next_element == Element.COLUMN:
# The ray hit a hole; kill it off.
pass
elif next_element == Element.PRISM:
# The ray hit a prism; create three new rays.
trajectories = copy.deepcopy(Trajectory.ALL)
trajectories.remove(Trajectory.reverse(ray.trajectory))
for trajectory in trajectories:
new_rays.append(Ray(next_position, trajectory, ray.intensity))
elif next_element == Element.EMPTY or \
next_element == Element.LIGHT_X or \
next_element == Element.LIGHT_BACK or \
next_element == Element.LIGHT_FORWARD:
new_rays.append(Ray(next_position, ray.trajectory, ray.intensity-1))
self._set_element(next_position, ray.trajectory)
# Eliminate ray if it has lost its intensity.
for new_ray in new_rays:
if new_ray.intensity > 0: result.append(new_ray)
return result
def propagate_light(self):
rays = [Ray(self._hole, self._initial_trajectory, LIGHT_DISTRIBUTION-1)]
while len(rays) > 0:
rays = self._propagate_light(rays)
def main():
test_cases = open(sys.argv[1], 'r')
for test in test_cases:
test = test.strip()
if len(test) == 0: continue
room = Room([list(t) for t in zip(*[iter(test)]*ROOM_SIZE)])
if DEBUG: print('---\n' + str(room))
room.propagate_light()
print(room)
test_cases.close()
if __name__ == '__main__':
main()
|
mpillar/codeeval
|
2-hard/ray-of-light/main.py
|
Python
|
unlicense
| 8,179 | 0.005502 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('applications', '0003_auto_20150115_1330'),
]
operations = [
migrations.RenameField(
model_name='applicationtask',
old_name='title',
new_name='name',
),
]
|
HackBulgaria/Odin
|
applications/migrations/0004_auto_20150115_1340.py
|
Python
|
agpl-3.0
| 398 | 0 |
from fastapi import FastAPI
from fastapi.responses import RedirectResponse
app = FastAPI()
@app.get("/typer")
async def redirect_typer():
return RedirectResponse("https://typer.tiangolo.com")
|
tiangolo/fastapi
|
docs_src/custom_response/tutorial006.py
|
Python
|
mit
| 199 | 0 |
"""
None of the functions/objects in this module need be passed `db`.
Naming convention: a `pub` is either a pubkey or a pubkeyhash
"""
import hashlib
import bitcoin as bitcoinlib
import binascii
from bitcoin.core.key import CPubKey
from counterpartylib.lib import util
from counterpartylib.lib import config
from counterpartylib.lib import exceptions
b58_digits = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
class InputError (Exception):
pass
class AddressError(Exception):
pass
class MultiSigAddressError(AddressError):
pass
class VersionByteError (AddressError):
pass
class Base58Error (AddressError):
pass
class Base58ChecksumError (Base58Error):
pass
def validate(address):
"""Make sure the address is valid.
May throw `AddressError`.
"""
# Get array of pubkeyhashes to check.
if is_multisig(address):
pubkeyhashes = pubkeyhash_array(address)
else:
pubkeyhashes = [address]
# Check validity by attempting to decode.
for pubkeyhash in pubkeyhashes:
base58_check_decode(pubkeyhash, config.ADDRESSVERSION)
def base58_encode(binary):
"""Encode the address in base58."""
# Convert big‐endian bytes to integer
n = int('0x0' + util.hexlify(binary), 16)
# Divide that integer into base58
res = []
while n > 0:
n, r = divmod(n, 58)
res.append(b58_digits[r])
res = ''.join(res[::-1])
return res
def base58_check_encode(original, version):
"""Check if base58 encoding is valid."""
b = binascii.unhexlify(bytes(original, 'utf-8'))
d = version + b
binary = d + util.dhash(d)[:4]
res = base58_encode(binary)
# Encode leading zeros as base58 zeros
czero = 0
pad = 0
for c in d:
if c == czero:
pad += 1
else:
break
address = b58_digits[0] * pad + res
if original != util.hexlify(base58_check_decode(address, version)):
raise AddressError('encoded address does not decode properly')
return address
def base58_check_decode(s, version):
"""Decode from base58."""
# Convert the string to an integer
n = 0
for c in s:
n *= 58
if c not in b58_digits:
raise Base58Error('Not a valid Base58 character: ‘{}’'.format(c))
digit = b58_digits.index(c)
n += digit
# Convert the integer to bytes
h = '%x' % n
if len(h) % 2:
h = '0' + h
res = binascii.unhexlify(h.encode('utf8'))
# Add padding back.
pad = 0
for c in s[:-1]:
if c == b58_digits[0]:
pad += 1
else:
break
k = version * pad + res
addrbyte, data, chk0 = k[0:1], k[1:-4], k[-4:]
if addrbyte != version:
raise VersionByteError('incorrect version byte')
chk1 = util.dhash(addrbyte + data)[:4]
if chk0 != chk1:
raise Base58ChecksumError('Checksum mismatch: 0x{} ≠ 0x{}'.format(util.hexlify(chk0), util.hexlify(chk1)))
return data
def is_multisig(address):
"""Check if the address is multi‐signature."""
array = address.split('_')
return len(array) > 1
def is_fully_valid(pubkey_bin):
"""Check if the public key is valid."""
cpubkey = CPubKey(pubkey_bin)
return cpubkey.is_fullyvalid
def make_canonical(address):
"""Return canonical version of the address."""
if is_multisig(address):
signatures_required, pubkeyhashes, signatures_possible = extract_array(address)
try:
[base58_check_decode(pubkeyhash, config.ADDRESSVERSION) for pubkeyhash in pubkeyhashes]
except Base58Error:
raise MultiSigAddressError('Multi‐signature address must use PubKeyHashes, not public keys.')
return construct_array(signatures_required, pubkeyhashes, signatures_possible)
else:
return address
def test_array(signatures_required, pubs, signatures_possible):
"""Check if multi‐signature data is valid."""
try:
signatures_required, signatures_possible = int(signatures_required), int(signatures_possible)
except (ValueError, TypeError):
raise MultiSigAddressError('Signature values not integers.')
if signatures_required < 1 or signatures_required > 3:
raise MultiSigAddressError('Invalid signatures_required.')
if signatures_possible < 2 or signatures_possible > 3:
raise MultiSigAddressError('Invalid signatures_possible.')
for pubkey in pubs:
if '_' in pubkey:
raise MultiSigAddressError('Invalid characters in pubkeys/pubkeyhashes.')
if signatures_possible != len(pubs):
raise InputError('Incorrect number of pubkeys/pubkeyhashes in multi‐signature address.')
def construct_array(signatures_required, pubs, signatures_possible):
"""Create a multi‐signature address."""
test_array(signatures_required, pubs, signatures_possible)
address = '_'.join([str(signatures_required)] + sorted(pubs) + [str(signatures_possible)])
return address
def extract_array(address):
"""Extract data from multi‐signature address."""
assert is_multisig(address)
array = address.split('_')
signatures_required, pubs, signatures_possible = array[0], sorted(array[1:-1]), array[-1]
test_array(signatures_required, pubs, signatures_possible)
return int(signatures_required), pubs, int(signatures_possible)
def pubkeyhash_array(address):
"""Return PubKeyHashes from an address."""
signatures_required, pubs, signatures_possible = extract_array(address)
if not all([is_pubkeyhash(pub) for pub in pubs]):
raise MultiSigAddressError('Invalid PubKeyHashes. Multi‐signature address must use PubKeyHashes, not public keys.')
pubkeyhashes = pubs
return pubkeyhashes
def hash160(x):
x = hashlib.sha256(x).digest()
m = hashlib.new('ripemd160')
m.update(x)
return m.digest()
def pubkey_to_pubkeyhash(pubkey):
"""Convert public key to PubKeyHash."""
pubkeyhash = hash160(pubkey)
pubkey = base58_check_encode(binascii.hexlify(pubkeyhash).decode('utf-8'), config.ADDRESSVERSION)
return pubkey
def get_asm(scriptpubkey):
# TODO: When is an exception thrown here? Can this `try` block be tighter? Can it be replaced by a conditional?
try:
asm = []
# TODO: This should be `for element in scriptpubkey`.
for op in scriptpubkey:
if type(op) == bitcoinlib.core.script.CScriptOp:
# TODO: `op = element`
asm.append(str(op))
else:
# TODO: `data = element` (?)
asm.append(op)
except bitcoinlib.core.script.CScriptTruncatedPushDataError:
raise exceptions.DecodeError('invalid pushdata due to truncation')
if not asm:
raise exceptions.DecodeError('empty output')
return asm
def get_checksig(asm):
if len(asm) == 5 and asm[0] == 'OP_DUP' and asm[1] == 'OP_HASH160' and asm[3] == 'OP_EQUALVERIFY' and asm[4] == 'OP_CHECKSIG':
pubkeyhash = asm[2]
if type(pubkeyhash) == bytes:
return pubkeyhash
raise exceptions.DecodeError('invalid OP_CHECKSIG')
def get_checkmultisig(asm):
# N‐of‐2
if len(asm) == 5 and asm[3] == 2 and asm[4] == 'OP_CHECKMULTISIG':
pubkeys, signatures_required = asm[1:3], asm[0]
if all([type(pubkey) == bytes for pubkey in pubkeys]):
return pubkeys, signatures_required
# N‐of‐3
if len(asm) == 6 and asm[4] == 3 and asm[5] == 'OP_CHECKMULTISIG':
pubkeys, signatures_required = asm[1:4], asm[0]
if all([type(pubkey) == bytes for pubkey in pubkeys]):
return pubkeys, signatures_required
raise exceptions.DecodeError('invalid OP_CHECKMULTISIG')
def scriptpubkey_to_address(scriptpubkey):
asm = get_asm(scriptpubkey)
if asm[-1] == 'OP_CHECKSIG':
try:
checksig = get_checksig(asm)
except exceptions.DecodeError: # coinbase
return None
return base58_check_encode(binascii.hexlify(checksig).decode('utf-8'), config.ADDRESSVERSION)
elif asm[-1] == 'OP_CHECKMULTISIG':
pubkeys, signatures_required = get_checkmultisig(asm)
pubkeyhashes = [pubkey_to_pubkeyhash(pubkey) for pubkey in pubkeys]
return construct_array(signatures_required, pubkeyhashes, len(pubkeyhashes))
return None
# TODO: Use `python-bitcointools` instead. (Get rid of `pycoin` dependency.)
from pycoin.encoding import wif_to_tuple_of_secret_exponent_compressed, public_pair_to_sec, EncodingError
from pycoin.ecdsa import generator_secp256k1, public_pair_for_secret_exponent
class AltcoinSupportError (Exception): pass
def private_key_to_public_key(private_key_wif):
"""Convert private key to public key."""
if config.TESTNET:
allowable_wif_prefixes = [config.PRIVATEKEY_VERSION_TESTNET]
else:
allowable_wif_prefixes = [config.PRIVATEKEY_VERSION_MAINNET]
try:
secret_exponent, compressed = wif_to_tuple_of_secret_exponent_compressed(
private_key_wif, allowable_wif_prefixes=allowable_wif_prefixes)
except EncodingError:
raise AltcoinSupportError('pycoin: unsupported WIF prefix')
public_pair = public_pair_for_secret_exponent(generator_secp256k1, secret_exponent)
public_key = public_pair_to_sec(public_pair, compressed=compressed)
public_key_hex = binascii.hexlify(public_key).decode('utf-8')
return public_key_hex
def is_pubkeyhash(monosig_address):
"""Check if PubKeyHash is valid. """
assert not is_multisig(monosig_address)
try:
base58_check_decode(monosig_address, config.ADDRESSVERSION)
return True
except (Base58Error, VersionByteError):
return False
def make_pubkeyhash(address):
"""Create a new PubKeyHash."""
if is_multisig(address):
signatures_required, pubs, signatures_possible = extract_array(address)
pubkeyhashes = []
for pub in pubs:
if is_pubkeyhash(pub):
pubkeyhash = pub
else:
pubkeyhash = pubkey_to_pubkeyhash(binascii.unhexlify(bytes(pub, 'utf-8')))
pubkeyhashes.append(pubkeyhash)
pubkeyhash_address = construct_array(signatures_required, pubkeyhashes, signatures_possible)
else:
if is_pubkeyhash(address):
pubkeyhash_address = address
else:
pubkeyhash_address = pubkey_to_pubkeyhash(binascii.unhexlify(bytes(address, 'utf-8')))
return pubkeyhash_address
def extract_pubkeys(pub):
"""Assume pubkey if not pubkeyhash. (Check validity later.)"""
pubkeys = []
if is_multisig(pub):
_, pubs, _ = extract_array(pub)
for pub in pubs:
if not is_pubkeyhash(pub):
pubkeys.append(pub)
else:
if not is_pubkeyhash(pub):
pubkeys.append(pub)
return pubkeys
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
tokenly/counterparty-lib
|
counterpartylib/lib/script.py
|
Python
|
mit
| 10,947 | 0.00513 |
"""Image class.
Represents an image in the frontend using a widget.
"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import base64
from .domwidget import DOMWidget
from .widget import register
from traitlets import Unicode, CUnicode, Bytes, observe
@register('Jupyter.Image')
class Image(DOMWidget):
"""Displays an image as a widget.
The `value` of this widget accepts a byte string. The byte string is the
raw image data that you want the browser to display. You can explicitly
define the format of the byte string using the `format` trait (which
defaults to "png").
"""
_view_name = Unicode('ImageView').tag(sync=True)
_model_name = Unicode('ImageModel').tag(sync=True)
_model_module = Unicode('jupyter-js-widgets').tag(sync=True)
_view_module = Unicode('jupyter-js-widgets').tag(sync=True)
# Define the custom state properties to sync with the front-end
format = Unicode('png').tag(sync=True)
width = CUnicode().tag(sync=True)
height = CUnicode().tag(sync=True)
_b64value = Unicode().tag(sync=True)
value = Bytes()
@observe('value')
def _value_changed(self, change):
self._b64value = base64.b64encode(change['new'])
|
lancezlin/ml_template_py
|
lib/python2.7/site-packages/ipywidgets/widgets/widget_image.py
|
Python
|
mit
| 1,267 | 0 |
# coding=utf-8
import unittest
"""826. Most Profit Assigning Work
https://leetcode.com/problems/most-profit-assigning-work/description/
We have jobs: `difficulty[i]` is the difficulty of the `i`th job, and
`profit[i]` is the profit of the `i`th job.
Now we have some workers. `worker[i]` is the ability of the `i`th worker,
which means that this worker can only complete a job with difficulty at most
`worker[i]`.
Every worker can be assigned at most one job, but one job can be completed
multiple times.
For example, if 3 people attempt the same job that pays $1, then the total
profit will be $3. If a worker cannot complete any job, his profit is $0.
What is the most profit we can make?
**Example 1:**
**Input:** difficulty = [2,4,6,8,10], profit = [10,20,30,40,50], worker = [4,5,6,7]
**Output:** 100
**Explanation: W** orkers are assigned jobs of difficulty [4,4,6,6] and they get profit of [20,20,30,30] seperately.
**Notes:**
* `1 <= difficulty.length = profit.length <= 10000`
* `1 <= worker.length <= 10000`
* `difficulty[i], profit[i], worker[i]` are in range `[1, 10^5]`
Similar Questions:
"""
class Solution(object):
def maxProfitAssignment(self, difficulty, profit, worker):
"""
:type difficulty: List[int]
:type profit: List[int]
:type worker: List[int]
:rtype: int
"""
def test(self):
pass
if __name__ == "__main__":
unittest.main()
|
openqt/algorithms
|
leetcode/python/lc826-most-profit-assigning-work.py
|
Python
|
gpl-3.0
| 1,478 | 0.004736 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import random
import os
def getRandomTime(sleepTime):
def wrapper():
return sleepTime * random.random()
return wrapper
class MisUtils(object):
"""
管理配置的类
"""
refreshSleep = getRandomTime(10) # 刷新的间隔时间
wechatPushSleep = getRandomTime(1) # 发送两条微信消息之间的间隔
maxAttempt = 100
# wechatGroup_ = ['研究生的咸♂鱼生活'] # 讲座推送的微信群名称
# wechatUser_ = ['邱大帅全宇宙粉丝后援会'] # 讲座推送的用户名称
attempt = maxAttempt
logPath = 'robber.log'
confFile = 'robber.conf'
blackList = 'blackList'
sender = 'class_robber@cycoe.win'
emailPassword = 'class_robber'
host = 'smtp.ym.163.com'
wechatURI = 'wxp://f2f0PYx27X0CWU1yiBhSKeHHgYzfA27iOicM'
alipayURI = 'HTTPS://QR.ALIPAY.COM/FKX01669SBV7NA4ALTVPE8'
confDict = {
'userName': '',
'password': '',
'receiver': '',
}
@staticmethod
def checkConfFile():
return os.path.exists(MisUtils.confFile)
@staticmethod
def loadConfFile():
with open(MisUtils.confFile) as fr:
content_ = fr.readlines()
for content in content_:
pair_ = content.split(':')
pair_ = [pair.strip() for pair in pair_]
if len(pair_) == 2:
MisUtils.confDict[pair_[0]] = pair_[1]
elif len(pair_) == 1:
MisUtils.confDict[pair_[0]] = ''
@staticmethod
def dumpConfFile():
with open(MisUtils.confFile, 'w') as fr:
for key in MisUtils.confDict.keys():
fr.write(str(key))
fr.write(': ')
fr.write(str(MisUtils.confDict[key]))
fr.write('\n')
@staticmethod
def setEmailInfo():
if MisUtils.checkConfFile():
MisUtils.loadConfFile()
MisUtils.confDict['receiver'] = input('> receiver email: ')
@staticmethod
def initAttempt():
MisUtils.attempt = MisUtils.maxAttempt
@staticmethod
def descAttempt():
MisUtils.attempt -= 1
if MisUtils.attempt > 0:
return True
else:
# if Mail.connectedToMail:
# threading.Thread(target=Mail.send_mail, args=('Class robber halted', Logger.log(
# 'Class robber halted because of up to max attempts',
# ['Check your login status', 'Check the response of server']
# ),)).start()
return False
@staticmethod
def getSelected():
if os.path.exists(MisUtils.blackList):
with open(MisUtils.blackList) as fr:
return [selected.strip() for selected in fr.readlines()]
else:
return []
@staticmethod
def mergeSelected(newSelected_):
if os.path.exists(MisUtils.blackList):
with open(MisUtils.blackList) as fr:
oriSelected_ = [selected.strip() for selected in fr.readlines()]
else:
oriSelected_ = []
oriSelected_.extend(newSelected_)
oriSelected_ = set(oriSelected_)
with open(MisUtils.blackList, 'w') as fr:
for oriSelected in oriSelected_:
fr.write(oriSelected + '\n')
|
cycoe/class_robber
|
modules/MisUtils.py
|
Python
|
mit
| 3,375 | 0.000924 |
from bamboo_boy.materials import Clump
import random
import factory
from comments.models import DocumentComment, DocumentNode, NodeSnapshot
from rtd_tests.factories.general_factories import UserFactory
from rtd_tests.factories.projects_factories import ProjectFactory
class SnapshotFactory(factory.DjangoModelFactory):
FACTORY_FOR = NodeSnapshot
hash = random.getrandbits(128)
node = factory.SubFactory(
'rtd_tests.test.comments_factories.DocumentNodeFactory')
class DocumentNodeFactory(factory.DjangoModelFactory):
FACTORY_FOR = DocumentNode
project = factory.SubFactory(ProjectFactory)
version = factory.LazyAttribute(lambda a: a.project.versions.all()[0])
page = "page-about-nothing"
@classmethod
def _create(self, *args, **kwargs):
if not kwargs.get('hash'):
kwargs['hash'] = random.getrandbits(128)
if not kwargs.get('commit'):
kwargs['commit'] = random.getrandbits(128)
return super(DocumentNodeFactory, self)._create(*args, **kwargs)
class DocumentCommentFactory(factory.DjangoModelFactory):
FACTORY_FOR = DocumentComment
user = factory.SubFactory(UserFactory)
text = "This is a comment."
node = factory.SubFactory(DocumentNodeFactory)
class ProjectsWithComments(Clump):
def build_canopy(self):
self.moderated_project = self.include_factory(ProjectFactory,
1,
comment_moderation=True
)[0]
self.moderated_node = self.include_factory(
DocumentNodeFactory, 1, project=self.moderated_project)[0]
self.first_moderated_comment, self.second_moderated_comment = self.include_factory(
DocumentCommentFactory, 2, node=self.moderated_node)
self.unmoderated_project = self.include_factory(ProjectFactory,
1,
comment_moderation=False
)[0]
self.unmoderated_node = self.include_factory(
DocumentNodeFactory, 1, project=self.unmoderated_project)[0]
self.first_unmoderated_comment, self.second_unmoderated_comment = self.include_factory(
DocumentCommentFactory, 2, node=self.unmoderated_node)
self.owner = self.include_factory(
UserFactory, 1, username="owner", password="test")[0]
self.moderated_project.users.add(self.owner)
self.unmoderated_project.users.add(self.owner)
|
takluyver/readthedocs.org
|
readthedocs/rtd_tests/factories/comments_factories.py
|
Python
|
mit
| 2,656 | 0.00113 |
#!/usr/bin/env python
from random import randint, choice
from copy import deepcopy
from math import log
import sys
import cProfile
def fuse(m, x, adj_list):
#print 'Fusing ({}) and ({})'.format(self.x, x.x)
#for i in self.edges:
# print '({})'.format(i.x),
#print ''
new_edge_list = []
for i in adj_list[m]:
if not x == i:
new_edge_list.append(i)
for x_neigh in adj_list[x]:
if not m == x_neigh:
new_edge_list.append(x_neigh)
for i in range(len(adj_list[x_neigh])):
if adj_list[x_neigh][i] == x:
adj_list[x_neigh][i] = m
adj_list[m] = new_edge_list
#for i in m.edges:
# print '({})'.format(i.x),
#print ''
#print 'Done'
'''
def __str__(self):
str_tmp = []
for i in self.edges:
str_tmp.append('({})'.format(i.x))
fused_tmp = []
for i in self.fused_nodes:
fused_tmp.append('({})'.format(i.x))
return '[ ({}) : {str_tmp} : Fused Nodes: {fused_tmp} ]'.format(self.x, str_tmp = str_tmp, fused_tmp = fused_tmp)
'''
def print_adj_list(adj_list):
for node in adj_list:
print adj_list[node]
def random_contraction(adj_list):
if len(adj_list) <= 2:
return
pivot = choice(adj_list.keys())
x = choice(adj_list[pivot])
#print '--------'
#print 'Contracting {} and {}'.format(adj_list[pivot], adj_list[x.x])
fuse(pivot, x, adj_list)
del adj_list[x]
#print 'After Contraction'
#print_adj_list(adj_list)
#print '--------'
if len(adj_list) > 2:
random_contraction(adj_list)
def main_1():
adj_list = {}
fd = open(sys.argv[1])
for line in fd.readlines():
line = line.strip('\n')
line = line.strip()
arr = [ int(x.strip()) for x in line.split('\t') ]
a = arr.pop(0)
if a not in adj_list:
adj_list[a] = []
for i in arr:
if i not in adj_list:
adj_list[i] = []
adj_list[a].append(i)
original = adj_list
#print_adj_list(adj_list)
n = len(adj_list)
#number_of_trials = int((n ** 2) * log(n)) + 1
number_of_trials = int((n ** 2))
number_of_trials = 5 # FIXME
print 'Total number of tries: {}'.format(number_of_trials)
minimum_cuts = False
for i in range(number_of_trials):
adj_list = deepcopy(original)
#print_adj_list(adj_list)
random_contraction(adj_list)
#print_adj_list(adj_list)
for k in adj_list:
cuts = len(adj_list[k])
if not minimum_cuts or cuts < minimum_cuts:
minimum_cuts = cuts
print 'Cuts in {}th run is {}. Min cuts {}'.format(i + 1, cuts, minimum_cuts)
print 'Minimum cut : {}'.format(minimum_cuts)
return None
if __name__ == '__main__':
#profile.run('main_1()')
cProfile.runctx('main_1()', None, locals())
#main_1()
|
Ramyak/CodingPractice
|
algo_practice/coursera-algo-1/min_cuts/minimum_cuts_1_pre.py
|
Python
|
gpl-2.0
| 2,942 | 0.008838 |
##
# Copyright 2009-2015 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for installing the Intel Advisor XE, implemented as an easyblock
@author: Lumir Jasiok (IT4Innovations)
"""
from easybuild.easyblocks.generic.intelbase import IntelBase
class EB_Advisor(IntelBase):
"""
Support for installing Intel Advisor XE
"""
def sanity_check_step(self):
"""Custom sanity check paths for Advisor"""
custom_paths = {
'files': [],
'dirs': ['advisor_xe/bin64', 'advisor_xe/lib64']
}
super(EB_Advisor, self).sanity_check_step(custom_paths=custom_paths)
def make_module_req_guess(self):
"""
A dictionary of possible directories to look for
"""
guesses = super(EB_Advisor, self).make_module_req_guess()
lib_path = 'advisor_xe/lib64'
include_path = 'advisor_xe/include'
guesses.update({
'CPATH': [include_path],
'INCLUDE': [include_path],
'LD_LIBRARY_PATH': [lib_path],
'LIBRARY_PATH': [lib_path],
'PATH': ['advisor_xe/bin64'],
})
return guesses
|
valtandor/easybuild-easyblocks
|
easybuild/easyblocks/a/advisor.py
|
Python
|
gpl-2.0
| 2,158 | 0.002317 |
#!/usr/bin/env python
import sys
import numpy as np
import matplotlib.pyplot as plt
from scipy.special import gammaincinv
import pint.models, pint.toa
import astropy.units as u
from pint.residuals import Residuals
import traceback
#np.seterr(all='raise')
def sigmaz(t,y,err,nseg,diagplot=False):
"""Compute sigma_z from lists of measurement times and values.
Input:
------
t: array of floats
The measurement times (days).
y: array of floats
The measurement values (seconds).
err: array of floats (1D)
Error bars of the measurements (seconds).
nseg : array of ints
In each iteration, the total time span of the measurements will be split into Nseg segments. This array contains all the values of Nseg we want to use.
diagplot: bool
Make a diagnostic plot of the polynomial fit to the full set of measurements.
Output:
-------
sz_corr : array of floats
Values of bias-corrected sigma-z for different segment length tau.
szerr_lower : array of floats
Lower error bars for the sigma-z values.
szerr_upper : array of floats
Upper error bars for the sigma-z values.
tz : array of floats
The values of the segment lengths, tau (days), for which sigma-z was calculated.
nsegments : array of ints
How many segments of each recorded length passed all criteria for being used when calculating tau and sigma-z statistics.
"""
# The length of the output arrays depends on how many segments meet our criteria of more than 6 points, and longer than T/sqrt(2)
sz = []
tz = []
ngood = [] # How many good segments went into each tz,sz point
toas = t
toaerr = err
toares = y
# Total span of the TOAs
durationday = (toas[-1]-toas[0]) # days
durationsec = durationday*86400.0 # seconds
#The amount of wiggle room for the TOAs to fall on the other side of the segment range and still be included. It's really only supposed to account for roundoff error. We have no reason to expect TOAs to fall on the border except for the first and last TOAs of the whole batch, so I don't believe we're in danger of double counting any TOAs.
wiggle = 1e-5
# Polynomial order to fit (a cubic may fail to produce a good fit for a long TOA span for pulsars with a lot of red noise; it fails for the NANOGrav data set on B1937+21).
polyorder = 3
for iseg in nseg:
# For each duration of length durationday/iseg compute sz.
dur_oneseg = durationday/iseg # how long is one segment
ngoodsegs = 0 # Reset the counter for good segments
C3sqr = 0 # This will accumulate values of C3sqr
C3un_sum = 0 # This will accumulate the sum of 1/C3_sigma^2 to normalize the C3^2 weights at the end
n_sing_matrix = 0 # how many segments make polyfit fail with a singular matrix error
n_few_points = 0 # how many segments have too few points
n_short_dataspan = 0 # in how many segments the points are clustered within too small a portion of the selected time span
n_C3_neg_var = 0 # for how many segments the C3 coefficient has a negative variance in the covariance matrix
for jseg in range (0, iseg): # Now loop through each segment of this length
# for iseq > 1 there are multiple segments we need to analyze
segrange=(toas[0]+dur_oneseg*jseg, toas[0]+dur_oneseg*(jseg+1))
centertime = (segrange[1]+segrange[0])/2.0 # Midpoint of observation interval
# Fit the polynomial using only the toas in the interval
desind = np.where((toas>(segrange[0]-wiggle)) & (toas<(segrange[1]+wiggle)))
if (np.size(desind))>polyorder+3: # if cov. matrix needed for error estimates on fitted params
#if (np.size(desind))>polyorder: # if cov. matrix not needed
dataspan = np.max(toas[desind]) - np.min(toas[desind])
else:
n_few_points = n_few_points+1
continue
# Matsakis recommends segment be longer than dur_oneseg/sqrt(2)
if (dataspan<=(dur_oneseg/np.sqrt(2))): #xAL added this criterion
n_short_dataspan = n_short_dataspan+1
continue
else:
res = toares[desind]
toaerrs = toaerr[desind]
try:
#NOTE: polyfit needs 1/sigma, not 1/sigma^2 weights. Times and residuals need to be in the same units, here are in seconds
p,pcov = np.polyfit((toas[desind]-centertime)*86400.0,
res.astype(np.float), polyorder, cov=True,
full=False, w=np.abs(1./toaerrs) )
#p = np.polyfit((toas[desind]-centertime)*86400.0,
# res.astype(np.float),polyorder, cov=False, full=False, w = np.abs(1./toaerrs) )
except:
#print('Polyfit failed!')
#traceback.print_exc()
n_sing_matrix = n_sing_matrix+1
continue
# Get C3 coefficient uncertainty from the covariance matrix
C3variance = np.diag(pcov)[-4]
if C3variance < 0:
n_C3_neg_var = n_C3_neg_var+1
#print('C3variance = %e' % C3variance)
continue
C3un = np.sqrt(C3variance)
C3un_sum = C3un_sum + 1.0/C3un**2 # for normalizing weights at the end
C3sqr=C3sqr + p[-4]**2/C3un**2
#C3sqr=C3sqr+p[0]**2 # Accumulate to eventually find avg C3^2
ngoodsegs += 1 # the number of good segments (with at least 6 TOAs in them)
# Plot data and fit for case where the full set of resids is treated as one segment
if (iseg==1 and diagplot):
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
toas_secs = (toas[desind]-centertime)*86400.0
ax.plot(toas[desind], res.astype(np.float)*1.e6, 'ko')
ax.errorbar(toas[desind],res.astype(np.float)*1.e6,yerr=toaerr[desind]*1.e6,fmt='none',color='k',capsize=2.0)
ax.plot(toas[desind], np.polyval(p,toas_secs)*1.e6,'r')
ax.set_xlabel('MJD')
ax.set_ylabel('Res (us)')
plt.title('Order-%d polynomial fit to full TOA set' % polyorder)
#plt.savefig("sigmaz-diagnostic.png", dpi=300, format='png', bbox_inches='tight')
print("Divided data into %d segments of length %.1f days. Number of good segments: %d" % (iseg, dur_oneseg, ngoodsegs))
if n_few_points > 0:
print('--->Segments with too few TOAs: %d' % n_few_points)
if n_short_dataspan > 0:
print('--->Segments with too short TOA span: %d' % n_short_dataspan)
if n_sing_matrix > 0:
print('--->Segments causing singular matrix error in polyfit: %d' % n_sing_matrix)
if n_C3_neg_var > 0:
print('--->Segments with C3 variance <0: %d' % n_C3_neg_var)
if ngoodsegs != 0:
#C3sqr=C3sqr/ngoodsegs # unweighted average
C3sqr=C3sqr/C3un_sum # average weighted by the uncertainties in fitted C3 values
sz.append((dur_oneseg*86400)**2*np.sqrt(C3sqr)/(2.0*np.sqrt(5.0))) # sigma_z formula
tz.append(dur_oneseg) #days
ngood.append(ngoodsegs)
# Sigma-z bias correction and error bars
nsegments = np.array(ngood)
x16 = np.divide(gammaincinv(0.5*nsegments, 0.16), 0.5*nsegments)
x50 = np.divide(gammaincinv(0.5*nsegments, 0.50), 0.5*nsegments)
x84 = np.divide(gammaincinv(0.5*nsegments, 0.84), 0.5*nsegments)
sz_corr = np.divide(sz,np.sqrt(x50))
szerr_upper = np.multiply(sz_corr, np.sqrt(np.divide(x50,x16))-1.0)
szerr_lower = np.multiply(sz_corr, 1.0-np.sqrt(np.divide(x50,x84)))
return sz_corr, szerr_lower, szerr_upper, tz, nsegments
def psr_sigmaz(parfile,timfile,nseg,diagplot=False):
"""Compute sigma-z from a pulsar par and tim file.
Input:
------
timfile : string
The file containing TOAs, in Tempo/Tempo2 format.
parfile : string
The file containing the timing model.
nseg : array of ints
In each iteration, the total time span of the TOAs will be split into Nseg segments. This array contains all the values of Nseg we want to use.
diagplot: bool
Make a diagnostic plot of the polynomial fit to the full set of TOAs.
Output:
-------
sz_corr : array of floats
Values of bias-corrected sigma-z for different segment length tau.
szerr_lower : array of floats
Lower error bars for the sigma-z values.
szerr_upper : array of floats
Upper error bars for the sigma-z values.
tz : array of floats
The values of the segment lengths, tau (days), for which sigma-z was calculated.
nsegments : array of ints
How many segments of each recorded length passed all criteria for being used when calculating tau and sigma-z statistics.
"""
# Read in the TOAs and timing model and compute the residuals
m = pint.models.get_model(parfile)
t = pint.toa.get_TOAs(timfile,ephem='DE430')
t.compute_pulse_numbers(m)
toas = t.get_mjds().value #MJD
toaerr = t.get_errors().to(u.s).value #s
toares = Residuals(t,m).time_resids.to(u.s).value #s
return sigmaz(toas,toares,toaerr,nseg,diagplot)
if __name__ == "__main__":
if len(sys.argv) != 3:
print('Usage: sigmaz.py [parfile] [timfile]')
sys.exit()
par = sys.argv[1]
tim = sys.argv[2]
seggrid = np.logspace(0.1,3,num=50)
seggrid = seggrid.astype(int)
seggrid = np.unique(seggrid)
#seggrid = 2**np.arange(0,10)
sz,errlo,errhi,tz,ns = psr_sigmaz(par,tim,seggrid,diagplot=True)
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.plot(tz, sz, 'ko',label="Sigma-z")
ax.errorbar(tz,sz,yerr=[errlo,errhi],fmt='none',color='k',capsize=2.0)
ax.grid(which='both')
ax.set_xscale('log')
ax.set_yscale('log')
ax.set_xlabel('Time (days)')
ax.set_ylabel('Sigma-z')
plt.title('%s\n%s' % (par,tim))
plt.show()
|
paulray/NICERsoft
|
nicer/sigmaz.py
|
Python
|
mit
| 10,393 | 0.014529 |
# coding=utf8
"""
Parser for todo format string.
from todo.parser import parser
parser.parse(string) # return an Todo instance
"""
from models import Task
from models import Todo
from ply import lex
from ply import yacc
class TodoLexer(object):
"""
Lexer for Todo format string.
Tokens
ID e.g. '1.'
DONE e.g. '(x)'
TASK e.g. 'This is a task'
"""
tokens = (
"ID",
"DONE",
"TASK",
)
t_ignore = "\x20\x09" # ignore spaces and tabs
def t_ID(self, t):
r'\d+\.([uU]|[lL]|[uU][lL]|[lL][uU])?'
t.value = int(t.value[:-1])
return t
def t_DONE(self, t):
r'(\(x\))'
return t
def t_TASK(self, t):
r'((?!\(x\))).+'
return t
def t_newline(self, t):
r'\n+'
t.lexer.lineno += len(t.value)
def t_error(self, t):
raise SyntaxError(
"Illegal character: '%s' at Line %d" % (t.value[0], t.lineno)
)
def __init__(self):
self.lexer = lex.lex(module=self)
class TodoParser(object):
"""
Parser for Todo format string, works with a todo lexer.
Parse string to Python list
todo_str = "1. (x) Write email to tom"
TodoParser().parse(todo_str)
"""
tokens = TodoLexer.tokens
def p_error(self, p):
if p:
raise SyntaxError(
"Character '%s' at line %d" % (p.value[0], p.lineno)
)
else:
raise SyntaxError("SyntaxError at EOF")
def p_start(self, p):
"start : translation_unit"
p[0] = self.todo
def p_translation_unit(self, p):
"""
translation_unit : translate_task
| translation_unit translate_task
|
"""
pass
def p_translation_task(self, p):
"""
translate_task : ID DONE TASK
| ID TASK
"""
if len(p) == 4:
done = True
content = p[3]
elif len(p) == 3:
done = False
content = p[2]
task = Task(p[1], content, done)
self.todo.append(task)
def __init__(self):
self.parser = yacc.yacc(module=self, debug=0, write_tables=0)
def parse(self, data):
# reset list
self.todo = Todo()
return self.parser.parse(data)
lexer = TodoLexer() # build lexer
parser = TodoParser() # build parser
|
guori12321/todo
|
todo/parser.py
|
Python
|
mit
| 2,473 | 0 |
"""Package entry point."""
import importlib
import os
import sys
from gitman.cli import main
# Declare itself as package if needed for better debugging support
# pylint: disable=multiple-imports,wrong-import-position,redefined-builtin,used-before-assignment
if __name__ == '__main__' and __package__ is None: # pragma: no cover
parent_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.dirname(parent_dir))
__package__ = os.path.basename(parent_dir)
importlib.import_module(__package__)
if __name__ == '__main__': # pragma: no cover
main()
|
jacebrowning/gdm
|
gitman/__main__.py
|
Python
|
mit
| 590 | 0.001695 |
# Copyright 2012-2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
A helper class for proxy objects to remote APIs.
For more information about rpc API version numbers, see:
rpc/dispatcher.py
"""
import six
from ceilometer.openstack.common import rpc
from ceilometer.openstack.common.rpc import common as rpc_common
from ceilometer.openstack.common.rpc import serializer as rpc_serializer
class RpcProxy(object):
"""A helper class for rpc clients.
This class is a wrapper around the RPC client API. It allows you to
specify the topic and API version in a single place. This is intended to
be used as a base class for a class that implements the client side of an
rpc API.
"""
# The default namespace, which can be overridden in a subclass.
RPC_API_NAMESPACE = None
def __init__(self, topic, default_version, version_cap=None,
serializer=None):
"""Initialize an RpcProxy.
:param topic: The topic to use for all messages.
:param default_version: The default API version to request in all
outgoing messages. This can be overridden on a per-message
basis.
:param version_cap: Optionally cap the maximum version used for sent
messages.
:param serializer: Optionaly (de-)serialize entities with a
provided helper.
"""
self.topic = topic
self.default_version = default_version
self.version_cap = version_cap
if serializer is None:
serializer = rpc_serializer.NoOpSerializer()
self.serializer = serializer
super(RpcProxy, self).__init__()
def _set_version(self, msg, vers):
"""Helper method to set the version in a message.
:param msg: The message having a version added to it.
:param vers: The version number to add to the message.
"""
v = vers if vers else self.default_version
if (self.version_cap and not
rpc_common.version_is_compatible(self.version_cap, v)):
raise rpc_common.RpcVersionCapError(version_cap=self.version_cap)
msg['version'] = v
def _get_topic(self, topic):
"""Return the topic to use for a message."""
return topic if topic else self.topic
def can_send_version(self, version):
"""Check to see if a version is compatible with the version cap."""
return (not self.version_cap or
rpc_common.version_is_compatible(self.version_cap, version))
@staticmethod
def make_namespaced_msg(method, namespace, **kwargs):
return {'method': method, 'namespace': namespace, 'args': kwargs}
def make_msg(self, method, **kwargs):
return self.make_namespaced_msg(method, self.RPC_API_NAMESPACE,
**kwargs)
def _serialize_msg_args(self, context, kwargs):
"""Helper method called to serialize message arguments.
This calls our serializer on each argument, returning a new
set of args that have been serialized.
:param context: The request context
:param kwargs: The arguments to serialize
:returns: A new set of serialized arguments
"""
new_kwargs = dict()
for argname, arg in six.iteritems(kwargs):
new_kwargs[argname] = self.serializer.serialize_entity(context,
arg)
return new_kwargs
def call(self, context, msg, topic=None, version=None, timeout=None):
"""rpc.call() a remote method.
:param context: The request context
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:param timeout: (Optional) A timeout to use when waiting for the
response. If no timeout is specified, a default timeout will be
used that is usually sufficient.
:returns: The return value from the remote method.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
real_topic = self._get_topic(topic)
try:
result = rpc.call(context, real_topic, msg, timeout)
return self.serializer.deserialize_entity(context, result)
except rpc.common.Timeout as exc:
raise rpc.common.Timeout(
exc.info, real_topic, msg.get('method'))
def multicall(self, context, msg, topic=None, version=None, timeout=None):
"""rpc.multicall() a remote method.
:param context: The request context
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:param timeout: (Optional) A timeout to use when waiting for the
response. If no timeout is specified, a default timeout will be
used that is usually sufficient.
:returns: An iterator that lets you process each of the returned values
from the remote method as they arrive.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
real_topic = self._get_topic(topic)
try:
result = rpc.multicall(context, real_topic, msg, timeout)
return self.serializer.deserialize_entity(context, result)
except rpc.common.Timeout as exc:
raise rpc.common.Timeout(
exc.info, real_topic, msg.get('method'))
def cast(self, context, msg, topic=None, version=None):
"""rpc.cast() a remote method.
:param context: The request context
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:returns: None. rpc.cast() does not wait on any return value from the
remote method.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
rpc.cast(context, self._get_topic(topic), msg)
def fanout_cast(self, context, msg, topic=None, version=None):
"""rpc.fanout_cast() a remote method.
:param context: The request context
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:returns: None. rpc.fanout_cast() does not wait on any return value
from the remote method.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
rpc.fanout_cast(context, self._get_topic(topic), msg)
def cast_to_server(self, context, server_params, msg, topic=None,
version=None):
"""rpc.cast_to_server() a remote method.
:param context: The request context
:param server_params: Server parameters. See rpc.cast_to_server() for
details.
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:returns: None. rpc.cast_to_server() does not wait on any
return values.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
rpc.cast_to_server(context, server_params, self._get_topic(topic), msg)
def fanout_cast_to_server(self, context, server_params, msg, topic=None,
version=None):
"""rpc.fanout_cast_to_server() a remote method.
:param context: The request context
:param server_params: Server parameters. See rpc.cast_to_server() for
details.
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:returns: None. rpc.fanout_cast_to_server() does not wait on any
return values.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
rpc.fanout_cast_to_server(context, server_params,
self._get_topic(topic), msg)
|
NeCTAR-RC/ceilometer
|
ceilometer/openstack/common/rpc/proxy.py
|
Python
|
apache-2.0
| 9,459 | 0 |
# -*- coding: utf-8 -*-
"""
rdd.exceptions
~~~~~~~~~~~~~~
This module contains the exceptions raised by rdd.
"""
from requests.exceptions import *
class ReadabilityException(RuntimeError):
"""Base class for Readability exceptions."""
class ShortenerError(ReadabilityException):
"""Failed to shorten URL."""
class MetadataError(ReadabilityException):
"""Failed to retrieve metadata."""
|
mlafeldt/rdd.py
|
rdd/exceptions.py
|
Python
|
mit
| 407 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.