text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoAlertPresentException
import unittest, time, re
class CreateFormTestTemplate(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.implicitly_wait(30)
self.base_url = "http://kf.kbtdev.org/"
self.verificationErrors = []
self.accept_next_alert = True
def test_create_form_test_template(self):
driver = self.driver
driver.get(self.base_url + "")
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".forms-header__title"): break
except: pass
time.sleep(1)
else: self.fail("time out")
self.assertFalse(self.is_element_present(By.CSS_SELECTOR, ".forms__card"))
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, ".forms-empty__button"))
driver.find_element_by_css_selector(".forms-empty__button").click()
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".forms__addform__start"): break
except: pass
time.sleep(1)
else: self.fail("time out")
# Click the form creation button using JavaScript to avoid element not visible errors.
# WARNING: The 'runScript' command doesn't export to python, so a manual edit is necessary.
# ERROR: Caught exception [ERROR: Unsupported command [runScript | $(".forms__addform__start").click(); | ]]
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".form-title"): break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_css_selector(".form-title").click()
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".survey-header__title input"): break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_css_selector(".survey-header__title input").send_keys(Keys.SHIFT, Keys.END, Keys.SHIFT, Keys.DELETE)
driver.find_element_by_css_selector(".survey-header__title input").send_keys("Selenium test form title.", Keys.ENTER)
self.assertEqual("Selenium test form title.", driver.find_element_by_css_selector(".form-title").text)
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, ".survey-editor .fa-plus"))
driver.find_element_by_css_selector(".survey-editor .fa-plus").click()
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".row__questiontypes__form > input"): break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_css_selector(".row__questiontypes__form > input").send_keys("Selenium test question label.", Keys.TAB)
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, ".row__questiontypes__form > button"))
driver.find_element_by_css_selector(".row__questiontypes__form > button").click()
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".questiontypelist__item[data-menu-item=\"select_one\"]"): break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_css_selector(".questiontypelist__item[data-menu-item=\"select_one\"]").click()
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".card--selectquestion__expansion li:nth-child(1) span"): break
except: pass
time.sleep(1)
else: self.fail("time out")
self.assertEqual("Selenium test question label.", driver.find_element_by_css_selector(".card__header-title").text)
driver.find_element_by_css_selector(".card--selectquestion__expansion li:nth-child(1) .editable-wrapper span:first-child").click()
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".card--selectquestion__expansion li:nth-child(1) input"): break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_css_selector(".card--selectquestion__expansion li:nth-child(1) input").send_keys(Keys.SHIFT, Keys.END, Keys.SHIFT, Keys.DELETE)
driver.find_element_by_css_selector(".card--selectquestion__expansion li:nth-child(1) input").send_keys("Selenium test question choice 1.", Keys.ENTER)
self.assertEqual("Selenium test question choice 1.", driver.find_element_by_css_selector(".card--selectquestion__expansion li:nth-child(1) span").text)
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, ".card--selectquestion__expansion li:nth-child(2) span"))
driver.find_element_by_css_selector(".card--selectquestion__expansion li:nth-child(2) span").click()
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".card--selectquestion__expansion li:nth-child(2) input"): break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_css_selector(".card--selectquestion__expansion li:nth-child(2) input").send_keys(Keys.SHIFT, Keys.END, Keys.SHIFT, Keys.DELETE)
driver.find_element_by_css_selector(".card--selectquestion__expansion li:nth-child(2) input").send_keys("Selenium test question choice 2.", Keys.ENTER)
self.assertEqual("Selenium test question choice 2.", driver.find_element_by_css_selector(".card--selectquestion__expansion li:nth-child(2) span").text)
self.assertTrue(self.is_element_present(By.ID, "save"))
driver.find_element_by_id("save").click()
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".forms__card__title"): break
except: pass
time.sleep(1)
else: self.fail("time out")
self.assertEqual("Selenium test form title.", driver.find_element_by_css_selector(".forms__card__title").text)
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def is_alert_present(self):
try: self.driver.switch_to_alert()
except NoAlertPresentException, e: return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally: self.accept_next_alert = True
def tearDown(self):
self.driver.quit()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()
| kobotoolbox/kobo_selenium_tests | kobo_selenium_tests/selenium_ide_exported/create_form_test_template.py | Python | gpl-3.0 | 7,253 | 0.011995 |
# *****************************************************************************
# Copyright (c) 2020, Intel Corporation All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# *****************************************************************************
import pandas as pd
from numba import njit
@njit
def series_rolling_median():
series = pd.Series([4, 3, 5, 2, 6]) # Series of 4, 3, 5, 2, 6
out_series = series.rolling(3).median()
return out_series # Expect series of NaN, NaN, 4.0, 3.0, 5.0
print(series_rolling_median())
| IntelLabs/hpat | examples/series/rolling/series_rolling_median.py | Python | bsd-2-clause | 1,804 | 0 |
from m5.params import *
from m5.SimObject import SimObject
from Controller import RubyController
class DMA_Controller(RubyController):
type = 'DMA_Controller'
dma_sequencer = Param.DMASequencer("")
request_latency = Param.Int(6, "")
| silkyar/570_Big_Little | build/ARM/mem/protocol/DMA_Controller.py | Python | bsd-3-clause | 246 | 0.004065 |
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from .access_memberships import *
from .fields import *
from .migrations import *
from .subscription import *
from .forms import *
from .widgets import *
| armstrong/armstrong.core.arm_access | armstrong/core/arm_access/tests/__init__.py | Python | apache-2.0 | 230 | 0 |
# Copyright 2015 Cloudwatt
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import template
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tabs
from openstack_dashboard.contrib.trove import api
from openstack_dashboard.contrib.trove.content.database_datastores \
import tables
class OverviewTab(tabs.Tab):
name = _("Overview")
slug = "overview"
def get_context_data(self, request):
return {"datastore": self.tab_group.kwargs['datastore']}
def get_template_name(self, request):
template_dir = 'project/database_datastores/%s'
datastore = self.tab_group.kwargs['datastore']
template_file = '_detail_overview_%s.html' % datastore.name
try:
template.loader.get_template(template_file)
except template.TemplateDoesNotExist:
# This datastore type does not have a template file
# Just use the base template file
template_file = '_detail_overview.html'
return template_dir % template_file
class VersionsTab(tabs.TableTab):
table_classes = [tables.VersionsTable]
name = _("Versions")
slug = "versions_tab"
template_name = "horizon/common/_detail_table.html"
def get_versions_data(self):
datastore = self.tab_group.kwargs['datastore']
try:
versions = api.trove.datastore_version_list(self.request,
datastore.id)
except Exception:
msg = _('Unable to get versions list.')
exceptions.handle(self.request, msg)
versions = []
return versions
class DatastoreDetailTabs(tabs.TabGroup):
slug = "datastore_detail"
tabs = (OverviewTab, VersionsTab)
sticky = True
| Tesora/tesora-horizon | openstack_dashboard/contrib/trove/content/database_datastores/tabs.py | Python | apache-2.0 | 2,343 | 0 |
import re
from collections import OrderedDict
import compiler.lang as lang
doc_next = None
doc_prev_component = None
doc_root_component = None
class CustomParser(object):
def match(self, next):
raise Exception("Expression should implement match method")
escape_re = re.compile(r"[\0\n\r\v\t\b\f]")
escape_map = {
'\0': '\\0',
'\n': '\\n',
'\r': '\\r',
'\v': '\\v',
'\t': '\\t',
'\b': '\\b',
'\f': '\\f'
}
def escape(str):
return escape_re.sub(lambda m: escape_map[m.group(0)], str)
class StringParser(CustomParser):
def match(self, next):
n = len(next)
if n < 2:
return
quote = next[0]
if quote != "'" and quote != "\"":
return
pos = 1
while next[pos] != quote:
if next[pos] == "\\":
pos += 2
else:
pos += 1
if pos >= n:
raise Exception("Unexpected EOF while parsing string")
return next[:pos + 1]
skip_re = re.compile(r'(?:\s+|/\*.*?\*/|//[^\n]*(?:$|\n))', re.DOTALL)
COMPONENT_NAME = r'(?:[a-z][a-zA-Z0-9._]*\.)?[A-Z][A-Za-z0-9]*'
component_name = re.compile(COMPONENT_NAME)
component_name_lookahead = re.compile(COMPONENT_NAME + r'\s*{')
identifier_re = re.compile(r'[a-z_][A-Za-z0-9_]*')
property_type_re = re.compile(r'[a-z][a-z0-9]*', re.IGNORECASE)
nested_identifier_re = re.compile(r'[a-z_][A-Za-z0-9_\.]*')
function_name_re = re.compile(r'[a-z_][a-z0-9_\.]*', re.IGNORECASE)
string_re = StringParser()
kw_re = re.compile(r'(?:true|false|null)')
NUMBER_RE = r"(?:\d+\.\d+(e[+-]?\d+)?|(?:0x)?[0-9]+)"
number_re = re.compile(NUMBER_RE, re.IGNORECASE)
percent_number_re = re.compile(NUMBER_RE + r'%', re.IGNORECASE)
scale_number_re = re.compile(NUMBER_RE + r's', re.IGNORECASE)
rest_of_the_line_re = re.compile(r".*$", re.MULTILINE)
json_object_value_delimiter_re = re.compile(r"[,;]")
dep_var = re.compile(r"\${(.*?)}")
class Expression(object):
__slots__ = ('op', 'args')
def __init__(self, op, *args):
self.op, self.args = op, args
def __repr__(self):
return "Expression %s { %s }" %(self.op, ", ".join(map(repr, self.args)))
def __str__(self):
args = self.args
n = len(args)
if n == 1:
return "(%s %s)" %(self.op, args[0])
elif n == 2:
return "(%s %s %s)" %(args[0], self.op, args[1])
elif n == 3:
op = self.op
return "(%s %s %s %s %s)" %(args[0], op[0], args[1], op[1], args[2])
else:
raise Exception("invalid argument counter")
class Call(object):
__slots__ = ('func', 'args')
def __init__(self, func, args):
self.func = func
self.args = args
def __repr__(self):
return "Call %s { %s }" %(self.func, self.args)
def __str__(self):
if isinstance(self.func, Literal):
name = self.func.term
if name[0].islower():
if '.' in name:
name = '${%s}' %name
else:
name = '$this._context.%s' %name
else:
name = str(self.func)
#if lhs is not an literal, than we can't process deps, removing ${xxx}
name = dep_var.sub(lambda m: m.group(1), name)
return "%s(%s)" %(name, ",".join(map(str, self.args)))
class Dereference(object):
__slots__ = ('array', 'index')
def __init__(self, array, index):
self.array = array
self.index = index
def __str__(self):
return "(%s[%s])" %(self.array, self.index)
class Literal(object):
__slots__ = ('lbp', 'term', 'identifier')
def __init__(self, term, string = False, identifier = False):
self.term = escape(term) if string else term
self.lbp = 0
self.identifier = identifier
def nud(self, state):
return self
def __repr__(self):
return "Literal { %s }" %self.term
def __str__(self):
return "${%s}" %self.term if self.identifier and self.term[0].islower() else self.term
class PrattParserState(object):
def __init__(self, parent, parser, token):
self.parent, self.parser, self.token = parent, parser, token
class PrattParser(object):
def __init__(self, ops):
symbols = [(x.term, x) for x in ops]
symbols.sort(key=lambda x: len(x[0]), reverse=True)
self.symbols = symbols
def next(self, parser):
parser._skip()
next = parser.next
next_n = len(next)
for term, sym in self.symbols:
n = len(term)
if n > next_n:
continue
keyword = term[-1].isalnum()
if next.startswith(term):
if keyword and n < next_n and next[n].isalnum():
continue
parser.advance(len(term))
return sym
next = parser.maybe(kw_re)
if next:
return Literal(next)
next = parser.maybe(percent_number_re)
if next:
next = next[:-1]
return Literal("((%s) / 100 * ${parent.<property-name>})" %next) if next != 100 else "(${parent.<property-name>})"
next = parser.maybe(scale_number_re)
if next:
next = next[:-1]
return Literal("((%s) * ${context.<scale-property-name>})" %next)
next = parser.maybe(number_re)
if next:
return Literal(next)
next = parser.maybe(function_name_re)
if next:
return Literal(next, identifier=True)
next = parser.maybe(string_re)
if next:
return Literal(next, string=True)
return None
def advance(self, state, expect = None):
if expect is not None:
state.parser.read(expect, "Expected %s in expression" %expect)
state.token = self.next(state.parser)
def expression(self, state, rbp = 0):
parser = state.parser
t = state.token
state.token = self.next(parser)
if state.token is None:
return t
left = t.nud(state)
while state.token is not None and rbp < state.token.lbp:
t = state.token
self.advance(state)
left = t.led(state, left)
return left
def parse(self, parser):
token = self.next(parser)
if token is None:
parser.error("Unexpected expression")
state = PrattParserState(self, parser, token)
return self.expression(state)
class UnsupportedOperator(object):
__slots__ = ('term', 'lbp', 'rbp')
def __init__(self, term, lbp = 0, rbp = 0):
self.term, self.lbp, self.rbp = term, lbp, rbp
def nud(self, state):
state.parser.error("Unsupported prefix operator %s" %self.term)
def led(self, state, left):
state.parser.error("Unsupported postfix operator %s" %self.term)
def __repr__(self):
return "UnsupportedOperator { %s %s }" %(self.term, self.lbp)
class Operator(object):
__slots__ = ('term', 'lbp', 'rbp')
def __init__(self, term, lbp = 0, rbp = None):
self.term, self.lbp, self.rbp = term, lbp, rbp
def nud(self, state):
if self.rbp is not None:
return Expression(self.term, state.parent.expression(state, self.rbp))
state.parser.error("Unexpected token in infix expression: '%s'" %self.term)
def led(self, state, left):
if self.lbp is not None:
return Expression(self.term, left, state.parent.expression(state, self.lbp))
else:
state.parser.error("No left-associative operator defined")
def __repr__(self):
return "Operator { %s %s %s }" %(self.term, self.lbp, self.rbp)
class Conditional(object):
__slots__ = ('term', 'lbp')
def __init__(self, lbp):
self.term = '?'
self.lbp = lbp
def nud(self, state):
state.parser.error("Conditional operator can't be used as unary")
def led(self, state, left):
true = state.parent.expression(state)
state.parent.advance(state, ':')
false = state.parent.expression(state)
return Expression(('?', ':'), left, true, false)
def __repr__(self):
return "Conditional { }"
class LeftParenthesis(object):
__slots__ = ('term', 'lbp')
def __init__(self, lbp):
self.term = '('
self.lbp = lbp
def nud(self, state):
expr = state.parent.expression(state)
state.parent.advance(state, ')')
return expr
def led(self, state, left):
args = []
next = state.token
if next.term != ')':
while True:
args.append(state.parent.expression(state))
if state.token is not None:
state.parser.error("Unexpected token %s" %state.token)
if not state.parser.maybe(','):
break
state.parent.advance(state)
state.parent.advance(state, ')')
return Call(left, args)
def __repr__(self):
return "LeftParenthesis { %d }" %self.lbp
class LeftSquareBracket(object):
__slots__ = ('term', 'lbp')
def __init__(self, lbp):
self.term = '['
self.lbp = lbp
def nud(self, state):
state.parser.error("Invalid [] expression")
def led(self, state, left):
arg = state.parent.expression(state)
if state.token is not None:
state.parser.error("Unexpected token %s" %state.token)
state.parent.advance(state, ']')
return Dereference(left, arg)
def __repr__(self):
return "LeftSquareBracket { %d }" %self.lbp
infix_parser = PrattParser([
Operator('.', 19),
LeftParenthesis(19),
LeftSquareBracket(19),
UnsupportedOperator('++', 17, 16),
UnsupportedOperator('--', 17, 16),
UnsupportedOperator('void', None, 16),
UnsupportedOperator('delete', None, 16),
UnsupportedOperator('await', None, 16),
Operator('typeof', None, 16),
Operator('!', None, 16),
Operator('~', None, 16),
Operator('+', 13, 16),
Operator('-', 13, 16),
Operator('typeof', None, 16),
Operator('**', 15),
Operator('*', 14),
Operator('/', 14),
Operator('%', 14),
Operator('<<', 12),
Operator('>>', 12),
Operator('>>>', 12),
Operator('<', 11),
Operator('<=', 11),
Operator('>', 11),
Operator('>=', 11),
Operator('in', 11),
Operator('instanceof', 11),
Operator('==', 10),
Operator('!=', 10),
Operator('===', 10),
Operator('!==', 10),
Operator('&', 9),
Operator('^', 8),
Operator('|', 7),
Operator('&&', 6),
Operator('||', 5),
Conditional(4),
])
class Parser(object):
def __init__(self, text):
self.__text = text
self.__pos = 0
self.__lineno = 1
self.__colno = 1
self.__last_object = None
self.__next_doc = None
@property
def at_end(self):
return self.__pos >= len(self.__text)
@property
def next(self):
return self.__text[self.__pos:]
@property
def current_line(self):
text = self.__text
pos = self.__pos
begin = text.rfind('\n', 0, pos)
end = text.find('\n', pos)
if begin < 0:
begin = 0
else:
begin += 1
if end < 0:
end = len(text)
return text[begin:end]
def advance(self, n):
text = self.__text
pos = self.__pos
for i in range(n):
if text[pos] == '\n':
self.__lineno += 1
self.__colno = 1
else:
self.__colno += 1
pos += 1
self.__pos = pos
def __docstring(self, text, prev):
if prev:
if self.__last_object:
if self.__last_object.doc is not None:
self.__last_object.doc = lang.DocumentationString(self.__last_object.doc.text + " " + text)
else:
self.__last_object.doc = lang.DocumentationString(text)
else:
self.error("Found docstring without previous object")
else:
if self.__next_doc is not None:
self.__next_doc += " " + text
else:
self.__next_doc = text
def __get_next_doc(self):
if self.__next_doc is None:
return
doc = lang.DocumentationString(self.__next_doc)
self.__next_doc = None
return doc
def __return(self, object, doc):
if doc:
if object.doc:
object.doc = lang.DocumentationString(object.doc.text + " " + doc.text)
else:
object.doc = doc
self.__last_object = object
return object
def _skip(self):
while True:
m = skip_re.match(self.next)
if m is not None:
text = m.group(0).strip()
if text.startswith('///<'):
self.__docstring(text[4:], True)
elif text.startswith('///'):
self.__docstring(text[3:], False)
elif text.startswith('/**'):
end = text.rfind('*/')
self.__docstring(text[3:end], False)
self.advance(m.end())
else:
break
def error(self, msg):
lineno, col, line = self.__lineno, self.__colno, self.current_line
pointer = re.sub(r'\S', ' ', line)[:col - 1] + '^-- ' + msg
raise Exception("at line %d:%d:\n%s\n%s" %(lineno, col, self.current_line, pointer))
def lookahead(self, exp):
if self.at_end:
return
self._skip()
next = self.next
if isinstance(exp, str):
keyword = exp[-1].isalnum()
n, next_n = len(exp), len(next)
if n > next_n:
return
if next.startswith(exp):
#check that exp ends on word boundary
if keyword and n < next_n and next[n].isalnum():
return
else:
return exp
elif isinstance(exp, CustomParser):
return exp.match(next)
else:
m = exp.match(next)
if m:
return m.group(0)
def maybe(self, exp):
value = self.lookahead(exp)
if value is not None:
self.advance(len(value))
return value
def read(self, exp, error):
value = self.maybe(exp)
if value is None:
self.error(error)
return value
def __read_statement_end(self):
self.read(';', "Expected ; at the end of the statement")
def __read_list(self, exp, delimiter, error):
result = []
result.append(self.read(exp, error))
while self.maybe(delimiter):
result.append(self.read(exp, error))
return result
def __read_nested(self, begin, end, error):
begin_off = self.__pos
self.read(begin, error)
counter = 1
while not self.at_end:
if self.maybe(begin):
counter += 1
elif self.maybe(end):
counter -= 1
if counter == 0:
end_off = self.__pos
value = self.__text[begin_off: end_off]
return value
else:
if not self.maybe(string_re):
self.advance(1)
def __read_code(self):
return self.__read_nested('{', '}', "Expected code block")
def __read_expression(self, terminate = True):
if self.maybe('['):
values = []
while not self.maybe(']'):
values.append(self.__read_expression(terminate = False))
if self.maybe(']'):
break
self.read(',', "Expected ',' as an array delimiter")
if terminate:
self.__read_statement_end()
return "[%s]" % (",".join(map(str, values)))
else:
value = infix_parser.parse(self)
if terminate:
self.__read_statement_end()
return str(value)
def __read_property(self):
if self.lookahead(':'):
return self.__read_rules_with_id(["property"])
doc = self.__get_next_doc()
type = self.read(property_type_re, "Expected type after property keyword")
if type == 'enum':
type = self.read(identifier_re, "Expected type after enum keyword")
self.read('{', "Expected { after property enum")
values = self.__read_list(component_name, ',', "Expected capitalised enum element")
self.read('}', "Expected } after enum element declaration")
if self.maybe(':'):
def_value = self.read(component_name, "Expected capitalised default enum value")
else:
def_value = None
self.__read_statement_end()
return self.__return(lang.EnumProperty(type, values, def_value), doc)
if type == 'const':
name = self.read(identifier_re, "Expected const property name")
self.read(':', "Expected : before const property code")
code = self.__read_code()
return self.__return(lang.Property("const", [(name, code)]), doc)
if type == 'alias':
name = self.read(identifier_re, "Expected alias property name")
self.read(':', "Expected : before alias target")
target = self.read(nested_identifier_re, "Expected identifier as an alias target")
self.__read_statement_end()
return self.__return(lang.AliasProperty(name, target), doc)
names = self.__read_list(identifier_re, ',', "Expected identifier in property list")
if len(names) == 1:
#Allow initialisation for the single property
def_value = None
if self.maybe(':'):
if self.lookahead(component_name_lookahead):
def_value = self.__read_comp()
else:
def_value = self.__read_expression()
else:
self.__read_statement_end()
name = names[0]
return self.__return(lang.Property(type, [(name, def_value)]), doc)
else:
self.read(';', 'Expected ; at the end of property declaration')
return self.__return(lang.Property(type, map(lambda name: (name, None), names)), doc)
def __read_rules_with_id(self, identifiers):
args = []
doc = self.__get_next_doc()
if self.maybe('('):
if not self.maybe(')'):
args = self.__read_list(identifier_re, ',', "Expected argument list")
self.read(')', "Expected () as an argument list")
if self.maybe(':'):
if self.lookahead('{'):
code = self.__read_code()
return self.__return(lang.Method(identifiers, args, code, True, False), doc)
if len(identifiers) > 1:
self.error("Multiple identifiers are not allowed in assignment")
if self.lookahead(component_name_lookahead):
return self.__return(lang.Assignment(identifiers[0], self.__read_comp()), doc)
value = self.__read_expression()
return self.__return(lang.Assignment(identifiers[0], value), doc)
elif self.maybe('{'):
if len(identifiers) > 1:
self.error("Multiple identifiers are not allowed in assignment scope")
values = []
while not self.maybe('}'):
name = self.read(nested_identifier_re, "Expected identifier in assignment scope")
self.read(':', "Expected : after identifier in assignment scope")
value = self.__read_expression()
values.append(lang.Assignment(name, value))
return self.__return(lang.AssignmentScope(identifiers[0], values), doc)
else:
self.error("Unexpected identifier(s): %s" %",".join(identifiers))
def __read_function(self, async_f = False):
doc = self.__get_next_doc()
name = self.read(identifier_re, "Expected identifier")
args = []
self.read('(', "Expected (argument-list) in function declaration")
if not self.maybe(')'):
args = self.__read_list(identifier_re, ',', "Expected argument list")
self.read(')', "Expected ) at the end of argument list")
code = self.__read_code()
return self.__return(lang.Method([name], args, code, False, async_f), doc)
def __read_json_value(self):
value = self.maybe(kw_re)
if value is not None:
return value
value = self.maybe(number_re)
if value is not None:
return value
value = self.maybe(string_re)
if value is not None:
return lang.unescape_string(value[1:-1])
if self.lookahead('{'):
return self.__read_json_object()
if self.lookahead('['):
return self.__read_json_list
def __read_json_list(self):
self.read('[', "Expect JSON list starts with [")
result = []
while not self.maybe(']'):
result.append(self.__read_json_value)
if self.maybe(']'):
break
self.read(',', "Expected , as a JSON list delimiter")
return result
def __read_json_object(self):
self.read('{', "Expected JSON object starts with {")
object = OrderedDict()
while not self.maybe('}'):
name = self.maybe(identifier_re)
if not name:
name = self.read(string_re, "Expected string or identifier as property name")
self.read(':', "Expected : after property name")
value = self.__read_json_value()
object[name] = value
self.maybe(json_object_value_delimiter_re)
return object
def __read_scope_decl(self):
if self.maybe('ListElement'):
doc = self.__get_next_doc()
return self.__return(lang.ListElement(self.__read_json_object()), doc)
elif self.maybe('Behavior'):
self.read("on", "Expected on keyword after Behavior declaration")
doc = self.__get_next_doc()
targets = self.__read_list(nested_identifier_re, ",", "Expected identifier list after on keyword")
self.read("{", "Expected { after identifier list in behavior declaration")
comp = self.__read_comp()
self.read("}", "Expected } after behavior animation declaration")
return self.__return(lang.Behavior(targets, comp), doc)
elif self.maybe('signal'):
doc = self.__get_next_doc()
name = self.read(identifier_re, "Expected identifier in signal declaration")
self.__read_statement_end()
return self.__return(lang.Signal(name), doc)
elif self.maybe('property'):
return self.__read_property()
elif self.maybe('id'):
doc = self.__get_next_doc()
self.read(':', "Expected : after id keyword")
name = self.read(identifier_re, "Expected identifier in id assignment")
self.__read_statement_end()
return self.__return(lang.IdAssignment(name), doc)
elif self.maybe('const'):
doc = self.__get_next_doc()
type = self.read(property_type_re, "Expected type after const keyword")
name = self.read(component_name, "Expected Capitalised const name")
self.read(':', "Expected : after const identifier")
value = self.__read_json_value()
self.__read_statement_end()
return self.__return(lang.Const(type, name, value), doc)
elif self.maybe('async'):
self.error("async fixme")
elif self.maybe('function'):
return self.__read_function()
elif self.maybe('async'):
self.read('function', "Expected function after async")
return self.__read_function(async_f = True)
elif self.lookahead(component_name_lookahead):
return self.__read_comp()
else:
identifiers = self.__read_list(nested_identifier_re, ",", "Expected identifier (or identifier list)")
return self.__read_rules_with_id(identifiers)
def __read_comp(self):
doc = self.__get_next_doc()
comp_name = self.read(component_name, "Expected component name")
self.read(r'{', "Expected {")
children = []
while not self.maybe('}'):
children.append(self.__read_scope_decl())
return self.__return(lang.Component(comp_name, children), doc)
def parse(self, parse_all = True):
while self.maybe('import'):
self.read(rest_of_the_line_re, "Skip to the end of the line failed")
r = [self.__read_comp()]
self._skip()
if parse_all:
if self.__pos < len(self.__text):
self.error("Extra text after component declaration")
return r
def parse(data):
global doc_root_component
doc_root_component = None
parser = Parser(data)
return parser.parse()
| pureqml/qmlcore | compiler/grammar2.py | Python | mit | 21,146 | 0.03263 |
# -*- coding: utf-8 -*-
import itertools
"""
Languages | ShortCode | Wordnet
Albanian | sq | als
Arabic | ar | arb
Bulgarian | bg | bul
Catalan | ca | cat
Chinese | zh | cmn
Chinese (Taiwan) | qn | qcn
Greek | el | ell
Basque | eu | eus
Persian | fa | fas
Finish | fi | fin
French | fr | fra
Galician | gl | glg
Hebrew | he | heb
Croatian | hr | hrv
Indonesian | id | ind
Italian | it | ita
Japanese | ja | jpn
Norwegian NyNorsk | nn | nno
Norwegian Bokmål | nb/no | nob
Polish | pl | pol
Portuguese | pt | por
Slovenian | sl | slv
Spanish | es | spa
Swedish | sv | swe
Thai | tt | tha
Malay | ms | zsm
"""
"""
Language short codes => Wordnet Code
"""
AVAILABLE_LANGUAGES = dict([('sq','als'), ('ar', 'arb'), ('bg', 'bul'), ('ca', 'cat'), ('da', 'dan'), ('zh', 'cmn'),
('el','ell'), ('eu', 'eus'), ('fa', 'fas'), ('fi', 'fin'), ('fr', 'fra'),
('gl','glg'), ('he', 'heb'), ('hr', 'hrv'), ('id', 'ind'), ('it', 'ita'),
('ja','jpn'),
('nn', 'nno'), ('nb', 'nob'),
('no', 'nob'), ('pl', 'pol'),
('pt', 'por'),
('qn','qcn'), ('sl', 'slv'), ('es', 'spa'), ('sv', 'swe'), ('tt', 'tha'),
('ms', 'zsm'),
('en', 'eng')])
"""
Language names => Short Code
"""
AVAILABLE_LANGUAGES_NAMES = dict([
('albanian', 'sq'), ('arabic', 'ar'),('bulgarian', 'bg'), ('catalan', 'cat'), ('danish', 'da'),
('chinese', 'zh'), ('basque', 'eu'), ('persian', 'fa'), ('finnish', 'fi'), ('france', 'fr'),
('galician', 'gl'), ('hebrew', 'he'), ('croatian', 'hr'), ('indonesian', 'id'), ('italian', 'it'),
('japanese', 'ja'), ('norwegian_nynorsk', 'nn'), ('norwegian', 'no'), ('norwegian_bokmal', 'nb'),
('polish', 'pl'), ('portuguese', 'pt'), ('slovenian', 'sl'), ('spanish', 'es'),
('swedish', 'sv'), ('thai', 'sv'), ('malay', 'ms'), ('english', 'en')
])
class WordnetManager(object):
def __init__(self, language="en"):
"""
Constructor for the wordnet manager.
It takes a main language.
"""
self.__language = language
def __isLanguageAvailable(self, code=None, language_name=None):
"""
Check if a language is available
"""
if code is None and language_name is None:
raise Exception("Error evaluating the correct language")
if code is not None and code.lower() in AVAILABLE_LANGUAGES:
return True
if language_name is not None and language_name.lower() in AVAILABLE_LANGUAGES_NAMES:
return True
return False
def __nameToWordnetCode(self, name):
"""
It returns the wordnet code for a given language name
"""
if not self.__isLanguageAvailable(language_name=name):
raise Exception("Wordnet code not found for the language name %s " % name)
name = name.lower()
languageShortCode = AVAILABLE_LANGUAGES_NAMES[name]
wordnetCode = self.__shortCodeToWordnetCode(code=languageShortCode)
return wordnetCode
def __shortCodeToWordnetCode(self, shortCode):
"""
It returns the wordnet code from a given language short code
"""
if not self.__isLanguageAvailable(code=shortCode):
raise Exception("Wordnet code not found for the language short code %s " % shortCode)
code = shortCode.lower()
wordnetCode = AVAILABLE_LANGUAGES[code]
return wordnetCode
def __getSynsets(self, word, wordNetCode):
"""
It returns the synsets given both word and language code
"""
from nltk.corpus import wordnet as wn
synsets = wn.synsets(word, lang=wordNetCode)
return synsets
def getLemmas(self, word, languageCode="en"):
"""
Get the lemmas for a given word
:word: The word
:languageCode: The language for a given lemma
"""
wnCode = self.__shortCodeToWordnetCode(shortCode=languageCode)
synsets = self.__getSynsets(word, wnCode) #wn.synsets(word, lang=wnCode)
lemmas = dict([('en', [])])
for synset in synsets:
enLemmas = synset.lemma_names()
lemmas['en'].extend(enLemmas)
if languageCode != "en" and self.__isLanguageAvailable(code=languageCode):
langLemmas = list(sorted(set(synset.lemma_names(lang=wnCode))))
lemmas[languageCode] = langLemmas
lemmas['en'] = list(sorted(set(lemmas.get('en', []))))
return lemmas
def getSynonyms(self, words=[], language_code="en"):
"""
Get the synonyms from a list of words.
:words: A list of words
:language_code: the language for the synonyms.
"""
if words is None or not isinstance(words, list) or list(words) <= 0:
return []
if not self.__isLanguageAvailable(code=language_code):
return []
wnCode = self.__shortCodeToWordnetCode(language_code)
result = {}
for word in words:
result[word] = dict([('lemmas', self.getLemmas(word,languageCode=language_code))])
return result
def getHyponyms(self, words, language_code="en"):
"""
Get specific synsets from a given synset
"""
wnCode = self.__shortCodeToWordnetCode(language_code)
result = {}
for word in words:
synonyms = self.__getSynsets(word, wnCode)
hyponyms = [hyp for synset in synonyms for hyp in synset.hyponyms()]
engLemmas = [hyp.lemma_names() for hyp in hyponyms]
lemmas = dict([('en', list(sorted(set(itertools.chain.from_iterable(engLemmas)), key=lambda s: s.lower())))])
if language_code != "en":
languageLemmas = [hyp.lemma_names(lang=wnCode) for hyp in hyponyms]
languageLemmas = list(sorted(set(itertools.chain.from_iterable(languageLemmas)), key=lambda s: s.lower()))
lemmas[language_code] = languageLemmas
result[word] = dict([ ('lemmas', lemmas), ('language', language_code)])
return result
def getHypernyms(self, words, language_code="en"):
"""
Get general synsets from a given synset
"""
wnCode = self.__shortCodeToWordnetCode(language_code)
result = {}
for word in words:
synonyms = self.__getSynsets(word, wnCode)
hypernyms = [hyp for synset in synonyms for hyp in synset.hypernyms()]
engLemmas = [hyp.lemma_names() for hyp in hypernyms]
lemmas = dict([('en', list(sorted(set(itertools.chain.from_iterable(engLemmas)), key=lambda s: s.lower())))])
if language_code != "en":
languageLemmas = [hyp.lemma_names(lang=wnCode) for hyp in hypernyms]
languageLemmas = list(sorted(set(itertools.chain.from_iterable(languageLemmas)), key=lambda s: s.lower()))
lemmas[language_code] = languageLemmas
result[word] = dict([ ('lemmas', lemmas), ('language', language_code)])
return result
| domenicosolazzo/jroc | jroc/nlp/wordnet/WordnetManager.py | Python | gpl-3.0 | 8,043 | 0.005098 |
# -*- coding: utf-8 -*-
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os, sys
from platformcode import config, logger
from core import scrapertools
from core.item import Item
from core import servertools
from core import httptools
host = 'https://www.porndish.com'
def mainlist(item):
logger.info()
itemlist = []
itemlist.append( Item(channel=item.channel, title="Nuevos" , action="lista", url=host))
itemlist.append( Item(channel=item.channel, title="Canal" , action="categorias", url=host))
itemlist.append( Item(channel=item.channel, title="Buscar", action="search"))
return itemlist
def search(item, texto):
logger.info()
texto = texto.replace(" ", "+")
item.url = host + "/?s=%s" % texto
try:
return lista(item)
except:
import sys
for line in sys.exc_info():
logger.error("%s" % line)
return []
def categorias(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t| |<br>|<br/>", "", data)
patron = '<li id="menu-item-\d+".*?'
patron += '<a href="([^"]+)">([^<]+)<'
matches = re.compile(patron,re.DOTALL).findall(data)
for scrapedurl,scrapedtitle in matches:
scrapedplot = ""
scrapedurl = urlparse.urljoin(item.url,scrapedurl)
scrapedthumbnail = ""
itemlist.append( Item(channel=item.channel, action="lista", title=scrapedtitle, url=scrapedurl,
fanart=scrapedthumbnail, thumbnail=scrapedthumbnail , plot=scrapedplot) )
return itemlist
def lista(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t| |<br>|<br/>", "", data)
data = scrapertools.find_single_match(data, 'archive-body">(.*?)<div class="g1-row g1-row-layout-page g1-prefooter">')
patron = '<article class=.*?'
patron += 'src="([^"]+)".*?'
patron += 'title="([^"]+)".*?'
patron += '<a href="([^"]+)" rel="bookmark">'
matches = re.compile(patron,re.DOTALL).findall(data)
for scrapedthumbnail,scrapedtitle,scrapedurl in matches:
thumbnail = scrapedthumbnail
plot = ""
itemlist.append( Item(channel=item.channel, action="play", title=scrapedtitle, url=scrapedurl,
fanart=thumbnail, thumbnail=thumbnail, plot=plot, contentTitle = scrapedtitle))
next_page = scrapertools.find_single_match(data, '<a class="g1-delta g1-delta-1st next" href="([^"]+)">Next</a>')
if next_page:
next_page = urlparse.urljoin(item.url,next_page)
itemlist.append( Item(channel=item.channel, action="lista", title="Página Siguiente >>", text_color="blue",
url=next_page) )
return itemlist
def play(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t| |<br>", "", data)
patron = '<iframe src="([^"]+)"'
matches = scrapertools.find_multiple_matches(data, patron)
for url in matches:
itemlist.append(item.clone(action="play", title= "%s" , contentTitle=item.title, url=url))
itemlist = servertools.get_servers_itemlist(itemlist, lambda i: i.title % i.server.capitalize())
a = len (itemlist)
for i in itemlist:
if a < 1:
return []
res = servertools.check_video_link(i.url, i.server, timeout=5)
a -= 1
if 'green' in res:
return [i]
else:
continue
| alfa-jor/addon | plugin.video.alfa/channels/porndish.py | Python | gpl-3.0 | 3,595 | 0.013634 |
# Note: Even though this has Sphinx format, this is not meant to be part of the public docs
"""
************
File Caching
************
Implements a cache on local disk for Synapse file entities and other objects
with a `FileHandle <https://rest.synapse.org/org/sagebionetworks/repo/model/file/FileHandle.html>`_.
This is part of the internal implementation of the client and should not be
accessed directly by users of the client.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from builtins import str
import collections
import datetime
import json
import operator
import os
import re
import shutil
import six
from math import floor
import synapseclient.utils as utils
from synapseclient.lock import Lock
from synapseclient.exceptions import *
CACHE_ROOT_DIR = os.path.join('~', '.synapseCache')
def epoch_time_to_iso(epoch_time):
"""
Convert seconds since unix epoch to a string in ISO format
"""
return None if epoch_time is None else utils.datetime_to_iso(utils.from_unix_epoch_time_secs(epoch_time))
def iso_time_to_epoch(iso_time):
"""
Convert an ISO formatted time into seconds since unix epoch
"""
return None if iso_time is None else utils.to_unix_epoch_time_secs(utils.iso_to_datetime(iso_time))
def compare_timestamps(modified_time, cached_time):
"""
Compare two ISO formatted timestamps, with a special case when cached_time
ends in .000Z.
For backward compatibility, we always write .000 for milliseconds into the cache.
We then match a cached time ending in .000Z, meaning zero milliseconds
with a modified time with any number of milliseconds.
:param modified_time: float representing seconds since unix epoch
:param cached_time: string holding a ISO formatted time
"""
if cached_time is None or modified_time is None:
return False
if cached_time.endswith(".000Z"):
return cached_time == epoch_time_to_iso(floor(modified_time))
else:
return cached_time == epoch_time_to_iso(modified_time)
def _get_modified_time(path):
if os.path.exists(path):
return os.path.getmtime(path)
return None
class Cache():
"""
Represent a cache in which files are accessed by file handle ID.
"""
def __init__(self, cache_root_dir=CACHE_ROOT_DIR, fanout=1000):
## set root dir of cache in which meta data will be stored and files
## will be stored here by default, but other locations can be specified
cache_root_dir = os.path.expanduser(cache_root_dir)
if not os.path.exists(cache_root_dir):
os.makedirs(cache_root_dir)
self.cache_root_dir = cache_root_dir
self.fanout = fanout
self.cache_map_file_name = ".cacheMap"
def get_cache_dir(self, file_handle_id):
if isinstance(file_handle_id, collections.Mapping):
if 'dataFileHandleId' in file_handle_id:
file_handle_id = file_handle_id['dataFileHandleId']
elif 'concreteType' in file_handle_id and 'id' in file_handle_id and file_handle_id['concreteType'].startswith('org.sagebionetworks.repo.model.file'):
file_handle_id = file_handle_id['id']
return os.path.join(self.cache_root_dir, str(int(file_handle_id) % self.fanout), str(file_handle_id))
def _read_cache_map(self, cache_dir):
cache_map_file = os.path.join(cache_dir, self.cache_map_file_name)
if not os.path.exists(cache_map_file):
return {}
with open(cache_map_file, 'r') as f:
cache_map = json.load(f)
return cache_map
def _write_cache_map(self, cache_dir, cache_map):
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
cache_map_file = os.path.join(cache_dir, self.cache_map_file_name)
with open(cache_map_file, 'w') as f:
json.dump(cache_map, f)
f.write('\n') # For compatibility with R's JSON parser
def contains(self, file_handle_id, path):
"""
Given a file and file_handle_id, return True if an unmodified cached
copy of the file exists at the exact path given or False otherwise.
:param file_handle_id:
:param path: file path at which to look for a cached copy
"""
cache_dir = self.get_cache_dir(file_handle_id)
if not os.path.exists(cache_dir):
return False
with Lock(self.cache_map_file_name, dir=cache_dir):
cache_map = self._read_cache_map(cache_dir)
path = utils.normalize_path(path)
cached_time = cache_map.get(path, None)
if cached_time:
return True if compare_timestamps(_get_modified_time(path), cached_time) else False
def get(self, file_handle_id, path=None):
"""
Retrieve a file with the given file handle from the cache.
:param file_handle_id:
:param path: If the given path is None, look for a cached copy of the
file in the cache directory. If the path is a directory,
look there for a cached copy. If a full file-path is
given, only check whether that exact file exists and is
unmodified since it was cached.
:returns: Either a file path, if an unmodified cached copy of the file
exists in the specified location or None if it does not
"""
cache_dir = self.get_cache_dir(file_handle_id)
if not os.path.exists(cache_dir):
return None
with Lock(self.cache_map_file_name, dir=cache_dir):
cache_map = self._read_cache_map(cache_dir)
path = utils.normalize_path(path)
## If the caller specifies a path and that path exists in the cache
## but has been modified, we need to indicate no match by returning
## None. The logic for updating a synapse entity depends on this to
## determine the need to upload a new file.
if path is not None:
## If we're given a path to a directory, look for a cached file in that directory
if os.path.isdir(path):
for cached_file_path, cached_time in six.iteritems(cache_map):
if path == os.path.dirname(cached_file_path):
return cached_file_path if compare_timestamps(_get_modified_time(cached_file_path), cached_time) else None
## if we're given a full file path, look up a matching file in the cache
else:
cached_time = cache_map.get(path, None)
if cached_time:
return path if compare_timestamps(_get_modified_time(path), cached_time) else None
## return most recently cached and unmodified file OR
## None if there are no unmodified files
for cached_file_path, cached_time in sorted(cache_map.items(), key=operator.itemgetter(1), reverse=True):
if compare_timestamps(_get_modified_time(cached_file_path), cached_time):
return cached_file_path
return None
def add(self, file_handle_id, path):
"""
Add a file to the cache
"""
if not path or not os.path.exists(path):
raise ValueError("Can't find file \"%s\"" % path)
cache_dir = self.get_cache_dir(file_handle_id)
with Lock(self.cache_map_file_name, dir=cache_dir):
cache_map = self._read_cache_map(cache_dir)
path = utils.normalize_path(path)
## write .000 milliseconds for backward compatibility
cache_map[path] = epoch_time_to_iso(floor(_get_modified_time(path)))
self._write_cache_map(cache_dir, cache_map)
return cache_map
def remove(self, file_handle_id, path=None, delete=None):
"""
Remove a file from the cache.
:param file_handle_id: Will also extract file handle id from either a File or file handle
:param path: If the given path is None, remove (and potentially delete)
all cached copies. If the path is that of a file in the
.cacheMap file, remove it.
:returns: A list of files removed
"""
removed = []
cache_dir = self.get_cache_dir(file_handle_id)
## if we've passed an entity and not a path, get path from entity
if path is None and isinstance(file_handle_id, collections.Mapping) and 'path' in file_handle_id:
path = file_handle_id['path']
with Lock(self.cache_map_file_name, dir=cache_dir):
cache_map = self._read_cache_map(cache_dir)
if path is None:
for path in cache_map:
if delete is True and os.path.exists(path):
os.remove(path)
removed.append(path)
cache_map = {}
else:
path = utils.normalize_path(path)
if path in cache_map:
if delete is True and os.path.exists(path):
os.remove(path)
del cache_map[path]
removed.append(path)
self._write_cache_map(cache_dir, cache_map)
return removed
def _cache_dirs(self):
"""
Generate a list of all cache dirs, directories of the form:
[cache.cache_root_dir]/949/59949
"""
for item1 in os.listdir(self.cache_root_dir):
path1 = os.path.join(self.cache_root_dir, item1)
if os.path.isdir(path1) and re.match('\d+', item1):
for item2 in os.listdir(path1):
path2 = os.path.join(path1, item2)
if os.path.isdir(path2) and re.match('\d+', item2):
yield path2
def purge(self, before_date, dry_run=False):
"""
Purge the cache. Use with caution. Delete files whose cache maps were last updated prior to the given date.
Deletes .cacheMap files and files stored in the cache.cache_root_dir, but does not delete
files stored outside the cache.
"""
if isinstance(before_date, datetime.datetime):
before_date = utils.to_unix_epoch_time_secs(epoch_time)
count = 0
for cache_dir in self._cache_dirs():
## _get_modified_time returns None if the cache map file doesn't
## exist and n > None evaluates to True (wtf?). I'm guessing it's
## OK to purge directories in the cache that have no .cacheMap file
if before_date > _get_modified_time(os.path.join(cache_dir, self.cache_map_file_name)):
if dry_run:
print(cache_dir)
else:
shutil.rmtree(cache_dir)
count += 1
return count
| kkdang/synapsePythonClient | synapseclient/cache.py | Python | apache-2.0 | 11,025 | 0.004444 |
from django.conf.urls.defaults import *
from django.contrib import admin
from django.conf import settings
from expedient.common.rpc4django.utils import rpc_url
from openflow.common.utils.OptinThemeManager import OptinThemeManager
OptinThemeManager.initialize()
admin.autodiscover()
urlpatterns = patterns('',
(r'^$', 'openflow.optin_manager.users.views.index'),
url(r'^dashboard$', 'openflow.optin_manager.users.views.dashboard', name="dashboard"),
url(r'^change_profile$', 'openflow.optin_manager.users.views.change_profile', name="change_profile"),
(r'^controls/', include('openflow.optin_manager.controls.urls')),
(r'^opts/', include('openflow.optin_manager.opts.urls')),
(r'^admin_manager/', include('openflow.optin_manager.admin_manager.urls')),
(r'^xmlrpc/', include('openflow.optin_manager.xmlrpc_server.urls')),
# For testing
(r'^dummyfv/', include('openflow.optin_manager.dummyfv.urls')),
(r'^admin/', include(admin.site.urls)),
(r'^accounts/', include('registration.urls')),
# sfa
rpc_url(r'^xmlrpc/sfa/?$', name='optin_sfa'),
rpc_url(r'^xmlrpc/geni/3/?$', name='gapi3')
)
#static_file_tuple = (r'^%s/(?P<path>.*)$' % settings.MEDIA_URL[1:],
# 'django.views.static.serve',
# {'document_root': "%s" % settings.MEDIA_ROOT})
#static_js_tuple = (r'^%s/(?P<path>.*)$' % str(settings.MEDIA_URL[1:]+"/js/"),
# 'django.views.static.serve',
# {'document_root': "%s" % settings.MEDIA_ROOT})
#urlpatterns += patterns('',
# TODO: Serve static content, should be removed in production deployment
# serve from another domain to speed up connections (no cookies needed)
# url(*static_file_tuple, name="img_media"),
# url(*static_file_tuple, name="css_media"),
# url(*static_js_tuple, name="js_media"),)
def get_static_url(name, path=""):
static_file_tuple = (
r'^%s%s/(?P<path>.*)$' % (settings.MEDIA_URL[1:], path),
'django.views.static.serve',
{'document_root': "%s%s" % (settings.MEDIA_ROOT, path)})
return url(*static_file_tuple, name=name)
'''
Static content
'''
urlpatterns += patterns('',
get_static_url("img_media", "/default"),
get_static_url("css_media", "/default"),
get_static_url("js_media", "/default/js"),
)
'''
Static theme content
'''
img_theme_tuple = OptinThemeManager.getStaticThemeTuple("img_media")
css_theme_tuple = OptinThemeManager.getStaticThemeTuple("css_media")
js_theme_tuple = OptinThemeManager.getStaticThemeTuple("js_media")
urlpatterns += patterns('',
get_static_url(img_theme_tuple[0],img_theme_tuple[1]),
get_static_url(css_theme_tuple[0],css_theme_tuple[1]),
get_static_url(js_theme_tuple[0],js_theme_tuple[1]),
)
| dana-i2cat/felix | optin_manager/src/python/openflow/optin_manager/urls.py | Python | apache-2.0 | 2,782 | 0.006111 |
"""
Common utility methods for Mobile APIs.
"""
API_V05 = 'v0.5'
API_V1 = 'v1'
def parsed_version(version):
""" Converts string X.X.X.Y to int tuple (X, X, X) """
return tuple(map(int, (version.split(".")[:3])))
| eduNEXT/edx-platform | lms/djangoapps/mobile_api/utils.py | Python | agpl-3.0 | 223 | 0 |
examples = dict(
disable_training_metrics="""
>>> from h2o.estimators import H2OSupportVectorMachineEstimator
>>> splice = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/splice/splice.svm")
>>> svm = H2OSupportVectorMachineEstimator(gamma=0.01,
... rank_ratio=0.1,
... disable_training_metrics=False)
>>> svm.train(y="C1", training_frame=splice)
>>> svm.mse()
""",
fact_threshold="""
>>> splice = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/splice/splice.svm")
>>> svm = H2OSupportVectorMachineEstimator(disable_training_metrics=False,
... fact_threshold=1e-7)
>>> svm.train(y="C1", training_frame=splice)
>>> svm.mse()
""",
feasible_threshold="""
>>> splice = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/splice/splice.svm")
>>> svm = H2OSupportVectorMachineEstimator(disable_training_metrics=False,
... fact_threshold=1e-7)
>>> svm.train(y="C1", training_frame=splice)
>>> svm.mse()
""",
gamma="""
>>> splice = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/splice/splice.svm")
>>> svm = H2OSupportVectorMachineEstimator(gamma=0.01,
... rank_ratio=0.1,
... disable_training_metrics=False)
>>> svm.train(y="C1", training_frame=splice)
>>> svm.mse()
""",
hyper_param="""
>>> splice = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/splice/splice.svm")
>>> svm = H2OSupportVectorMachineEstimator(gamma=0.01,
... rank_ratio=0.1,
... hyper_param=0.01,
... disable_training_metrics=False)
>>> svm.train(y="C1", training_frame=splice)
>>> svm.mse()
""",
ignore_const_cols="""
>>> splice = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/splice/splice.svm")
>>> svm = H2OSupportVectorMachineEstimator(gamma=0.01,
... rank_ratio=0.1,
... ignore_const_cols=False,
... disable_training_metrics=False)
>>> svm.train(y="C1", training_frame=splice)
>>> svm.mse()
""",
kernel_type="""
>>> splice = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/splice/splice.svm")
>>> svm = H2OSupportVectorMachineEstimator(gamma=0.1,
... rank_ratio=0.1,
... hyper_param=0.01,
... kernel_type="gaussian",
... disable_training_metrics=False)
>>> svm.train(y="C1", training_frame=splice)
>>> svm.mse()
""",
max_iterations="""
>>> splice = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/splice/splice.svm")
>>> svm = H2OSupportVectorMachineEstimator(gamma=0.1,
... rank_ratio=0.1,
... hyper_param=0.01,
... max_iterations=20,
... disable_training_metrics=False)
>>> svm.train(y="C1", training_frame=splice)
>>> svm.mse()
""",
mu_factor="""
>>> splice = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/splice/splice.svm")
>>> svm = H2OSupportVectorMachineEstimator(gamma=0.1,
... mu_factor=100.5,
... disable_training_metrics=False)
>>> svm.train(y="C1", training_frame=splice)
>>> svm.mse()
""",
negative_weight="""
>>> splice = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/splice/splice.svm")
>>> svm = H2OSupportVectorMachineEstimator(gamma=0.1,
... rank_ratio=0.1,
... negative_weight=10,
... disable_training_metrics=False)
>>> svm.train(y="C1", training_frame=splice)
>>> svm.mse()
""",
positive_weight="""
>>> splice = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/splice/splice.svm")
>>> svm = H2OSupportVectorMachineEstimator(gamma=0.1,
... rank_ratio=0.1,
... positive_weight=0.1,
... disable_training_metrics=False)
>>> svm.train(y="C1", training_frame=splice)
>>> svm.mse()
""",
rank_ratio="""
>>> splice = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/splice/splice.svm")
>>> svm = H2OSupportVectorMachineEstimator(gamma=0.01,
... rank_ratio=0.1,
... disable_training_metrics=False)
>>> svm.train(y="C1", training_frame=splice)
>>> svm.mse()
""",
seed="""
>>> splice = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/splice/splice.svm")
>>> svm = H2OSupportVectorMachineEstimator(gamma=0.1,
... rank_ratio=0.1,
... seed=1234,
... disable_training_metrics=False)
>>> svm.train(y="C1", training_frame=splice)
>>> svm.model_performance
""",
surrogate_gap_threshold="""
>>> splice = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/splice/splice.svm")
>>> svm = H2OSupportVectorMachineEstimator(gamma=0.01,
... rank_ratio=0.1,
... surrogate_gap_threshold=0.1,
... disable_training_metrics=False)
>>> svm.train(y="C1", training_frame=splice)
>>> svm.mse()
""",
sv_threshold="""
>>> splice = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/splice/splice.svm")
>>> svm = H2OSupportVectorMachineEstimator(gamma=0.01,
... rank_ratio=0.1,
... sv_threshold=0.01,
... disable_training_metrics=False)
>>> svm.train(y="C1", training_frame=splice)
>>> svm.mse()
""",
training_frame="""
>>> splice = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/splice/splice.svm")
>>> train, valid = splice.split_frame(ratios=[0.8])
>>> svm = H2OSupportVectorMachineEstimator(disable_training_metrics=False)
>>> svm.train(y="C1", training_frame=train)
>>> svm.mse()
""",
validation_frame="""
>>> splice = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/splice/splice.svm")
>>> train, valid = splice.split_frame(ratios=[0.8])
>>> svm = H2OSupportVectorMachineEstimator(disable_training_metrics=False)
>>> svm.train(y="C1", training_frame=train, validation_frame=valid)
>>> svm.mse()
"""
)
| h2oai/h2o-3 | h2o-bindings/bin/custom/python/gen_psvm.py | Python | apache-2.0 | 7,069 | 0.003537 |
try:
from unittest import mock
except ImportError:
import mock
import github3
import unittest
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
def create_mocked_session(self):
MockedSession = mock.create_autospec(github3.session.GitHubSession)
return MockedSession()
def create_session_mock(self, *args):
session = self.create_mocked_session()
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def create_instance_of_described_class(self):
if self.example_data:
instance = self.described_class(self.example_data,
self.session)
else:
instance = self.described_class()
instance._session = self.session
return instance
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.create_instance_of_described_class()
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
| adrianmoisey/github3.py | tests/unit/helper.py | Python | bsd-3-clause | 1,888 | 0 |
__source__ = 'https://leetcode.com/problems/balanced-binary-tree/#/description'
# https://github.com/kamyu104/LeetCode/blob/master/Python/balanced-binary-tree.py
# Time: O(n)
# Space: O(h), h is height of binary tree
# divide and conquer
#
# Description: Leetcode # 110. Balanced Binary Tree
#
# Given a binary tree, determine if it is height-balanced.
#
# For this problem, a height-balanced binary tree is defined as a binary tree
# in which the depth of the two subtrees of every node never differ by more than 1.
#
# Companies
# Bloomberg
# Related Topics
# Tree Depth-first Search
# Similar Questions
# Maximum Depth of Binary Tree
#
import unittest
# Definition for a binary tree node
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
# @param root, a tree node
# @return a boolean
def isBalanced(self, root):
return (self.getHeight(root) >= 0)
def getHeight(self, root):
if root is None:
return 0
left_height = self.getHeight(root.left)
right_height = self.getHeight(root.right)
if left_height < 0 or right_height < 0 or abs(left_height - right_height) > 1:
return -1
return max(left_height, right_height) + 1
#http://www.programcreek.com/2013/02/leetcode-balanced-binary-tree-java/
class javaSolution:
# @param root, a tree node
# @return a boolean
def isBalanced(self, root):
if not root:
return None
if self.getHeight(root) == -1:
return False
return True
def getHeight(self, root):
if not root:
return 0
left = self.getHeight(root.left)
right = self.getHeight(root.right)
if left == -1 or right == -1:
return -1
if abs(left - right) > 1:
return -1
return max(left, right) + 1
class SolutionOther:
# @param root, a tree node
# @return a boolean
# http://www.cnblogs.com/zuoyuan/p/3720169.html
def isBalanced(self, root):
if root == None:
return True
if abs(self.Height(root.left) - self.Height(root.right)) <= 1:
return self.isBalanced(root.left) and self.isBalanced(root.right)
else:
return False
def Height(self, root) :
if root == None:
return 0
return max(self.Height(root.left), self.Height(root.right)) +1
#############test
#creating BST tree ####
root0=TreeNode(0)
tree1=TreeNode(1)
tree2=TreeNode(2)
tree3=TreeNode(3)
tree4=TreeNode(4)
tree5=TreeNode(5)
tree6=TreeNode(6)
root0.left=tree1
#root0.right=tree2
tree1.left=tree3
tree1.right=tree4
tree2.left=tree5
#tree2.right=tree6
#end of creating BST tree ####
#test
test = SolutionOther()
print test.isBalanced(root0)
#print test.isBalanced3(root0)
#print test.isBalanced2(root0)
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
root = TreeNode(0)
root.left = TreeNode(1)
result = Solution().isBalanced(root)
print result
root.left.left = TreeNode(2)
result = javaSolution().isBalanced(root)
print result
if __name__ == '__main__':
unittest.main()
Java = '''
#Thought: https://leetcode.com/problems/contains-duplicate/solution/
Thought: This problem is generally believed to have two solutions:
the top down approach and the bottom up way.
DFS 1) The first method checks whether the tree is balanced strictly according to the definition
of balanced binary tree: the difference between the heights of the two sub trees are not bigger than 1,
and both the left sub tree and right sub tree are also balanced. With the helper function depth(),
we could easily write the code;
For the current node root, calling depth() for its left and right children actually has to access all of its children,
thus the complexity is O(N). We do this for each node in the tree,
so the overall complexity of isBalanced will be O(N^2). This is a top down approach.
DFS 2)The second method is based on DFS. Instead of calling depth() explicitly for each child node,
we return the height of the current node in DFS recursion.
When the sub tree of the current node (inclusive) is balanced, the function dfsHeight()
returns a non-negative value as the height.
Otherwise -1 is returned. According to the leftHeight and rightHeight of the two children,
the parent node could check if the sub tree is balanced, and decides its return value.
# DFS
# 87.89% 1ms
class Solution {
public boolean isBalanced(TreeNode root) {
return dfsHeight(root) != -1;
}
public int dfsHeight(TreeNode root) {
if (root == null) return 0;
int left = dfsHeight(root.left);
int right = dfsHeight(root.right);
if (left == -1 || right == -1 || Math.abs(left - right) > 1) return -1;
return Math.max(left, right) + 1;
}
}
# DFS
# 87.89% 1ms
class Solution {
public boolean isBalanced(TreeNode root) {
if (root == null) return true;
int left = getDpeth(root.left);
int right = getDpeth(root.right);
return Math.abs(left - right) <= 1 && isBalanced(root.left) && isBalanced(root.right);
}
public int getDpeth(TreeNode root) {
if (root == null) return 0;
return Math.max(getDpeth(root.left), getDpeth(root.right)) + 1;
}
}
'''
| JulyKikuAkita/PythonPrac | cs15211/BalancedBinaryTree.py | Python | apache-2.0 | 5,434 | 0.009201 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# test_crafting.py
import os
import sys
import unittest
root_folder = os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + os.sep + ".." )
pth = root_folder #+ os.sep + 'worldbuild'
sys.path.append(pth)
from worldbuild.crafting import craft as mod_craft
class TestTemplate(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
def tearDown(self):
unittest.TestCase.tearDown(self)
def test_01_recipe(self):
res = mod_craft.Recipe('1', 'new recipe','20','mix')
#print(res)
self.assertEqual(str(res),'new recipe')
def test_02_dataset_recipe(self):
recipes = mod_craft.DataSet(mod_craft.Recipe, mod_craft.get_fullname('recipes.csv'))
self.assertTrue(len(recipes.object_list) > 18)
tot_time_to_build = 0
for recipe in recipes.object_list:
#print(recipe)
tot_time_to_build += int(recipe.base_time_to_build)
#print('total time to build all recipes = ' + str(tot_time_to_build))
self.assertEqual(str(recipes.object_list[0]), 'Torch')
self.assertEqual(str(recipes.object_list[1]), 'Wooden Plank')
self.assertTrue(tot_time_to_build > 10)
if __name__ == '__main__':
unittest.main() | acutesoftware/worldbuild | tests/test_crafting.py | Python | gpl-2.0 | 1,300 | 0.013077 |
'''
@author: Quarkonics
'''
import os
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.operations.host_operations as host_ops
import zstackwoodpecker.operations.resource_operations as res_ops
import zstackwoodpecker.operations.vm_operations as vm_ops
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
def test():
test_util.test_dsc('Test VM online change instance offering')
image_name = os.environ.get('imageName_net')
image_uuid = test_lib.lib_get_image_by_name(image_name).uuid
l3_name = os.environ.get('l3VlanNetworkName1')
l3_net_uuid = test_lib.lib_get_l3_by_name(l3_name).uuid
l3_net_list = [l3_net_uuid]
cpuNum = 2
memorySize = 666 * 1024 * 1024
new_offering = test_lib.lib_create_instance_offering(cpuNum = cpuNum,\
memorySize = memorySize)
test_obj_dict.add_instance_offering(new_offering)
new_offering_uuid = new_offering.uuid
vm = test_stub.create_vm(l3_net_list, image_uuid, 'online_chg_offering_vm', instance_offering_uuid=new_offering_uuid, system_tags=['instanceOfferingOnlinechange::true'])
test_obj_dict.add_vm(vm)
cpuNum = 1
memorySize = 222 * 1024 * 1024
new_offering2 = test_lib.lib_create_instance_offering(cpuNum = cpuNum,\
memorySize = memorySize)
test_obj_dict.add_instance_offering(new_offering2)
new_offering_uuid2 = new_offering2.uuid
vm.change_instance_offering(new_offering_uuid2)
cpuNum = 1
memorySize = 444 * 1024 * 1024
new_offering3 = test_lib.lib_create_instance_offering(cpuNum = cpuNum,\
memorySize = memorySize)
test_obj_dict.add_instance_offering(new_offering3)
new_offering_uuid3 = new_offering3.uuid
vm.change_instance_offering(new_offering_uuid3)
test_lib.lib_robot_cleanup(test_obj_dict)
test_util.test_pass('VM online change instance offering Test Pass')
#Will be called only if exception happens in test().
def error_cleanup():
test_lib.lib_error_cleanup(test_obj_dict)
| zstackorg/zstack-woodpecker | integrationtest/vm/virtualrouter/vlan/test_chg_instance_offering_online2.py | Python | apache-2.0 | 2,179 | 0.010096 |
__file__ = 'IRI_v1'
__date__ = '5/15/2014'
__author__ = 'ABREZNIC'
import arcpy, os, datetime, csv, tpp
now = datetime.datetime.now()
curMonth = now.strftime("%m")
curDay = now.strftime("%d")
curYear = now.strftime("%Y")
today = curYear + "_" + curMonth + "_" + curDay
input = arcpy.GetParameterAsText(0)
calRhino = arcpy.GetParameterAsText(1)
output = arcpy.GetParameterAsText(2)
# theMXD = "C:\\TxDOT\\Projects\\IRI_dan\\working\\Untitled.mxd"
inputlist = [input]
inputcntr = 1
lengthinput = len(inputlist)
issuesReport = [["DISTRICT_FILE", "ROUTE_ID", "BEGIN_POINT", "END_POINT", "SECTION_LENGTH", "IRI", "RUTTING", "DATE", "ERROR_DESCRIPTION"]]
statsReport = [["DISTRICT_FILE", "LG Record Count", "KG Record Count", "Total Records Count", "Input Record Count", "Lost Records Count", "LG Records Length", "KG Records Length", "Total Routed Length"]]
arcpy.CreateFileGDB_management(output, "RhinoLines.gdb")
rhinospace = output + os.sep + "RhinoLines.gdb"
rhino_lines = rhinospace + os.sep + "rhinolines"
arcpy.Copy_management(calRhino, rhino_lines)
# arcpy.AddField_management(rhino_lines, "FRM_DFO", "DOUBLE")
# arcpy.AddField_management(rhino_lines, "TO_DFO", "DOUBLE")
cursor = arcpy.da.UpdateCursor(rhino_lines, ["FRM_DFO", "TO_DFO", 'SHAPE@'])
for row in cursor:
bp = row[2].firstPoint.M
ep = row[2].lastPoint.M
bpNew = float(format(float(bp), '.3f'))
epNew = float(format(float(ep), '.3f'))
row[0] = bpNew
row[1] = epNew
cursor.updateRow(row)
del cursor
del row
arcpy.AddMessage("Calibrated RHINO copied local.")
arcpy.AddField_management(rhino_lines, "RTE_ORDER", "SHORT")
arcpy.AddField_management(rhino_lines, "FLAG", "TEXT", "", "", 30)
arcpy.AddMessage("Applying RTE_ORDER.")
cursor = arcpy.da.UpdateCursor(rhino_lines, ["RTE_ID", "FRM_DFO", "RTE_ORDER", "FLAG", "RU", "F_SYSTEM", "SEC_NHS", "HPMS"], "", "", "", (None, "ORDER BY RTE_ID ASC, FRM_DFO ASC"))
counter = 0
order = 1
previous = ""
for row in cursor:
current = row[0]
if counter == 0:
row[2] = order
elif counter != 0 and previous == current:
order += 1
row[2] = order
else:
order = 1
row[2] = order
previous = current
counter += 1
ru = int(row[4])
fs = int(row[5])
nhs = int(row[6])
row[3] = current + "-" + str(order) + "-" + str(ru) + "-" + str(fs) + "-" + str(nhs) + "-" + str(row[7])
cursor.updateRow(row)
del cursor
arcpy.AddMessage("RTE_ORDER applied.")
dictionary = {}
cursor = arcpy.da.SearchCursor(rhino_lines, ["FLAG", "FRM_DFO", "TO_DFO"])
for row in cursor:
flag = row[0]
odr = flag.split("-")[0] + "-" + flag.split("-")[1] + "-" + flag.split("-")[2]
fDFO = row[1]
tDFO = row[2]
dictionary[odr] = [fDFO, tDFO]
del cursor
for excel in inputlist:
distName = str(excel).split("\\")[-1]
if distName[-1] == "$":
distName = distName[:-1]
arcpy.AddMessage("Beginning " + str(inputcntr) + " of " + str(lengthinput) + ": " + distName)
arcpy.CreateFileGDB_management(output, "Wrkg" + str(inputcntr) + ".gdb")
workspace = output + os.sep + "Wrkg" + str(inputcntr) + ".gdb"
arcpy.AddMessage("Working database created.")
data = []
fields = ["ROUTE_ID", "BEGIN_POINT", "END_POINT", "SECTION_LENGTH", "IRI", "RUTTING", "DATE", "RU", "F_SYSTEM", "SEC_NHS", "HPMS"]
data.append(fields)
# spref = "Coordinate Systems\\Geographic Coordinate Systems\\World\\GCS_WGS_1984.prj"
# spref = "Coordinate Systems\\Geographic Coordinate Systems\\World\\WGS 1984.prj"
# arcpy.MakeXYEventLayer_management(excel, "Long", "Lat", "pointEvents" + str(inputcntr), spref)
# arcpy.AddMessage("Event Layer created.")
pntfeature = workspace + os.sep + "allPoints"
arcpy.CopyFeatures_management(excel, pntfeature)
arcpy.AddMessage("Point feature class created.")
arcpy.AddField_management(pntfeature, "RTE_ID_Orig", "TEXT", "", "", 30)
initial = 0
ids = []
cursor = arcpy.da.UpdateCursor(pntfeature, ["ROUTE_ID", "ROUTE_ID_Good", "RTE_ID_Orig"])
for row in cursor:
id = row[0]
id2 = row[1]
initial += 1
if id2 not in ids:
ids.append(id2)
row[0] = id2
row[2] = id
cursor.updateRow(row)
del cursor
del row
arcpy.AddMessage("RTE_IDs compiled.")
roadslayer = ""
pointslayer = ""
# mxd = arcpy.mapping.MapDocument(theMXD)
mxd = arcpy.mapping.MapDocument("CURRENT")
df = arcpy.mapping.ListDataFrames(mxd, "*")[0]
for lyr in arcpy.mapping.ListLayers(mxd):
if lyr.name == "rhinolines":
arcpy.mapping.RemoveLayer(df, lyr)
if lyr.name == "allPoints":
arcpy.mapping.RemoveLayer(df, lyr)
newlayerpnt = arcpy.mapping.Layer(pntfeature)
arcpy.mapping.AddLayer(df, newlayerpnt)
newlayerline = arcpy.mapping.Layer(rhino_lines)
arcpy.mapping.AddLayer(df, newlayerline)
for lyr in arcpy.mapping.ListLayers(mxd):
if lyr.name == "rhinolines":
roadslayer = lyr
if lyr.name == "allPoints":
pointslayer = lyr
arcpy.AddMessage("Layers acquired.")
counter = 1
total = len(ids)
arcpy.AddMessage("Finding measures for: ")
for id in ids:
roadslayer.definitionQuery = " RTE_ID = '" + id + "' "
pointslayer.definitionQuery = " ROUTE_ID = '" + id + "' "
arcpy.RefreshActiveView()
arcpy.AddMessage(str(counter) + "/" + str(total) + " " + id)
label = id.replace("-", "")
arcpy.LocateFeaturesAlongRoutes_lr(pointslayer, roadslayer, "FLAG", "230 Feet", workspace + os.sep + label, "FLAG POINT END_POINT")
counter += 1
arcpy.AddMessage("Tables created.")
# alltables = []
arcpy.env.workspace = workspace
tables = arcpy.ListTables()
for table in tables:
arcpy.AddMessage(table)
arcpy.AddField_management(table, "ODR_FLAG", "TEXT", "", "", 20)
arcpy.AddMessage("Order Flag field created.")
numbDict = {}
cursor = arcpy.da.UpdateCursor(table, ["FLAG", "ODR_FLAG"])
for row in cursor:
flag = row[0]
odr = flag.split("-")[0] + "-" + flag.split("-")[1] + "-" + flag.split("-")[2]
if odr not in numbDict.keys():
numbDict[odr] = 1
else:
curNumb = numbDict[odr]
curNumb += 1
numbDict[odr] = curNumb
row[1] = odr
cursor.updateRow(row)
del cursor
counter = 1
previous = ""
last = ""
cursor = arcpy.da.UpdateCursor(table, ["ODR_FLAG", "BEGIN_POINT", "END_POINT", "SECTION_LENGTH"], None, None, False, (None, "ORDER BY ODR_FLAG ASC, END_POINT ASC"))
for row in cursor:
current = row[0]
total = numbDict[current]
if counter == 1 and counter != total:
values = dictionary[current]
beginner = float(format(float(values[0]), '.3f'))
segEnd = float(format(float(row[2]), '.3f'))
if abs(segEnd - beginner) > 1:
segSrt = segEnd - .1
row[1] = float(format(float(segSrt), '.3f'))
row[2] = segEnd
row[3] = row[2] - row[1]
else:
row[1] = beginner
row[2] = segEnd
row[3] = row[2] - row[1]
elif counter == 1 and counter == total:
values = dictionary[current]
row[1] = float(format(float(values[0]), '.3f'))
row[2] = float(format(float(values[1]), '.3f'))
row[3] = row[2] - row[1]
counter = 0
elif previous == current and counter != total:
row[1] = last
row[2] = float(format(float(row[2]), '.3f'))
row[3] = row[2] - last
elif previous == current and counter == total:
values = dictionary[current]
ender = float(format(float(values[1]), '.3f'))
if abs(ender - last) > 1:
row[1] = last
row[2] = float(format(float(row[2]), '.3f'))
row[3] = row[2] - last
else:
row[1] = last
row[2] = float(format(float(values[1]), '.3f'))
row[3] = row[2] - last
counter = 0
else:
arcpy.AddMessage("problem with " + current)
last = row[2]
cursor.updateRow(row)
previous = current
counter += 1
del cursor
arcpy.AddMessage("Measure difference fields populated.")
arcpy.Merge_management(tables, workspace + os.sep + "merged")
arcpy.AddMessage("All tables merged successfully.")
arcpy.AddField_management(workspace + os.sep + "merged", "RU", "TEXT", "", "", 5)
arcpy.AddMessage("RU field created.")
arcpy.AddField_management(workspace + os.sep + "merged", "F_SYSTEM", "TEXT", "", "", 5)
arcpy.AddMessage("Functional System field created.")
arcpy.AddField_management(workspace + os.sep + "merged", "SEC_NHS", "TEXT", "", "", 5)
arcpy.AddMessage("NHS field created.")
arcpy.AddField_management(workspace + os.sep + "merged", "HPMS", "TEXT", "", "", 5)
arcpy.AddMessage("HPMS Keeper field created.")
# arcpy.AddMessage("Fields created.")
cursor = arcpy.da.UpdateCursor(workspace + os.sep + "merged", ["FLAG", "RU", "F_SYSTEM", "SEC_NHS", "HPMS"])
for row in cursor:
flag = row[0]
row[1] = flag.split("-")[3]
row[2] = flag.split("-")[4]
row[3] = flag.split("-")[5]
row[4] = flag.split("-")[6]
cursor.updateRow(row)
del cursor
LGcounter = 0
KGcounter = 0
LGlength = 0
KGlength = 0
cursor = arcpy.da.SearchCursor(workspace + os.sep + "merged", fields)
for row in cursor:
id = row[0]
if id[-2:] == "LG":
data.append(row)
LGcounter += 1
LGlength += float(row[3])
else:
data.append(row)
KGcounter += 1
KGlength += float(row[3])
if float(row[3]) > 1:
problem = [distName, row[0], row[1], row[2], row[3], row[4], row[5], row[6], "Abnormally large SECTION_LENGTH"]
issuesReport.append(problem)
if float(row[3]) == 0:
problem = [distName, row[0], row[1], row[2], row[3], row[4], row[5], row[6], "Zero length SECTION_LENGTH"]
issuesReport.append(problem)
del cursor
arcpy.AddMessage("Data compiled.")
arcpy.AddMessage("Creating CSV report.")
final = open(output + os.sep + distName + "_Plotted_" + str(inputcntr) + ".csv", 'wb')
writer = csv.writer(final)
writer.writerows(data)
final.close()
arcpy.AddMessage("CSV written.")
TOTALcounter = LGcounter + KGcounter
TOTALlength = LGlength + KGlength
DIFFcounter = initial - TOTALcounter
statsReport.append([distName, LGcounter, KGcounter, TOTALcounter, initial, DIFFcounter, LGlength, KGlength, TOTALlength])
inputcntr += 1
if len(issuesReport) > 1:
arcpy.AddMessage("Creating errors report...")
errors = open(output + os.sep + "00ISSUES_Investigate.csv", 'wb')
writer = csv.writer(errors)
writer.writerows(issuesReport)
errors.close()
arcpy.AddMessage("Creating stats report...")
stats = open(output + os.sep + "00Statistics.csv", 'wb')
writer = csv.writer(stats)
writer.writerows(statsReport)
stats.close()
arcpy.AddMessage("that's all folks!")
arcpy.AddMessage("started: " + str(now))
now2 = datetime.datetime.now()
arcpy.AddMessage("ended: " + str(now2))
print "that's all folks!" | adambreznicky/python | DanMan/IRI_v5_fixer.py | Python | mit | 11,750 | 0.002128 |
"""Tests for parallel client.py
Authors:
* Min RK
"""
#-------------------------------------------------------------------------------
# Copyright (C) 2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports
#-------------------------------------------------------------------------------
from __future__ import division
import time
from datetime import datetime
from tempfile import mktemp
import zmq
from IPython import parallel
from IPython.parallel.client import client as clientmod
from IPython.parallel import error
from IPython.parallel import AsyncResult, AsyncHubResult
from IPython.parallel import LoadBalancedView, DirectView
from clienttest import ClusterTestCase, segfault, wait, add_engines
def setup():
add_engines(4, total=True)
class TestClient(ClusterTestCase):
def test_ids(self):
n = len(self.client.ids)
self.add_engines(2)
self.assertEquals(len(self.client.ids), n+2)
def test_view_indexing(self):
"""test index access for views"""
self.minimum_engines(4)
targets = self.client._build_targets('all')[-1]
v = self.client[:]
self.assertEquals(v.targets, targets)
t = self.client.ids[2]
v = self.client[t]
self.assert_(isinstance(v, DirectView))
self.assertEquals(v.targets, t)
t = self.client.ids[2:4]
v = self.client[t]
self.assert_(isinstance(v, DirectView))
self.assertEquals(v.targets, t)
v = self.client[::2]
self.assert_(isinstance(v, DirectView))
self.assertEquals(v.targets, targets[::2])
v = self.client[1::3]
self.assert_(isinstance(v, DirectView))
self.assertEquals(v.targets, targets[1::3])
v = self.client[:-3]
self.assert_(isinstance(v, DirectView))
self.assertEquals(v.targets, targets[:-3])
v = self.client[-1]
self.assert_(isinstance(v, DirectView))
self.assertEquals(v.targets, targets[-1])
self.assertRaises(TypeError, lambda : self.client[None])
def test_lbview_targets(self):
"""test load_balanced_view targets"""
v = self.client.load_balanced_view()
self.assertEquals(v.targets, None)
v = self.client.load_balanced_view(-1)
self.assertEquals(v.targets, [self.client.ids[-1]])
v = self.client.load_balanced_view('all')
self.assertEquals(v.targets, None)
def test_dview_targets(self):
"""test direct_view targets"""
v = self.client.direct_view()
self.assertEquals(v.targets, 'all')
v = self.client.direct_view('all')
self.assertEquals(v.targets, 'all')
v = self.client.direct_view(-1)
self.assertEquals(v.targets, self.client.ids[-1])
def test_lazy_all_targets(self):
"""test lazy evaluation of rc.direct_view('all')"""
v = self.client.direct_view()
self.assertEquals(v.targets, 'all')
def double(x):
return x*2
seq = range(100)
ref = [ double(x) for x in seq ]
# add some engines, which should be used
self.add_engines(1)
n1 = len(self.client.ids)
# simple apply
r = v.apply_sync(lambda : 1)
self.assertEquals(r, [1] * n1)
# map goes through remotefunction
r = v.map_sync(double, seq)
self.assertEquals(r, ref)
# add a couple more engines, and try again
self.add_engines(2)
n2 = len(self.client.ids)
self.assertNotEquals(n2, n1)
# apply
r = v.apply_sync(lambda : 1)
self.assertEquals(r, [1] * n2)
# map
r = v.map_sync(double, seq)
self.assertEquals(r, ref)
def test_targets(self):
"""test various valid targets arguments"""
build = self.client._build_targets
ids = self.client.ids
idents,targets = build(None)
self.assertEquals(ids, targets)
def test_clear(self):
"""test clear behavior"""
self.minimum_engines(2)
v = self.client[:]
v.block=True
v.push(dict(a=5))
v.pull('a')
id0 = self.client.ids[-1]
self.client.clear(targets=id0, block=True)
a = self.client[:-1].get('a')
self.assertRaisesRemote(NameError, self.client[id0].get, 'a')
self.client.clear(block=True)
for i in self.client.ids:
self.assertRaisesRemote(NameError, self.client[i].get, 'a')
def test_get_result(self):
"""test getting results from the Hub."""
c = clientmod.Client(profile='iptest')
t = c.ids[-1]
ar = c[t].apply_async(wait, 1)
# give the monitor time to notice the message
time.sleep(.25)
ahr = self.client.get_result(ar.msg_ids)
self.assertTrue(isinstance(ahr, AsyncHubResult))
self.assertEquals(ahr.get(), ar.get())
ar2 = self.client.get_result(ar.msg_ids)
self.assertFalse(isinstance(ar2, AsyncHubResult))
c.close()
def test_get_execute_result(self):
"""test getting execute results from the Hub."""
c = clientmod.Client(profile='iptest')
t = c.ids[-1]
cell = '\n'.join([
'import time',
'time.sleep(0.25)',
'5'
])
ar = c[t].execute("import time; time.sleep(1)", silent=False)
# give the monitor time to notice the message
time.sleep(.25)
ahr = self.client.get_result(ar.msg_ids)
self.assertTrue(isinstance(ahr, AsyncHubResult))
self.assertEquals(ahr.get().pyout, ar.get().pyout)
ar2 = self.client.get_result(ar.msg_ids)
self.assertFalse(isinstance(ar2, AsyncHubResult))
c.close()
def test_ids_list(self):
"""test client.ids"""
ids = self.client.ids
self.assertEquals(ids, self.client._ids)
self.assertFalse(ids is self.client._ids)
ids.remove(ids[-1])
self.assertNotEquals(ids, self.client._ids)
def test_queue_status(self):
ids = self.client.ids
id0 = ids[0]
qs = self.client.queue_status(targets=id0)
self.assertTrue(isinstance(qs, dict))
self.assertEquals(sorted(qs.keys()), ['completed', 'queue', 'tasks'])
allqs = self.client.queue_status()
self.assertTrue(isinstance(allqs, dict))
intkeys = list(allqs.keys())
intkeys.remove('unassigned')
self.assertEquals(sorted(intkeys), sorted(self.client.ids))
unassigned = allqs.pop('unassigned')
for eid,qs in allqs.items():
self.assertTrue(isinstance(qs, dict))
self.assertEquals(sorted(qs.keys()), ['completed', 'queue', 'tasks'])
def test_shutdown(self):
ids = self.client.ids
id0 = ids[0]
self.client.shutdown(id0, block=True)
while id0 in self.client.ids:
time.sleep(0.1)
self.client.spin()
self.assertRaises(IndexError, lambda : self.client[id0])
def test_result_status(self):
pass
# to be written
def test_db_query_dt(self):
"""test db query by date"""
hist = self.client.hub_history()
middle = self.client.db_query({'msg_id' : hist[len(hist)//2]})[0]
tic = middle['submitted']
before = self.client.db_query({'submitted' : {'$lt' : tic}})
after = self.client.db_query({'submitted' : {'$gte' : tic}})
self.assertEquals(len(before)+len(after),len(hist))
for b in before:
self.assertTrue(b['submitted'] < tic)
for a in after:
self.assertTrue(a['submitted'] >= tic)
same = self.client.db_query({'submitted' : tic})
for s in same:
self.assertTrue(s['submitted'] == tic)
def test_db_query_keys(self):
"""test extracting subset of record keys"""
found = self.client.db_query({'msg_id': {'$ne' : ''}},keys=['submitted', 'completed'])
for rec in found:
self.assertEquals(set(rec.keys()), set(['msg_id', 'submitted', 'completed']))
def test_db_query_default_keys(self):
"""default db_query excludes buffers"""
found = self.client.db_query({'msg_id': {'$ne' : ''}})
for rec in found:
keys = set(rec.keys())
self.assertFalse('buffers' in keys, "'buffers' should not be in: %s" % keys)
self.assertFalse('result_buffers' in keys, "'result_buffers' should not be in: %s" % keys)
def test_db_query_msg_id(self):
"""ensure msg_id is always in db queries"""
found = self.client.db_query({'msg_id': {'$ne' : ''}},keys=['submitted', 'completed'])
for rec in found:
self.assertTrue('msg_id' in rec.keys())
found = self.client.db_query({'msg_id': {'$ne' : ''}},keys=['submitted'])
for rec in found:
self.assertTrue('msg_id' in rec.keys())
found = self.client.db_query({'msg_id': {'$ne' : ''}},keys=['msg_id'])
for rec in found:
self.assertTrue('msg_id' in rec.keys())
def test_db_query_get_result(self):
"""pop in db_query shouldn't pop from result itself"""
self.client[:].apply_sync(lambda : 1)
found = self.client.db_query({'msg_id': {'$ne' : ''}})
rc2 = clientmod.Client(profile='iptest')
# If this bug is not fixed, this call will hang:
ar = rc2.get_result(self.client.history[-1])
ar.wait(2)
self.assertTrue(ar.ready())
ar.get()
rc2.close()
def test_db_query_in(self):
"""test db query with '$in','$nin' operators"""
hist = self.client.hub_history()
even = hist[::2]
odd = hist[1::2]
recs = self.client.db_query({ 'msg_id' : {'$in' : even}})
found = [ r['msg_id'] for r in recs ]
self.assertEquals(set(even), set(found))
recs = self.client.db_query({ 'msg_id' : {'$nin' : even}})
found = [ r['msg_id'] for r in recs ]
self.assertEquals(set(odd), set(found))
def test_hub_history(self):
hist = self.client.hub_history()
recs = self.client.db_query({ 'msg_id' : {"$ne":''}})
recdict = {}
for rec in recs:
recdict[rec['msg_id']] = rec
latest = datetime(1984,1,1)
for msg_id in hist:
rec = recdict[msg_id]
newt = rec['submitted']
self.assertTrue(newt >= latest)
latest = newt
ar = self.client[-1].apply_async(lambda : 1)
ar.get()
time.sleep(0.25)
self.assertEquals(self.client.hub_history()[-1:],ar.msg_ids)
def _wait_for_idle(self):
"""wait for an engine to become idle, according to the Hub"""
rc = self.client
# timeout 5s, polling every 100ms
qs = rc.queue_status()
for i in range(50):
if qs['unassigned'] or any(qs[eid]['tasks'] for eid in rc.ids):
time.sleep(0.1)
qs = rc.queue_status()
else:
break
# ensure Hub up to date:
self.assertEquals(qs['unassigned'], 0)
for eid in rc.ids:
self.assertEquals(qs[eid]['tasks'], 0)
def test_resubmit(self):
def f():
import random
return random.random()
v = self.client.load_balanced_view()
ar = v.apply_async(f)
r1 = ar.get(1)
# give the Hub a chance to notice:
self._wait_for_idle()
ahr = self.client.resubmit(ar.msg_ids)
r2 = ahr.get(1)
self.assertFalse(r1 == r2)
def test_resubmit_chain(self):
"""resubmit resubmitted tasks"""
v = self.client.load_balanced_view()
ar = v.apply_async(lambda x: x, 'x'*1024)
ar.get()
self._wait_for_idle()
ars = [ar]
for i in range(10):
ar = ars[-1]
ar2 = self.client.resubmit(ar.msg_ids)
[ ar.get() for ar in ars ]
def test_resubmit_header(self):
"""resubmit shouldn't clobber the whole header"""
def f():
import random
return random.random()
v = self.client.load_balanced_view()
v.retries = 1
ar = v.apply_async(f)
r1 = ar.get(1)
# give the Hub a chance to notice:
self._wait_for_idle()
ahr = self.client.resubmit(ar.msg_ids)
ahr.get(1)
time.sleep(0.5)
records = self.client.db_query({'msg_id': {'$in': ar.msg_ids + ahr.msg_ids}}, keys='header')
h1,h2 = [ r['header'] for r in records ]
for key in set(h1.keys()).union(set(h2.keys())):
if key in ('msg_id', 'date'):
self.assertNotEquals(h1[key], h2[key])
else:
self.assertEquals(h1[key], h2[key])
def test_resubmit_aborted(self):
def f():
import random
return random.random()
v = self.client.load_balanced_view()
# restrict to one engine, so we can put a sleep
# ahead of the task, so it will get aborted
eid = self.client.ids[-1]
v.targets = [eid]
sleep = v.apply_async(time.sleep, 0.5)
ar = v.apply_async(f)
ar.abort()
self.assertRaises(error.TaskAborted, ar.get)
# Give the Hub a chance to get up to date:
self._wait_for_idle()
ahr = self.client.resubmit(ar.msg_ids)
r2 = ahr.get(1)
def test_resubmit_inflight(self):
"""resubmit of inflight task"""
v = self.client.load_balanced_view()
ar = v.apply_async(time.sleep,1)
# give the message a chance to arrive
time.sleep(0.2)
ahr = self.client.resubmit(ar.msg_ids)
ar.get(2)
ahr.get(2)
def test_resubmit_badkey(self):
"""ensure KeyError on resubmit of nonexistant task"""
self.assertRaisesRemote(KeyError, self.client.resubmit, ['invalid'])
def test_purge_results(self):
# ensure there are some tasks
for i in range(5):
self.client[:].apply_sync(lambda : 1)
# Wait for the Hub to realise the result is done:
# This prevents a race condition, where we
# might purge a result the Hub still thinks is pending.
time.sleep(0.1)
rc2 = clientmod.Client(profile='iptest')
hist = self.client.hub_history()
ahr = rc2.get_result([hist[-1]])
ahr.wait(10)
self.client.purge_results(hist[-1])
newhist = self.client.hub_history()
self.assertEquals(len(newhist)+1,len(hist))
rc2.spin()
rc2.close()
def test_purge_all_results(self):
self.client.purge_results('all')
hist = self.client.hub_history()
self.assertEquals(len(hist), 0)
def test_spin_thread(self):
self.client.spin_thread(0.01)
ar = self.client[-1].apply_async(lambda : 1)
time.sleep(0.1)
self.assertTrue(ar.wall_time < 0.1,
"spin should have kept wall_time < 0.1, but got %f" % ar.wall_time
)
def test_stop_spin_thread(self):
self.client.spin_thread(0.01)
self.client.stop_spin_thread()
ar = self.client[-1].apply_async(lambda : 1)
time.sleep(0.15)
self.assertTrue(ar.wall_time > 0.1,
"Shouldn't be spinning, but got wall_time=%f" % ar.wall_time
)
def test_activate(self):
ip = get_ipython()
magics = ip.magics_manager.magics
self.assertTrue('px' in magics['line'])
self.assertTrue('px' in magics['cell'])
v0 = self.client.activate(-1, '0')
self.assertTrue('px0' in magics['line'])
self.assertTrue('px0' in magics['cell'])
self.assertEquals(v0.targets, self.client.ids[-1])
v0 = self.client.activate('all', 'all')
self.assertTrue('pxall' in magics['line'])
self.assertTrue('pxall' in magics['cell'])
self.assertEquals(v0.targets, 'all')
| cloud9ers/gurumate | environment/lib/python2.7/site-packages/IPython/parallel/tests/test_client.py | Python | lgpl-3.0 | 16,414 | 0.007006 |
# Generated by Django 2.2.16 on 2020-10-01 18:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0064_add_feature_future_default_true'),
]
operations = [
migrations.AlterField(
model_name='project',
name='privacy_level',
field=models.CharField(choices=[('public', 'Public'), ('protected', 'Protected'), ('private', 'Private')], default='public', help_text='Should the project dashboard be public?', max_length=20, verbose_name='Privacy Level'),
),
]
| rtfd/readthedocs.org | readthedocs/projects/migrations/0065_add_feature_future_default_true.py | Python | mit | 594 | 0.001684 |
try:
from pip._internal.req import parse_requirements
except ImportError:
from pip.req import parse_requirements
from setuptools import find_packages
from setuptools import setup
def get_long_description():
with open('README.md') as readme_file:
return readme_file.read()
setup(
name='jsonapi-requests',
version='0.6.2.dev0',
description='Python client implementation for json api. http://jsonapi.org/',
long_description=get_long_description(),
long_description_content_type='text/markdown',
author='Social WiFi',
author_email='it@socialwifi.com',
url='https://github.com/socialwifi/jsonapi-requests',
packages=find_packages(exclude=['tests']),
install_requires=[str(ir.req) for ir in parse_requirements('base_requirements.txt', session=False)],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'flask'],
extras_require={
'flask': ['flask']
},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
]
)
| socialwifi/jsonapi-requests | setup.py | Python | bsd-3-clause | 1,500 | 0.001333 |
#!/usr/bin/env python
import markdown
from markdown.util import etree
from markdown.blockprocessors import BlockProcessor
import re
class VideoExtension(markdown.Extension):
def __init__(self, js_support=False, **kwargs):
markdown.Extension.__init__(self)
self.config = {
'dailymotion_width': ['480', 'Width for Dailymotion videos'],
'dailymotion_height': ['270', 'Height for Dailymotion videos'],
'vimeo_width': ['500', 'Width for Vimeo videos'],
'vimeo_height': ['281', 'Height for Vimeo videos'],
'yahoo_width': ['624', 'Width for Yahoo! videos'],
'yahoo_height': ['351', 'Height for Yahoo! videos'],
'youtube_width': ['560', 'Width for Youtube videos'],
'youtube_height': ['315', 'Height for Youtube videos'],
'ina_width': ['620', 'Width for INA videos'],
'ina_height': ['349', 'Height for INA videos'],
'jsfiddle': [False, ''],
'jsfiddle_width': ['560', 'Width for jsfiddle'],
'jsfiddle_height': ['560', 'Height for jsfiddle'],
}
self.config['youtube_short_width'] = self.config['youtube_width']
self.config['youtube_short_height'] = self.config['youtube_height']
# Override defaults with user settings
for key, value in kwargs.items():
self.setConfig(key, value)
if js_support:
self.setConfig("jsfiddle", True)
def add_inline(self, md, name, klass, pat):
RE = r'(^|\n)!\(' + pat + r'\)'
md.parser.blockprocessors.add("video-" + name,
klass(md, RE,
self.config["{}_width".format(name)][0],
self.config["{}_height".format(name)][0]),
"_begin")
def extendMarkdown(self, md, md_globals):
self.add_inline(md, 'dailymotion', Dailymotion,
r'https?://www\.dailymotion\.com/video/(?P<dailymotionid>[a-z0-9]+)(_[\w\-]*)?')
self.add_inline(md, 'vimeo', Vimeo,
r'https?://(www.|)vimeo\.com/(?P<vimeoid>\d+)\S*')
self.add_inline(md, 'yahoo', Yahoo,
r'https?://screen\.yahoo\.com/.+/?')
self.add_inline(md, 'youtube', Youtube,
r'https?://(www\.)?youtube\.com/watch\?\S*v=(?P<youtubeid>\S[^&/]+)'
r'(?P<channel>&ab_channel=[\w%]+)?')
self.add_inline(md, 'youtube_short', Youtube,
r'https?://youtu\.be/(?P<youtubeid>\S[^?&/]+)?')
self.add_inline(md, 'ina', Ina,
r'https?://www\.ina\.fr/video/(?P<inaid>[A-Z0-9]+)/([\w\-]*)\.html')
if self.config["jsfiddle"][0]:
self.add_inline(md, 'jsfiddle', JsFiddle,
r'https?://(www.|)jsfiddle\.net(/(?P<jsfiddleuser>\w+))?/'
r'(?P<jsfiddleid>\w+)(/(?P<jsfiddlerev>[0-9]+)|)/?')
class VideoBProcessor(BlockProcessor):
def __init__(self, md, patt, width, height):
BlockProcessor.__init__(self, md.parser)
self.md = md
self.width = width
self.height = height
self.RE = re.compile(patt)
def test(self, parent, block):
return bool(self.RE.search(block))
def run(self, parent, blocks):
m = self.RE.search(blocks[0])
el = self.handle_match(m)
if el is None:
return False
block = blocks.pop(0)
before = block[:m.start()]
after = block[m.end():]
if before: # pragma: no cover
# This should never occur because regex require that the expression is starting the block.
# Do not raise an exception because exception should never be generated.
self.md.parser.parseBlocks(parent, [before])
parent.append(el)
if after:
blocks.insert(0, after)
@staticmethod
def extract_url(m): # pragma: no cover
# Should be overridden in sub-class
return ""
def handle_match(self, m):
url = self.extract_url(m)
if url is None:
return None
return self.render_iframe(url, self.width, self.height)
@staticmethod
def render_iframe(url, width, height):
iframe = etree.Element('iframe')
iframe.set('width', width)
iframe.set('height', height)
iframe.set('src', url)
iframe.set('allowfullscreen', 'true')
iframe.set('frameborder', '0')
return iframe
class Dailymotion(VideoBProcessor):
@staticmethod
def extract_url(m):
return 'https://www.dailymotion.com/embed/video/%s' % m.group('dailymotionid')
class Vimeo(VideoBProcessor):
@staticmethod
def extract_url(m):
return 'https://player.vimeo.com/video/%s' % m.group('vimeoid')
class Yahoo(VideoBProcessor):
@staticmethod
def extract_url(m):
return m.string + '?format=embed&player_autoplay=false'
class Youtube(VideoBProcessor):
@staticmethod
def extract_url(m):
return 'https://www.youtube.com/embed/%s' % m.group('youtubeid')
class Ina(VideoBProcessor):
@staticmethod
def extract_url(m):
return 'http://player.ina.fr/player/embed/%s/1/1b0bd203fbcd702f9bc9b10ac3d0fc21/560/315/1/148db8' % m.group(
'inaid')
class JsFiddle(VideoBProcessor):
@staticmethod
def extract_url(m):
fields = (m.group('jsfiddleuser'), m.group('jsfiddleid'), m.group('jsfiddlerev'))
if fields[0] is not None and fields[2] is None:
# Only two part, revision could be in id pattern
try:
int(fields[1])
# It is a revision !
fields = (None, fields[0], fields[1])
except ValueError:
pass
if fields[0] is not None and fields[1] is not None and fields[2] is None:
# Base version link, should not be allowed because content can be changed externally
return None
base = "https://jsfiddle.net/{}/embedded/result,js,html,css/"
return base.format("/".join([t for t in fields if t is not None]))
def makeExtension(*args, **kwargs):
return VideoExtension(*args, **kwargs)
| Situphen/Python-ZMarkdown | markdown/extensions/video.py | Python | bsd-3-clause | 6,325 | 0.002213 |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
'''
kivyapp.py
Este arquivo descreve a classe KivyApp que é a classe derivada de kivy.app.App do kivy. Esta classe é necessaria para
inicializar um aplicativo com o kivy. Após inicializar a classe, voce deve setar uma classe parent (atraves de
KivyApp.parent) para receber um callback de todas as funcoes que forem chamadas nesta classe.
Metodos publicos
(Varios, mas não existe a necessidade de chamar eles)
Dependencias (dentro do projeto)
'''
from kivy import Config
from kivy.app import App
from kivy.clock import Clock
class KivyApp(App):
parent = None
def build(self):
if self.parent == None: raise KivyAppException("Variable parent not defined in KivyApp")
Config.set('kivy', 'exit_on_escape', 0)
Config.set('kivy', 'log_enable', 0)
Clock.schedule_interval(self.on_update, 0) #Schedule main update
def on_start(self):
return self.parent.on_start()
def on_stop(self):
return self.parent.on_stop()
def on_pause(self):
return self.parent.on_pause()
def on_resume(self):
return self.parent.on_resume()
def on_update(self, *args):
return self.parent.on_update()
def build_settings(self, settings):
self.parent.build_settings(settings)
def build_config(self, config):
self.parent.build_config(config)
def on_config_change(self, config, section, key, value):
self.parent.on_config_change(config, section, key, value)
def on_event(self, *args):
return self.parent.on_event(*args)
class KivyAppException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value) | mscansian/SigmaWebPlus | plus/kivyapp.py | Python | gpl-3.0 | 1,840 | 0.014706 |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A workflow that uses a simple Monte Carlo method to estimate π.
The algorithm computes the fraction of points drawn uniformly within the unit
square that also fall in the quadrant of the unit circle that overlaps the
square. A simple area calculation shows that this fraction should be π/4, so
we multiply our counts ratio by four to estimate π.
"""
from __future__ import absolute_import
import argparse
import json
import logging
import random
import apache_beam as beam
from apache_beam.io import WriteToText
from apache_beam.typehints import Any
from apache_beam.typehints import Iterable
from apache_beam.typehints import Tuple
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
@beam.typehints.with_output_types(Tuple[int, int, int])
@beam.typehints.with_input_types(int)
def run_trials(runs):
"""Run trials and return a 3-tuple representing the results.
Args:
runs: Number of trial runs to be executed.
Returns:
A 3-tuple (total trials, inside trials, 0).
The final zero is needed solely to make sure that the combine_results function
has same type for inputs and outputs (a requirement for combiner functions).
"""
inside_runs = 0
for _ in xrange(runs):
x = random.uniform(0, 1)
y = random.uniform(0, 1)
inside_runs += 1 if x * x + y * y <= 1.0 else 0
return runs, inside_runs, 0
@beam.typehints.with_output_types(Tuple[int, int, float])
@beam.typehints.with_input_types(Iterable[Tuple[int, int, Any]])
def combine_results(results):
"""Combiner function to sum up trials and compute the estimate.
Args:
results: An iterable of 3-tuples (total trials, inside trials, ignored).
Returns:
A 3-tuple containing the sum of total trials, sum of inside trials, and
the probability computed from the two numbers.
"""
# TODO(silviuc): Do we guarantee that argument can be iterated repeatedly?
# Should document one way or the other.
total, inside = sum(r[0] for r in results), sum(r[1] for r in results)
return total, inside, 4 * float(inside) / total
class JsonCoder(object):
"""A JSON coder used to format the final result."""
def encode(self, x):
return json.dumps(x)
class EstimatePiTransform(beam.PTransform):
"""Runs 10M trials, and combine the results to estimate pi."""
def __init__(self, tries_per_work_item=100000):
self.tries_per_work_item = tries_per_work_item
def expand(self, pcoll):
# A hundred work items of a hundred thousand tries each.
return (pcoll
| 'Initialize' >> beam.Create(
[self.tries_per_work_item] * 100).with_output_types(int)
| 'Run trials' >> beam.Map(run_trials)
| 'Sum' >> beam.CombineGlobally(combine_results).without_defaults())
def run(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('--output',
required=True,
help='Output file to write results to.')
known_args, pipeline_args = parser.parse_known_args(argv)
# We use the save_main_session option because one or more DoFn's in this
# workflow rely on global context (e.g., a module imported at module level).
pipeline_options = PipelineOptions(pipeline_args)
pipeline_options.view_as(SetupOptions).save_main_session = True
p = beam.Pipeline(options=pipeline_options)
(p # pylint: disable=expression-not-assigned
| EstimatePiTransform()
| WriteToText(known_args.output, coder=JsonCoder()))
# Actually run the pipeline (all operations above are deferred).
p.run()
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
run()
| dhalperi/incubator-beam | sdks/python/apache_beam/examples/complete/estimate_pi.py | Python | apache-2.0 | 4,482 | 0.006475 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
VERSION = "0.3.7"
| lmazuel/azure-sdk-for-python | azure-keyvault/azure/keyvault/version.py | Python | mit | 493 | 0.002028 |
import numpy
import scipy
import matplotlib.pyplot as pyplot
import time
numpy.random.seed(int(time.time()))
PART_1_COEFFICIENTS = numpy.array([-0.1, 4.0, -0.1, 10.0], float)
PART_1_X_LIMITS = [-10.0, 25.0]
def plot_form(axis_handle, x_limit=None, title="", x_label="x", y_label="f(x)"):
if x_limit is not None:
axis_handle.set_xlim(x_limit)
axis_handle.set_title(title)
axis_handle.set_xlabel(x_label)
axis_handle.set_ylabel(y_label)
def part_1_polynomial(x_input):
return numpy.polyval(PART_1_COEFFICIENTS, x_input)
def part_2_plot():
x_limit_min = PART_1_X_LIMITS[0]
x_limit_max = PART_1_X_LIMITS[1]
temp = numpy.linspace(x_limit_min, x_limit_max, 351, dtype=float)
function_handle_1, axis_handle_1 = pyplot.subplots()
axis_handle_1.plot(temp, part_1_polynomial(temp), "b-")
plot_form(axis_handle_1, PART_1_X_LIMITS, "Original Polynomial")
function_handle_1.savefig("figures/hw0_original_polynomial.pdf", bbox_inches="tight")
def part_3():
x_limit_min = PART_1_X_LIMITS[0]
x_limit_max = PART_1_X_LIMITS[1]
bin_width = (x_limit_max-x_limit_min) / 14.0
x_bin = numpy.arange(x_limit_min, x_limit_max, bin_width, float)
y_bin = part_1_polynomial(x_bin)
function_handle, axis_handle = pyplot.subplots()
axis_handle.bar(x_bin + bin_width/2.0, y_bin, width=bin_width, edgecolor="k")
plot_form(axis_handle, PART_1_X_LIMITS, "Discretized Bins")
function_handle.savefig("figures/hw0_discretized_bins.pdf", bbox_inches="tight")
def part_4():
x_limit_min = PART_1_X_LIMITS[0]
x_limit_max = PART_1_X_LIMITS[1]
bin_width = (x_limit_max - x_limit_min) / 14.0
x_bin = numpy.arange(x_limit_min, x_limit_max, bin_width, float)
y_bin = part_1_polynomial(x_bin)
y_bin_normalized = y_bin / y_bin.sum()
function_handle, axis_handle = pyplot.subplots()
axis_handle.bar(x_bin + bin_width / 2.0, y_bin_normalized, width=bin_width, edgecolor="k")
plot_form(axis_handle, PART_1_X_LIMITS, "Discretized Bins (Normalized) sum=%s" % y_bin_normalized.sum(), y_label="p(k)")
function_handle.savefig("figures/hw0_discretized_bins_normalized.pdf", bbox_inches="tight")
def part_5_1():
num_samples = 500
x_rand_values = numpy.arange(1, num_samples+1, 1, int)
y_rand_values = numpy.random.random(num_samples)
function_handle, axis_handle = pyplot.subplots()
pyplot.plot(x_rand_values, y_rand_values, "k+")
plot_form(axis_handle, x_limit=[1, num_samples], title="%s Samples, Uniformly Distributed" % num_samples)
function_handle.savefig("figures/hw0_%s_random_samples.pdf" % num_samples, bbox_inches="tight")
return (x_rand_values, y_rand_values)
def part_5_2(vals):
num_samples = 500
x_limit_min = PART_1_X_LIMITS[0]
x_limit_max = PART_1_X_LIMITS[1]
bin_width = (x_limit_max - x_limit_min) / 14.0
x_bin = numpy.arange(x_limit_min, x_limit_max, bin_width, float)
x_rand_values = vals[0]
y_rand_values = vals[1]
y_random_scaled = y_rand_values * ((x_limit_max - x_limit_min) + x_limit_min)
function_handle, axis_handle = pyplot.subplots()
pyplot.plot(x_rand_values, y_random_scaled, "k+")
for i in range(0, len(x_bin)):
axis_handle.plot([1, num_samples], [x_bin[0], x_bin[1]])
plot_form(axis_handle, [1, num_samples], "Random Samples Mapped to X Ranges Of Bins")
function_handle.savefig("figures/hw0_random_bins_to_ranges.pdf", bbox_inches="tight")
def part_5_3():
y_count_incorrect = numpy.zeros(x_bin.shape)
for i in range(0, len(y_rand_scaled)):
for j in range(len(x_bin), 0, -1):
if y_rand_scaled[i] > x_bin[j-1]:
y_count_incorrect[j-1] += 1
break
function_handle, axis_handle = pyplot.subplots()
pyplot.plot(x_bin+b_width/2.0, y_random_incorrect, "k+")
plot_form(axis_handle, PART_1_X_LIMITS, "Samples per bin (incorrect)", bbox_inches="tight")
#savefig "hw0_samples_per_bin_incorrect.pdf"
def part_5_4():
y_bin_cdf = y_bin_normalized.copy()
i = 0
while i < len(y_bin_cdf) - 1:
i += 1
y_bin_cdf[i] += y_bin_cdf[i-1]
function_handle, axis_handle = pyplot.subplots()
axis_handle.plot(x_rand, y_rand, "k+")
for i in range(0, len(y_bin_cdf)):
axis_handle.plot([1, num_samples], [y_bin_cdf[0], y_bin_cdf[1]])
axis_handle.set_title("Dividing up the samples according to bin height")
function_handle.savefig("hw0_correct_sample_division.pdf", bbox_inches="tight")
y_count_correct = numpy.zeros(x_bin.shape)
for i in range(0, len(y_rand)):
for j in range(len_bin_cdf):
if y_rand[i] < y_bin_cdf[j]:
y_count_correct[j] += 1
break
function_handle_1, axis_handle_1 = pyplot.subplots()
axis_handle_1.bar(x_bin + b_width/2.0, y_count_correct, width=b_width, edgecolor="k")
plot_form(axis_handle_1, x_limit=PART_1_X_LIMITS, "Samples per bin (correct)", y_label="samples")
function_handle.savefig("hw0_samples_per_bin_correct.pdf", bbox_inches="tight")
def real_part_2():
pass
if __name__ == '__main__':
# part_2_plot()
# part_3()
# part_4()
# vals = part_5_1()
# part_5_2(vals)
real_part_2()
pyplot.show()
| caperren/Archives | OSU Coursework/ROB 456 - Intelligent Robotics/Homework 0 - Robotics Probabilities Examples/HW0.py | Python | gpl-3.0 | 5,437 | 0.003862 |
import sys, os
import re
import unittest
import traceback
import pywin32_testutil
# A list of demos that depend on user-interface of *any* kind. Tests listed
# here are not suitable for unattended testing.
ui_demos = """GetSaveFileName print_desktop win32cred_demo win32gui_demo
win32gui_dialog win32gui_menu win32gui_taskbar
win32rcparser_demo winprocess win32console_demo
win32gui_devicenotify
NetValidatePasswordPolicy""".split()
# Other demos known as 'bad' (or at least highly unlikely to work)
# cerapi: no CE module is built (CE via pywin32 appears dead)
# desktopmanager: hangs (well, hangs for 60secs or so...)
bad_demos = "cerapi desktopmanager win32comport_demo".split()
argvs = {
"rastest": ("-l",),
}
# re to pull apart an exception line into the exception type and the args.
re_exception = re.compile("([a-zA-Z0-9_.]*): (.*)$")
def find_exception_in_output(data):
have_traceback = False
for line in data.splitlines():
line = line.decode('ascii') # not sure what the correct encoding is...
if line.startswith("Traceback ("):
have_traceback = True
continue
if line.startswith(" "):
continue
if have_traceback:
# first line not starting with a space since the traceback.
# must be the exception!
m = re_exception.match(line)
if m:
exc_type, args = m.groups()
# get hacky - get the *real* exception object from the name.
bits = exc_type.split(".", 1)
if len(bits) > 1:
mod = __import__(bits[0])
exc = getattr(mod, bits[1])
else:
# probably builtin
exc = eval(bits[0])
else:
# hrm - probably just an exception with no args
try:
exc = eval(line.strip())
args = "()"
except:
return None
# try and turn the args into real args.
try:
args = eval(args)
except:
pass
if not isinstance(args, tuple):
args = (args,)
# try and instantiate the exception.
try:
ret = exc(*args)
except:
ret = None
return ret
# apparently not - keep looking...
have_traceback = False
class TestRunner:
def __init__(self, argv):
self.argv = argv
def __call__(self):
try:
import subprocess
p = subprocess.Popen(self.argv,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
output, _ = p.communicate()
rc = p.returncode
except ImportError:
# py2.3?
fin, fout, ferr = os.popen3(" ".join(self.argv))
fin.close()
output = fout.read() + ferr.read()
fout.close()
rc = ferr.close()
if rc:
base = os.path.basename(self.argv[1])
# See if we can detect and reconstruct an exception in the output.
reconstituted = find_exception_in_output(output)
if reconstituted is not None:
raise reconstituted
raise AssertionError("%s failed with exit code %s. Output is:\n%s" % (base, rc, output))
def get_demo_tests():
import win32api
ret = []
demo_dir = os.path.abspath(os.path.join(os.path.dirname(win32api.__file__), "Demos"))
assert os.path.isdir(demo_dir), demo_dir
for name in os.listdir(demo_dir):
base, ext = os.path.splitext(name)
if ext != ".py" or base in ui_demos or base in bad_demos:
continue
argv = (sys.executable, os.path.join(demo_dir, base+".py")) + \
argvs.get(base, ())
ret.append(unittest.FunctionTestCase(TestRunner(argv), description="win32/demos/" + name))
return ret
def import_all():
# Some hacks for import order - dde depends on win32ui
try:
import win32ui
except ImportError:
pass # 'what-ev-a....'
import win32api
dir = os.path.dirname(win32api.__file__)
num = 0
is_debug = os.path.basename(win32api.__file__).endswith("_d")
for name in os.listdir(dir):
base, ext = os.path.splitext(name)
if (ext==".pyd") and \
name != "_winxptheme.pyd" and \
(is_debug and base.endswith("_d") or \
not is_debug and not base.endswith("_d")):
try:
__import__(base)
except:
print "FAILED to import", name
raise
num += 1
def suite():
# Loop over all .py files here, except me :)
try:
me = __file__
except NameError:
me = sys.argv[0]
me = os.path.abspath(me)
files = os.listdir(os.path.dirname(me))
suite = unittest.TestSuite()
suite.addTest(unittest.FunctionTestCase(import_all))
for file in files:
base, ext = os.path.splitext(file)
if ext=='.py' and os.path.basename(me) != file:
try:
mod = __import__(base)
except:
print "FAILED to import test module %r" % base
traceback.print_exc()
continue
if hasattr(mod, "suite"):
test = mod.suite()
else:
test = unittest.defaultTestLoader.loadTestsFromModule(mod)
suite.addTest(test)
for test in get_demo_tests():
suite.addTest(test)
return suite
class CustomLoader(pywin32_testutil.TestLoader):
def loadTestsFromModule(self, module):
return self.fixupTestsForLeakTests(suite())
if __name__=='__main__':
pywin32_testutil.testmain(testLoader=CustomLoader())
| JulienMcJay/eclock | windows/Python27/Lib/site-packages/pywin32-218-py2.7-win32.egg/test/testall.py | Python | gpl-2.0 | 5,957 | 0.004029 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use `airflow.providers.discord.hooks.discord_webhook`."""
import warnings
# pylint: disable=unused-import
from airflow.providers.discord.hooks.discord_webhook import DiscordWebhookHook # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.discord.hooks.discord_webhook`.",
DeprecationWarning,
stacklevel=2,
)
| airbnb/airflow | airflow/contrib/hooks/discord_webhook_hook.py | Python | apache-2.0 | 1,175 | 0.001702 |
from django_filters import filters
from djng.forms import fields
class Filter(filters.Filter):
field_class = fields.Field
class CharFilter(filters.CharFilter):
field_class = fields.CharField
class BooleanFilter(filters.BooleanFilter):
field_class = fields.NullBooleanField
class ChoiceFilter(filters.ChoiceFilter):
field_class = fields.ChoiceField
class TypedChoiceFilter(filters.TypedChoiceFilter):
field_class = fields.TypedChoiceField
class UUIDFilter(filters.UUIDFilter):
field_class = fields.UUIDField
class MultipleChoiceFilter(filters.MultipleChoiceFilter):
field_class = fields.MultipleChoiceField
class TypedMultipleChoiceFilter(filters.TypedMultipleChoiceFilter):
field_class = fields.TypedMultipleChoiceField
class DateFilter(filters.DateFilter):
field_class = fields.DateField
class DateTimeFilter(filters.DateTimeFilter):
field_class = fields.DateTimeField
class TimeFilter(filters.TimeFilter):
field_class = fields.TimeField
class DurationFilter(filters.DurationFilter):
field_class = fields.DurationField
class ModelChoiceFilter(filters.ModelChoiceFilter):
field_class = fields.ModelChoiceField
class ModelMultipleChoiceFilter(filters.ModelMultipleChoiceFilter):
field_class = fields.ModelMultipleChoiceField
class NumberFilter(filters.NumberFilter):
field_class = fields.DecimalField
class NumericRangeFilter(filters.NumericRangeFilter):
"""
TODO: we first must redeclare the RangeField
"""
| awesto/django-shop | shop/filters.py | Python | bsd-3-clause | 1,512 | 0 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Partners Persons Management',
'version': '1.0',
'category': 'Tools',
'sequence': 14,
'summary': '',
'description': """
Partners Persons Management
===========================
Openerp consider a person those partners that have not "is_company" as true, now, those partners can have:
----------------------------------------------------------------------------------------------------------
* First Name and Last Name
* Birthdate
* Sex
* Mother and Father
* Childs
* Age (functional field)
* Nationality
* Husband/Wife
* National Identity
* Passport
* Marital Status
It also adds a configuration menu for choosing which fields do you wanna see.
""",
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'images': [
],
'depends': [
'base',
],
'data': [
'res_partner_view.xml',
'res_config_view.xml',
'security/partner_person_security.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| maljac/odoo-addons | partner_person/__openerp__.py | Python | agpl-3.0 | 2,095 | 0.000955 |
# -*- coding: UTF-8 -*-
"""
Example Forms for Microformats.
Copyright (c) 2009 Nicholas H.Tollervey (http://ntoll.org/contact)
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
* Neither the name of ntoll.org nor the names of its
contributors may be used to endorse or promote products
derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# Django
from django import forms
from django.forms.util import ErrorList
from django.utils.translation import ugettext as _
# Microformats
from microformats.models import geo, hCard, adr, adr_type, org, email,\
email_type, tel, tel_type, hCalendar, hReview, hListing, hFeed,\
hEntry, hNews
class GeoForm(forms.ModelForm):
"""
A ModelForm for the geo microformat that makes sure the degrees decimal
fields are within the valid ranges:
Latitude: ±90°
Longitude: ±180°
"""
def clean_latitude(self):
"""
±90
"""
value = self.cleaned_data['latitude']
if value < -90.0 or value > 90.0:
raise forms.ValidationError(_(u'Latitude is not within the valid'
u' range (±90)'))
return value
def clean_longitude(self):
"""
±180
"""
value = self.cleaned_data['longitude']
if value < -180.0 or value > 180.0:
raise forms.ValidationError(_(u'Longitude is not within the valid'
u' range (±180)'))
return value
class Meta:
model = geo
class LocationAwareForm(forms.ModelForm):
"""
Used in concert with models derived from the LocationAwareMicroformat model.
This form makes sure that the geo information is valid.
"""
def clean(self):
"""
Checks if you have one of Long or Lat you must have the other
"""
super(LocationAwareForm, self).clean()
cleaned_data = self.cleaned_data
# Make sure we have a longitude and latitude
lat = cleaned_data.get("latitude", False)
long = cleaned_data.get("longitude", False)
if long and not lat:
self._errors['longitude'] = ErrorList([_("You must supply both a"\
" longitude and latitude")])
del cleaned_data['longitude']
if lat and not long:
self._errors['latitude'] = ErrorList([_("You must supply both a"\
" longitude and latitude")])
del cleaned_data['latitude']
return cleaned_data
def clean_latitude(self):
"""
±90
"""
value = self.cleaned_data.get('latitude', False)
if value:
if value < -90.0 or value > 90.0:
raise forms.ValidationError(_(u'Latitude is not within the valid'
u' range (±90)'))
return value
def clean_longitude(self):
"""
±180
"""
value = self.cleaned_data.get('longitude', False)
if value:
if value < -180.0 or value > 180.0:
raise forms.ValidationError(_(u'Longitude is not within the valid'
u' range (±180)'))
return value
class hCardForm(LocationAwareForm):
"""
A simple form to use for gathering basic information for an hCard. Use in
conjunction with the AdrForm, OrgForm, EmailForm and TelForm to build
something more complex.
Inspired by:
http://microformats.org/code/hcard/creator
"""
def clean(self):
"""
Checks you have something useful to use as fn
"""
super(hCardForm, self).clean()
cleaned_data = self.cleaned_data
# Some minimum fields needed to create a fn
org = cleaned_data.get('org', False)
given_name = cleaned_data.get('given_name', False)
family_name = cleaned_data.get('family_name', False)
nickname = cleaned_data.get('nickname', False)
# What the following if statement means:
# if the user hasn't supplied either and organization name or provided
# at least a nickname or a given name then raise an error
if not (org or nickname or given_name):
raise forms.ValidationError(_("You must supply a name. "\
" (given name, family name, nickname"\
" or an organization name)"))
return cleaned_data
class Meta:
model = hCard
class hCalForm(LocationAwareForm):
"""
A simple form for gathering information for an hCalendar event. Inspired by
the form found here:
http://microformats.org/code/hcalendar/creator
"""
class Meta:
model = hCalendar
exclude = [
'attendees',
'contacts',
'organizers',
]
class hReviewForm(LocationAwareForm):
"""
A simple form for gathering information for an hReview microformat. Inspired
by the form found here:
http://microformats.org/code/hreview/creator
"""
class Meta:
model = hReview
class hListingForm(LocationAwareForm):
"""
A simple form for gathering information for an hListing microforat.
"""
class Meta:
model = hListing
class hFeedForm(forms.ModelForm):
"""
A simple form for gathering information for the hFeed part of the hAtom
microformat.
"""
class Meta:
model = hFeed
class hEntryForm(forms.ModelForm):
"""
A simple form for gathering information for the hEntry part of the hAtom
microformat.
"""
class Meta:
model = hEntry
class hNewsForm(LocationAwareForm):
"""
A simple form for gathering information for the hNews part of the hEntry
microformat.
"""
class Meta:
model = hNews
class AdrForm(forms.ModelForm):
"""
A simple form to use for gathering basic information for an adr microformat.
Use in conjunction with the hCardForm, OrgForm, EmailForm and TelForm to
build something more complex.
Inspired by:
http://microformats.org/code/hcard/creator
"""
def __init__(self, *args, **kwargs):
super(AdrForm, self).__init__(*args, **kwargs)
if 'types' in self.fields:
self.fields['types'].widget = forms.CheckboxSelectMultiple()
self.fields['types'].label = _('Address Type')
self.fields['types'].help_text = _('Please select as many that apply')
self.fields['types'].queryset = adr_type.objects.all()
class Meta:
model = adr
exclude = ['hcard', 'post_office_box']
class OrgForm(forms.ModelForm):
"""
A simple form to use for gathering basic information for an organisation
associated with an hCard. Use in conjunction with the AdrForm, EmailForm
and TelForm to build something more complex.
Inspired by:
http://microformats.org/code/hcard/creator
"""
class Meta:
model = org
exclude = ['hcard']
class EmailForm(forms.ModelForm):
"""
A simple form to use for gathering basic email information for an hCard.
Use in conjunction with the hCardForm, AdrForm, OrgForm and TelForm to
build something more complex.
Inspired by:
http://microformats.org/code/hcard/creator
"""
def __init__(self, *args, **kwargs):
super(EmailForm, self).__init__(*args, **kwargs)
self.fields['types'].widget = forms.CheckboxSelectMultiple()
self.fields['types'].label = _('Email Type')
self.fields['types'].help_text = _('Please select as many that apply')
self.fields['types'].queryset = email_type.objects.all()
class Meta:
model = email
exclude = ['hcard']
class TelForm(forms.ModelForm):
"""
A simple form to use for gathering basic telephone information for an hCard.
Use in conjunction with the hCardForm, AdrForm, OrgForm and EmailForm to
build something more complex.
Inspired by:
http://microformats.org/code/hcard/creator
"""
def __init__(self, *args, **kwargs):
super(TelForm, self).__init__(*args, **kwargs)
self.fields['types'].widget = forms.CheckboxSelectMultiple()
self.fields['types'].label = _('Telephone Type')
self.fields['types'].help_text = _('Please select as many that apply')
self.fields['types'].queryset = tel_type.objects.all()
class Meta:
model = tel
exclude = ['hcard']
| poswald/microformats | microformats/forms.py | Python | bsd-3-clause | 9,632 | 0.005613 |
import unittest
import os
from unipass.controller import controller
from unipass.model.models import initdb
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
class ControllerTest(unittest.TestCase):
def setUp(self):
initdb()
def tearDown(self):
try:
os.remove('sqlite3.db')
os.remove('unipass_export.json')
except OSError:
pass
def test_createAdmin_True(self):
self.assertTrue(controller.create_user('john', 'password'))
def test_addService_True(self):
self.assertTrue(controller.add_service('facebook', 'jherrin@gmail.com', 'password', 'facebook acc'))
def test_login_True(self):
self.assertTrue(controller.create_user('admin', 'password'))
self.assertTrue(controller.login('admin', 'password'))
def test_deleteEntry_True(self):
self.assertTrue(controller.add_service('facebook', 'jherrin@gmail.com', 'password', 'facebook acc'))
serv = controller.find_by_service('facebook')
self.assertTrue(controller.delete_service(serv.uuid))
def test_exportData_True(self):
self.assertTrue(controller.create_user('john', 'password'))
self.assertTrue(controller.export_data())
def test_exportData_False(self):
self.assertTrue(controller.export_data())
def test_importData_False(self):
self.assertFalse(controller.import_data(path=BASE_DIR+'/broken.json'))
def test_importData_True(self):
self.assertTrue(controller.import_data(path=BASE_DIR+'/correct.json'))
def test_generatePassword_True(self):
self.assertTrue(len(controller.generate_password(
True, True, True, True, 10)) == 10)
| jherrlin/unipass | tests/test_controller.py | Python | mit | 1,722 | 0.002323 |
import unittest
from PySide.QtCore import QSizeF
from PySide.QtGui import QGraphicsProxyWidget, QSizePolicy, QPushButton, QGraphicsScene, QGraphicsView
from helper import TimedQApplication
def createItem(minimum, preferred, maximum, name):
w = QGraphicsProxyWidget()
w.setWidget(QPushButton(name))
w.setMinimumSize(minimum)
w.setPreferredSize(preferred)
w.setMaximumSize(maximum)
w.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
return w
class TestBug972 (TimedQApplication):
# Test if the function QGraphicsProxyWidget.setWidget have the correct behavior
def testIt(self):
scene = QGraphicsScene()
minSize = QSizeF(30, 100)
prefSize = QSizeF(210, 100)
maxSize = QSizeF(300, 100)
a = createItem(minSize, prefSize, maxSize, "A")
b = createItem(minSize, prefSize, maxSize, "B")
c = createItem(minSize, prefSize, maxSize, "C")
d = createItem(minSize, prefSize, maxSize, "D")
view = QGraphicsView(scene)
view.show()
self.app.exec_()
if __name__ == "__main__":
unittest.main()
| M4rtinK/pyside-android | tests/QtGui/bug_972.py | Python | lgpl-2.1 | 1,124 | 0.003559 |
import logging
import datetime
import pandas as pd
import numpy as np
import os
import boto3
from dataactcore.config import CONFIG_BROKER
from dataactcore.interfaces.db import GlobalDB
from dataactcore.logging import configure_logging
from dataactcore.models.stagingModels import DetachedAwardProcurement
from dataactcore.models.jobModels import Submission # noqa
from dataactcore.models.userModel import User # noqa
from dataactvalidator.health_check import create_app
from dataactvalidator.scripts.loader_utils import trim_item
from dataactvalidator.filestreaming.csvReader import CsvReader
from dataactvalidator.filestreaming.csvLocalWriter import CsvLocalWriter
logger = logging.getLogger(__name__)
def get_delete_file():
""" Read the file into a pandas object """
file_name = 'IDV_Deletes.csv'
if CONFIG_BROKER["use_aws"]:
reader = CsvReader()
pa_file = open(reader.get_filename(CONFIG_BROKER['aws_region'], CONFIG_BROKER['sf_133_bucket'], file_name),
encoding='utf-8')
else:
base_path = os.path.join(CONFIG_BROKER["path"], "dataactvalidator", "config")
pa_file = os.path.join(base_path, file_name)
return pa_file
def convert_date(date_string):
""" Converts the date to the same format as our last_modified column """
delete_date = datetime.datetime.strptime(date_string, '%m/%d/%y %I:%M %p')
date_string = delete_date.strftime('%Y-%m-%d %H:%M:%S')
return date_string
def convert_unique_key(unique_key):
""" Converts the unique key given by the file into the format we use for our unique key """
unique_key_array = unique_key.split(':')
unique_key = unique_key_array[2] + '_-none-_' + unique_key_array[0] + '_' + unique_key_array[1] + '_-none-_-none-'
return unique_key
def clean_delete_data(data):
""" Clean up the data so it's easier to process """
# Shouldn't be any extra rows, but just in case, drop all with no contents
data.dropna(inplace=True, how='all')
# replace NaN
data = data.replace(np.nan, '', regex=True)
# trim all columns
data = data.applymap(lambda x: trim_item(x) if len(str(x).strip()) else None)
# Convert all dates to the same format as we have in the DB
data['delete_date'] = data['delete_date'].map(lambda x: convert_date(x) if x else None)
# Convert all unique keys to the same format as we have in the DB
data['primary_key'] = data['primary_key'].map(lambda x: convert_unique_key(x) if x else None)
return data
def get_deletes(sess, data):
""" Gets all the values that actually need to be deleted from our database """
model = DetachedAwardProcurement
delete_dict = {}
delete_list = []
row_count = len(data.index)
for index, row in data.iterrows():
unique_string = row['primary_key']
last_modified = row['delete_date']
# Keeping track so we know it isn't spinning its wheels forever
if index % 500 == 0:
logger.info("Checking delete record {} of {}.".format(index, row_count))
existing_item = sess.query(model.last_modified, model.detached_award_procurement_id,
model.detached_award_proc_unique). \
filter_by(detached_award_proc_unique=unique_string).one_or_none()
if existing_item and last_modified > existing_item.last_modified:
delete_list.append(existing_item.detached_award_procurement_id)
delete_dict[existing_item.detached_award_procurement_id] = existing_item.detached_award_proc_unique
return delete_list, delete_dict
def delete_records(sess, delete_list, delete_dict):
""" Delete the records listed and create a file for website deletion. """
# only need to delete values if there's something to delete
if delete_list:
sess.query(DetachedAwardProcurement). \
filter(DetachedAwardProcurement.detached_award_procurement_id.in_(delete_list)). \
delete(synchronize_session=False)
# writing the file
seconds = int((datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)).total_seconds())
now = datetime.datetime.now()
file_name = now.strftime('%m-%d-%Y') + "_delete_records_IDV_" + str(seconds) + ".csv"
headers = ["detached_award_procurement_id", "detached_award_proc_unique"]
if CONFIG_BROKER["use_aws"]:
s3client = boto3.client('s3', region_name=CONFIG_BROKER['aws_region'])
# add headers
contents = bytes((",".join(headers) + "\n").encode())
for key, value in delete_dict.items():
contents += bytes('{},{}\n'.format(key, value).encode())
s3client.put_object(Bucket=CONFIG_BROKER['fpds_delete_bucket'], Key=file_name, Body=contents)
else:
with CsvLocalWriter(file_name, headers) as writer:
for key, value in delete_dict.items():
writer.write([key, value])
writer.finish_batch()
def main():
sess = GlobalDB.db().session
start = datetime.datetime.now()
logger.info("FPDS IDV delete started")
# get and read the file
del_file = get_delete_file()
data = pd.read_csv(del_file, dtype=str, encoding='utf_8_sig')
# Clean up the data so it's usable
data = clean_delete_data(data)
# Gather list of records to delete
gather_start = datetime.datetime.now()
logger.info("Starting gathering of records to delete.")
delete_list, delete_dict = get_deletes(sess, data)
gather_end = datetime.datetime.now()
logger.info("Finished gathering records in {} seconds. Total records to delete: {}".
format(gather_end - gather_start, len(delete_list)))
# Delete records
logger.info("Deleting records")
delete_records(sess, delete_list, delete_dict)
sess.commit()
end = datetime.datetime.now()
logger.info("FPDS IDV delete finished in %s seconds", end - start)
if __name__ == '__main__':
configure_logging()
with create_app().app_context():
main()
| fedspendingtransparency/data-act-broker-backend | dataactcore/scripts/delete_deleted_fpds_idv.py | Python | cc0-1.0 | 6,000 | 0.003 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('tags', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Restaurant',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=128)),
('slug', models.SlugField(default=b'', max_length=128)),
('note', models.IntegerField(max_length=2)),
('date', models.DateTimeField(auto_now_add=True, verbose_name=b"Date d'ajout")),
('reservation', models.BooleanField(default=False)),
('description', models.TextField()),
('phone', models.CharField(default=b'', max_length=16)),
('website', models.CharField(default=b'', max_length=128)),
('address', models.CharField(default=b'', max_length=128)),
('postalcode', models.CharField(default=b'', max_length=16)),
('city', models.CharField(default=b'', max_length=128)),
('tags', models.ManyToManyField(to='tags.Tag')),
],
options={
},
bases=(models.Model,),
),
]
| aroquemaurel/Cuis-In | cuisin/restaurant/migrations/0001_initial.py | Python | gpl-2.0 | 1,387 | 0.001442 |
import Gears as gears
from .. import *
from .SingleShape import *
class FullfieldGradient(SingleShape) :
def boot(self,
*,
duration : 'Stimulus time in frames (unless superseded by duration_s).'
= 1,
duration_s : 'Stimulus time in seconds (takes precendece over duration given in frames).'
= 0,
name : 'Stimulus name to display in sequence overview plot.'
= 'gradient',
toneMapping : 'Tone mapping component (Tone.*)'
= Tone.UiConfigured(),
**bargs : Pif.Gradient
):
super().boot(name=name, duration=duration, duration_s=duration_s,
pattern = Pif.Gradient( **bargs ),
toneMapping = toneMapping,
) | szecsi/Gears | GearsPy/Project/Components/Stimulus/FullfieldGradient.py | Python | gpl-2.0 | 898 | 0.040089 |
#!/usr/bin/env python
# _*_ coding: utf-8 _*_
import subprocess
try:
from simplejson import json
except:
import json
# -> list ["422e608f9f28cef127b3d5ef93fe9399", ""]
def list_job_ids(host="http://localhost", port=6800, project="default"):
command = "curl %s:%d/listjobs.json?project=%s" % (host, port, project)
command_result = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE).stdout.readline()
running_ids = json.loads(command_result).get("running")
ids = []
for i in running_ids:
ids.append(i["id"])
return ids
# str -> list "43242342342354efklajdf14" -> [4234, grep_pid]
def id_to_pid(spider_id):
command = "ps aux | grep %s | grep -v grep | awk '{print $2}'" % spider_id
info = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE).stdout.readlines()
return info
# list -> list ["asdfasdf234234", "a2345asdfaa"] -> [4324, 3453]
def ids_to_pids(spider_ids):
pids = []
for i in spider_ids:
pid = id_to_pid(i)
pids.extend(pid)
return pids
# kill 4323
def kill_spider(pid):
command = "kill -9 %s" % pid
subprocess.Popen(command, shell=True)
# kill [4324, 4234]
def kill_spider_list(pid_list):
for i in pid_list:
kill_spider(i)
if __name__ == "__main__":
ids = list_job_ids()
pids = ids_to_pids(ids)
kill_spider_list(pids)
| qq40660/rss_spider | script/stop_spider.py | Python | gpl-2.0 | 1,370 | 0.00219 |
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_ip_access_list(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def ip_acl_ip_access_list_standard_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name = ET.SubElement(standard, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_seq_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id = ET.SubElement(seq, "seq-id")
seq_id.text = kwargs.pop('seq_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_host_any_sip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_any_sip = ET.SubElement(seq, "src-host-any-sip")
src_host_any_sip.text = kwargs.pop('src_host_any_sip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_ip = ET.SubElement(seq, "src-host-ip")
src_host_ip.text = kwargs.pop('src_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_mask = ET.SubElement(seq, "src-mask")
src_mask.text = kwargs.pop('src_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
count = ET.SubElement(seq, "count")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_log(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
log = ET.SubElement(seq, "log")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name = ET.SubElement(extended, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_seq_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id = ET.SubElement(seq, "seq-id")
seq_id.text = kwargs.pop('seq_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_protocol_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
protocol_type = ET.SubElement(seq, "protocol-type")
protocol_type.text = kwargs.pop('protocol_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_host_any_sip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_any_sip = ET.SubElement(seq, "src-host-any-sip")
src_host_any_sip.text = kwargs.pop('src_host_any_sip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_ip = ET.SubElement(seq, "src-host-ip")
src_host_ip.text = kwargs.pop('src_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_mask = ET.SubElement(seq, "src-mask")
src_mask.text = kwargs.pop('src_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport = ET.SubElement(seq, "sport")
sport.text = kwargs.pop('sport')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_eq_neq_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_eq_neq_tcp = ET.SubElement(seq, "sport-number-eq-neq-tcp")
sport_number_eq_neq_tcp.text = kwargs.pop('sport_number_eq_neq_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_lt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_lt_tcp = ET.SubElement(seq, "sport-number-lt-tcp")
sport_number_lt_tcp.text = kwargs.pop('sport_number_lt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_gt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_gt_tcp = ET.SubElement(seq, "sport-number-gt-tcp")
sport_number_gt_tcp.text = kwargs.pop('sport_number_gt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_eq_neq_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_eq_neq_udp = ET.SubElement(seq, "sport-number-eq-neq-udp")
sport_number_eq_neq_udp.text = kwargs.pop('sport_number_eq_neq_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_lt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_lt_udp = ET.SubElement(seq, "sport-number-lt-udp")
sport_number_lt_udp.text = kwargs.pop('sport_number_lt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_gt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_gt_udp = ET.SubElement(seq, "sport-number-gt-udp")
sport_number_gt_udp.text = kwargs.pop('sport_number_gt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_lower_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_lower_tcp = ET.SubElement(seq, "sport-number-range-lower-tcp")
sport_number_range_lower_tcp.text = kwargs.pop('sport_number_range_lower_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_lower_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_lower_udp = ET.SubElement(seq, "sport-number-range-lower-udp")
sport_number_range_lower_udp.text = kwargs.pop('sport_number_range_lower_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_higher_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_higher_tcp = ET.SubElement(seq, "sport-number-range-higher-tcp")
sport_number_range_higher_tcp.text = kwargs.pop('sport_number_range_higher_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_higher_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_higher_udp = ET.SubElement(seq, "sport-number-range-higher-udp")
sport_number_range_higher_udp.text = kwargs.pop('sport_number_range_higher_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_host_any_dip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_host_any_dip = ET.SubElement(seq, "dst-host-any-dip")
dst_host_any_dip.text = kwargs.pop('dst_host_any_dip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_host_ip = ET.SubElement(seq, "dst-host-ip")
dst_host_ip.text = kwargs.pop('dst_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_mask = ET.SubElement(seq, "dst-mask")
dst_mask.text = kwargs.pop('dst_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport = ET.SubElement(seq, "dport")
dport.text = kwargs.pop('dport')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_eq_neq_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_eq_neq_tcp = ET.SubElement(seq, "dport-number-eq-neq-tcp")
dport_number_eq_neq_tcp.text = kwargs.pop('dport_number_eq_neq_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_lt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_lt_tcp = ET.SubElement(seq, "dport-number-lt-tcp")
dport_number_lt_tcp.text = kwargs.pop('dport_number_lt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_gt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_gt_tcp = ET.SubElement(seq, "dport-number-gt-tcp")
dport_number_gt_tcp.text = kwargs.pop('dport_number_gt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_eq_neq_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_eq_neq_udp = ET.SubElement(seq, "dport-number-eq-neq-udp")
dport_number_eq_neq_udp.text = kwargs.pop('dport_number_eq_neq_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_lt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_lt_udp = ET.SubElement(seq, "dport-number-lt-udp")
dport_number_lt_udp.text = kwargs.pop('dport_number_lt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_gt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_gt_udp = ET.SubElement(seq, "dport-number-gt-udp")
dport_number_gt_udp.text = kwargs.pop('dport_number_gt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_lower_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_lower_tcp = ET.SubElement(seq, "dport-number-range-lower-tcp")
dport_number_range_lower_tcp.text = kwargs.pop('dport_number_range_lower_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_lower_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_lower_udp = ET.SubElement(seq, "dport-number-range-lower-udp")
dport_number_range_lower_udp.text = kwargs.pop('dport_number_range_lower_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_higher_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_higher_tcp = ET.SubElement(seq, "dport-number-range-higher-tcp")
dport_number_range_higher_tcp.text = kwargs.pop('dport_number_range_higher_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_higher_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_higher_udp = ET.SubElement(seq, "dport-number-range-higher-udp")
dport_number_range_higher_udp.text = kwargs.pop('dport_number_range_higher_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dscp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dscp = ET.SubElement(seq, "dscp")
dscp.text = kwargs.pop('dscp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_urg(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
urg = ET.SubElement(seq, "urg")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_ack(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
ack = ET.SubElement(seq, "ack")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_push(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
push = ET.SubElement(seq, "push")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_fin(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
fin = ET.SubElement(seq, "fin")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_rst(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
rst = ET.SubElement(seq, "rst")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sync(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sync = ET.SubElement(seq, "sync")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_vlan(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
vlan = ET.SubElement(seq, "vlan")
vlan.text = kwargs.pop('vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
count = ET.SubElement(seq, "count")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_log(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
log = ET.SubElement(seq, "log")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name = ET.SubElement(standard, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_seq_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id = ET.SubElement(seq, "seq-id")
seq_id.text = kwargs.pop('seq_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_host_any_sip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_any_sip = ET.SubElement(seq, "src-host-any-sip")
src_host_any_sip.text = kwargs.pop('src_host_any_sip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_ip = ET.SubElement(seq, "src-host-ip")
src_host_ip.text = kwargs.pop('src_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_mask = ET.SubElement(seq, "src-mask")
src_mask.text = kwargs.pop('src_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
count = ET.SubElement(seq, "count")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_log(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
log = ET.SubElement(seq, "log")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name = ET.SubElement(extended, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_seq_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id = ET.SubElement(seq, "seq-id")
seq_id.text = kwargs.pop('seq_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_protocol_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
protocol_type = ET.SubElement(seq, "protocol-type")
protocol_type.text = kwargs.pop('protocol_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_host_any_sip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_any_sip = ET.SubElement(seq, "src-host-any-sip")
src_host_any_sip.text = kwargs.pop('src_host_any_sip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_ip = ET.SubElement(seq, "src-host-ip")
src_host_ip.text = kwargs.pop('src_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_mask = ET.SubElement(seq, "src-mask")
src_mask.text = kwargs.pop('src_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport = ET.SubElement(seq, "sport")
sport.text = kwargs.pop('sport')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_eq_neq_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_eq_neq_tcp = ET.SubElement(seq, "sport-number-eq-neq-tcp")
sport_number_eq_neq_tcp.text = kwargs.pop('sport_number_eq_neq_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_lt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_lt_tcp = ET.SubElement(seq, "sport-number-lt-tcp")
sport_number_lt_tcp.text = kwargs.pop('sport_number_lt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_gt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_gt_tcp = ET.SubElement(seq, "sport-number-gt-tcp")
sport_number_gt_tcp.text = kwargs.pop('sport_number_gt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_eq_neq_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_eq_neq_udp = ET.SubElement(seq, "sport-number-eq-neq-udp")
sport_number_eq_neq_udp.text = kwargs.pop('sport_number_eq_neq_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_lt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_lt_udp = ET.SubElement(seq, "sport-number-lt-udp")
sport_number_lt_udp.text = kwargs.pop('sport_number_lt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_gt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_gt_udp = ET.SubElement(seq, "sport-number-gt-udp")
sport_number_gt_udp.text = kwargs.pop('sport_number_gt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_lower_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_lower_tcp = ET.SubElement(seq, "sport-number-range-lower-tcp")
sport_number_range_lower_tcp.text = kwargs.pop('sport_number_range_lower_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_lower_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_lower_udp = ET.SubElement(seq, "sport-number-range-lower-udp")
sport_number_range_lower_udp.text = kwargs.pop('sport_number_range_lower_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_higher_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_higher_tcp = ET.SubElement(seq, "sport-number-range-higher-tcp")
sport_number_range_higher_tcp.text = kwargs.pop('sport_number_range_higher_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_higher_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_higher_udp = ET.SubElement(seq, "sport-number-range-higher-udp")
sport_number_range_higher_udp.text = kwargs.pop('sport_number_range_higher_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_host_any_dip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_host_any_dip = ET.SubElement(seq, "dst-host-any-dip")
dst_host_any_dip.text = kwargs.pop('dst_host_any_dip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_host_ip = ET.SubElement(seq, "dst-host-ip")
dst_host_ip.text = kwargs.pop('dst_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_mask = ET.SubElement(seq, "dst-mask")
dst_mask.text = kwargs.pop('dst_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport = ET.SubElement(seq, "dport")
dport.text = kwargs.pop('dport')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_eq_neq_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_eq_neq_tcp = ET.SubElement(seq, "dport-number-eq-neq-tcp")
dport_number_eq_neq_tcp.text = kwargs.pop('dport_number_eq_neq_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_lt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_lt_tcp = ET.SubElement(seq, "dport-number-lt-tcp")
dport_number_lt_tcp.text = kwargs.pop('dport_number_lt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_gt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_gt_tcp = ET.SubElement(seq, "dport-number-gt-tcp")
dport_number_gt_tcp.text = kwargs.pop('dport_number_gt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_eq_neq_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_eq_neq_udp = ET.SubElement(seq, "dport-number-eq-neq-udp")
dport_number_eq_neq_udp.text = kwargs.pop('dport_number_eq_neq_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_lt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_lt_udp = ET.SubElement(seq, "dport-number-lt-udp")
dport_number_lt_udp.text = kwargs.pop('dport_number_lt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_gt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_gt_udp = ET.SubElement(seq, "dport-number-gt-udp")
dport_number_gt_udp.text = kwargs.pop('dport_number_gt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_lower_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_lower_tcp = ET.SubElement(seq, "dport-number-range-lower-tcp")
dport_number_range_lower_tcp.text = kwargs.pop('dport_number_range_lower_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_lower_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_lower_udp = ET.SubElement(seq, "dport-number-range-lower-udp")
dport_number_range_lower_udp.text = kwargs.pop('dport_number_range_lower_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_higher_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_higher_tcp = ET.SubElement(seq, "dport-number-range-higher-tcp")
dport_number_range_higher_tcp.text = kwargs.pop('dport_number_range_higher_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_higher_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_higher_udp = ET.SubElement(seq, "dport-number-range-higher-udp")
dport_number_range_higher_udp.text = kwargs.pop('dport_number_range_higher_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dscp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dscp = ET.SubElement(seq, "dscp")
dscp.text = kwargs.pop('dscp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_urg(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
urg = ET.SubElement(seq, "urg")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_ack(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
ack = ET.SubElement(seq, "ack")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_push(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
push = ET.SubElement(seq, "push")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_fin(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
fin = ET.SubElement(seq, "fin")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_rst(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
rst = ET.SubElement(seq, "rst")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sync(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sync = ET.SubElement(seq, "sync")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_vlan(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
vlan = ET.SubElement(seq, "vlan")
vlan.text = kwargs.pop('vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
count = ET.SubElement(seq, "count")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_log(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
log = ET.SubElement(seq, "log")
callback = kwargs.pop('callback', self._callback)
return callback(config)
| BRCDcomm/pynos | pynos/versions/ver_7/ver_7_1_0/yang/brocade_ip_access_list.py | Python | apache-2.0 | 93,629 | 0.003407 |
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import typepy
EXCEPTION_RESULT = "E"
def convert_wrapper(typeobj, method):
try:
return getattr(typeobj, method)()
except (typepy.TypeConversionError):
return EXCEPTION_RESULT
| thombashi/typepy | test/converter/_common.py | Python | mit | 276 | 0 |
#!/usr/bin/python
import sys
file = sys.argv[1]
f = open(file)
print '''
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml"
xmlns:py="http://genshi.edgewall.org/"
xmlns:xi="http://www.w3.org/2001/XInclude"
py:strip="">
'''
try:
for lang in f:
lang = lang.strip()
if lang and not lang.startswith('#'):
print ' <option value="' + lang + '" py:attrs="{\'selected\': lang == \'' + lang + '\' and \'selected\' or None}">' + lang + '</option>'
finally:
f.close()
print '''</html>
'''
| shreyankg/Dorrie | mckup/build/translations.py | Python | agpl-3.0 | 628 | 0.003185 |
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import subprocess
import unittest
from collections import namedtuple
from operator import eq, ne
from pants.base.build_environment import get_buildroot
from pants.fs.archive import ZIP
from pants.util.contextutil import temporary_dir
from pants.util.dirutil import safe_mkdir, safe_open
PantsResult = namedtuple('PantsResult', ['command', 'returncode', 'stdout_data', 'stderr_data'])
class PantsRunIntegrationTest(unittest.TestCase):
"""A base class useful for integration tests for targets in the same repo."""
PANTS_SUCCESS_CODE = 0
PANTS_SCRIPT_NAME = 'pants'
@classmethod
def has_python_version(cls, version):
"""Returns true if the current system has the specified version of python.
:param version: A python version string, such as 2.6, 3.
"""
try:
subprocess.call(['python%s' % version, '-V'])
return True
except OSError:
return False
def workdir_root(self):
# We can hard-code '.pants.d' here because we know that will always be its value
# in the pantsbuild/pants repo (e.g., that's what we .gitignore in that repo).
# Grabbing the pants_workdir config would require this pants's config object,
# which we don't have a reference to here.
root = os.path.join(get_buildroot(), '.pants.d', 'tmp')
safe_mkdir(root)
return root
def run_pants_with_workdir(self, command, workdir, config=None, stdin_data=None, extra_env=None,
**kwargs):
config = config.copy() if config else {}
# We add workdir to the DEFAULT section, and also ensure that it's emitted first.
default_section = config.pop('DEFAULT', {})
default_section['pants_workdir'] = '%s' % workdir
ini = ''
for section, section_config in [('DEFAULT', default_section)] + config.items():
ini += '\n[%s]\n' % section
for key, val in section_config.items():
ini += '%s: %s\n' % (key, val)
ini_file_name = os.path.join(workdir, 'pants.ini')
with safe_open(ini_file_name, mode='w') as fp:
fp.write(ini)
env = os.environ.copy()
env.update(extra_env or {})
pants_script = os.path.join(get_buildroot(), self.PANTS_SCRIPT_NAME)
pants_command = [pants_script,
'--no-lock',
'--kill-nailguns',
'--no-pantsrc',
'--config-override={0}'.format(ini_file_name),
'--print-exception-stacktrace'] + command
proc = subprocess.Popen(pants_command, env=env, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
(stdout_data, stderr_data) = proc.communicate(stdin_data)
return PantsResult(pants_command, proc.returncode, stdout_data.decode("utf-8"),
stderr_data.decode("utf-8"))
def run_pants(self, command, config=None, stdin_data=None, extra_env=None, **kwargs):
"""Runs pants in a subprocess.
:param list command: A list of command line arguments coming after `./pants`.
:param config: Optional data for a generated ini file. A map of <section-name> ->
map of key -> value. If order in the ini file matters, this should be an OrderedDict.
:param kwargs: Extra keyword args to pass to `subprocess.Popen`.
:returns a tuple (returncode, stdout_data, stderr_data).
IMPORTANT NOTE: The subprocess will be run with --no-lock, so that it doesn't deadlock waiting
for this process to release the workspace lock. It's the caller's responsibility to ensure
that the invoked pants doesn't interact badly with this one.
"""
with temporary_dir(root_dir=self.workdir_root()) as workdir:
return self.run_pants_with_workdir(command, workdir, config, stdin_data, extra_env, **kwargs)
def bundle_and_run(self, target, bundle_name, args=None):
"""Creates the bundle with pants, then does java -jar {bundle_name}.jar to execute the bundle.
:param target: target name to compile
:param bundle_name: resulting bundle filename (minus .jar extension)
:param args: optional arguments to pass to executable
:return: stdout as a string on success, raises an Exception on error
"""
pants_run = self.run_pants(['bundle', '--archive=zip', target])
self.assert_success(pants_run)
# TODO(John Sirois): We need a zip here to suck in external library classpath elements
# pointed to by symlinks in the run_pants ephemeral tmpdir. Switch run_pants to be a
# contextmanager that yields its results while the tmpdir workdir is still active and change
# this test back to using an un-archived bundle.
with temporary_dir() as workdir:
ZIP.extract('dist/{bundle_name}.zip'.format(bundle_name=bundle_name), workdir)
optional_args = []
if args:
optional_args = args
java_run = subprocess.Popen(['java',
'-jar',
'{bundle_name}.jar'.format(bundle_name=bundle_name)]
+ optional_args,
stdout=subprocess.PIPE,
cwd=workdir)
stdout, _ = java_run.communicate()
java_returncode = java_run.returncode
self.assertEquals(java_returncode, 0)
return stdout
def assert_success(self, pants_run, msg=None):
self.assert_result(pants_run, self.PANTS_SUCCESS_CODE, expected=True, msg=msg)
def assert_failure(self, pants_run, msg=None):
self.assert_result(pants_run, self.PANTS_SUCCESS_CODE, expected=False, msg=msg)
def assert_result(self, pants_run, value, expected=True, msg=None):
check, assertion = (eq, self.assertEqual) if expected else (ne, self.assertNotEqual)
if check(pants_run.returncode, value):
return
details = [msg] if msg else []
details.append(' '.join(pants_run.command))
details.append('returncode: {returncode}'.format(returncode=pants_run.returncode))
def indent(content):
return '\n\t'.join(content.splitlines())
if pants_run.stdout_data:
details.append('stdout:\n\t{stdout}'.format(stdout=indent(pants_run.stdout_data)))
if pants_run.stderr_data:
details.append('stderr:\n\t{stderr}'.format(stderr=indent(pants_run.stderr_data)))
error_msg = '\n'.join(details)
assertion(value, pants_run.returncode, error_msg)
| tejal29/pants | tests/python/pants_test/pants_run_integration_test.py | Python | apache-2.0 | 6,636 | 0.00859 |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
#
# sax encoding/decoding test.
#
from suds.sax.element import Element
from suds.sax.parser import Parser
def basic():
xml = "<a>Me && <b>my</b> shadow's <i>dog</i> love to 'play' and sing "la,la,la";</a>"
p = Parser()
d = p.parse(string=xml)
a = d.root()
print('A(parsed)=\n%s' % a)
assert str(a) == xml
b = Element('a')
b.setText('Me && <b>my</b> shadow\'s <i>dog</i> love to \'play\' and sing "la,la,la";')
print('B(encoded)=\n%s' % b)
assert str(b) == xml
print('A(text-decoded)=\n%s' % a.getText())
print('B(text-decoded)=\n%s' % b.getText())
assert a.getText() == b.getText()
print('test pruning')
j = Element('A')
j.set('n', 1)
j.append(Element('B'))
print(j)
j.prune()
print(j)
def cdata():
xml = '<a><![CDATA[<b>This is my &<tag></b>]]></a>'
p = Parser()
d = p.parse(string=xml)
print(d)
a = d.root()
print(a.getText())
if __name__ == '__main__':
#basic()
cdata()
| USGM/suds | tests/saxenc.py | Python | lgpl-3.0 | 1,896 | 0.003692 |
# -*- coding: utf-8 -*-
#
# The internetarchive module is a Python/CLI interface to Archive.org.
#
# Copyright (C) 2012-2016 Internet Archive
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Configure 'ia' with your Archive.org credentials.
usage:
ia configure [--help]
ia configure [--username=<username> --password=<password>]
options:
-h, --help
-u, --username=<username> Provide username as an option rather than
providing it interactively.
-p, --password=<password> Provide password as an option rather than
providing it interactively.
"""
from __future__ import absolute_import, print_function, unicode_literals
import sys
from docopt import docopt
from internetarchive import configure
from internetarchive.exceptions import AuthenticationError
def main(argv, session):
args = docopt(__doc__, argv=argv)
try:
if args['--username'] and args['--password']:
config_file_path = configure(args['--username'],
args['--password'],
session.config_file)
print('Config saved to: {0}'.format(config_file_path))
else:
print("Enter your Archive.org credentials below to configure 'ia'.\n")
config_file_path = configure(config_file=session.config_file)
print('\nConfig saved to: {0}'.format(config_file_path))
except AuthenticationError as exc:
# TODO: refactor output so we don't have to have special cases
# for adding newlines!
if args['--username']:
print('error: {0}'.format(str(exc)))
else:
print('\nerror: {0}'.format(str(exc)))
sys.exit(1)
| brycedrennan/internetarchive | internetarchive/cli/ia_configure.py | Python | agpl-3.0 | 2,377 | 0.000421 |
from __future__ import absolute_import
from changes.constants import Status, Result
from changes.config import db
from changes.jobs.update_project_stats import (
update_project_stats, update_project_plan_stats
)
from changes.models.project import Project
from changes.testutils import TestCase
class UpdateProjectStatsTest(TestCase):
def test_simple(self):
project = self.create_project()
self.create_build(
project=project,
status=Status.finished,
result=Result.passed,
duration=5050,
)
update_project_stats(project_id=project.id.hex)
db.session.expire(project)
project = Project.query.get(project.id)
assert project.avg_build_time == 5050
class UpdateProjectPlanStatsTest(TestCase):
def test_simple(self):
project = self.create_project()
build = self.create_build(
project=project,
status=Status.finished,
result=Result.passed,
duration=5050,
)
job = self.create_job(build)
plan = self.create_plan(project)
self.create_job_plan(job, plan)
update_project_plan_stats(
project_id=project.id.hex,
plan_id=plan.id.hex,
)
db.session.expire(plan)
assert plan.avg_build_time == 5050
| dropbox/changes | tests/changes/jobs/test_update_project_stats.py | Python | apache-2.0 | 1,357 | 0 |
"""
Qizx Python API bindings
:copyright: (c) 2015 by Michael Paddon
:license: MIT, see LICENSE for more details.
"""
from .qizx import (
Client, QizxError, QizxBadRequestError, QizxServerError,
QizxNotFoundError, QizxAccessControlError, QizxXMLDataError,
QizxCompilationError, QizxEvaluationError, QizxTimeoutError,
QizxImportError, UnexpectedResponseError, TransactionError
)
__title__ = 'qizx'
__version__ = '1.0.2'
__author__ = "Michael Paddon"
__license__ = 'MIT'
__copyright__ = "Copyright 2015 Michael Paddon"
| qizxdb/qizx-python | qizx/__init__.py | Python | mit | 534 | 0 |
# -*- coding: utf-8 -*-
# Generated by h2py from sensorlab-frame-format.h
FIRST_BYTE = 0
LAST_BYTE = -1
NODE_ID_FIELD_LENGTH = 4
ENTITY_ID_FIELD_LENGTH = 1
EVENT_ID_FIELD_LENGTH = 1
PROPERTIES_COUNT_FIELD_LENGTH = 1
NAME_LENGTH_FIELD_LENGTH = 1
LINK_ID_FIELD_LENGTH = 1
FRAME_ID_FIELD_LENGTH = 1
FRAME_DATA_LENGTH_FIELD_LENGTH = 2
PROPERTY_ID_FIELD_LENGTH = 1
PROPERTY_UNIT_PREFIX_FIELD_LENGTH = 1
PROPERTY_UNIT_FIELD_LENGTH = 1
PROPERTY_TYPE_FIELD_LENGTH = 1
PROPERTY_VALUE_LENGTH_FIELD_LENGTH = 2
SENSORLAB_HEADER_LENGTH = (NODE_ID_FIELD_LENGTH + EVENT_ID_FIELD_LENGTH)
NODE_HEADER_LENGTH = PROPERTIES_COUNT_FIELD_LENGTH
ENTITY_HEADER_LENGTH = (ENTITY_ID_FIELD_LENGTH + PROPERTIES_COUNT_FIELD_LENGTH)
PROPERTY_DECLARATION_HEADER_LENGTH = (PROPERTY_ID_FIELD_LENGTH + PROPERTY_UNIT_PREFIX_FIELD_LENGTH +
PROPERTY_UNIT_FIELD_LENGTH + PROPERTY_TYPE_FIELD_LENGTH +
NAME_LENGTH_FIELD_LENGTH + PROPERTY_VALUE_LENGTH_FIELD_LENGTH)
PROPERTY_UPDATE_HEADER_LENGTH = (PROPERTY_ID_FIELD_LENGTH + PROPERTY_VALUE_LENGTH_FIELD_LENGTH)
SENSORLAB_HEADER = FIRST_BYTE
NODE_ID_FIELD = SENSORLAB_HEADER
EVENT_ID_FIELD = (NODE_ID_FIELD + NODE_ID_FIELD_LENGTH)
EVENT_PAYLOAD = (EVENT_ID_FIELD + EVENT_ID_FIELD_LENGTH)
NODE_ADD_PAYLOAD = EVENT_PAYLOAD
NODE_ADD_PROPERTIES_COUNT_FIELD = NODE_ADD_PAYLOAD
NODE_ADD_PROPERTIES = (NODE_ADD_PROPERTIES_COUNT_FIELD + PROPERTIES_COUNT_FIELD_LENGTH)
NODE_PROPERTY_ADD_PAYLOAD = EVENT_PAYLOAD
NODE_PROPERTY_ADD_PROPERTIES_COUNT_FIELD = NODE_PROPERTY_ADD_PAYLOAD
NODE_PROPERTY_ADD_PROPERTIES = (NODE_PROPERTY_ADD_PROPERTIES_COUNT_FIELD + PROPERTIES_COUNT_FIELD_LENGTH)
NODE_PROPERTY_UPDATE_PAYLOAD = EVENT_PAYLOAD
NODE_PROPERTY_UPDATE_PROPERTIES_COUNT_FIELD = NODE_PROPERTY_UPDATE_PAYLOAD
NODE_PROPERTY_UPDATE_PROPERTIES = (NODE_PROPERTY_UPDATE_PROPERTIES_COUNT_FIELD + PROPERTIES_COUNT_FIELD_LENGTH)
NODE_REMOVE_PAYLOAD = EVENT_PAYLOAD
ENTITY_ADD_PAYLOAD = EVENT_PAYLOAD
ENTITY_ADD_ENTITY_ID_FIELD = ENTITY_ADD_PAYLOAD
ENTITY_ADD_NAME_LENGTH_FIELD = (ENTITY_ADD_ENTITY_ID_FIELD + ENTITY_ID_FIELD_LENGTH)
ENTITY_ADD_PROPERTIES_COUNT_FIELD = (ENTITY_ADD_NAME_LENGTH_FIELD + NAME_LENGTH_FIELD_LENGTH)
ENTITY_ADD_NAME_FIELD = (ENTITY_ADD_PROPERTIES_COUNT_FIELD + PROPERTIES_COUNT_FIELD_LENGTH)
ENTITY_PROPERTY_ADD_PAYLOAD = EVENT_PAYLOAD
ENTITY_PROPERTY_ADD_ENTITY_ID_FIELD = ENTITY_PROPERTY_ADD_PAYLOAD
ENTITY_PROPERTY_ADD_PROPERTIES_COUNT_FIELD = (ENTITY_PROPERTY_ADD_ENTITY_ID_FIELD + ENTITY_ID_FIELD_LENGTH)
ENTITY_PROPERTY_ADD_PROPERTIES = (ENTITY_PROPERTY_ADD_PROPERTIES_COUNT_FIELD + PROPERTIES_COUNT_FIELD_LENGTH)
ENTITY_PROPERTY_UPDATE_PAYLOAD = EVENT_PAYLOAD
ENTITY_PROPERTY_UPDATE_ENTITY_ID_FIELD = ENTITY_PROPERTY_UPDATE_PAYLOAD
ENTITY_PROPERTY_UPDATE_PROPERTIES_COUNT_FIELD = (ENTITY_PROPERTY_UPDATE_ENTITY_ID_FIELD + ENTITY_ID_FIELD_LENGTH)
ENTITY_PROPERTY_UPDATE_PROPERTIES = (ENTITY_PROPERTY_UPDATE_PROPERTIES_COUNT_FIELD + PROPERTIES_COUNT_FIELD_LENGTH)
ENTITY_REMOVE_PAYLOAD = EVENT_PAYLOAD
ENTITY_REMOVE_ENTITY_ID_FIELD = ENTITY_REMOVE_PAYLOAD
LINK_ADD_PAYLOAD = EVENT_PAYLOAD
LINK_ADD_ENTITY_ID_FIELD = LINK_ADD_PAYLOAD
LINK_ADD_ID_FIELD = (LINK_ADD_ENTITY_ID_FIELD + ENTITY_ID_FIELD_LENGTH)
LINK_ADD_SOURCE_PROPERTIES_COUNT_FIELD = (LINK_ADD_ID_FIELD + LINK_ID_FIELD_LENGTH)
LINK_ADD_TARGET_PROPERTIES_COUNT_FIELD = (LINK_ADD_SOURCE_PROPERTIES_COUNT_FIELD + PROPERTIES_COUNT_FIELD_LENGTH)
LINK_ADD_PROPERTIES_COUNT_FIELD = (LINK_ADD_TARGET_PROPERTIES_COUNT_FIELD + PROPERTIES_COUNT_FIELD_LENGTH)
LINK_ADD_SOURCE_PROPERTIES = (LINK_ADD_PROPERTIES_COUNT_FIELD + PROPERTIES_COUNT_FIELD_LENGTH)
LINK_PROPERTY_ADD_PAYLOAD = EVENT_PAYLOAD
LINK_PROPERTY_ADD_ENTITY_ID_FIELD = LINK_PROPERTY_ADD_PAYLOAD
LINK_PROPERTY_ADD_ID_FIELD = (LINK_PROPERTY_ADD_ENTITY_ID_FIELD + ENTITY_ID_FIELD_LENGTH)
LINK_PROPERTY_ADD_PROPERTIES_COUNT_FIELD = (LINK_PROPERTY_ADD_ID_FIELD + LINK_ID_FIELD_LENGTH)
LINK_PROPERTY_ADD_PROPERTIES = (LINK_PROPERTY_ADD_PROPERTIES_COUNT_FIELD + PROPERTIES_COUNT_FIELD_LENGTH)
LINK_PROPERTY_UPDATE_PAYLOAD = EVENT_PAYLOAD
LINK_PROPERTY_UPDATE_ENTITY_ID_FIELD = LINK_PROPERTY_ADD_PAYLOAD
LINK_PROPERTY_UPDATE_ID_FIELD = (LINK_PROPERTY_UPDATE_ENTITY_ID_FIELD + ENTITY_ID_FIELD_LENGTH)
LINK_PROPERTY_UPDATE_PROPERTIES_COUNT_FIELD = (LINK_PROPERTY_UPDATE_ID_FIELD + LINK_ID_FIELD_LENGTH)
LINK_PROPERTY_UPDATE_PROPERTIES = (LINK_PROPERTY_UPDATE_PROPERTIES_COUNT_FIELD + PROPERTIES_COUNT_FIELD_LENGTH)
LINK_REMOVE_PAYLOAD = EVENT_PAYLOAD
LINK_REMOVE_ENTITY_ID_FIELD = LINK_REMOVE_PAYLOAD
LINK_REMOVE_ID_FIELD = (LINK_REMOVE_ENTITY_ID_FIELD + ENTITY_ID_FIELD_LENGTH)
FRAME_PRODUCE_PAYLOAD = EVENT_PAYLOAD
FRAME_PRODUCE_ENTITY_ID_FIELD = FRAME_PRODUCE_PAYLOAD
FRAME_PRODUCE_ID_FIELD = (FRAME_PRODUCE_ENTITY_ID_FIELD + ENTITY_ID_FIELD_LENGTH)
FRAME_PRODUCE_DATA_LENGTH_FIELD = (FRAME_PRODUCE_ID_FIELD + FRAME_ID_FIELD_LENGTH)
FRAME_PRODUCE_PROPERTIES_COUNT_FIELD = (FRAME_PRODUCE_DATA_LENGTH_FIELD + FRAME_DATA_LENGTH_FIELD_LENGTH)
FRAME_PRODUCE_DATA_FIELD = (FRAME_PRODUCE_PROPERTIES_COUNT_FIELD + PROPERTIES_COUNT_FIELD_LENGTH)
FRAME_RX_PAYLOAD = EVENT_PAYLOAD
FRAME_RX_ENTITY_ID_FIELD = FRAME_RX_PAYLOAD
FRAME_RX_ID_FIELD = (FRAME_RX_ENTITY_ID_FIELD + ENTITY_ID_FIELD_LENGTH)
FRAME_RX_DATA_LENGTH_FIELD = (FRAME_RX_ID_FIELD + FRAME_ID_FIELD_LENGTH)
FRAME_RX_PROPERTIES_COUNT_FIELD = (FRAME_RX_DATA_LENGTH_FIELD + FRAME_DATA_LENGTH_FIELD_LENGTH)
FRAME_RX_DATA_FIELD = (FRAME_RX_PROPERTIES_COUNT_FIELD + PROPERTIES_COUNT_FIELD_LENGTH)
FRAME_PROPERTY_ADD_PAYLOAD = EVENT_PAYLOAD
FRAME_PROPERTY_ADD_ENTITY_ID_FIELD = FRAME_PROPERTY_ADD_PAYLOAD
FRAME_PROPERTY_ADD_ID_FIELD = (FRAME_PROPERTY_ADD_ENTITY_ID_FIELD + ENTITY_ID_FIELD_LENGTH)
FRAME_PROPERTY_ADD_PROPERTIES_COUNT_FIELD = (FRAME_PROPERTY_ADD_ID_FIELD + FRAME_ID_FIELD_LENGTH)
FRAME_PROPERTY_ADD_PROPERTIES = (FRAME_PROPERTY_ADD_PROPERTIES_COUNT_FIELD + PROPERTIES_COUNT_FIELD_LENGTH)
FRAME_PROPERTY_UPDATE_PAYLOAD = EVENT_PAYLOAD
FRAME_PROPERTY_UPDATE_ENTITY_ID_FIELD = FRAME_PROPERTY_UPDATE_PAYLOAD
FRAME_PROPERTY_UPDATE_ID_FIELD = (FRAME_PROPERTY_UPDATE_ENTITY_ID_FIELD + ENTITY_ID_FIELD_LENGTH)
FRAME_PROPERTY_UPDATE_PROPERTIES_COUNT_FIELD = (FRAME_PROPERTY_UPDATE_ID_FIELD + FRAME_ID_FIELD_LENGTH)
FRAME_PROPERTY_UPDATE_PROPERTIES = (FRAME_PROPERTY_UPDATE_PROPERTIES_COUNT_FIELD + PROPERTIES_COUNT_FIELD_LENGTH)
FRAME_DATA_UPDATE_PAYLOAD = EVENT_PAYLOAD
FRAME_DATA_UPDATE_ENTITY_ID_FIELD = FRAME_DATA_UPDATE_PAYLOAD
FRAME_DATA_UPDATE_ID_FIELD = (FRAME_DATA_UPDATE_ENTITY_ID_FIELD + ENTITY_ID_FIELD_LENGTH)
FRAME_DATA_UPDATE_DATA_LENGTH_FIELD = (FRAME_DATA_UPDATE_ID_FIELD + FRAME_ID_FIELD_LENGTH)
FRAME_DATA_UPDATE_DATA_FIELD = (FRAME_DATA_UPDATE_DATA_LENGTH_FIELD + FRAME_DATA_LENGTH_FIELD_LENGTH)
FRAME_TX_PAYLOAD = EVENT_PAYLOAD
FRAME_TX_ENTITY_ID_FIELD = FRAME_TX_PAYLOAD
FRAME_TX_ID_FIELD = (FRAME_TX_ENTITY_ID_FIELD + ENTITY_ID_FIELD_LENGTH)
FRAME_TX_DATA_LENGTH_FIELD = (FRAME_TX_ID_FIELD + FRAME_ID_FIELD_LENGTH)
FRAME_TX_DATA_FIELD = (FRAME_TX_DATA_LENGTH_FIELD + FRAME_DATA_LENGTH_FIELD_LENGTH)
FRAME_CONSUME_PAYLOAD = EVENT_PAYLOAD
FRAME_CONSUME_ENTITY_ID_FIELD = FRAME_CONSUME_PAYLOAD
FRAME_CONSUME_ID_FIELD = (FRAME_CONSUME_ENTITY_ID_FIELD + ENTITY_ID_FIELD_LENGTH)
FRAME_CONSUME_DATA_LENGTH_FIELD = (FRAME_CONSUME_ID_FIELD + FRAME_ID_FIELD_LENGTH)
FRAME_CONSUME_DATA_FIELD = (FRAME_CONSUME_DATA_LENGTH_FIELD + FRAME_DATA_LENGTH_FIELD_LENGTH)
PROPERTY_DECLARATION_ID_FIELD = FIRST_BYTE
PROPERTY_DECLARATION_UNIT_PREFIX_FIELD = (PROPERTY_DECLARATION_ID_FIELD + PROPERTY_ID_FIELD_LENGTH)
PROPERTY_DECLARATION_UNIT_FIELD = (PROPERTY_DECLARATION_UNIT_PREFIX_FIELD + PROPERTY_UNIT_PREFIX_FIELD_LENGTH)
PROPERTY_DECLARATION_TYPE_FIELD = (PROPERTY_DECLARATION_UNIT_FIELD + PROPERTY_UNIT_FIELD_LENGTH)
PROPERTY_DECLARATION_NAME_LENGTH_FIELD = (PROPERTY_DECLARATION_TYPE_FIELD + PROPERTY_TYPE_FIELD_LENGTH)
PROPERTY_DECLARATION_VALUE_LENGTH_FIELD = (PROPERTY_DECLARATION_NAME_LENGTH_FIELD + NAME_LENGTH_FIELD_LENGTH)
PROPERTY_UPDATE_ID_FIELD = FIRST_BYTE
PROPERTY_UPDATE_VALUE_LENGTH_FIELD = (PROPERTY_UPDATE_ID_FIELD + PROPERTY_ID_FIELD_LENGTH)
EVENT_NODE_ADD = 0x00
EVENT_NODE_PROPERTY_ADD = 0x01
EVENT_NODE_PROPERTY_UPDATE = 0x02
EVENT_NODE_REMOVE = 0x03
EVENT_ENTITY_ADD = 0x10
EVENT_ENTITY_PROPERTY_ADD = 0x11
EVENT_ENTITY_PROPERTY_UPDATE = 0x12
EVENT_ENTITY_REMOVE = 0x13
EVENT_LINK_ADD = 0x20
EVENT_LINK_PROPERTY_ADD = 0x21
EVENT_LINK_PROPERTY_UPDATE = 0x22
EVENT_LINK_REMOVE = 0x23
EVENT_FRAME_PRODUCE = 0x30
EVENT_FRAME_PROPERTY_ADD = 0x31
EVENT_FRAME_PROPERTY_UPDATE = 0x32
EVENT_FRAME_DATA_UPDATE = 0x33
EVENT_FRAME_TX = 0x34
EVENT_FRAME_RX = 0x35
EVENT_FRAME_CONSUME = 0x36
TYPE_BOOLEAN = 0x00
TYPE_INT8 = 0x01
TYPE_INT16 = 0x02
TYPE_INT32 = 0x03
TYPE_INT64 = 0x04
TYPE_UINT8 = 0x05
TYPE_UINT16 = 0x06
TYPE_UINT32 = 0x07
TYPE_UINT64 = 0x08
TYPE_FLOAT = 0x09
TYPE_DOUBLE = 0x0A
TYPE_ASCII_ARRAY = 0x0B
TYPE_BYTE_ARRAY = 0x0C
TYPE_INVALID = 0x0D
TYPE_FLOAT_ARRAY = 0x0E
TYPE_DOUBLE_ARRAY = 0x0F
UNIT_NONE = 0x00
UNIT_METRE = 0x01
UNIT_KILOGRAM = 0x02
UNIT_SECOND = 0x03
UNIT_AMPERE = 0x04
UNIT_KELVIN = 0x05
UNIT_MOLE = 0x06
UNIT_CANDELA = 0x07
UNIT_RADIAN = 0x08
UNIT_STERADIAN = 0x09
UNIT_HERTZ = 0x0A
UNIT_NEWTON = 0x0B
UNIT_PASCAL = 0x0C
UNIT_JOULE = 0x0D
UNIT_WATT = 0x0E
UNIT_COULOMB = 0x0F
UNIT_VOLT = 0x10
UNIT_FARAD = 0x11
UNIT_OHM = 0x12
UNIT_SIEMENS = 0x13
UNIT_WEBER = 0x14
UNIT_TESLA = 0x15
UNIT_HENRY = 0x16
UNIT_DEGREECELSIUS = 0x17
UNIT_LUMEN = 0x18
UNIT_LUX = 0x19
UNIT_BECQUEREL = 0x1A
UNIT_GRAY = 0x1B
UNIT_SIEVERT = 0x1C
UNIT_KATAL = 0x1D
UNIT_DB = 0x1E
UNIT_DBW = 0x1F
UNIT_DBM = 0x20
PREFIX_YOTTA = 0x00
PREFIX_ZETTA = 0x01
PREFIX_EXA = 0x02
PREFIX_PETA = 0x03
PREFIX_TERA = 0x04
PREFIX_GIGA = 0x05
PREFIX_MEGA = 0x06
PREFIX_KILO = 0x07
PREFIX_HECTO = 0x08
PREFIX_DECA = 0x09
PREFIX_NONE = 0x0A
PREFIX_DECI = 0x0B
PREFIX_CENTI = 0x0C
PREFIX_MILLI = 0x0D
PREFIX_MICRO = 0x0E
PREFIX_NANO = 0x0F
PREFIX_PICO = 0x10
PREFIX_FEMTO = 0x11
PREFIX_ATTO = 0x12
PREFIX_ZEPTO = 0x13
PREFIX_YOCTO = 0x14
VALUE_FALSE = 0x00
VALUE_TRUE = 0x01
SENSORLAB_LINK_LAYER_TYPE = 147
| glarue-ol/sensorlab-observer | observer/m_sensorlab/frame_format.py | Python | mpl-2.0 | 9,968 | 0.004916 |
import time, logging
from artnet import dmx, fixtures, rig
from artnet.dmx import fades
log = logging.getLogger(__name__)
# set up test fixtures
r = rig.get_default_rig()
g = r.groups['all']
def all_red():
"""
Create an all-red frame.
"""
g.setColor('#ff0000')
g.setIntensity(255)
return g.getFrame()
def all_blue():
"""
Create an all-blue frame.
"""
g.setColor('#0000ff')
g.setIntensity(255)
return g.getFrame()
def main(config, controller=None):
log.info("Running script %s" % __name__)
# global g
# g = get_default_fixture_group(config)
q = controller or dmx.Controller(config.get('base', 'address'), bpm=60, nodaemon=True, runout=True)
q.add(fades.create_multifade([
all_red(),
all_blue(),
] * 3, secs=5.0))
if not controller:
q.start()
| ScienceWorldCA/domelights | backend/artnet-bridge/artnet/scripts/alternating_color_fades.py | Python | apache-2.0 | 776 | 0.03866 |
#encoding=utf-8
# this the interface to create your own data source
# this class pings a private / public blockchain to get the balance and code information
from web3 import Web3, KeepAliveRPCProvider
class EthereumData:
def __init__(self):
self.host = 'x.x.x.x'
self.port = '8545'
self.web3 = Web3(KeepAliveRPCProvider(host=self.host, port=self.port))
def getBalance(self, address):
return self.web3.eth.getBalance(address)
def getCode(self, address):
return self.web3.eth.getCode(address)
| Ganben/solverify | analyze/ethereum_data1.py | Python | gpl-3.0 | 508 | 0.021654 |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from functools import partial
from typing import (Any, Callable, Optional, Sequence, Union, Tuple)
import warnings
import numpy as np
from jax.interpreters import ad
from jax.interpreters import batching
from jax.interpreters import mlir
from jax.interpreters import xla
from jax import core
from jax.core import (ShapedArray, ConcreteArray)
from jax import tree_util
from jax._src import ad_util
from jax._src import dtypes
import jax._src.lax.lax as lax
import jax._src.lax.convolution as convolution
import jax._src.lax.slicing as slicing
from jax._src.lib.mlir import ir
from jax._src.lib.mlir.dialects import mhlo
from jax._src.lib import xla_bridge
from jax._src.lib import xla_client
import jax._src.util as util
map = util.safe_map
zip = util.safe_zip
xb = xla_bridge
xc = xla_client
xops = xla_client.ops
Array = Any
def reduce_window(operand, init_value, computation: Callable,
window_dimensions: core.Shape, window_strides: Sequence[int],
padding: Union[str, Sequence[Tuple[int, int]]],
base_dilation: Optional[Sequence[int]] = None,
window_dilation: Optional[Sequence[int]] = None) -> Array:
"""Wraps XLA's `ReduceWindowWithGeneralPadding
<https://www.tensorflow.org/xla/operation_semantics#reducewindow>`_
operator.
"""
flat_operands, operand_tree = tree_util.tree_flatten(operand)
flat_init_values, init_value_tree = tree_util.tree_flatten(init_value)
if operand_tree != init_value_tree:
raise ValueError('Operands must have the same tree structure as '
f'init_values: {operand_tree} vs. {init_value_tree}')
if len(flat_operands) == 0:
raise ValueError('reduce_window must have at least one operand.')
if len(flat_operands) != len(flat_init_values):
raise ValueError('Must have same total number of operands as init_values: '
f' {len(flat_operands)} vs. {len(flat_init_values)}')
if isinstance(padding, str):
dilated_window_dims = (
window_dimensions if window_dilation is None else
lax._dilate_shape(window_dimensions, window_dilation))
padding = tuple(lax.padtype_to_pads(
flat_operands[0].shape, dilated_window_dims, window_strides, padding))
else:
padding = tuple(padding)
if base_dilation is None:
base_dilation = (1,) * len(window_dimensions)
if window_dilation is None:
window_dilation = (1,) * len(window_dimensions)
monoid_reducer = _get_monoid_window_reducer(computation, flat_init_values)
if monoid_reducer:
return monoid_reducer(operand, window_dimensions, window_strides, padding,
base_dilation, window_dilation)
else:
flat_init_avals = map(lax._abstractify, flat_init_values)
jaxpr, consts, out_tree = lax._variadic_reduction_jaxpr(
computation, tuple(flat_init_avals), init_value_tree)
if operand_tree != out_tree:
raise ValueError(
'reduce_window output must have the same tree structure as the operands'
f' {operand_tree} vs. {out_tree}')
out_flat = reduce_window_p.bind(
*(flat_operands + flat_init_values), jaxpr=jaxpr, consts=consts,
window_dimensions=tuple(window_dimensions),
window_strides=tuple(window_strides), padding=padding,
base_dilation=tuple(base_dilation),
window_dilation=tuple(window_dilation))
return tree_util.tree_unflatten(out_tree, out_flat)
def _get_monoid_window_reducer(monoid_op: Callable,
xs: Sequence[Array]) -> Optional[Callable]:
if len(xs) != 1:
return None
x, = xs
aval = core.get_aval(x)
if (type(aval) is ConcreteArray) and aval.shape == ():
if monoid_op is lax.add:
return aval.val == 0 and _reduce_window_sum
elif monoid_op is lax.max:
return (aval.val == lax._get_max_identity(aval.dtype)
and _reduce_window_max)
elif monoid_op is lax.min:
return (aval.val == lax._get_min_identity(aval.dtype)
and _reduce_window_min)
return None
def _reduce_window_sum(operand: Array, window_dimensions: core.Shape,
window_strides: Sequence[int],
padding: Sequence[Tuple[int, int]],
base_dilation: Optional[Sequence[int]] = None,
window_dilation: Optional[Sequence[int]] = None) -> Array:
if base_dilation is None:
base_dilation = (1,) * len(window_dimensions)
if window_dilation is None:
window_dilation = (1,) * len(window_dimensions)
return reduce_window_sum_p.bind(
operand, window_dimensions=tuple(window_dimensions),
window_strides=tuple(window_strides), padding=tuple(padding),
base_dilation=tuple(base_dilation),
window_dilation=tuple(window_dilation))
def _reduce_window_prod(operand: Array, window_dimensions: core.Shape,
window_strides: Sequence[int],
padding: Sequence[Tuple[int, int]],
base_dilation: Optional[Sequence[int]] = None,
window_dilation: Optional[Sequence[int]] = None) -> Array:
init_value = lax._const(operand, 1)
jaxpr, consts = lax._reduction_jaxpr(lax.mul, lax._abstractify(init_value))
if base_dilation is None:
base_dilation = (1,) * len(window_dimensions)
if window_dilation is None:
window_dilation = (1,) * len(window_dimensions)
out, = reduce_window_p.bind(
operand, init_value, jaxpr=jaxpr, consts=consts,
window_dimensions=tuple(window_dimensions),
window_strides=tuple(window_strides), padding=tuple(padding),
base_dilation=tuple(base_dilation),
window_dilation=tuple(window_dilation))
return out
def _reduce_window_max(operand: Array, window_dimensions: core.Shape,
window_strides: Sequence[int],
padding: Sequence[Tuple[int, int]],
base_dilation: Optional[Sequence[int]] = None,
window_dilation: Optional[Sequence[int]] = None) -> Array:
if base_dilation is None:
base_dilation = (1,) * len(window_dimensions)
if window_dilation is None:
window_dilation = (1,) * len(window_dimensions)
return reduce_window_max_p.bind(
operand, window_dimensions=tuple(window_dimensions),
window_strides=tuple(window_strides), padding=tuple(padding),
base_dilation=tuple(base_dilation),
window_dilation=tuple(window_dilation))
def _reduce_window_min(operand: Array, window_dimensions: core.Shape,
window_strides: Sequence[int],
padding: Sequence[Tuple[int, int]],
base_dilation: Optional[Sequence[int]] = None,
window_dilation: Optional[Sequence[int]] = None) -> Array:
if base_dilation is None:
base_dilation = (1,) * len(window_dimensions)
if window_dilation is None:
window_dilation = (1,) * len(window_dimensions)
return reduce_window_min_p.bind(
operand, window_dimensions=tuple(window_dimensions),
window_strides=tuple(window_strides), padding=tuple(padding),
base_dilation=tuple(base_dilation),
window_dilation=tuple(window_dilation))
def _select_and_scatter(operand: Array, select: Callable,
window_dimensions: core.Shape,
window_strides: Sequence[int],
padding: Sequence[Tuple[int, int]], source: Array,
init_value: Array, scatter: Callable) -> Array:
select_jaxpr, select_consts = lax._reduction_jaxpr(
select, lax._abstractify(init_value))
scatter_jaxpr, scatter_consts = lax._reduction_jaxpr(
scatter, lax._abstractify(init_value))
return select_and_scatter_p.bind(
operand, source, init_value, select_jaxpr=select_jaxpr,
select_consts=select_consts, scatter_jaxpr=scatter_jaxpr,
scatter_consts=scatter_consts, window_dimensions=tuple(window_dimensions),
window_strides=tuple(window_strides), padding=tuple(padding))
def _select_and_scatter_add(source: Array, operand: Array,
select_prim: core.Primitive,
window_dimensions: core.Shape,
window_strides: Sequence[int],
padding: Sequence[Tuple[int, int]]) -> Array:
return select_and_scatter_add_p.bind(
source, operand, select_prim=select_prim,
window_dimensions=tuple(window_dimensions),
window_strides=tuple(window_strides), padding=tuple(padding))
def _select_and_gather_add(tangents: Array, operand: Array,
select_prim: core.Primitive,
window_dimensions: core.Shape,
window_strides: Sequence[int],
padding: Sequence[Tuple[int, int]],
base_dilation: Sequence[int],
window_dilation: Sequence[int]) -> Array:
"""Extracts the tangent corresponding to the minimum or maximum element in
each window of the `operand` array.
Wraps XLA's `ReduceWindow
<https://www.tensorflow.org/xla/operation_semantics#reducewindow>`_
operator, which applies a reduction function to all elements in each window of
the input multi-dimensional array. In this case, the input multi-dimensional
array is built by packing each element in the `operand` array with its
corresponding element in the `tangents` array.
Args:
tangents: an array
operand: an array with the same shape as `tangents`
select_prim: a reduction function (restricted to `ge_p` and `le_p`)
window_dimensions: an array of integers for window dimension values
window_strides: an array of integers for window stride values
base_dilation: an array of integers for base dilation values
window_dilation: an array of integers for window dilation values
Returns:
An array containing the elements in `tangents` corresponding to the output
of the reduction of `operand` fin each window.
"""
return select_and_gather_add_p.bind(
tangents, operand, select_prim=select_prim,
window_dimensions=tuple(window_dimensions),
window_strides=tuple(window_strides), padding=tuple(padding),
base_dilation=tuple(base_dilation),
window_dilation=tuple(window_dilation))
def _reduce_window_abstract_eval_rule(
*avals, jaxpr, consts, window_dimensions, window_strides, padding,
base_dilation, window_dilation):
operand_avals, init_val_avals = util.split_list(avals, [len(avals) // 2])
if any(o.dtype != iv.dtype for o, iv in zip(operand_avals, init_val_avals)):
msg = ("reduce_window got inconsistent dtypes for operands and init_values:"
" got operand dtypes {} and init_value dtypes {}.")
raise TypeError(msg.format([o.dtype for o in operand_avals],
[iv.dtype for iv in init_val_avals]))
if any(len(v.shape) != 0 for v in init_val_avals):
msg = ("reduce_window expected init_values to be scalars but init_values "
"have shapes {}.")
raise TypeError(msg.format([v.shape for v in init_val_avals]))
out_shape = _common_reduce_window_shape_rule(
operand_avals[0], window_dimensions, window_strides, padding,
base_dilation, window_dilation)
return tuple(ShapedArray(out_shape, op.dtype) for op in operand_avals)
def _reduce_window_translation_rule(ctx, avals_in, avals_out, *args, jaxpr,
consts, window_dimensions, window_strides,
padding, base_dilation, window_dilation):
operands, init_values = util.split_list(args, [len(args) // 2])
xla_computation = lax._reduction_computation(ctx, jaxpr, consts, init_values,
singleton=False)
return xla.xla_destructure(ctx.builder, xops.ReduceWindowWithGeneralPadding(
operands, init_values, xla_computation, window_dimensions,
window_strides, base_dilation, window_dilation, padding))
def _generic_reduce_window_batch_rule(
batched_args, batch_dims, *, jaxpr, consts, window_dimensions,
window_strides, padding, base_dilation, window_dilation):
num_operands = len(batched_args) // 2
operands, init_values = util.split_list(batched_args, [num_operands])
operand_bdims, init_value_bdims = util.split_list(batch_dims, [num_operands])
operand, init = batched_args
bdim, init_bdim = batch_dims
if any(init_bdim is not None for init_bdim in init_value_bdims):
raise NotImplementedError("reduce_window batching is not implemented for "
"initial values")
size = next(x.shape[ax] for x, ax in zip(operands, operand_bdims)
if ax is not None)
operands = [batching.bdim_at_front(arg, bdim, size)
for arg, bdim in zip(operands, operand_bdims)]
window_dimensions = (1,) + window_dimensions
window_strides = (1,) + window_strides
padding = ((0, 0),) + padding
base_dilation = (1,) + base_dilation
window_dilation = (1,) + window_dilation
outs = reduce_window_p.bind(
*(operands + init_values), jaxpr=jaxpr, consts=consts,
window_dimensions=window_dimensions, window_strides=window_strides,
padding=padding, base_dilation=base_dilation,
window_dilation=window_dilation)
return outs, (0,) * num_operands
reduce_window_p = core.Primitive('reduce_window')
reduce_window_p.multiple_results = True
reduce_window_p.def_impl(partial(xla.apply_primitive, reduce_window_p))
reduce_window_p.def_abstract_eval(_reduce_window_abstract_eval_rule)
batching.primitive_batchers[reduce_window_p] = _generic_reduce_window_batch_rule
xla.register_translation(reduce_window_p, _reduce_window_translation_rule)
def _generic_reduce_window_lower(ctx, *args, jaxpr, consts,
window_dimensions, window_strides, padding,
base_dilation, window_dilation):
operands, init_values = util.split_list(args, [len(args) // 2])
_, init_value_avals = util.split_list(ctx.avals_in, [len(operands)])
scalar_types = [mlir.aval_to_ir_type(aval) for aval in init_value_avals]
rw = mhlo.ReduceWindowOp(
map(mlir.aval_to_ir_type, ctx.avals_out), operands, init_values,
mlir.dense_int_elements(window_dimensions),
mlir.dense_int_elements(window_strides),
mlir.dense_int_elements(base_dilation),
mlir.dense_int_elements(window_dilation),
ir.DenseIntElementsAttr.get(np.asarray(padding, np.int64)))
reducer = rw.regions[0].blocks.append(*(scalar_types + scalar_types))
with ir.InsertionPoint(reducer):
out_nodes = mlir.jaxpr_subcomp(ctx.module_context, jaxpr, consts,
*([a] for a in reducer.arguments))
mhlo.ReturnOp(util.flatten(out_nodes))
return rw.results
mlir.register_lowering(reduce_window_p, _generic_reduce_window_lower)
def _reduce_window_sum_shape_rule(operand, *, window_dimensions, window_strides,
padding, base_dilation, window_dilation):
if not dtypes.issubdtype(operand.dtype, np.number):
msg = "operand to reduce_window_sum must have a number dtype, got {}"
raise TypeError(msg.format(np.dtype(operand.dtype).name))
return _common_reduce_window_shape_rule(operand, window_dimensions,
window_strides, padding,
base_dilation, window_dilation)
def _reduce_window_sum_translation_rule(ctx, avals_in, avals_out, operand, *,
window_dimensions, window_strides,
padding, base_dilation,
window_dilation):
operand_aval, = avals_in
scalar = ShapedArray((), operand_aval.dtype)
return [xops.ReduceWindowWithGeneralPadding(
operand,
xla.pyval_to_ir_constant(ctx.builder, np.array(0, operand_aval.dtype)),
xla.primitive_subcomputation(ctx.platform, ctx.axis_env, lax.add_p, scalar,
scalar),
window_dimensions,
window_strides, base_dilation, window_dilation, padding)]
def _reduce_window_sum_transpose_rule(cotangent, operand, *, window_dimensions,
window_strides, padding, base_dilation,
window_dilation):
assert ad.is_undefined_primal(operand)
input_shape = operand.aval.shape
pads = convolution._conv_general_vjp_lhs_padding(
input_shape, window_dimensions, window_strides, cotangent.shape, padding,
base_dilation, window_dilation)
ones = [1] * len(input_shape)
padding_config = [(lo, hi, stride - 1)
for (lo, hi), stride in zip(pads, window_strides)]
pad_cotangent = lax.pad(cotangent, lax._zero(cotangent), padding_config)
result = _reduce_window_sum(pad_cotangent, window_dimensions, base_dilation,
[(0, 0)] * len(input_shape),
base_dilation=ones,
window_dilation=window_dilation)
assert result.shape == input_shape, (result.shape, input_shape)
return [result]
def _reduce_window_batch_rule(reduce_window, batched_args, bdims, *,
window_dimensions, window_strides, padding,
base_dilation, window_dilation):
operand, = batched_args
bdim, = bdims
if bdim is not None:
window_dimensions = \
window_dimensions[:bdim] + (1,) + window_dimensions[bdim:]
window_strides = window_strides[:bdim] + (1,) + window_strides[bdim:]
padding = padding[:bdim] + ((0, 0),) + padding[bdim:]
base_dilation = base_dilation[:bdim] + (1,) + base_dilation[bdim:]
window_dilation = window_dilation[:bdim] + (1,) + window_dilation[bdim:]
operand = reduce_window(operand, window_dimensions, window_strides, padding,
base_dilation, window_dilation)
return operand, bdim
reduce_window_sum_p = lax.standard_primitive(
_reduce_window_sum_shape_rule, lax._input_dtype, 'reduce_window_sum',
_reduce_window_sum_translation_rule)
ad.deflinear2(reduce_window_sum_p, _reduce_window_sum_transpose_rule)
batching.primitive_batchers[reduce_window_sum_p] = partial(
_reduce_window_batch_rule, _reduce_window_sum)
def _reduce_window_chooser_translation_rule(
prim, identity, ctx, avals_in, avals_out, operand, *, window_dimensions,
window_strides, padding, base_dilation, window_dilation):
operand_aval, = avals_in
scalar = ShapedArray((), operand_aval.dtype)
return [xops.ReduceWindowWithGeneralPadding(
operand,
xla.pyval_to_ir_constant(ctx.builder, identity(operand_aval.dtype)),
xla.primitive_subcomputation(ctx.platform, ctx.axis_env, prim, scalar,
scalar),
window_dimensions,
window_strides, base_dilation, window_dilation, padding)]
def _reduce_window_chooser_jvp_rule(prim, g, operand, *, window_dimensions,
window_strides, padding, base_dilation,
window_dilation):
assert prim is lax.max_p or prim is lax.min_p
select_prim = lax.ge_p if prim is lax.max_p else lax.le_p
return _select_and_gather_add(g, operand, select_prim, window_dimensions,
window_strides, padding, base_dilation,
window_dilation)
def _common_reduce_window_shape_rule(operand, window_dimensions,
window_strides, padding, base_dilation,
window_dilation):
lax._check_shapelike("reduce_window", "window_dimensions", window_dimensions,
non_zero_shape=True)
lax._check_shapelike("reduce_window", "window_strides", window_strides,
non_zero_shape=True)
lax._check_shapelike("reduce_window", "base_dilation", base_dilation)
lax._check_shapelike("reduce_window", "window_dilation", window_dilation)
if operand.ndim != len(window_dimensions):
msg = ("reduce_window got the wrong number of window_dimensions for "
"operand: got operand shape {} with window_dimensions {}.")
raise TypeError(msg.format(operand.shape, window_dimensions))
if len(window_strides) != len(window_dimensions):
msg = ("reduce_window got inconsistent window_strides and "
"window_dimensions: got window_strides {} and window_dimensions {}.")
raise TypeError(msg.format(window_strides, window_dimensions))
if len(base_dilation) != len(window_dimensions):
msg = ("reduce_window got inconsistent base_dilation and "
"window_dimensions: got base_dilation {} and window_dimensions {}.")
raise TypeError(msg.format(base_dilation, window_dimensions))
if len(window_dilation) != len(window_dimensions):
msg = ("reduce_window got inconsistent window_dilation and "
"window_dimensions: got window_dilation {} and window_dimensions "
"{}.")
raise TypeError(msg.format(window_dilation, window_dimensions))
return reduce_window_shape_tuple(operand.shape, window_dimensions,
window_strides, padding, base_dilation,
window_dilation)
def reduce_window_shape_tuple(operand_shape, window_dimensions, window_strides,
padding, base_dilation=None,
window_dilation=None):
if base_dilation is not None:
operand_shape = lax._dilate_shape(operand_shape, base_dilation)
if window_dilation is not None:
window_dimensions = lax._dilate_shape(window_dimensions, window_dilation)
pads_lo, pads_hi = zip(*padding)
operand_padded = core.sum_shapes(operand_shape, pads_lo, pads_hi)
return core.stride_shape(operand_padded, window_dimensions, window_strides)
_reduce_window_max_translation_rule = partial(
_reduce_window_chooser_translation_rule, lax.max_p, lax._get_max_identity)
reduce_window_max_p = lax.standard_primitive(
_common_reduce_window_shape_rule, lax._input_dtype, 'reduce_window_max',
_reduce_window_max_translation_rule)
ad.defjvp(reduce_window_max_p, partial(_reduce_window_chooser_jvp_rule,
lax.max_p))
batching.primitive_batchers[reduce_window_max_p] = partial(
_reduce_window_batch_rule, _reduce_window_max)
_reduce_window_min_translation_rule = partial(
_reduce_window_chooser_translation_rule, lax.min_p, lax._get_min_identity)
reduce_window_min_p = lax.standard_primitive(
_common_reduce_window_shape_rule, lax._input_dtype, 'reduce_window_min',
_reduce_window_min_translation_rule)
ad.defjvp(reduce_window_min_p, partial(_reduce_window_chooser_jvp_rule,
lax.min_p))
_reduce_window_min_batch_rule = partial(_reduce_window_batch_rule,
_reduce_window_min)
batching.primitive_batchers[reduce_window_min_p] = partial(
_reduce_window_batch_rule, _reduce_window_min)
def _reduce_window_lower(
reduce_op, init_value, ctx, operand, *,
window_dimensions, window_strides, padding, base_dilation, window_dilation):
aval_out, = ctx.avals_out
operand_aval, = ctx.avals_in
scalar_aval = operand_aval.update(shape=())
scalar_type = mlir.aval_to_ir_type(scalar_aval)
rw = mhlo.ReduceWindowOp(
mlir.aval_to_ir_types(aval_out), [operand],
[mlir.full_like_aval(init_value(scalar_aval.dtype), scalar_aval)],
mlir.dense_int_elements(window_dimensions),
mlir.dense_int_elements(window_strides),
mlir.dense_int_elements(base_dilation),
mlir.dense_int_elements(window_dilation),
ir.DenseIntElementsAttr.get(np.asarray(padding, np.int64)))
reducer = rw.regions[0].blocks.append(scalar_type, scalar_type)
with ir.InsertionPoint(reducer):
mhlo.ReturnOp(reduce_op(*reducer.arguments))
return rw.results
mlir.register_lowering(reduce_window_sum_p, partial(
_reduce_window_lower, mhlo.AddOp, lambda _: 0))
mlir.register_lowering(reduce_window_min_p, partial(
_reduce_window_lower, mlir.min_mhlo, lax._get_min_identity))
mlir.register_lowering(reduce_window_max_p, partial(
_reduce_window_lower, mlir.max_mhlo, lax._get_max_identity))
def _select_and_scatter_shape_rule(
operand, source, init_value, *, select_jaxpr, select_consts, scatter_jaxpr,
scatter_consts, window_dimensions, window_strides, padding):
lax._check_shapelike("select_and_scatter", "window_dimensions",
window_dimensions)
lax._check_shapelike("select_and_scatter", "window_strides", window_strides)
if len(window_dimensions) != len(window_strides):
msg = ("select_and_scatter got inconsistent window_strides and "
"window_dimensions: got window_strides {} and window_dimensions {}.")
raise TypeError(msg.format(window_strides, window_dimensions))
return operand.shape
def _select_and_scatter_translation(
ctx, avals_in, avals_out, operand, source, init_value, *, select_jaxpr,
select_consts, scatter_jaxpr, scatter_consts, window_dimensions,
window_strides, padding):
select = lax._reduction_computation(ctx, select_jaxpr, select_consts,
init_value)
scatter = lax._reduction_computation(ctx, scatter_jaxpr, scatter_consts,
init_value)
return [xops.SelectAndScatterWithGeneralPadding(
operand, select, window_dimensions, window_strides, padding, source,
init_value, scatter)]
select_and_scatter_p = lax.standard_primitive(
_select_and_scatter_shape_rule, lax._input_dtype, 'select_and_scatter',
_select_and_scatter_translation)
def _select_and_scatter_lower(
ctx, operand, source, init_value, *, select_jaxpr,
select_consts, scatter_jaxpr, scatter_consts, window_dimensions,
window_strides, padding):
operand_aval, source_aval, init_value_aval = ctx.avals_in
aval_out, = ctx.avals_out
scalar_aval = operand_aval.update(shape=())
scalar_type = mlir.aval_to_ir_type(scalar_aval)
op = mhlo.SelectAndScatterOp(
mlir.aval_to_ir_type(aval_out), operand, source,
init_value, mlir.dense_int_elements(window_dimensions),
mlir.dense_int_elements(window_strides),
ir.DenseIntElementsAttr.get(np.asarray(padding, np.int64)))
select = op.select.blocks.append(scalar_type, scalar_type)
with ir.InsertionPoint(select):
out_nodes = mlir.jaxpr_subcomp(ctx.module_context, select_jaxpr,
select_consts,
*([a] for a in select.arguments))
mhlo.ReturnOp(util.flatten(out_nodes))
scatter = op.scatter.blocks.append(scalar_type, scalar_type)
with ir.InsertionPoint(scatter):
out_nodes = mlir.jaxpr_subcomp(ctx.module_context, scatter_jaxpr,
scatter_consts,
*([a] for a in scatter.arguments))
mhlo.ReturnOp(util.flatten(out_nodes))
return op.results
mlir.register_lowering(select_and_scatter_p, _select_and_scatter_lower)
def _select_and_scatter_add_shape_rule(
source, operand, *, select_prim, window_dimensions, window_strides,
padding):
return operand.shape
def _select_and_scatter_add_translation(
ctx, avals_in, avals_out, source, operand, *, select_prim,
window_dimensions, window_strides, padding, expand_padding):
source_aval, operand_aval = avals_in
c = ctx.builder
dtype = operand_aval.dtype
scalar = ShapedArray((), dtype)
select = xla.primitive_subcomputation(
ctx.platform, ctx.axis_env, select_prim, scalar, scalar)
scatter = xla.primitive_subcomputation(
ctx.platform, ctx.axis_env, lax.or_p if dtype == np.bool_ else lax.add_p,
scalar, scalar)
zero = xla.pyval_to_ir_constant(c, np.array(0, dtype))
# TODO(b/161704903): remove this workaround when XLA:CPU bug is fixed.
expand_padding = (expand_padding and
not all(lo == 0 and hi == 0 for (lo, hi) in padding))
if expand_padding:
original_padding = padding
identity = (lax._get_max_identity if select_prim is lax.ge_p
else lax._get_min_identity)
pads = [(lo, hi, 0) for (lo, hi) in padding]
operand = xops.Pad(operand, xla.pyval_to_ir_constant(c, identity(dtype)),
xc.make_padding_config(pads))
padding = [(0, 0) for _ in padding]
output = xops.SelectAndScatterWithGeneralPadding(
operand, select, window_dimensions, window_strides, padding, source, zero,
scatter)
if expand_padding:
start_indices = [lo for (lo, hi) in original_padding]
stop_indices = [lo + d for ((lo, hi), d) in zip(original_padding,
operand_aval.shape)]
output = xops.Slice(output, start_indices, stop_indices,
[1] * len(start_indices))
return [output]
def _select_and_scatter_add_jvp(
primals, tangents, *, select_prim, window_dimensions, window_strides,
padding):
source, operand = primals
g_source, g_operand = tangents
val_out = _select_and_scatter_add(
source, operand, select_prim, window_dimensions, window_strides,
padding)
del g_operand
if type(g_source) is ad_util.Zero:
tangent_out = ad_util.Zero.from_value(val_out)
else:
tangent_out = _select_and_scatter_add(
g_source, operand, select_prim, window_dimensions,
window_strides, padding)
return val_out, tangent_out
def _select_and_scatter_add_transpose(
t, source, operand, *, select_prim, window_dimensions, window_strides,
padding):
assert ad.is_undefined_primal(source) and not ad.is_undefined_primal(operand)
if type(t) is ad_util.Zero:
return [ad_util.Zero(source.aval), None]
ones = (1,) * len(window_dimensions)
source_t = _select_and_gather_add(t, operand, select_prim, window_dimensions,
window_strides, padding, ones, ones)
return [source_t, None]
def _select_and_scatter_add_batch_rule(
batched_args, batch_dims, *, select_prim, window_dimensions, window_strides,
padding):
source, operand = batched_args
s_bdim, o_bdim = batch_dims
size = next(a.shape[bdim] for a, bdim in zip(batched_args, batch_dims)
if bdim is not None)
source = batching.bdim_at_front(source, s_bdim, size)
operand = batching.bdim_at_front(operand, o_bdim, size)
window_dimensions = (1,) + window_dimensions
window_strides = (1,) + window_strides
padding = ((0, 0),) + padding
out = _select_and_scatter_add(source, operand, select_prim, window_dimensions,
window_strides, padding)
return out, 0
select_and_scatter_add_p = lax.standard_primitive(
_select_and_scatter_add_shape_rule, lax._input_dtype,
'select_and_scatter_add',
partial(_select_and_scatter_add_translation, expand_padding=False))
ad.primitive_transposes[select_and_scatter_add_p] = \
_select_and_scatter_add_transpose
ad.primitive_jvps[select_and_scatter_add_p] = _select_and_scatter_add_jvp
batching.primitive_batchers[select_and_scatter_add_p] = \
_select_and_scatter_add_batch_rule
# TODO(b/161704903): workaround for XLA/CPU crash.
xla.register_translation(
select_and_scatter_add_p,
partial(_select_and_scatter_add_translation, expand_padding=True),
platform='cpu')
# TODO(b/182390722): workaround for XLA/GPU crash.
xla.register_translation(
select_and_scatter_add_p,
partial(_select_and_scatter_add_translation, expand_padding=True),
platform='gpu')
def _select_and_scatter_add_impl(source, operand, *,
select_prim, window_dimensions, window_strides,
padding, expand_padding):
dtype = source.dtype
select = lambda x, y: select_prim.bind(x, y)
scatter = lax.bitwise_or if dtype == np.bool_ else lax.add
if expand_padding:
operand_shape = operand.shape
original_padding = padding
identity = (lax._get_max_identity if select_prim is lax.ge_p
else lax._get_min_identity)
pads = [(lo, hi, 0) for (lo, hi) in padding]
operand = lax.pad(operand, identity(dtype), pads)
padding = [(0, 0) for _ in padding]
out = _select_and_scatter(
operand, select, window_dimensions, window_strides, padding, source,
lax._zero(operand), scatter)
if expand_padding:
start_indices = [lo for (lo, hi) in original_padding]
stop_indices = [lo + d for ((lo, hi), d) in zip(original_padding,
operand_shape)]
out = slicing.slice(out, start_indices, stop_indices)
return out
mlir.register_lowering(select_and_scatter_add_p, mlir.lower_fun(
partial(_select_and_scatter_add_impl, expand_padding=False),
multiple_results=False))
mlir.register_lowering(select_and_scatter_add_p, mlir.lower_fun(
partial(_select_and_scatter_add_impl, expand_padding=True),
multiple_results=False), platform='cpu')
mlir.register_lowering(select_and_scatter_add_p, mlir.lower_fun(
partial(_select_and_scatter_add_impl, expand_padding=True),
multiple_results=False), platform='gpu')
def _select_and_gather_add_shape_rule(
tangents, operand, *, select_prim, window_dimensions, window_strides,
padding, base_dilation, window_dilation):
if tangents.shape != operand.shape:
msg = ("select_and_gather_add tangents and operand shapes must match, "
"got {} and {}.")
raise TypeError(msg.format(tangents.shape, operand.shape))
return _common_reduce_window_shape_rule(
operand, window_dimensions, window_strides, padding, base_dilation,
window_dilation)
def _select_and_gather_add_translation(
ctx, avals_in, avals_out, tangents, operand, *, select_prim,
window_dimensions, window_strides, padding, base_dilation, window_dilation,
max_bits=64):
c = ctx.builder
tangents_aval, operand_aval, = avals_in
dtype = operand_aval.dtype
etype = xla.dtype_to_primitive_type(dtype)
nbits = dtypes.finfo(dtype).bits
assert nbits <= max_bits
double_word_reduction = nbits * 2 <= max_bits
const = lambda c, dtype, x: xops.Constant(c, np.array(x, dtype=dtype))
if double_word_reduction:
# TODO(b/73062247): XLA doesn't yet implement ReduceWindow on tuples, so
# we implement a pair-wise ReduceWindow by packing two k-bit values into
# 2k-bit unsigned integer using bit tricks.
word_dtype = lax._UINT_DTYPES[nbits]
double_word_dtype = lax._UINT_DTYPES[nbits * 2]
word_type = xla.dtype_to_primitive_type(word_dtype)
double_word_type = xla.dtype_to_primitive_type(double_word_dtype)
# Packs two values into a tuple.
def pack(a, b):
a = xops.BitcastConvertType(a, word_type)
b = xops.BitcastConvertType(b, word_type)
a = xops.ConvertElementType(a, double_word_type)
b = xops.ConvertElementType(b, double_word_type)
a = xops.ShiftLeft(a, const(c, double_word_dtype, nbits))
return xops.Or(a, b)
# Unpacks the first element of a tuple.
def fst(c, t):
st = xops.ShiftRightLogical(t, const(c, double_word_dtype, nbits))
return xops.BitcastConvertType(xops.ConvertElementType(st, word_type),
etype)
# Unpacks the second element of a tuple.
def snd(t):
return xops.BitcastConvertType(xops.ConvertElementType(t, word_type),
etype)
else:
# The double-word trick above only works if we have a sufficiently large
# type. As an alternative, we can pack two half words into a single word,
# at the cost of precision.
# TODO(b/73062247): add support for tuple reductions and remove this case.
warnings.warn("Using reduced precision for gradient of reduce-window "
"min/max operator to work around missing XLA support for "
"pair-reductions. This is likely from a second or "
"higher derivative of a max-pooling operation.")
r_nbits = nbits // 2
# Drop/round the bottom mantissa bits.
nexp = dtypes.finfo(dtype).nexp
nmant = r_nbits - nexp - 1
double_word_dtype = word_dtype = lax._UINT_DTYPES[nbits]
word_type = xla.dtype_to_primitive_type(word_dtype)
# Packs two values into a tuple.
def pack(a, b):
a = xops.ReducePrecision(a, exponent_bits=nexp, mantissa_bits=nmant)
b = xops.ReducePrecision(b, exponent_bits=nexp, mantissa_bits=nmant)
a = xops.BitcastConvertType(a, word_type)
b = xops.BitcastConvertType(b, word_type)
b = xops.ShiftRightLogical(b, const(c, word_dtype, r_nbits))
return xops.Or(a, b)
# Unpacks the first element of a tuple.
def fst(c, t):
st = xops.And(t, const(c, word_dtype, ((1 << r_nbits) - 1) << r_nbits))
return xops.BitcastConvertType(st, etype)
# Unpacks the second element of a tuple.
def snd(t):
return xops.BitcastConvertType(
xops.ShiftLeft(t, const(c, word_dtype, r_nbits)), etype)
def reducer():
c = xc.XlaBuilder("select_and_gather_pair_reducer")
x = xla.parameter(c, 0,
xla_client.Shape.array_shape(np.dtype(double_word_dtype), ()))
y = xla.parameter(c, 1,
xla_client.Shape.array_shape(np.dtype(double_word_dtype), ()))
assert select_prim is lax.ge_p or select_prim is lax.le_p
which = xops.Ge if select_prim is lax.ge_p else xops.Le
xops.Select(which(fst(c, x), fst(c, y)), x, y)
return c.build()
assert select_prim is lax.ge_p or select_prim is lax.le_p, select_prim
init = -np.inf if select_prim is lax.ge_p else np.inf
out = xops.ReduceWindowWithGeneralPadding(
pack(operand, tangents), pack(const(c, dtype, init), const(c, dtype, 0)),
reducer(), window_dimensions, window_strides, base_dilation,
window_dilation, padding)
return [snd(out)]
# TODO(phawkins): use this translation rule on all platforms.
def _select_and_gather_add_using_variadic_reducewindow(
tangents, operand, *, select_prim, window_dimensions, window_strides,
padding, base_dilation, window_dilation):
def reducer(x, y):
kx, vx = x
ky, vy = y
which = select_prim.bind(kx, ky)
return (lax.select(which, kx, ky), lax.select(which, vx, vy))
assert select_prim is lax.ge_p or select_prim is lax.le_p, select_prim
init = -np.inf if select_prim is lax.ge_p else np.inf
_, out = reduce_window(
(operand, tangents),
(np.array(init, dtype=operand.dtype), np.array(0, dtype=operand.dtype)),
reducer, window_dimensions, window_strides, padding, base_dilation,
window_dilation)
return out
def _select_and_gather_add_jvp(
primals, tangents, *, select_prim, window_dimensions, window_strides,
padding, base_dilation, window_dilation):
source, operand = primals
g_source, g_operand = tangents
val_out = _select_and_gather_add(
source, operand, select_prim, window_dimensions, window_strides,
padding, base_dilation, window_dilation)
del g_operand
if type(g_source) is ad_util.Zero:
tangent_out = ad_util.Zero.from_value(val_out)
else:
tangent_out = _select_and_gather_add(
g_source, operand, select_prim, window_dimensions,
window_strides, padding, base_dilation, window_dilation)
return val_out, tangent_out
def _select_and_gather_add_transpose(
t, tangents, operand, *, select_prim, window_dimensions, window_strides,
padding, base_dilation, window_dilation):
assert select_prim in (lax.le_p, lax.ge_p)
assert (ad.is_undefined_primal(tangents) and
not ad.is_undefined_primal(operand))
if any(d != 1 for d in window_dilation):
msg = ("VJP not implemented for select_and_gather (MaxPool) with window "
"dilation, got window_dilation={}.")
raise NotImplementedError(msg.format(window_dilation))
if type(t) is ad_util.Zero:
return [ad_util.Zero(tangents.aval), None]
has_base_dilation = any(d != 1 for d in base_dilation)
if has_base_dilation:
select_identity = (lax._get_max_identity if select_prim is lax.ge_p
else lax._get_min_identity)
operand = lax.pad(operand, select_identity(operand.dtype),
tuple((0, 0, d - 1) for d in base_dilation))
result = _select_and_scatter_add(t, operand, select_prim, window_dimensions,
window_strides, padding)
if has_base_dilation:
result = slicing.slice(result, (0,) * len(result.shape), result.shape,
base_dilation)
return [result, None]
def _select_and_gather_add_batching_rule(
batched_args, batch_dims, *, select_prim, window_dimensions, window_strides,
padding, base_dilation, window_dilation):
t, x = batched_args
t_bdim, x_bdim = batch_dims
size = next(a.shape[bdim] for a, bdim in zip(batched_args, batch_dims)
if bdim is not None)
t = batching.bdim_at_front(t, t_bdim, size)
x = batching.bdim_at_front(x, x_bdim, size)
window_dimensions = (1,) + window_dimensions
window_strides = (1,) + window_strides
padding = ((0, 0),) + padding
base_dilation = (1,) + base_dilation
window_dilation = (1,) + window_dilation
out = _select_and_gather_add(t, x, select_prim, window_dimensions,
window_strides, padding, base_dilation,
window_dilation)
return (out, 0)
select_and_gather_add_p = lax.standard_primitive(
_select_and_gather_add_shape_rule, lax._input_dtype,
'select_and_gather_add',
xla.lower_fun(_select_and_gather_add_using_variadic_reducewindow,
new_style=True, multiple_results=False))
ad.primitive_jvps[select_and_gather_add_p] = _select_and_gather_add_jvp
ad.primitive_transposes[select_and_gather_add_p] = \
_select_and_gather_add_transpose
batching.primitive_batchers[select_and_gather_add_p] = \
_select_and_gather_add_batching_rule
# TODO(b/183233858): use variadic reducewindow on GPU, when implemented.
xla.register_translation(
select_and_gather_add_p,
_select_and_gather_add_translation,
platform='gpu')
mlir.register_lowering(select_and_gather_add_p, mlir.lower_fun(
_select_and_gather_add_using_variadic_reducewindow,
multiple_results=False))
mlir.register_lowering(
select_and_gather_add_p,
mlir.xla_fallback_lowering(select_and_gather_add_p),
platform="gpu")
| google/jax | jax/_src/lax/windowed_reductions.py | Python | apache-2.0 | 42,865 | 0.007652 |
"""
Task
Students of District College have subscription of English and French newspapers. Some students have subscribed to only English, some have subscribed to only French and some have subscribed to both newspapers.
You are given two sets of roll numbers of students, who have subscribed to English and French newspapers. Your task is to find total number of students who have subscribed to both newspapers.
Input Format
First line contains, number of students who have subscribed to English newspaper.
Second line contains, space separated list of roll numbers of students, who have subscribed to English newspaper.
Third line contains, number of students who have subscribed to French newspaper.
Fourth line contains, space separated list of roll numbers of students, who have subscribed to French newspaper.
Constraints
0<Total number of students in college<1000
Output Format
Output total number of students who have subscriptions in both English and French.
Sample Input
9
1 2 3 4 5 6 7 8 9
9
10 1 2 3 11 21 55 6 8
Sample Output
5
Explanation
Roll numbers of students who have both subscriptions:
1, 2, 3, 6 and 8.
Hence, total is 5 students.
"""
n1 = int(raw_input())
english = set(map(int, raw_input().split()))
n2 = int(raw_input())
french = set(map(int, raw_input().split()))
print len(english.intersection(french)) | spradeepv/dive-into-python | hackerrank/domain/python/sets/intersection.py | Python | mit | 1,336 | 0.005988 |
#!/usr/bin/env python3
# -----------------------------------------------------------------------------
# semanalyser.py
#
# Author: Ahmad Alhour (aalhour.com).
# Date: TODO
# Description: The Semantic Analyser module. Implements Semantic Analysis and
# Type Checking.
# -----------------------------------------------------------------------------
"""
# Semantic Analysis
## Checks
1. All identifiers are declared.
2. Types.
3. Inheritance relationships.
4. Classes defined only once.
5. Methods in a class defined only once.
6. Reserved identifiers are not misused.
## Scope
### Identifier Bindings:
Cool Identifier Bindings are introduced by:
* Class declarations (introduce class names)
* Method definitions (introduce method names) – Let expressions (introduce object id’s)
* Formal parameters (introduce object id’s)
* Attribute definitions (introduce object id’s)
* Case expressions (introduce object id’s)
### Class Definitions:
* Cannot be nested.
* Are globally visible throughout the program.
* Class names can be used before they are defined.
### Class Attributes:
* Attribute names are global within the class in which they are defined
### Class Methods:
* Method names have complex rules.
* A method need not be defined in the class in which it is used, but in some parent class.
* Methods may also be redefined (overridden).
## Type System
### Type Operations:
* Type Checking. The process of verifying fully typed programs
* Type Inference. The process of filling in missing type information
### Types in Cool:
1. Class names: Builtins (Int; String; Bool; Object; IO) and User Defined.
2. SELF_TYPE.
### Sub-Typing:
* Types can be thought of as sets of attributes and operations defined on these sets.
* All types are subtypes of the `Object` type.
* Types can inherit from other types other than the `Object` type.
* No type is allowed to inherit from the following types only: `Int`, `Bool`, `String` and `SELF_TYPE`.
* All type relations can be thought of as a tree where `Object` is at the root and all other types branching down from
it, this is also called the `inheritance tree`.
* A least upper bound (`lub`) relation of two types is their least common ancestor in the inheritance tree.
* Subclasses only add attributes or methods.
* Methods can be redefined but with same type.
* All operations that can be used on type `C` can also be used on type `C'`, where `C'` <= `C`, meaning `C'` is a
subtype of `C`.
### Typing Methods:
* Method and Object identifiers live in different name spaces.
+ A method `foo` and an object `foo` can coexist in the same scope.
* Logically, Cool Type Checking needs the following 2 Type Environments:
+ `O`: a function providing mapping from types to Object Identifiers and vice versa.
+ `M`: a function providing mapping from types to Method Names and vice versa.
* Due to `SELF_TYPE`, we need to know the class name at all points of Type Checking methods.
+ `C`: a function providing the name of the current class (Type).
### SELF_TYPE:
`SELF_TYPE` is not a Dynamic Type, it is a Static Type.
`SELF_TYPE` is the type of the `self` parameter in an instance. In a method dispatch, `SELF_TYPE` might be a subtype of
the class in which the subject method appears.
#### Usage:
* `SELF_TYPE` can be used with `new T` expressions.
* `SELF_TYPE` can be used as the return type of class methods.
* `SELF_TYPE` can be used as the type of expressions (i.e. let expressions: `let x : T in expr`).
* `SELF_TYPE` can be used as the type of the actual arguments in a method dispatch.
* `SELF_TYPE` can **not** be used as the type of class attributes.
* `SELF_TYPE` can **not** be used with Static Dispatch (i.e. `T` in `m@T(expr1,...,exprN)`).
* `SELF_TYPE` can **not** be used as the type of Formal Parameters.
#### Least-Upper Bound Relations:
* `lub(SELF_TYPE.c, SELF_TYPE.c) = SELF_TYPE.c`.
* `lub(SELF_TYPE.c, T) = lub(C, T)`.
* `lub(T, SELF_TYPE.c) = lub(C, T)`.
## Semantic Analysis Passes
**[incomplete]**
1. Gather all class names.
2. Gather all identifier names.
3. Ensure no undeclared identifier is referenced.
4. Ensure no undeclared class is referenced.
3. Ensure all Scope Rules are satisfied (see: above).
4. Compute Types in a bottom-up pass over the AST.
## Error Recovery
Two solutions:
1. Assign the type `Object` to ill-typed expressions.
2. Introduce a new type called `No_Type` for use with ill-typed expressions.
Solution 1 is easy to implement and will enforce the type inheritance and class hierarchy tree structures.
Solution 2 will introduce further adjustments. First, every operation will be treated as defined for `No_Type`. Second,
the inheritance tree and class hierarchy will change from being Trees to Graphs. The reason for that is that expressions
will ultimately either be of type `Object` or `No_Type`, which will make the whole representation look like a graph with
two roots.
"""
from logging import info, debug, warning, critical
from collections import defaultdict
from typing import Dict, Set, AnyStr, Tuple
import pycoolc.ast as AST
# -----------------------------------------------------------------------------
#
# GLOBALS AND CONSTANTS
#
# -----------------------------------------------------------------------------
# Un-boxed Primitive Value Type
UNBOXED_PRIMITIVE_VALUE_TYPE = "__prim_slot"
IO_CLASS = "IO"
OBJECT_CLASS = "Object"
INTEGER_CLASS = "Int"
BOOLEAN_CLASS = "Bool"
STRING_CLASS = "String"
# -----------------------------------------------------------------------------
#
# HELPERS: Exceptions, Symbol Tables and Setup Methods
#
# -----------------------------------------------------------------------------
class SemanticAnalysisError(Exception):
pass
class SemanticAnalysisWarning(Warning):
pass
# -----------------------------------------------------------------------------
#
# MAIN SEMANTIC ANALYSER API CLASS
#
# -----------------------------------------------------------------------------
class PyCoolSemanticAnalyser(object):
def __init__(self):
"""
TODO
:param program_ast: TODO
:return: None
"""
super(PyCoolSemanticAnalyser, self).__init__()
# Initialize the internal program ast instance.
self._program_ast = None
# Classes Map: maps each class name (key: String) to its class instance (value: AST.Class).
# Dict[AnyStr, AST.Class]
self._classes_map = dict()
# Class Inheritance Graph: maps a parent class (key: String) to a unique collection of its
# children classes (value: set).
# Dict[AnyStr, Set]
self._inheritance_graph = defaultdict(set)
# #########################################################################
# PUBLIC #
# #########################################################################
def transform(self, program_ast: AST.Program) -> AST.Program:
"""
TODO
:param program_ast: TODO
:return: TODO
"""
if program_ast is None:
raise ValueError("Program AST object cannot be None!")
elif not isinstance(program_ast, AST.Program):
raise TypeError("Program AST object is not of type \"AST.Program\"!")
self._init_collections(program_ast)
# Run some passes
self._default_undefined_parent_classes_to_object()
self._invalidate_inheritance_from_builtin_classes()
self._check_cyclic_inheritance_relations()
return self._program_ast
# #########################################################################
# PRIVATE #
# #########################################################################
def _init_collections(self, program_ast: AST.Program) -> None:
"""
TODO
:param program_ast: TODO
:return: None
"""
# Install the builtin classes into the internal program_ast private AST instance.
self._program_ast = self._install_builtin_types_to_ast(program_ast)
# Build the inheritance graph and initialize the classes map.
self._classes_map, self._inheritance_graph = \
self._build_classes_map_and_inheritance_graph(self._program_ast)
@staticmethod
def _install_builtin_types_to_ast(program_ast: AST.Program) -> AST.Program:
"""
Initializes the COOL Builtin Classes: Object, IO, Int, Bool and String, and then adds them to the Program AST node.
:param program_ast: an AST.Program class instance, represents a COOL program AST.
:return: a new AST.Program class instance.
"""
global UNBOXED_PRIMITIVE_VALUE_TYPE, OBJECT_CLASS, IO_CLASS, INTEGER_CLASS, STRING_CLASS, BOOLEAN_CLASS
if program_ast is None:
raise SemanticAnalysisError("Program AST cannot be None.")
if not isinstance(program_ast, AST.Program):
raise SemanticAnalysisError("Expected argument to be of type AST.Program, but got {} instead.".
format(type(program_ast)))
# Object Class
object_class = AST.Class(name=OBJECT_CLASS, parent=None, features=[
# Abort method: halts the program.
AST.ClassMethod(name="abort", formal_params=[], return_type="Object", body=None),
# Copy method: copies the object.
AST.ClassMethod(name="copy", formal_params=[], return_type="SELF_TYPE", body=None),
# type_name method: returns a string representation of the class name.
AST.ClassMethod(name="type_name", formal_params=[], return_type="String", body=None)
])
# IO Class
io_class = AST.Class(name=IO_CLASS, parent="Object", features=[
# in_int: reads an integer from stdio
AST.ClassMethod(name="in_int", formal_params=[], return_type="Int", body=None),
# in_string: reads a string from stdio
AST.ClassMethod(name="in_string", formal_params=[], return_type="String", body=None),
# out_int: outputs an integer to stdio
AST.ClassMethod(name="out_int",
formal_params=[AST.FormalParameter("arg", "Int")],
return_type="SELF_TYPE",
body=None),
# out_string: outputs a string to stdio
AST.ClassMethod(name="out_string",
formal_params=[AST.FormalParameter("arg", "String")],
return_type="SELF_TYPE",
body=None)
])
# Int Class
int_class = AST.Class(name=INTEGER_CLASS, parent=object_class.name, features=[
# _val attribute: integer un-boxed value
AST.ClassAttribute(name="_val", attr_type=UNBOXED_PRIMITIVE_VALUE_TYPE, init_expr=None)
])
# Bool Class
bool_class = AST.Class(name=BOOLEAN_CLASS, parent=object_class.name, features=[
# _val attribute: boolean un-boxed value
AST.ClassAttribute(name="_val", attr_type=UNBOXED_PRIMITIVE_VALUE_TYPE, init_expr=None)
])
# String Class
string_class = AST.Class(name=STRING_CLASS, parent=object_class.name, features=[
# _val attribute: string length
AST.ClassAttribute(name='_val', attr_type='Int', init_expr=None),
# _str_field attribute: an un-boxed, untyped string value
AST.ClassAttribute('_str_field', UNBOXED_PRIMITIVE_VALUE_TYPE, None),
# length method: returns the string's length
AST.ClassMethod(name='length', formal_params=[], return_type='Int', body=None),
# concat method: concatenates this string with another
AST.ClassMethod(name='concat',
formal_params=[AST.FormalParameter('arg', 'String')],
return_type='String',
body=None),
# substr method: returns the substring between two integer indices
AST.ClassMethod(name='substr',
formal_params=[AST.FormalParameter('arg1', 'Int'), AST.FormalParameter('arg2', 'Int')],
return_type='String',
body=None)
])
# Built in classes collection
builtin_classes = (object_class, io_class, int_class, bool_class, string_class)
# All classes
all_classes = builtin_classes + program_ast.classes
return AST.Program(classes=all_classes)
@staticmethod
def _build_classes_map_and_inheritance_graph(program_ast: AST.Program) -> Tuple[Dict, Dict]:
"""
TODO
:param program_ast: TODO
:return: TODO
"""
global OBJECT_CLASS
if program_ast is None:
raise SemanticAnalysisError("Program AST cannot be None.")
if not isinstance(program_ast, AST.Program):
raise SemanticAnalysisError(
"Expected argument to be of type AST.Program, but got {} instead.".format(type(program_ast)))
classes_map = {}
inheritance_graph = defaultdict(set)
for klass in program_ast.classes:
if klass.name in classes_map:
raise SemanticAnalysisError("Class \"{}\" is already defined!".format(klass.name))
classes_map[klass.name] = klass
if klass.name == "Object":
continue
klass.parent = klass.parent if klass.parent else OBJECT_CLASS
inheritance_graph[klass.parent].add(klass.name)
return classes_map, inheritance_graph
def _traverse_inheritance_graph(self, starting_node: AnyStr, seen: Dict) -> bool:
"""
Depth-First Traversal of the Inheritance Graph.
:param starting_node: TODO
:param seen: TODO
:return: TODO
"""
if seen is None:
seen = {}
seen[starting_node] = True
# If the starting node is not a parent class for any child classes, then return!
if starting_node not in self._inheritance_graph:
return True
# Traverse the children of the current node
for child_node in self._inheritance_graph[starting_node]:
self._traverse_inheritance_graph(starting_node=child_node, seen=seen)
return True
def _default_undefined_parent_classes_to_object(self):
"""
TODO
:return: TODO
"""
global OBJECT_CLASS
if not self._inheritance_graph or len(self._inheritance_graph) == 0:
warning("Inheritance Graph is empty!")
if not self._classes_map or len(self._classes_map) == 0:
warning("Classes Map is empty!")
# Assume self._inheritance_graph and self._classes_map are initialized
non_existing_parents = [
klass for klass in self._inheritance_graph.keys()
if klass not in self._classes_map and klass != OBJECT_CLASS
]
for parent_klass in non_existing_parents:
# Warn the user about this
warning(
"Found an undefined parent class: \"{0}\". Defaulting all its children's to the Object parent class."
.format(parent_klass))
# Add the child classes of this nonexisting parent class to the set of classes
# that inherit from the "Object" class.
self._inheritance_graph[OBJECT_CLASS] |= self._inheritance_graph[parent_klass]
# For every child class that inherits from the nonexisting parent, modify their
# parent attribute in their AST Node to have "Object" instead.
for child_klass in self._inheritance_graph[parent_klass]:
self._classes_map[child_klass].parent = OBJECT_CLASS
# Delete this nonexistent parent class from the inheritance map
del self._inheritance_graph[parent_klass]
def _invalidate_inheritance_from_builtin_classes(self):
"""
TODO
:return: TODO
"""
if not self._inheritance_graph or len(self._inheritance_graph) == 0:
warning("Inheritance Graph is empty!")
if not self._classes_map or len(self._classes_map) == 0:
warning("Classes Map is empty!")
global INTEGER_CLASS, STRING_CLASS, BOOLEAN_CLASS
for parent_klass in [INTEGER_CLASS, STRING_CLASS, BOOLEAN_CLASS]:
for child_klass in self._inheritance_graph[parent_klass]:
raise SemanticAnalysisError(
"Not Allowed! Class \"{0}\" is inheriting from built-in class \"{1}\".".format(
child_klass, parent_klass))
def _check_cyclic_inheritance_relations(self):
"""
TODO
:return: TODO
"""
global OBJECT_CLASS
# Mark all classes as not seen
seen = {class_name: False for class_name in self._classes_map.keys()}
# Perform a depth-first traversal of the inheritance graph, mutate the seen dict as you go.
self._traverse_inheritance_graph(OBJECT_CLASS, seen)
for class_name, was_seen in seen.items():
if not was_seen:
raise SemanticAnalysisError("Class \"{0}\" completes an inheritance cycle!".format(class_name))
# -----------------------------------------------------------------------------
#
# Semantic Analyser as a Standalone Python Program
# Usage: ./semanalyser.py cool_program.cl
#
# -----------------------------------------------------------------------------
def make_semantic_analyser(**kwargs):
"""
Utility function.
:return: PyCoolSemanter object.
"""
return PyCoolSemanticAnalyser()
if __name__ == '__main__':
import sys
from pycoolc.parser import make_parser
from pycoolc.utils import print_readable_ast
if len(sys.argv) != 2:
print("Usage: ./semanalyser.py program.cl")
exit()
elif not str(sys.argv[1]).endswith(".cl"):
print("Cool program source code files must end with .cl extension.")
print("Usage: ./semanalyser.py program.cl")
exit()
input_file = sys.argv[1]
with open(input_file, encoding="utf-8") as file:
cool_program_code = file.read()
parser = make_parser()
parse_result = parser.parse(cool_program_code)
sema_analyser = make_semantic_analyser()
sema_result = sema_analyser.transform(parse_result)
print_readable_ast(sema_result)
| aalhour/PyCOOLC | pycoolc/semanalyser.py | Python | mit | 18,923 | 0.003701 |
"""Zwave util methods."""
import logging
from . import const
_LOGGER = logging.getLogger(__name__)
def check_node_schema(node, schema):
"""Check if node matches the passed node schema."""
if (const.DISC_NODE_ID in schema and
node.node_id not in schema[const.DISC_NODE_ID]):
_LOGGER.debug("node.node_id %s not in node_id %s",
node.node_id, schema[const.DISC_NODE_ID])
return False
if (const.DISC_GENERIC_DEVICE_CLASS in schema and
node.generic not in schema[const.DISC_GENERIC_DEVICE_CLASS]):
_LOGGER.debug("node.generic %s not in generic_device_class %s",
node.generic, schema[const.DISC_GENERIC_DEVICE_CLASS])
return False
if (const.DISC_SPECIFIC_DEVICE_CLASS in schema and
node.specific not in schema[const.DISC_SPECIFIC_DEVICE_CLASS]):
_LOGGER.debug("node.specific %s not in specific_device_class %s",
node.specific, schema[const.DISC_SPECIFIC_DEVICE_CLASS])
return False
return True
def check_value_schema(value, schema):
"""Check if the value matches the passed value schema."""
if (const.DISC_COMMAND_CLASS in schema and
value.command_class not in schema[const.DISC_COMMAND_CLASS]):
_LOGGER.debug("value.command_class %s not in command_class %s",
value.command_class, schema[const.DISC_COMMAND_CLASS])
return False
if (const.DISC_TYPE in schema and
value.type not in schema[const.DISC_TYPE]):
_LOGGER.debug("value.type %s not in type %s",
value.type, schema[const.DISC_TYPE])
return False
if (const.DISC_GENRE in schema and
value.genre not in schema[const.DISC_GENRE]):
_LOGGER.debug("value.genre %s not in genre %s",
value.genre, schema[const.DISC_GENRE])
return False
if (const.DISC_READONLY in schema and
value.is_read_only is not schema[const.DISC_READONLY]):
_LOGGER.debug("value.is_read_only %s not %s",
value.is_read_only, schema[const.DISC_READONLY])
return False
if (const.DISC_WRITEONLY in schema and
value.is_write_only is not schema[const.DISC_WRITEONLY]):
_LOGGER.debug("value.is_write_only %s not %s",
value.is_write_only, schema[const.DISC_WRITEONLY])
return False
if (const.DISC_LABEL in schema and
value.label not in schema[const.DISC_LABEL]):
_LOGGER.debug("value.label %s not in label %s",
value.label, schema[const.DISC_LABEL])
return False
if (const.DISC_INDEX in schema and
value.index not in schema[const.DISC_INDEX]):
_LOGGER.debug("value.index %s not in index %s",
value.index, schema[const.DISC_INDEX])
return False
if (const.DISC_INSTANCE in schema and
value.instance not in schema[const.DISC_INSTANCE]):
_LOGGER.debug("value.instance %s not in instance %s",
value.instance, schema[const.DISC_INSTANCE])
return False
return True
| miniconfig/home-assistant | homeassistant/components/zwave/util.py | Python | mit | 3,178 | 0 |
# -*- coding: utf-8 -*-
import re
from module.plugins.internal.XFSCrypter import XFSCrypter, create_getInfo
class XFileSharingProFolder(XFSCrypter):
__name__ = "XFileSharingProFolder"
__type__ = "crypter"
__version__ = "0.14"
__status__ = "testing"
__pattern__ = r'https?://(?:www\.)?(?:\w+\.)*?(?P<DOMAIN>(?:[\d.]+|[\w\-^_]{3,}(?:\.[a-zA-Z]{2,}){1,2})(?:\:\d+)?)/(?:user|folder)s?/\w+'
__config__ = [("use_subfolder" , "bool", "Save package to subfolder" , True),
("subfolder_per_pack", "bool", "Create a subfolder for each package", True)]
__description__ = """XFileSharingPro dummy folder decrypter plugin for hook"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
def _log(self, level, plugintype, pluginname, messages):
return super(XFileSharingProFolder, self)._log(level,
plugintype,
"%s: %s" % (pluginname, self.PLUGIN_NAME),
messages)
def init(self):
super(XFileSharingProFolder, self).init()
self.__pattern__ = self.pyload.pluginManager.crypterPlugins[self.__name__]['pattern']
self.PLUGIN_DOMAIN = re.match(self.__pattern__, self.pyfile.url).group("DOMAIN").lower()
self.PLUGIN_NAME = "".join(part.capitalize() for part in re.split(r'(\.|\d+|-)', self.PLUGIN_DOMAIN) if part != '.')
def _setup(self):
account_name = self.__name__ if self.account.PLUGIN_DOMAIN is None else self.PLUGIN_NAME
self.chunk_limit = 1
self.multiDL = True
if self.account:
self.req = self.pyload.requestFactory.getRequest(accountname, self.account.user)
self.premium = self.account.premium
self.resume_download = self.premium
else:
self.req = self.pyload.requestFactory.getRequest(account_name)
self.premium = False
self.resume_download = False
def load_account(self):
if self.req:
self.req.close()
if not self.account:
self.account = self.pyload.accountManager.getAccountPlugin(self.PLUGIN_NAME)
if not self.account:
self.account = self.pyload.accountManager.getAccountPlugin(self.__name__)
if self.account:
if not self.account.PLUGIN_DOMAIN:
self.account.PLUGIN_DOMAIN = self.PLUGIN_DOMAIN
if not self.account.user: #@TODO: Move to `Account` in 0.4.10
self.account.user = self.account.select()[0]
if not self.account.logged:
self.account = False
getInfo = create_getInfo(XFileSharingProFolder)
| joberreiter/pyload | module/plugins/crypter/XFileSharingProFolder.py | Python | gpl-3.0 | 2,857 | 0.011551 |
#!/usr/bin/python -u
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import call, Popen
from unittest import main, TestCase
from uuid import uuid4
from swiftclient import client
from swift.common import direct_client
from swift.common.exceptions import ClientException
from test.probe.common import kill_server, kill_servers, reset_environment, \
start_server
class TestObjectHandoff(TestCase):
def setUp(self):
(self.pids, self.port2server, self.account_ring, self.container_ring,
self.object_ring, self.url, self.token,
self.account, self.configs) = reset_environment()
def tearDown(self):
kill_servers(self.port2server, self.pids)
def test_main(self):
# Create container
# Kill one container/obj primary server
# Create container/obj (goes to two primary servers and one handoff)
# Kill other two container/obj primary servers
# Indirectly through proxy assert we can get container/obj
# Restart those other two container/obj primary servers
# Directly to handoff server assert we can get container/obj
# Assert container listing (via proxy and directly) has container/obj
# Bring the first container/obj primary server back up
# Assert that it doesn't have container/obj yet
# Run object replication, ensuring we run the handoff node last so it
# should remove its extra handoff partition
# Assert the first container/obj primary server now has container/obj
# Assert the handoff server no longer has container/obj
# Kill the first container/obj primary server again (we have two
# primaries and the handoff up now)
# Delete container/obj
# Assert we can't head container/obj
# Assert container/obj is not in the container listing, both indirectly
# and directly
# Restart the first container/obj primary server again
# Assert it still has container/obj
# Run object replication, ensuring we run the handoff node last so it
# should remove its extra handoff partition
# Assert primary node no longer has container/obj
container = 'container-%s' % uuid4()
client.put_container(self.url, self.token, container)
cpart, cnodes = self.container_ring.get_nodes(self.account, container)
cnode = cnodes[0]
obj = 'object-%s' % uuid4()
opart, onodes = self.object_ring.get_nodes(
self.account, container, obj)
onode = onodes[0]
kill_server(onode['port'], self.port2server, self.pids)
client.put_object(self.url, self.token, container, obj, 'VERIFY')
odata = client.get_object(self.url, self.token, container, obj)[-1]
if odata != 'VERIFY':
raise Exception('Object GET did not return VERIFY, instead it '
'returned: %s' % repr(odata))
# Kill all primaries to ensure GET handoff works
for node in onodes[1:]:
kill_server(node['port'], self.port2server, self.pids)
odata = client.get_object(self.url, self.token, container, obj)[-1]
if odata != 'VERIFY':
raise Exception('Object GET did not return VERIFY, instead it '
'returned: %s' % repr(odata))
for node in onodes[1:]:
start_server(node['port'], self.port2server, self.pids)
# We've indirectly verified the handoff node has the object, but let's
# directly verify it.
another_onode = self.object_ring.get_more_nodes(opart).next()
odata = direct_client.direct_get_object(
another_onode, opart, self.account, container, obj)[-1]
if odata != 'VERIFY':
raise Exception('Direct object GET did not return VERIFY, instead '
'it returned: %s' % repr(odata))
objs = [o['name'] for o in
client.get_container(self.url, self.token, container)[1]]
if obj not in objs:
raise Exception('Container listing did not know about object')
for cnode in cnodes:
objs = [o['name'] for o in
direct_client.direct_get_container(
cnode, cpart, self.account, container)[1]]
if obj not in objs:
raise Exception(
'Container server %s:%s did not know about object' %
(cnode['ip'], cnode['port']))
start_server(onode['port'], self.port2server, self.pids)
exc = None
try:
direct_client.direct_get_object(onode, opart, self.account,
container, obj)
except ClientException as err:
exc = err
self.assertEquals(exc.http_status, 404)
# Run the extra server last so it'll remove its extra partition
processes = []
for node in onodes:
try:
port_num = node['replication_port']
except KeyError:
port_num = node['port']
processes.append(Popen(['swift-object-replicator',
self.configs['object-replicator'] %
((port_num - 6000) / 10),
'once']))
for process in processes:
process.wait()
try:
another_port_num = another_onode['replication_port']
except KeyError:
another_port_num = another_onode['port']
call(['swift-object-replicator',
self.configs['object-replicator'] %
((another_port_num - 6000) / 10), 'once'])
odata = direct_client.direct_get_object(onode, opart, self.account,
container, obj)[-1]
if odata != 'VERIFY':
raise Exception('Direct object GET did not return VERIFY, instead '
'it returned: %s' % repr(odata))
exc = None
try:
direct_client.direct_get_object(another_onode, opart, self.account,
container, obj)
except ClientException as err:
exc = err
self.assertEquals(exc.http_status, 404)
kill_server(onode['port'], self.port2server, self.pids)
client.delete_object(self.url, self.token, container, obj)
exc = None
try:
client.head_object(self.url, self.token, container, obj)
except client.ClientException as err:
exc = err
self.assertEquals(exc.http_status, 404)
objs = [o['name'] for o in
client.get_container(self.url, self.token, container)[1]]
if obj in objs:
raise Exception('Container listing still knew about object')
for cnode in cnodes:
objs = [o['name'] for o in
direct_client.direct_get_container(
cnode, cpart, self.account, container)[1]]
if obj in objs:
raise Exception(
'Container server %s:%s still knew about object' %
(cnode['ip'], cnode['port']))
start_server(onode['port'], self.port2server, self.pids)
direct_client.direct_get_object(onode, opart, self.account, container,
obj)
# Run the extra server last so it'll remove its extra partition
processes = []
for node in onodes:
try:
port_num = node['replication_port']
except KeyError:
port_num = node['port']
processes.append(Popen(['swift-object-replicator',
self.configs['object-replicator'] %
((port_num - 6000) / 10),
'once']))
for process in processes:
process.wait()
call(['swift-object-replicator',
self.configs['object-replicator'] %
((another_port_num - 6000) / 10), 'once'])
exc = None
try:
direct_client.direct_get_object(another_onode, opart, self.account,
container, obj)
except ClientException as err:
exc = err
self.assertEquals(exc.http_status, 404)
if __name__ == '__main__':
main()
| gotostack/swift | test/probe/test_object_handoff.py | Python | apache-2.0 | 9,005 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
class Migration(migrations.Migration):
dependencies = [
('ccx', '0017_auto_20170721_0437'),
]
operations = [
migrations.AlterField(
model_name='customcourseforedx',
name='time',
field=models.DateTimeField(default=datetime.datetime(2017, 7, 21, 6, 10, 51, 471098)),
),
]
| mbareta/edx-platform-ft | lms/djangoapps/ccx/migrations/0018_auto_20170721_0611.py | Python | agpl-3.0 | 472 | 0.002119 |
#!/usr/bin/env python
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
"""Run an ssh agent and set SSH_AUTH_SOCK so that clients will use it
Example:
with ssh_agent.SshAgent() as agent:
agent.add_key(private_key_string)
# do ssh stuff
# as agent loses scope, the ssh agent is killed
"""
from __future__ import with_statement
import atexit
import tempfile
import os
import sys
import shutil
import subprocess
import random
import time
import datetime
class SshAgentException(Exception):
"""An exception thrown for problems in SshAgent
"""
def __init__(self, message):
# Call the base class constructor with the parameters it needs
super(SshAgentException, self).__init__(message)
class SshAgent(object):
"""Run an ssh agent and set SSH_AUTH_SOCK so that clients will use it.
The running agent can have one or more keys added (via the SshAgent.add_key()
method or via any other method that can find and talk to the running agent.
"""
class Cleanup(object):
"""A helper functor class for SshAgent
An object of this class can be passed
directly to atexit, which will call __call__() when the
program exits
"""
def __init__(self, ssh_agent, ssh_auth_sock_dir):
self.ssh_agent = ssh_agent
self.ssh_auth_sock_dir = ssh_auth_sock_dir
self.cleaned_up = False
self.original_env_var = os.environ.get('SSH_AUTH_SOCK')
def __call__(self):
if self.cleaned_up:
return
self.cleaned_up = True
try:
shutil.rmtree(self.ssh_auth_sock_dir, ignore_errors=True)
except OSError:
pass
try:
self.ssh_agent.kill()
except OSError:
pass
if self.original_env_var:
os.environ['SSH_AUTH_SOCK'] = self.original_env_var
else:
del os.environ['SSH_AUTH_SOCK']
def pass_(self):
"""A function to appease pylint"""
pass
def pass__(self):
"""Another function to appease pylint"""
self.pass_()
def __init__(self):
devnull = open(os.devnull, 'w')
# Start an ssh-agent process and register it to be killed atexit
self.ssh_auth_sock_dir = tempfile.mkdtemp(prefix=os.path.basename(sys.argv[0]) + '.')
self.ssh_auth_sock = os.path.join(self.ssh_auth_sock_dir, "ssh_agent")
self.ssh_agent = subprocess.Popen(["ssh-agent", "-d", "-a", self.ssh_auth_sock], stdout=devnull, stderr=devnull)
self.cleanup = self.Cleanup(self.ssh_agent, self.ssh_auth_sock_dir)
# this is here so that when python exits, we make sure that the agent is killed
# (in case python exits before our __del__() is called
atexit.register(self.cleanup)
os.environ["SSH_AUTH_SOCK"] = self.ssh_auth_sock
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tback):
self.cleanup()
def __del__(self):
self.cleanup()
def kill(self):
'''Explicitly kill the running ssh-agent
It's not necessary to call this function as the agent
will be cleaned up automatically.
'''
self.cleanup()
def add_key(self, key):
"""Add a key to the running agent.
Note:
This function can be called any number of times to add multiple keys.
Args:
key (str): A string containing the ssh private key to be added (the
actual key data, not the filename of a key)
Raises:
SshAgentException: when ssh-add does not immediately return (as in the
case of a private key with a passphrase)
"""
#if self.ssh_agent.poll() is None:
# raise SshAgentException("Unable to add ssh key. Did agent die?")
named_pipe_path = os.path.join(self.ssh_auth_sock_dir, "keypipe." + str(random.getrandbits(64)))
try:
os.mkfifo(named_pipe_path, 0600)
except OSError, exception:
print "Failed to create FIFO: %s" % exception
devnull = open(os.devnull, 'w')
ssh_add = subprocess.Popen(["ssh-add", named_pipe_path], stdout=devnull, stderr=devnull)
fifo = open(named_pipe_path, 'w')
print >> fifo, key
fifo.close()
#Popen.wait() doesn't have a timeout, so we'll implement one using poll() :(
start_time = datetime.datetime.now()
while ssh_add.poll() is None:
if (datetime.datetime.now() - start_time).total_seconds() > 5:
try:
ssh_add.kill()
except OSError:
pass
raise SshAgentException("Unable to add ssh key. Timed out. Does key have a passphrase?")
time.sleep(0.1)
os.remove(named_pipe_path)
# pylint: disable=too-many-lines
# these are already imported inside of the ssh library
#import os
#import subprocess
class GitCLIError(Exception):
'''Exception class for openshiftcli'''
pass
# pylint: disable=too-few-public-methods
class GitCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
path,
verbose=False,
ssh_key=None,
author=None):
''' Constructor for GitCLI '''
self.path = path
self.verbose = verbose
self.ssh_key = ssh_key
self.author = author
self.environment_vars = os.environ.copy()
if self.author:
author_dict = {}
author_list = author.split('<')
author_dict['GIT_COMMITTER_NAME'] = author_list[0].strip()
author_dict['GIT_COMMITTER_EMAIL'] = author_list[0].strip()
self.environment_vars.update(author_dict)
def _add(self, files_to_add=None):
''' git add '''
cmd = ["add", "--no-ignore-removal"]
if files_to_add:
cmd.extend(files_to_add)
else:
cmd.append('.')
results = self.git_cmd(cmd)
return results
def _commit(self, msg, author=None):
''' git commit with message '''
cmd = ["commit", "-m", msg]
if author:
cmd += ["--author", author]
results = self.git_cmd(cmd)
return results
def _clone(self, repo, dest, bare=False):
''' git clone '''
cmd = ["clone"]
if bare:
cmd += ["--bare"]
cmd += [repo, dest]
results = self.git_cmd(cmd)
return results
def _status(self, porcelain=False, show_untracked=True):
''' Do a git status '''
cmd = ["status"]
if porcelain:
cmd.append('--porcelain')
if show_untracked:
cmd.append('--untracked-files=normal')
else:
cmd.append('--untracked-files=no')
results = self.git_cmd(cmd, output=True, output_type='raw')
return results
def _checkout(self, branch):
''' Do a git checkout to <branch> '''
cmd = ["checkout", branch]
results = self.git_cmd(cmd, output=True, output_type='raw')
return results
def _get_current_branch(self):
''' Do a git checkout to <branch> '''
cmd = ["describe", "--contains", "--all", "HEAD"]
results = self.git_cmd(cmd, output=True, output_type='raw')
results['results'] = results['results'].rstrip()
return results
def _merge(self, merge_id):
''' Do a git checkout to <branch> '''
cmd = ["merge", merge_id]
results = self.git_cmd(cmd, output=True, output_type='raw')
return results
def _push(self, remote, src_branch, dest_branch):
''' Do a git checkout to <branch> '''
push_branches = src_branch + ":" + dest_branch
cmd = ["push", remote, push_branches]
results = self.git_cmd(cmd, output=True, output_type='raw')
return results
def _remote_update(self):
''' Do a git remote update '''
cmd = ["remote", "update"]
results = self.git_cmd(cmd, output=True, output_type='raw')
return results
def _diff(self, diff_branch):
''' Do a git diff diff_branch'''
cmd = ["diff", diff_branch]
results = self.git_cmd(cmd, output=True, output_type='raw')
return results
def _rebase(self, rebase_branch):
''' Do a git rebase rebase_branch'''
cmd = ["rebase", rebase_branch]
results = self.git_cmd(cmd, output=True, output_type='raw')
return results
def _config(self, get_args):
''' Do a git config --get <get_args> '''
cmd = ["config", '--get', get_args]
results = self.git_cmd(cmd, output=True, output_type='raw')
return results
def git_cmd(self, cmd, output=False, output_type='json'):
'''Base command for git '''
cmds = ['/usr/bin/git']
cmds.extend(cmd)
rval = {}
results = ''
err = None
if self.verbose:
print ' '.join(cmds)
if self.ssh_key:
with SshAgent() as agent:
self.environment_vars['SSH_AUTH_SOCK'] = os.environ['SSH_AUTH_SOCK']
agent.add_key(self.ssh_key)
proc = subprocess.Popen(cmds,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=self.environment_vars)
stdout, stderr = proc.communicate()
rval = {"returncode": proc.returncode,
"results": results,
"cmd": ' '.join(cmds),
}
else:
proc = subprocess.Popen(cmds,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=self.environment_vars)
stdout, stderr = proc.communicate()
rval = {"returncode": proc.returncode,
"results": results,
"cmd": ' '.join(cmds),
}
if proc.returncode == 0:
if output:
if output_type == 'json':
try:
rval['results'] = json.loads(stdout)
except ValueError as err:
if "No JSON object could be decoded" in err.message:
err = err.message
elif output_type == 'raw':
rval['results'] = stdout
if self.verbose:
print stdout
print stderr
if err:
rval.update({"err": err,
"stderr": stderr,
"stdout": stdout,
"cmd": cmds
})
else:
rval.update({"stderr": stderr,
"stdout": stdout,
"results": {},
})
return rval
class GitRebase(GitCLI):
''' Class to wrap the git merge line tools
'''
# pylint: disable=too-many-arguments
def __init__(self,
path,
branch,
rebase_branch,
ssh_key=None):
''' Constructor for GitPush '''
super(GitRebase, self).__init__(path, ssh_key=ssh_key)
self.path = path
self.branch = branch
self.rebase_branch = rebase_branch
self.debug = []
os.chdir(path)
def checkout_branch(self):
''' check out the desired branch '''
current_branch_results = self._get_current_branch()
if current_branch_results['results'] == self.branch:
return True
current_branch_results = self._checkout(self.branch)
self.debug.append(current_branch_results)
if current_branch_results['returncode'] == 0:
return True
return False
def remote_update(self):
''' update the git remotes '''
remote_update_results = self._remote_update()
self.debug.append(remote_update_results)
if remote_update_results['returncode'] == 0:
return True
return False
def need_rebase(self):
''' checks to see if rebase is needed '''
git_diff_results = self._diff(self.rebase_branch)
self.debug.append(git_diff_results)
if git_diff_results['results']:
return True
return False
def rebase(self):
'''perform a git push '''
if self.checkout_branch():
if self.remote_update():
if self.need_rebase():
rebase_results = self._rebase(self.rebase_branch)
rebase_results['debug'] = self.debug
return rebase_results
else:
return {'returncode': 0,
'results': {},
'no_rebase_needed': True
}
return {'returncode': 1,
'results': {},
'debug': self.debug
}
def main():
'''
ansible git module for rebasing
'''
module = AnsibleModule(
argument_spec=dict(
state=dict(default='present', type='str', choices=['present']),
path=dict(default=None, required=True, type='str'),
branch=dict(default=None, required=True, type='str'),
rebase_branch=dict(default=None, required=True, type='str'),
ssh_key=dict(default=None, required=False, type='str'),
),
supports_check_mode=False,
)
git = GitRebase(module.params['path'],
module.params['branch'],
module.params['rebase_branch'],
module.params['ssh_key'])
state = module.params['state']
if state == 'present':
results = git.rebase()
if results['returncode'] != 0:
module.fail_json(msg=results)
if results.has_key('no_rebase_needed'):
module.exit_json(changed=False, results=results, state="present")
module.exit_json(changed=True, results=results, state="present")
module.exit_json(failed=True,
changed=False,
results='Unknown state passed. %s' % state,
state="unknown")
# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
# import module snippets. This are required
if __name__ == '__main__':
from ansible.module_utils.basic import *
main()
| joelsmith/openshift-tools | ansible/roles/lib_git/library/git_rebase.py | Python | apache-2.0 | 15,173 | 0.002504 |
from __future__ import print_function
import logging, re, os
import data, parser, util
from pymake.globrelative import hasglob, glob
from pymake import errors
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
_log = logging.getLogger('pymake.data')
_tabwidth = 4
class Location(object):
"""
A location within a makefile.
For the moment, locations are just path/line/column, but in the future
they may reference parent locations for more accurate "included from"
or "evaled at" error reporting.
"""
__slots__ = ('path', 'line', 'column')
def __init__(self, path, line, column):
self.path = path
self.line = line
self.column = column
def offset(self, s, start, end):
"""
Returns a new location offset by
the specified string.
"""
if start == end:
return self
skiplines = s.count('\n', start, end)
line = self.line + skiplines
if skiplines:
lastnl = s.rfind('\n', start, end)
assert lastnl != -1
start = lastnl + 1
column = 0
else:
column = self.column
while True:
j = s.find('\t', start, end)
if j == -1:
column += end - start
break
column += j - start
column += _tabwidth
column -= column % _tabwidth
start = j + 1
return Location(self.path, line, column)
def __str__(self):
return "%s:%s:%s" % (self.path, self.line, self.column)
def _expandwildcards(makefile, tlist):
for t in tlist:
if not hasglob(t):
yield t
else:
l = glob(makefile.workdir, t)
for r in l:
yield r
_flagescape = re.compile(r'([\s\\])')
def parsecommandlineargs(args):
"""
Given a set of arguments from a command-line invocation of make,
parse out the variable definitions and return (stmts, arglist, overridestr)
"""
overrides = []
stmts = StatementList()
r = []
for i in range(0, len(args)):
a = args[i]
vname, t, val = util.strpartition(a, ':=')
if t == '':
vname, t, val = util.strpartition(a, '=')
if t != '':
overrides.append(_flagescape.sub(r'\\\1', a))
vname = vname.strip()
vnameexp = data.Expansion.fromstring(vname, "Command-line argument")
stmts.append(ExportDirective(vnameexp, concurrent_set=True))
stmts.append(SetVariable(vnameexp, token=t,
value=val, valueloc=Location('<command-line>', i, len(vname) + len(t)),
targetexp=None, source=data.Variables.SOURCE_COMMANDLINE))
else:
r.append(data.stripdotslash(a))
return stmts, r, ' '.join(overrides)
class Statement(object):
"""
Represents parsed make file syntax.
This is an abstract base class. Child classes are expected to implement
basic methods defined below.
"""
def execute(self, makefile, context):
"""Executes this Statement within a make file execution context."""
raise Exception("%s must implement execute()." % self.__class__)
def to_source(self):
"""Obtain the make file "source" representation of the Statement.
This converts an individual Statement back to a string that can again
be parsed into this Statement.
"""
raise Exception("%s must implement to_source()." % self.__class__)
def __eq__(self, other):
raise Exception("%s must implement __eq__." % self.__class__)
def __ne__(self, other):
return self.__eq__(other)
class DummyRule(object):
__slots__ = ()
def addcommand(self, r):
pass
class Rule(Statement):
"""
Rules represent how to make specific targets.
See https://www.gnu.org/software/make/manual/make.html#Rules.
An individual rule is composed of a target, dependencies, and a recipe.
This class only contains references to the first 2. The recipe will be
contained in Command classes which follow this one in a stream of Statement
instances.
Instances also contain a boolean property `doublecolon` which says whether
this is a doublecolon rule. Doublecolon rules are rules that are always
executed, if they are evaluated. Normally, rules are only executed if their
target is out of date.
"""
__slots__ = ('targetexp', 'depexp', 'doublecolon')
def __init__(self, targetexp, depexp, doublecolon):
assert isinstance(targetexp, (data.Expansion, data.StringExpansion))
assert isinstance(depexp, (data.Expansion, data.StringExpansion))
self.targetexp = targetexp
self.depexp = depexp
self.doublecolon = doublecolon
def execute(self, makefile, context):
if context.weak:
self._executeweak(makefile, context)
else:
self._execute(makefile, context)
def _executeweak(self, makefile, context):
"""
If the context is weak (we're just handling dependencies) we can make a number of assumptions here.
This lets us go really fast and is generally good.
"""
assert context.weak
deps = self.depexp.resolvesplit(makefile, makefile.variables)
# Skip targets with no rules and no dependencies
if not deps:
return
targets = data.stripdotslashes(self.targetexp.resolvesplit(makefile, makefile.variables))
rule = data.Rule(list(data.stripdotslashes(deps)), self.doublecolon, loc=self.targetexp.loc, weakdeps=True)
for target in targets:
makefile.gettarget(target).addrule(rule)
makefile.foundtarget(target)
context.currule = rule
def _execute(self, makefile, context):
assert not context.weak
atargets = data.stripdotslashes(self.targetexp.resolvesplit(makefile, makefile.variables))
targets = [data.Pattern(p) for p in _expandwildcards(makefile, atargets)]
if not len(targets):
context.currule = DummyRule()
return
ispatterns = set((t.ispattern() for t in targets))
if len(ispatterns) == 2:
raise errors.DataError("Mixed implicit and normal rule", self.targetexp.loc)
ispattern, = ispatterns
deps = list(_expandwildcards(makefile, data.stripdotslashes(self.depexp.resolvesplit(makefile, makefile.variables))))
if ispattern:
prerequisites = [data.Pattern(d) for d in deps]
rule = data.PatternRule(targets, prerequisites, self.doublecolon, loc=self.targetexp.loc)
makefile.appendimplicitrule(rule)
else:
rule = data.Rule(deps, self.doublecolon, loc=self.targetexp.loc, weakdeps=False)
for t in targets:
makefile.gettarget(t.gettarget()).addrule(rule)
makefile.foundtarget(targets[0].gettarget())
context.currule = rule
def dump(self, fd, indent):
print("%sRule %s: %s" % (indent, self.targetexp, self.depexp), file=fd)
def to_source(self):
sep = ':'
if self.doublecolon:
sep = '::'
deps = self.depexp.to_source()
if len(deps) > 0 and not deps[0].isspace():
sep += ' '
return '\n%s%s%s' % (
self.targetexp.to_source(escape_variables=True),
sep,
deps)
def __eq__(self, other):
if not isinstance(other, Rule):
return False
return self.targetexp == other.targetexp \
and self.depexp == other.depexp \
and self.doublecolon == other.doublecolon
class StaticPatternRule(Statement):
"""
Static pattern rules are rules which specify multiple targets based on a
string pattern.
See https://www.gnu.org/software/make/manual/make.html#Static-Pattern
They are like `Rule` instances except an added property, `patternexp` is
present. It contains the Expansion which represents the rule pattern.
"""
__slots__ = ('targetexp', 'patternexp', 'depexp', 'doublecolon')
def __init__(self, targetexp, patternexp, depexp, doublecolon):
assert isinstance(targetexp, (data.Expansion, data.StringExpansion))
assert isinstance(patternexp, (data.Expansion, data.StringExpansion))
assert isinstance(depexp, (data.Expansion, data.StringExpansion))
self.targetexp = targetexp
self.patternexp = patternexp
self.depexp = depexp
self.doublecolon = doublecolon
def execute(self, makefile, context):
if context.weak:
raise errors.DataError("Static pattern rules not allowed in includedeps", self.targetexp.loc)
targets = list(_expandwildcards(makefile, data.stripdotslashes(self.targetexp.resolvesplit(makefile, makefile.variables))))
if not len(targets):
context.currule = DummyRule()
return
patterns = list(data.stripdotslashes(self.patternexp.resolvesplit(makefile, makefile.variables)))
if len(patterns) != 1:
raise errors.DataError("Static pattern rules must have a single pattern", self.patternexp.loc)
pattern = data.Pattern(patterns[0])
deps = [data.Pattern(p) for p in _expandwildcards(makefile, data.stripdotslashes(self.depexp.resolvesplit(makefile, makefile.variables)))]
rule = data.PatternRule([pattern], deps, self.doublecolon, loc=self.targetexp.loc)
for t in targets:
if data.Pattern(t).ispattern():
raise errors.DataError("Target '%s' of a static pattern rule must not be a pattern" % (t,), self.targetexp.loc)
stem = pattern.match(t)
if stem is None:
raise errors.DataError("Target '%s' does not match the static pattern '%s'" % (t, pattern), self.targetexp.loc)
makefile.gettarget(t).addrule(data.PatternRuleInstance(rule, '', stem, pattern.ismatchany()))
makefile.foundtarget(targets[0])
context.currule = rule
def dump(self, fd, indent):
print("%sStaticPatternRule %s: %s: %s" % (indent, self.targetexp, self.patternexp, self.depexp), file=fd)
def to_source(self):
sep = ':'
if self.doublecolon:
sep = '::'
pattern = self.patternexp.to_source()
deps = self.depexp.to_source()
if len(pattern) > 0 and pattern[0] not in (' ', '\t'):
sep += ' '
return '\n%s%s%s:%s' % (
self.targetexp.to_source(escape_variables=True),
sep,
pattern,
deps)
def __eq__(self, other):
if not isinstance(other, StaticPatternRule):
return False
return self.targetexp == other.targetexp \
and self.patternexp == other.patternexp \
and self.depexp == other.depexp \
and self.doublecolon == other.doublecolon
class Command(Statement):
"""
Commands are things that get executed by a rule.
A rule's recipe is composed of 0 or more Commands.
A command is simply an expansion. Commands typically represent strings to
be executed in a shell (e.g. via system()). Although, since make files
allow arbitrary shells to be used for command execution, this isn't a
guarantee.
"""
__slots__ = ('exp',)
def __init__(self, exp):
assert isinstance(exp, (data.Expansion, data.StringExpansion))
self.exp = exp
def execute(self, makefile, context):
assert context.currule is not None
if context.weak:
raise errors.DataError("rules not allowed in includedeps", self.exp.loc)
context.currule.addcommand(self.exp)
def dump(self, fd, indent):
print("%sCommand %s" % (indent, self.exp,), file=fd)
def to_source(self):
# Commands have some interesting quirks when it comes to source
# formatting. First, they can be multi-line. Second, a tab needs to be
# inserted at the beginning of every line. Finally, there might be
# variable references inside the command. This means we need to escape
# variable references inside command strings. Luckily, this is handled
# by the Expansion.
s = self.exp.to_source(escape_variables=True)
return '\n'.join(['\t%s' % line for line in s.split('\n')])
def __eq__(self, other):
if not isinstance(other, Command):
return False
return self.exp == other.exp
class SetVariable(Statement):
"""
Represents a variable assignment.
Variable assignment comes in two different flavors.
Simple assignment has the form:
<Expansion> <Assignment Token> <string>
e.g. FOO := bar
These correspond to the fields `vnameexp`, `token`, and `value`. In
addition, `valueloc` will be a Location and `source` will be a
pymake.data.Variables.SOURCE_* constant.
There are also target-specific variables. These are variables that only
apply in the context of a specific target. They are like the aforementioned
assignment except the `targetexp` field is set to an Expansion representing
the target they apply to.
"""
__slots__ = ('vnameexp', 'token', 'value', 'valueloc', 'targetexp', 'source')
def __init__(self, vnameexp, token, value, valueloc, targetexp, source=None):
assert isinstance(vnameexp, (data.Expansion, data.StringExpansion))
assert isinstance(value, str)
assert targetexp is None or isinstance(targetexp, (data.Expansion, data.StringExpansion))
if source is None:
source = data.Variables.SOURCE_MAKEFILE
self.vnameexp = vnameexp
self.token = token
self.value = value
self.valueloc = valueloc
self.targetexp = targetexp
self.source = source
def execute(self, makefile, context):
vname = self.vnameexp.resolvestr(makefile, makefile.variables)
if len(vname) == 0:
raise errors.DataError("Empty variable name", self.vnameexp.loc)
if self.targetexp is None:
setvariables = [makefile.variables]
else:
setvariables = []
targets = [data.Pattern(t) for t in data.stripdotslashes(self.targetexp.resolvesplit(makefile, makefile.variables))]
for t in targets:
if t.ispattern():
setvariables.append(makefile.getpatternvariables(t))
else:
setvariables.append(makefile.gettarget(t.gettarget()).variables)
for v in setvariables:
if self.token == '+=':
v.append(vname, self.source, self.value, makefile.variables, makefile)
continue
if self.token == '?=':
flavor = data.Variables.FLAVOR_RECURSIVE
oldflavor, oldsource, oldval = v.get(vname, expand=False)
if oldval is not None:
continue
value = self.value
elif self.token == '=':
flavor = data.Variables.FLAVOR_RECURSIVE
value = self.value
else:
assert self.token == ':='
flavor = data.Variables.FLAVOR_SIMPLE
d = parser.Data.fromstring(self.value, self.valueloc)
e, t, o = parser.parsemakesyntax(d, 0, (), parser.iterdata)
value = e.resolvestr(makefile, makefile.variables)
v.set(vname, flavor, self.source, value)
def dump(self, fd, indent):
print("%sSetVariable<%s> %s %s\n%s %r" % (indent, self.valueloc, self.vnameexp, self.token, indent, self.value), file=fd)
def __eq__(self, other):
if not isinstance(other, SetVariable):
return False
return self.vnameexp == other.vnameexp \
and self.token == other.token \
and self.value == other.value \
and self.targetexp == other.targetexp \
and self.source == other.source
def to_source(self):
chars = []
for i in range(0, len(self.value)):
c = self.value[i]
# Literal # is escaped in variable assignment otherwise it would be
# a comment.
if c == '#':
# If a backslash precedes this, we need to escape it as well.
if i > 0 and self.value[i-1] == '\\':
chars.append('\\')
chars.append('\\#')
continue
chars.append(c)
value = ''.join(chars)
prefix = ''
if self.source == data.Variables.SOURCE_OVERRIDE:
prefix = 'override '
# SetVariable come in two flavors: simple and target-specific.
# We handle the target-specific syntax first.
if self.targetexp is not None:
return '%s: %s %s %s' % (
self.targetexp.to_source(),
self.vnameexp.to_source(),
self.token,
value)
# The variable could be multi-line or have leading whitespace. For
# regular variable assignment, whitespace after the token but before
# the value is ignored. If we see leading whitespace in the value here,
# the variable must have come from a define.
if value.count('\n') > 0 or (len(value) and value[0].isspace()):
# The parser holds the token in vnameexp for whatever reason.
return '%sdefine %s\n%s\nendef' % (
prefix,
self.vnameexp.to_source(),
value)
return '%s%s %s %s' % (
prefix,
self.vnameexp.to_source(),
self.token,
value)
class Condition(object):
"""
An abstract "condition", either ifeq or ifdef, perhaps negated.
See https://www.gnu.org/software/make/manual/make.html#Conditional-Syntax
Subclasses must implement:
def evaluate(self, makefile)
"""
def __eq__(self, other):
raise Exception("%s must implement __eq__." % __class__)
def __ne__(self, other):
return not self.__eq__(other)
class EqCondition(Condition):
"""
Represents an ifeq or ifneq conditional directive.
This directive consists of two Expansions which are compared for equality.
The `expected` field is a bool indicating what the condition must evaluate
to in order for its body to be executed. If True, this is an "ifeq"
conditional directive. If False, an "ifneq."
"""
__slots__ = ('exp1', 'exp2', 'expected')
def __init__(self, exp1, exp2):
assert isinstance(exp1, (data.Expansion, data.StringExpansion))
assert isinstance(exp2, (data.Expansion, data.StringExpansion))
self.expected = True
self.exp1 = exp1
self.exp2 = exp2
def evaluate(self, makefile):
r1 = self.exp1.resolvestr(makefile, makefile.variables)
r2 = self.exp2.resolvestr(makefile, makefile.variables)
return (r1 == r2) == self.expected
def __str__(self):
return "ifeq (expected=%s) %s %s" % (self.expected, self.exp1, self.exp2)
def __eq__(self, other):
if not isinstance(other, EqCondition):
return False
return self.exp1 == other.exp1 \
and self.exp2 == other.exp2 \
and self.expected == other.expected
class IfdefCondition(Condition):
"""
Represents an ifdef or ifndef conditional directive.
This directive consists of a single expansion which represents the name of
a variable (without the leading '$') which will be checked for definition.
The `expected` field is a bool and has the same behavior as EqCondition.
If it is True, this represents a "ifdef" conditional. If False, "ifndef."
"""
__slots__ = ('exp', 'expected')
def __init__(self, exp):
assert isinstance(exp, (data.Expansion, data.StringExpansion))
self.exp = exp
self.expected = True
def evaluate(self, makefile):
vname = self.exp.resolvestr(makefile, makefile.variables)
flavor, source, value = makefile.variables.get(vname, expand=False)
if value is None:
return not self.expected
return (len(value) > 0) == self.expected
def __str__(self):
return "ifdef (expected=%s) %s" % (self.expected, self.exp)
def __eq__(self, other):
if not isinstance(other, IfdefCondition):
return False
return self.exp == other.exp and self.expected == other.expected
class ElseCondition(Condition):
"""
Represents the transition between branches in a ConditionBlock.
"""
__slots__ = ()
def evaluate(self, makefile):
return True
def __str__(self):
return "else"
def __eq__(self, other):
return isinstance(other, ElseCondition)
class ConditionBlock(Statement):
"""
A set of related Conditions.
This is essentially a list of 2-tuples of (Condition, list(Statement)).
The parser creates a ConditionBlock for all statements related to the same
conditional group. If iterating over the parser's output, where you think
you would see an ifeq, you will see a ConditionBlock containing an IfEq. In
other words, the parser collapses separate statements into this container
class.
ConditionBlock instances may exist within other ConditionBlock if the
conditional logic is multiple levels deep.
"""
__slots__ = ('loc', '_groups')
def __init__(self, loc, condition):
self.loc = loc
self._groups = []
self.addcondition(loc, condition)
def getloc(self):
return self.loc
def addcondition(self, loc, condition):
assert isinstance(condition, Condition)
condition.loc = loc
if len(self._groups) and isinstance(self._groups[-1][0], ElseCondition):
raise errors.SyntaxError("Multiple else conditions for block starting at %s" % self.loc, loc)
self._groups.append((condition, StatementList()))
def append(self, statement):
self._groups[-1][1].append(statement)
def execute(self, makefile, context):
i = 0
for c, statements in self._groups:
if c.evaluate(makefile):
_log.debug("Condition at %s met by clause #%i", self.loc, i)
statements.execute(makefile, context)
return
i += 1
def dump(self, fd, indent):
print("%sConditionBlock" % (indent,), file=fd)
indent2 = indent + ' '
for c, statements in self._groups:
print("%s Condition %s" % (indent, c), file=fd)
statements.dump(fd, indent2)
print("%s ~Condition" % (indent,), file=fd)
print("%s~ConditionBlock" % (indent,), file=fd)
def to_source(self):
lines = []
index = 0
for condition, statements in self:
lines.append(ConditionBlock.condition_source(condition, index))
index += 1
for statement in statements:
lines.append(statement.to_source())
lines.append('endif')
return '\n'.join(lines)
def __eq__(self, other):
if not isinstance(other, ConditionBlock):
return False
if len(self) != len(other):
return False
for i in range(0, len(self)):
our_condition, our_statements = self[i]
other_condition, other_statements = other[i]
if our_condition != other_condition:
return False
if our_statements != other_statements:
return False
return True
@staticmethod
def condition_source(statement, index):
"""Convert a condition to its source representation.
The index argument defines the index of this condition inside a
ConditionBlock. If it is greater than 0, an "else" will be prepended
to the result, if necessary.
"""
prefix = ''
if isinstance(statement, (EqCondition, IfdefCondition)) and index > 0:
prefix = 'else '
if isinstance(statement, IfdefCondition):
s = statement.exp.s
if statement.expected:
return '%sifdef %s' % (prefix, s)
return '%sifndef %s' % (prefix, s)
if isinstance(statement, EqCondition):
args = [
statement.exp1.to_source(escape_comments=True),
statement.exp2.to_source(escape_comments=True)]
use_quotes = False
single_quote_present = False
double_quote_present = False
for i, arg in enumerate(args):
if len(arg) > 0 and (arg[0].isspace() or arg[-1].isspace()):
use_quotes = True
if "'" in arg:
single_quote_present = True
if '"' in arg:
double_quote_present = True
# Quote everything if needed.
if single_quote_present and double_quote_present:
raise Exception('Cannot format condition with multiple quotes.')
if use_quotes:
for i, arg in enumerate(args):
# Double to single quotes.
if single_quote_present:
args[i] = '"' + arg + '"'
else:
args[i] = "'" + arg + "'"
body = None
if use_quotes:
body = ' '.join(args)
else:
body = '(%s)' % ','.join(args)
if statement.expected:
return '%sifeq %s' % (prefix, body)
return '%sifneq %s' % (prefix, body)
if isinstance(statement, ElseCondition):
return 'else'
raise Exception('Unhandled Condition statement: %s' %
statement.__class__)
def __iter__(self):
return iter(self._groups)
def __len__(self):
return len(self._groups)
def __getitem__(self, i):
return self._groups[i]
class Include(Statement):
"""
Represents the include directive.
See https://www.gnu.org/software/make/manual/make.html#Include
The file to be included is represented by the Expansion defined in the
field `exp`. `required` is a bool indicating whether execution should fail
if the specified file could not be processed.
"""
__slots__ = ('exp', 'required', 'deps')
def __init__(self, exp, required, weak):
assert isinstance(exp, (data.Expansion, data.StringExpansion))
self.exp = exp
self.required = required
self.weak = weak
def execute(self, makefile, context):
files = self.exp.resolvesplit(makefile, makefile.variables)
for f in files:
makefile.include(f, self.required, loc=self.exp.loc, weak=self.weak)
def dump(self, fd, indent):
print("%sInclude %s" % (indent, self.exp), file=fd)
def to_source(self):
prefix = ''
if not self.required:
prefix = '-'
return '%sinclude %s' % (prefix, self.exp.to_source())
def __eq__(self, other):
if not isinstance(other, Include):
return False
return self.exp == other.exp and self.required == other.required
class VPathDirective(Statement):
"""
Represents the vpath directive.
See https://www.gnu.org/software/make/manual/make.html#Selective-Search
"""
__slots__ = ('exp',)
def __init__(self, exp):
assert isinstance(exp, (data.Expansion, data.StringExpansion))
self.exp = exp
def execute(self, makefile, context):
words = list(data.stripdotslashes(self.exp.resolvesplit(makefile, makefile.variables)))
if len(words) == 0:
makefile.clearallvpaths()
else:
pattern = data.Pattern(words[0])
mpaths = words[1:]
if len(mpaths) == 0:
makefile.clearvpath(pattern)
else:
dirs = []
for mpath in mpaths:
dirs.extend((dir for dir in mpath.split(os.pathsep)
if dir != ''))
if len(dirs):
makefile.addvpath(pattern, dirs)
def dump(self, fd, indent):
print("%sVPath %s" % (indent, self.exp), file=fd)
def to_source(self):
return 'vpath %s' % self.exp.to_source()
def __eq__(self, other):
if not isinstance(other, VPathDirective):
return False
return self.exp == other.exp
class ExportDirective(Statement):
"""
Represents the "export" directive.
This is used to control exporting variables to sub makes.
See https://www.gnu.org/software/make/manual/make.html#Variables_002fRecursion
The `concurrent_set` field defines whether this statement occurred with or
without a variable assignment. If False, no variable assignment was
present. If True, the SetVariable immediately following this statement
originally came from this export directive (the parser splits it into
multiple statements).
"""
__slots__ = ('exp', 'concurrent_set')
def __init__(self, exp, concurrent_set):
assert isinstance(exp, (data.Expansion, data.StringExpansion))
self.exp = exp
self.concurrent_set = concurrent_set
def execute(self, makefile, context):
if self.concurrent_set:
vlist = [self.exp.resolvestr(makefile, makefile.variables)]
else:
vlist = list(self.exp.resolvesplit(makefile, makefile.variables))
if not len(vlist):
raise errors.DataError("Exporting all variables is not supported", self.exp.loc)
for v in vlist:
makefile.exportedvars[v] = True
def dump(self, fd, indent):
print("%sExport (single=%s) %s" % (indent, self.single, self.exp), file=fd)
def to_source(self):
return ('export %s' % self.exp.to_source()).rstrip()
def __eq__(self, other):
if not isinstance(other, ExportDirective):
return False
# single is irrelevant because it just says whether the next Statement
# contains a variable definition.
return self.exp == other.exp
class UnexportDirective(Statement):
"""
Represents the "unexport" directive.
This is the opposite of ExportDirective.
"""
__slots__ = ('exp',)
def __init__(self, exp):
self.exp = exp
def execute(self, makefile, context):
vlist = list(self.exp.resolvesplit(makefile, makefile.variables))
for v in vlist:
makefile.exportedvars[v] = False
def dump(self, fd, indent):
print("%sUnexport %s" % (indent, self.exp), file=fd)
def to_source(self):
return 'unexport %s' % self.exp.to_source()
def __eq__(self, other):
if not isinstance(other, UnexportDirective):
return False
return self.exp == other.exp
class EmptyDirective(Statement):
"""
Represents a standalone statement, usually an Expansion.
You will encounter EmptyDirective instances if there is a function
or similar at the top-level of a make file (e.g. outside of a rule or
variable assignment). You can also find them as the bodies of
ConditionBlock branches.
"""
__slots__ = ('exp',)
def __init__(self, exp):
assert isinstance(exp, (data.Expansion, data.StringExpansion))
self.exp = exp
def execute(self, makefile, context):
v = self.exp.resolvestr(makefile, makefile.variables)
if v.strip() != '':
raise errors.DataError("Line expands to non-empty value", self.exp.loc)
def dump(self, fd, indent):
print("%sEmptyDirective: %s" % (indent, self.exp), file=fd)
def to_source(self):
return self.exp.to_source()
def __eq__(self, other):
if not isinstance(other, EmptyDirective):
return False
return self.exp == other.exp
class _EvalContext(object):
__slots__ = ('currule', 'weak')
def __init__(self, weak):
self.weak = weak
class StatementList(list):
"""
A list of Statement instances.
This is what is generated by the parser when a make file is parsed.
Consumers can iterate over all Statement instances in this collection to
statically inspect (and even modify) make files before they are executed.
"""
__slots__ = ('mtime',)
def append(self, statement):
assert isinstance(statement, Statement)
list.append(self, statement)
def execute(self, makefile, context=None, weak=False):
if context is None:
context = _EvalContext(weak=weak)
for s in self:
s.execute(makefile, context)
def dump(self, fd, indent):
for s in self:
s.dump(fd, indent)
def __str__(self):
fd = StringIO()
self.dump(fd, '')
return fd.getvalue()
def to_source(self):
return '\n'.join([s.to_source() for s in self])
def iterstatements(stmts):
for s in stmts:
yield s
if isinstance(s, ConditionBlock):
for c, sl in s:
for s2 in iterstatments(sl): yield s2
| mozilla/pymake | pymake/parserdata.py | Python | mit | 33,522 | 0.002357 |
#!/usr/bin/python
# -- Content-Encoding: UTF-8 --
"""
Herald HTTP beans definition
:author: Thomas Calmant
:copyright: Copyright 2014, isandlaTech
:license: Apache License 2.0
:version: 0.0.3
:status: Alpha
..
Copyright 2014 isandlaTech
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Module version
__version_info__ = (0, 0, 3)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# ------------------------------------------------------------------------------
# Herald HTTP
from . import ACCESS_ID
# Standard library
import functools
# ------------------------------------------------------------------------------
@functools.total_ordering
class HTTPAccess(object):
"""
Description of an HTTP access
"""
def __init__(self, host, port, path):
"""
Sets up the access
:param host: HTTP server host
:param port: HTTP server port
:param path: Path to the Herald service
"""
# Normalize path
if path[0] == '/':
path = path[1:]
self.__host = host
self.__port = int(port)
self.__path = path
def __hash__(self):
"""
Hash is based on the access tuple
"""
return hash(self.access)
def __eq__(self, other):
"""
Equality based on JID
"""
if isinstance(other, HTTPAccess):
return self.access == other.access
return False
def __lt__(self, other):
"""
JID string ordering
"""
if isinstance(other, HTTPAccess):
return self.access < other.access
return False
def __str__(self):
"""
String representation
"""
return "http://{0}:{1}/{2}".format(self.__host, self.__port,
self.__path)
@property
def access_id(self):
"""
Retrieves the access ID associated to this kind of access
"""
return ACCESS_ID
@property
def access(self):
"""
Returns the access to the peer as a 3-tuple (host, port, path)
"""
return self.__host, self.__port, self.__path
@property
def address(self):
"""
Returns the address of the HTTP server to access the peer (host, port)
"""
return self.__host, self.__port
@property
def host(self):
"""
Retrieves the host address of the associated peer
"""
return self.__host
@property
def port(self):
"""
Retrieves the host port of the associated peer
"""
return self.__port
@property
def path(self):
"""
Retrieves the path to the Herald service
"""
return self.__path
def dump(self):
"""
Returns the content to store in a directory dump to describe this
access
"""
return self.access
| librallu/cohorte-herald | python/herald/transports/http/beans.py | Python | apache-2.0 | 3,526 | 0 |
import sys
from cx_Freeze import setup, Executable
# Dependencies are automatically detected, but it might need fine tuning.
build_exe_options = {"packages": ["math", "json"], "excludes": ["tkinter"]}
# GUI applications require a different base on Windows (the default is for a
# console application).
setup( name = "liac-soccer",
version = "1.0.0",
description = "",
options = {"build_exe": build_exe_options},
executables = [Executable("ball_follower.py")]) | renatopp/liac-soccer | clients/python/setup.py | Python | mit | 495 | 0.024242 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'res/designer/simulations/biomeng321lab1.ui'
#
# Created: Fri Mar 4 13:11:44 2016
# by: pyside-uic 0.2.15 running on PySide 1.2.1
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_Biomeng321Lab1(object):
def setupUi(self, shared_opengl_widget, Biomeng321Lab1):
Biomeng321Lab1.setObjectName("Biomeng321Lab1")
Biomeng321Lab1.resize(1066, 907)
self.gridLayout_3 = QtGui.QGridLayout(Biomeng321Lab1)
self.gridLayout_3.setObjectName("gridLayout_3")
self.groupBox = QtGui.QGroupBox(Biomeng321Lab1)
self.groupBox.setObjectName("groupBox")
self.gridLayout_4 = QtGui.QGridLayout(self.groupBox)
self.gridLayout_4.setObjectName("gridLayout_4")
self.groupBox_9 = QtGui.QGroupBox(self.groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(2)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_9.sizePolicy().hasHeightForWidth())
self.groupBox_9.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(10)
font.setWeight(75)
font.setBold(True)
self.groupBox_9.setFont(font)
self.groupBox_9.setObjectName("groupBox_9")
self.gridLayout_2 = QtGui.QGridLayout(self.groupBox_9)
self.gridLayout_2.setObjectName("gridLayout_2")
self.groupBox_6 = QtGui.QGroupBox(self.groupBox_9)
font = QtGui.QFont()
font.setPointSize(8)
font.setWeight(50)
font.setBold(False)
self.groupBox_6.setFont(font)
self.groupBox_6.setObjectName("groupBox_6")
self.horizontalLayout = QtGui.QHBoxLayout(self.groupBox_6)
self.horizontalLayout.setObjectName("horizontalLayout")
self.label = QtGui.QLabel(self.groupBox_6)
font = QtGui.QFont()
font.setPointSize(12)
font.setWeight(75)
font.setBold(True)
self.label.setFont(font)
self.label.setObjectName("label")
self.horizontalLayout.addWidget(self.label)
self.label_2 = QtGui.QLabel(self.groupBox_6)
self.label_2.setObjectName("label_2")
self.horizontalLayout.addWidget(self.label_2)
self.tableWidgetDeformationGradient = QtGui.QTableWidget(self.groupBox_6)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tableWidgetDeformationGradient.sizePolicy().hasHeightForWidth())
self.tableWidgetDeformationGradient.setSizePolicy(sizePolicy)
self.tableWidgetDeformationGradient.setObjectName("tableWidgetDeformationGradient")
self.tableWidgetDeformationGradient.setColumnCount(0)
self.tableWidgetDeformationGradient.setRowCount(0)
self.tableWidgetDeformationGradient.horizontalHeader().setVisible(False)
self.tableWidgetDeformationGradient.verticalHeader().setVisible(False)
self.horizontalLayout.addWidget(self.tableWidgetDeformationGradient)
self.gridLayout_2.addWidget(self.groupBox_6, 0, 0, 1, 1)
self.groupBox_2 = QtGui.QGroupBox(self.groupBox_9)
font = QtGui.QFont()
font.setPointSize(8)
font.setWeight(50)
font.setBold(False)
self.groupBox_2.setFont(font)
self.groupBox_2.setObjectName("groupBox_2")
self.formLayout = QtGui.QFormLayout(self.groupBox_2)
self.formLayout.setObjectName("formLayout")
self.label_3 = QtGui.QLabel(self.groupBox_2)
self.label_3.setObjectName("label_3")
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.label_3)
self.lineEditInvariant1 = QtGui.QLineEdit(self.groupBox_2)
self.lineEditInvariant1.setObjectName("lineEditInvariant1")
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.lineEditInvariant1)
self.label_4 = QtGui.QLabel(self.groupBox_2)
self.label_4.setObjectName("label_4")
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.label_4)
self.lineEditInvariant2 = QtGui.QLineEdit(self.groupBox_2)
self.lineEditInvariant2.setObjectName("lineEditInvariant2")
self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.lineEditInvariant2)
self.label_5 = QtGui.QLabel(self.groupBox_2)
self.label_5.setObjectName("label_5")
self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.label_5)
self.lineEditInvariant3 = QtGui.QLineEdit(self.groupBox_2)
self.lineEditInvariant3.setObjectName("lineEditInvariant3")
self.formLayout.setWidget(2, QtGui.QFormLayout.FieldRole, self.lineEditInvariant3)
self.gridLayout_2.addWidget(self.groupBox_2, 0, 1, 1, 1)
self.groupBox_7 = QtGui.QGroupBox(self.groupBox_9)
font = QtGui.QFont()
font.setPointSize(8)
font.setWeight(50)
font.setBold(False)
self.groupBox_7.setFont(font)
self.groupBox_7.setObjectName("groupBox_7")
self.horizontalLayout_2 = QtGui.QHBoxLayout(self.groupBox_7)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.label_7 = QtGui.QLabel(self.groupBox_7)
font = QtGui.QFont()
font.setPointSize(12)
font.setWeight(75)
font.setBold(True)
self.label_7.setFont(font)
self.label_7.setObjectName("label_7")
self.horizontalLayout_2.addWidget(self.label_7)
self.label_8 = QtGui.QLabel(self.groupBox_7)
self.label_8.setObjectName("label_8")
self.horizontalLayout_2.addWidget(self.label_8)
self.tableWidgetRightCauchyGreenDeformation = QtGui.QTableWidget(self.groupBox_7)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tableWidgetRightCauchyGreenDeformation.sizePolicy().hasHeightForWidth())
self.tableWidgetRightCauchyGreenDeformation.setSizePolicy(sizePolicy)
self.tableWidgetRightCauchyGreenDeformation.setObjectName("tableWidgetRightCauchyGreenDeformation")
self.tableWidgetRightCauchyGreenDeformation.setColumnCount(0)
self.tableWidgetRightCauchyGreenDeformation.setRowCount(0)
self.tableWidgetRightCauchyGreenDeformation.horizontalHeader().setVisible(False)
self.tableWidgetRightCauchyGreenDeformation.verticalHeader().setVisible(False)
self.horizontalLayout_2.addWidget(self.tableWidgetRightCauchyGreenDeformation)
self.gridLayout_2.addWidget(self.groupBox_7, 1, 0, 1, 1)
self.groupBox_5 = QtGui.QGroupBox(self.groupBox_9)
font = QtGui.QFont()
font.setPointSize(8)
font.setWeight(50)
font.setBold(False)
self.groupBox_5.setFont(font)
self.groupBox_5.setObjectName("groupBox_5")
self.gridLayout_6 = QtGui.QGridLayout(self.groupBox_5)
self.gridLayout_6.setObjectName("gridLayout_6")
self.label_9 = QtGui.QLabel(self.groupBox_5)
font = QtGui.QFont()
font.setPointSize(12)
font.setWeight(75)
font.setBold(True)
self.label_9.setFont(font)
self.label_9.setObjectName("label_9")
self.gridLayout_6.addWidget(self.label_9, 0, 0, 1, 1)
self.tableWidgetGreenLagrangeStrain = QtGui.QTableWidget(self.groupBox_5)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tableWidgetGreenLagrangeStrain.sizePolicy().hasHeightForWidth())
self.tableWidgetGreenLagrangeStrain.setSizePolicy(sizePolicy)
self.tableWidgetGreenLagrangeStrain.setObjectName("tableWidgetGreenLagrangeStrain")
self.tableWidgetGreenLagrangeStrain.setColumnCount(0)
self.tableWidgetGreenLagrangeStrain.setRowCount(0)
self.tableWidgetGreenLagrangeStrain.horizontalHeader().setVisible(False)
self.tableWidgetGreenLagrangeStrain.verticalHeader().setVisible(False)
self.gridLayout_6.addWidget(self.tableWidgetGreenLagrangeStrain, 0, 2, 1, 1)
self.label_10 = QtGui.QLabel(self.groupBox_5)
self.label_10.setObjectName("label_10")
self.gridLayout_6.addWidget(self.label_10, 0, 1, 1, 1)
self.gridLayout_2.addWidget(self.groupBox_5, 1, 1, 1, 1)
self.gridLayout_4.addWidget(self.groupBox_9, 0, 0, 1, 1)
self.widgetSceneviewer = SceneviewerWidget(self.groupBox, shared_opengl_widget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(3)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.widgetSceneviewer.sizePolicy().hasHeightForWidth())
self.widgetSceneviewer.setSizePolicy(sizePolicy)
self.widgetSceneviewer.setObjectName("widgetSceneviewer")
self.gridLayout_4.addWidget(self.widgetSceneviewer, 0, 1, 2, 1)
self.groupBox_10 = QtGui.QGroupBox(self.groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(2)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_10.sizePolicy().hasHeightForWidth())
self.groupBox_10.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(10)
font.setWeight(75)
font.setBold(True)
self.groupBox_10.setFont(font)
self.groupBox_10.setObjectName("groupBox_10")
self.gridLayout = QtGui.QGridLayout(self.groupBox_10)
self.gridLayout.setObjectName("gridLayout")
self.groupBox_3 = QtGui.QGroupBox(self.groupBox_10)
font = QtGui.QFont()
font.setPointSize(8)
font.setWeight(50)
font.setBold(False)
self.groupBox_3.setFont(font)
self.groupBox_3.setObjectName("groupBox_3")
self.formLayout_2 = QtGui.QFormLayout(self.groupBox_3)
self.formLayout_2.setObjectName("formLayout_2")
self.label_6 = QtGui.QLabel(self.groupBox_3)
self.label_6.setObjectName("label_6")
self.formLayout_2.setWidget(0, QtGui.QFormLayout.LabelRole, self.label_6)
self.lineEditHydrostaticPressure = QtGui.QLineEdit(self.groupBox_3)
self.lineEditHydrostaticPressure.setObjectName("lineEditHydrostaticPressure")
self.formLayout_2.setWidget(0, QtGui.QFormLayout.FieldRole, self.lineEditHydrostaticPressure)
self.gridLayout.addWidget(self.groupBox_3, 0, 0, 1, 1)
self.groupBox_8 = QtGui.QGroupBox(self.groupBox_10)
font = QtGui.QFont()
font.setPointSize(8)
font.setWeight(50)
font.setBold(False)
self.groupBox_8.setFont(font)
self.groupBox_8.setObjectName("groupBox_8")
self.gridLayout_5 = QtGui.QGridLayout(self.groupBox_8)
self.gridLayout_5.setObjectName("gridLayout_5")
self.label_11 = QtGui.QLabel(self.groupBox_8)
font = QtGui.QFont()
font.setPointSize(12)
font.setWeight(75)
font.setBold(True)
self.label_11.setFont(font)
self.label_11.setObjectName("label_11")
self.gridLayout_5.addWidget(self.label_11, 0, 0, 1, 1)
self.tableWidgetSecondPiolaKirchoffStress = QtGui.QTableWidget(self.groupBox_8)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tableWidgetSecondPiolaKirchoffStress.sizePolicy().hasHeightForWidth())
self.tableWidgetSecondPiolaKirchoffStress.setSizePolicy(sizePolicy)
self.tableWidgetSecondPiolaKirchoffStress.setObjectName("tableWidgetSecondPiolaKirchoffStress")
self.tableWidgetSecondPiolaKirchoffStress.setColumnCount(0)
self.tableWidgetSecondPiolaKirchoffStress.setRowCount(0)
self.tableWidgetSecondPiolaKirchoffStress.horizontalHeader().setVisible(False)
self.tableWidgetSecondPiolaKirchoffStress.verticalHeader().setVisible(False)
self.gridLayout_5.addWidget(self.tableWidgetSecondPiolaKirchoffStress, 0, 2, 1, 1)
self.label_12 = QtGui.QLabel(self.groupBox_8)
self.label_12.setObjectName("label_12")
self.gridLayout_5.addWidget(self.label_12, 0, 1, 1, 1)
self.gridLayout.addWidget(self.groupBox_8, 1, 0, 1, 1)
self.groupBox_4 = QtGui.QGroupBox(self.groupBox_10)
font = QtGui.QFont()
font.setPointSize(8)
font.setWeight(50)
font.setBold(False)
self.groupBox_4.setFont(font)
self.groupBox_4.setObjectName("groupBox_4")
self.gridLayout_7 = QtGui.QGridLayout(self.groupBox_4)
self.gridLayout_7.setObjectName("gridLayout_7")
self.tableWidgetCauchyStress = QtGui.QTableWidget(self.groupBox_4)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tableWidgetCauchyStress.sizePolicy().hasHeightForWidth())
self.tableWidgetCauchyStress.setSizePolicy(sizePolicy)
self.tableWidgetCauchyStress.setObjectName("tableWidgetCauchyStress")
self.tableWidgetCauchyStress.setColumnCount(0)
self.tableWidgetCauchyStress.setRowCount(0)
self.tableWidgetCauchyStress.horizontalHeader().setVisible(False)
self.tableWidgetCauchyStress.verticalHeader().setVisible(False)
self.gridLayout_7.addWidget(self.tableWidgetCauchyStress, 0, 2, 1, 1)
self.label_13 = QtGui.QLabel(self.groupBox_4)
font = QtGui.QFont()
font.setPointSize(12)
font.setWeight(75)
font.setBold(True)
self.label_13.setFont(font)
self.label_13.setTextFormat(QtCore.Qt.PlainText)
self.label_13.setObjectName("label_13")
self.gridLayout_7.addWidget(self.label_13, 0, 0, 1, 1)
self.label_14 = QtGui.QLabel(self.groupBox_4)
self.label_14.setObjectName("label_14")
self.gridLayout_7.addWidget(self.label_14, 0, 1, 1, 1)
self.gridLayout.addWidget(self.groupBox_4, 1, 1, 1, 1)
self.gridLayout_4.addWidget(self.groupBox_10, 1, 0, 1, 1)
self.gridLayout_3.addWidget(self.groupBox, 0, 0, 1, 1)
self.retranslateUi(Biomeng321Lab1)
QtCore.QMetaObject.connectSlotsByName(Biomeng321Lab1)
def retranslateUi(self, Biomeng321Lab1):
Biomeng321Lab1.setWindowTitle(QtGui.QApplication.translate("Biomeng321Lab1", "Biomeng321 Lab1", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setTitle(QtGui.QApplication.translate("Biomeng321Lab1", "Solution", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_9.setTitle(QtGui.QApplication.translate("Biomeng321Lab1", "Strain Analysis", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_6.setTitle(QtGui.QApplication.translate("Biomeng321Lab1", "Deformation Gradient Tensor", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("Biomeng321Lab1", "F", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("Biomeng321Lab1", "=", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_2.setTitle(QtGui.QApplication.translate("Biomeng321Lab1", "Invariants", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("Biomeng321Lab1", "I1:", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setText(QtGui.QApplication.translate("Biomeng321Lab1", "I2:", None, QtGui.QApplication.UnicodeUTF8))
self.label_5.setText(QtGui.QApplication.translate("Biomeng321Lab1", "I3:", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_7.setTitle(QtGui.QApplication.translate("Biomeng321Lab1", "Right Cauchy-Green Deformation Tensor", None, QtGui.QApplication.UnicodeUTF8))
self.label_7.setText(QtGui.QApplication.translate("Biomeng321Lab1", "C", None, QtGui.QApplication.UnicodeUTF8))
self.label_8.setText(QtGui.QApplication.translate("Biomeng321Lab1", "=", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_5.setTitle(QtGui.QApplication.translate("Biomeng321Lab1", "Green-Lagrange Strain Tensor", None, QtGui.QApplication.UnicodeUTF8))
self.label_9.setText(QtGui.QApplication.translate("Biomeng321Lab1", "E", None, QtGui.QApplication.UnicodeUTF8))
self.label_10.setText(QtGui.QApplication.translate("Biomeng321Lab1", "=", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_10.setTitle(QtGui.QApplication.translate("Biomeng321Lab1", "Stress Analysis", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_3.setTitle(QtGui.QApplication.translate("Biomeng321Lab1", "Hydrostatic Pressure", None, QtGui.QApplication.UnicodeUTF8))
self.label_6.setText(QtGui.QApplication.translate("Biomeng321Lab1", "Value:", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_8.setTitle(QtGui.QApplication.translate("Biomeng321Lab1", "Second Piola-Kirchhoff Stress Tensor", None, QtGui.QApplication.UnicodeUTF8))
self.label_11.setText(QtGui.QApplication.translate("Biomeng321Lab1", "T", None, QtGui.QApplication.UnicodeUTF8))
self.label_12.setText(QtGui.QApplication.translate("Biomeng321Lab1", "=", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_4.setTitle(QtGui.QApplication.translate("Biomeng321Lab1", "Cauchy Stress Tensor", None, QtGui.QApplication.UnicodeUTF8))
self.label_13.setText(QtGui.QApplication.translate("Biomeng321Lab1", "sigma", None, QtGui.QApplication.UnicodeUTF8))
self.label_14.setText(QtGui.QApplication.translate("Biomeng321Lab1", "=", None, QtGui.QApplication.UnicodeUTF8))
from opencmiss.neon.ui.zincwidgets.sceneviewerwidget import SceneviewerWidget
| OpenCMISS/neon | src/opencmiss/neon/ui/simulations/ui_biomeng321lab1.py | Python | apache-2.0 | 18,237 | 0.003784 |
"""
Test for Nest climate platform for the Smart Device Management API.
These tests fake out the subscriber/devicemanager, and are not using a real
pubsub subscriber.
"""
from google_nest_sdm.device import Device
from google_nest_sdm.event import EventMessage
import pytest
from homeassistant.components.climate.const import (
ATTR_CURRENT_TEMPERATURE,
ATTR_FAN_MODE,
ATTR_FAN_MODES,
ATTR_HVAC_ACTION,
ATTR_HVAC_MODES,
ATTR_PRESET_MODE,
ATTR_PRESET_MODES,
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
CURRENT_HVAC_OFF,
FAN_LOW,
FAN_OFF,
FAN_ON,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
PRESET_ECO,
PRESET_NONE,
PRESET_SLEEP,
)
from homeassistant.const import ATTR_TEMPERATURE
from .common import async_setup_sdm_platform
from tests.components.climate import common
PLATFORM = "climate"
async def setup_climate(hass, raw_traits=None, auth=None):
"""Load Nest climate devices."""
devices = None
if raw_traits:
traits = raw_traits
traits["sdm.devices.traits.Info"] = {"customName": "My Thermostat"}
devices = {
"some-device-id": Device.MakeDevice(
{
"name": "some-device-id",
"type": "sdm.devices.types.Thermostat",
"traits": traits,
},
auth=auth,
),
}
return await async_setup_sdm_platform(hass, PLATFORM, devices)
async def test_no_devices(hass):
"""Test no devices returned by the api."""
await setup_climate(hass)
assert len(hass.states.async_all()) == 0
async def test_climate_devices(hass):
"""Test no eligible climate devices returned by the api."""
await setup_climate(hass, {"sdm.devices.traits.CameraImage": {}})
assert len(hass.states.async_all()) == 0
async def test_thermostat_off(hass):
"""Test a thermostat that is not running."""
await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {"status": "OFF"},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "OFF",
},
"sdm.devices.traits.Temperature": {
"ambientTemperatureCelsius": 16.2,
},
},
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_OFF
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_OFF
assert thermostat.attributes[ATTR_CURRENT_TEMPERATURE] == 16.2
assert set(thermostat.attributes[ATTR_HVAC_MODES]) == {
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
}
assert thermostat.attributes[ATTR_TEMPERATURE] is None
assert thermostat.attributes[ATTR_TARGET_TEMP_LOW] is None
assert thermostat.attributes[ATTR_TARGET_TEMP_HIGH] is None
assert ATTR_PRESET_MODE not in thermostat.attributes
assert ATTR_PRESET_MODES not in thermostat.attributes
assert ATTR_FAN_MODE not in thermostat.attributes
assert ATTR_FAN_MODES not in thermostat.attributes
async def test_thermostat_heat(hass):
"""Test a thermostat that is heating."""
await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {
"status": "HEATING",
},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "HEAT",
},
"sdm.devices.traits.Temperature": {
"ambientTemperatureCelsius": 16.2,
},
"sdm.devices.traits.ThermostatTemperatureSetpoint": {
"heatCelsius": 22.0,
},
},
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_HEAT
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_HEAT
assert thermostat.attributes[ATTR_CURRENT_TEMPERATURE] == 16.2
assert set(thermostat.attributes[ATTR_HVAC_MODES]) == {
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
}
assert thermostat.attributes[ATTR_TEMPERATURE] == 22.0
assert thermostat.attributes[ATTR_TARGET_TEMP_LOW] is None
assert thermostat.attributes[ATTR_TARGET_TEMP_HIGH] is None
assert ATTR_PRESET_MODE not in thermostat.attributes
assert ATTR_PRESET_MODES not in thermostat.attributes
async def test_thermostat_cool(hass):
"""Test a thermostat that is cooling."""
await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {
"status": "COOLING",
},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "COOL",
},
"sdm.devices.traits.Temperature": {
"ambientTemperatureCelsius": 29.9,
},
"sdm.devices.traits.ThermostatTemperatureSetpoint": {
"coolCelsius": 28.0,
},
},
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_COOL
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_COOL
assert thermostat.attributes[ATTR_CURRENT_TEMPERATURE] == 29.9
assert set(thermostat.attributes[ATTR_HVAC_MODES]) == {
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
}
assert thermostat.attributes[ATTR_TEMPERATURE] == 28.0
assert thermostat.attributes[ATTR_TARGET_TEMP_LOW] is None
assert thermostat.attributes[ATTR_TARGET_TEMP_HIGH] is None
assert ATTR_PRESET_MODE not in thermostat.attributes
assert ATTR_PRESET_MODES not in thermostat.attributes
async def test_thermostat_heatcool(hass):
"""Test a thermostat that is cooling in heatcool mode."""
await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {
"status": "COOLING",
},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "HEATCOOL",
},
"sdm.devices.traits.Temperature": {
"ambientTemperatureCelsius": 29.9,
},
"sdm.devices.traits.ThermostatTemperatureSetpoint": {
"heatCelsius": 22.0,
"coolCelsius": 28.0,
},
},
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_HEAT_COOL
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_COOL
assert thermostat.attributes[ATTR_CURRENT_TEMPERATURE] == 29.9
assert set(thermostat.attributes[ATTR_HVAC_MODES]) == {
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
}
assert thermostat.attributes[ATTR_TARGET_TEMP_LOW] == 22.0
assert thermostat.attributes[ATTR_TARGET_TEMP_HIGH] == 28.0
assert thermostat.attributes[ATTR_TEMPERATURE] is None
assert ATTR_PRESET_MODE not in thermostat.attributes
assert ATTR_PRESET_MODES not in thermostat.attributes
async def test_thermostat_eco_off(hass):
"""Test a thermostat cooling with eco off."""
await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {
"status": "COOLING",
},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "HEATCOOL",
},
"sdm.devices.traits.ThermostatEco": {
"availableModes": ["MANUAL_ECO", "OFF"],
"mode": "OFF",
"heatCelsius": 20.0,
"coolCelsius": 22.0,
},
"sdm.devices.traits.Temperature": {
"ambientTemperatureCelsius": 29.9,
},
"sdm.devices.traits.ThermostatTemperatureSetpoint": {
"heatCelsius": 22.0,
"coolCelsius": 28.0,
},
},
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_HEAT_COOL
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_COOL
assert thermostat.attributes[ATTR_CURRENT_TEMPERATURE] == 29.9
assert set(thermostat.attributes[ATTR_HVAC_MODES]) == {
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
}
assert thermostat.attributes[ATTR_TARGET_TEMP_LOW] == 22.0
assert thermostat.attributes[ATTR_TARGET_TEMP_HIGH] == 28.0
assert thermostat.attributes[ATTR_TEMPERATURE] is None
assert thermostat.attributes[ATTR_PRESET_MODE] == PRESET_NONE
assert thermostat.attributes[ATTR_PRESET_MODES] == [PRESET_ECO, PRESET_NONE]
async def test_thermostat_eco_on(hass):
"""Test a thermostat in eco mode."""
await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {
"status": "COOLING",
},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "HEATCOOL",
},
"sdm.devices.traits.ThermostatEco": {
"availableModes": ["MANUAL_ECO", "OFF"],
"mode": "MANUAL_ECO",
"heatCelsius": 21.0,
"coolCelsius": 29.0,
},
"sdm.devices.traits.Temperature": {
"ambientTemperatureCelsius": 29.9,
},
"sdm.devices.traits.ThermostatTemperatureSetpoint": {
"heatCelsius": 22.0,
"coolCelsius": 28.0,
},
},
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_HEAT_COOL
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_COOL
assert thermostat.attributes[ATTR_CURRENT_TEMPERATURE] == 29.9
assert set(thermostat.attributes[ATTR_HVAC_MODES]) == {
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
}
assert thermostat.attributes[ATTR_TARGET_TEMP_LOW] == 21.0
assert thermostat.attributes[ATTR_TARGET_TEMP_HIGH] == 29.0
assert thermostat.attributes[ATTR_TEMPERATURE] is None
assert thermostat.attributes[ATTR_PRESET_MODE] == PRESET_ECO
assert thermostat.attributes[ATTR_PRESET_MODES] == [PRESET_ECO, PRESET_NONE]
async def test_thermostat_eco_heat_only(hass):
"""Test a thermostat in eco mode that only supports heat."""
await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {
"status": "OFF",
},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "OFF"],
"mode": "HEAT",
},
"sdm.devices.traits.ThermostatEco": {
"availableModes": ["MANUAL_ECO", "OFF"],
"mode": "MANUAL_ECO",
"heatCelsius": 21.0,
"coolCelsius": 29.0,
},
"sdm.devices.traits.Temperature": {
"ambientTemperatureCelsius": 29.9,
},
"sdm.devices.traits.ThermostatTemperatureSetpoint": {},
},
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_HEAT
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_IDLE
assert thermostat.attributes[ATTR_CURRENT_TEMPERATURE] == 29.9
assert set(thermostat.attributes[ATTR_HVAC_MODES]) == {
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
}
assert thermostat.attributes[ATTR_TEMPERATURE] == 21.0
assert ATTR_TARGET_TEMP_LOW not in thermostat.attributes
assert ATTR_TARGET_TEMP_HIGH not in thermostat.attributes
assert thermostat.attributes[ATTR_PRESET_MODE] == PRESET_ECO
assert thermostat.attributes[ATTR_PRESET_MODES] == [PRESET_ECO, PRESET_NONE]
async def test_thermostat_set_hvac_mode(hass, auth):
"""Test a thermostat changing hvac modes."""
subscriber = await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {"status": "OFF"},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "OFF",
},
},
auth=auth,
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_OFF
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_OFF
await common.async_set_hvac_mode(hass, HVAC_MODE_HEAT)
await hass.async_block_till_done()
assert auth.method == "post"
assert auth.url == "some-device-id:executeCommand"
assert auth.json == {
"command": "sdm.devices.commands.ThermostatMode.SetMode",
"params": {"mode": "HEAT"},
}
# Local state does not reflect the update
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_OFF
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_OFF
# Simulate pubsub message when mode changes
event = EventMessage(
{
"eventId": "some-event-id",
"timestamp": "2019-01-01T00:00:01Z",
"resourceUpdate": {
"name": "some-device-id",
"traits": {
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "HEAT",
},
},
},
},
auth=None,
)
await subscriber.async_receive_event(event)
await hass.async_block_till_done() # Process dispatch/update signal
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_HEAT
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_IDLE
# Simulate pubsub message when the thermostat starts heating
event = EventMessage(
{
"eventId": "some-event-id",
"timestamp": "2019-01-01T00:00:01Z",
"resourceUpdate": {
"name": "some-device-id",
"traits": {
"sdm.devices.traits.ThermostatHvac": {
"status": "HEATING",
},
},
},
},
auth=None,
)
await subscriber.async_receive_event(event)
await hass.async_block_till_done() # Process dispatch/update signal
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_HEAT
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_HEAT
async def test_thermostat_invalid_hvac_mode(hass, auth):
"""Test setting an hvac_mode that is not supported."""
await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {"status": "OFF"},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "OFF",
},
},
auth=auth,
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_OFF
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_OFF
with pytest.raises(ValueError):
await common.async_set_hvac_mode(hass, HVAC_MODE_DRY)
await hass.async_block_till_done()
assert thermostat.state == HVAC_MODE_OFF
assert auth.method is None # No communication with API
async def test_thermostat_set_eco_preset(hass, auth):
"""Test a thermostat put into eco mode."""
subscriber = await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {"status": "OFF"},
"sdm.devices.traits.ThermostatEco": {
"availableModes": ["MANUAL_ECO", "OFF"],
"mode": "OFF",
"heatCelsius": 15.0,
"coolCelsius": 28.0,
},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "OFF",
},
},
auth=auth,
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_OFF
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_OFF
assert thermostat.attributes[ATTR_PRESET_MODE] == PRESET_NONE
# Turn on eco mode
await common.async_set_preset_mode(hass, PRESET_ECO)
await hass.async_block_till_done()
assert auth.method == "post"
assert auth.url == "some-device-id:executeCommand"
assert auth.json == {
"command": "sdm.devices.commands.ThermostatEco.SetMode",
"params": {"mode": "MANUAL_ECO"},
}
# Local state does not reflect the update
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_OFF
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_OFF
assert thermostat.attributes[ATTR_PRESET_MODE] == PRESET_NONE
# Simulate pubsub message when mode changes
event = EventMessage(
{
"eventId": "some-event-id",
"timestamp": "2019-01-01T00:00:01Z",
"resourceUpdate": {
"name": "some-device-id",
"traits": {
"sdm.devices.traits.ThermostatEco": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "MANUAL_ECO",
"heatCelsius": 15.0,
"coolCelsius": 28.0,
},
},
},
},
auth=auth,
)
await subscriber.async_receive_event(event)
await hass.async_block_till_done() # Process dispatch/update signal
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_OFF
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_OFF
assert thermostat.attributes[ATTR_PRESET_MODE] == PRESET_ECO
# Turn off eco mode
await common.async_set_preset_mode(hass, PRESET_NONE)
await hass.async_block_till_done()
assert auth.method == "post"
assert auth.url == "some-device-id:executeCommand"
assert auth.json == {
"command": "sdm.devices.commands.ThermostatEco.SetMode",
"params": {"mode": "OFF"},
}
async def test_thermostat_set_cool(hass, auth):
"""Test a thermostat in cool mode with a temperature change."""
await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {"status": "OFF"},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "COOL",
},
"sdm.devices.traits.ThermostatTemperatureSetpoint": {
"coolCelsius": 25.0,
},
},
auth=auth,
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_COOL
await common.async_set_temperature(hass, temperature=24.0)
await hass.async_block_till_done()
assert auth.method == "post"
assert auth.url == "some-device-id:executeCommand"
assert auth.json == {
"command": "sdm.devices.commands.ThermostatTemperatureSetpoint.SetCool",
"params": {"coolCelsius": 24.0},
}
async def test_thermostat_set_heat(hass, auth):
"""Test a thermostat heating mode with a temperature change."""
await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {"status": "OFF"},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "HEAT",
},
"sdm.devices.traits.ThermostatTemperatureSetpoint": {
"heatCelsius": 19.0,
},
},
auth=auth,
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_HEAT
await common.async_set_temperature(hass, temperature=20.0)
await hass.async_block_till_done()
assert auth.method == "post"
assert auth.url == "some-device-id:executeCommand"
assert auth.json == {
"command": "sdm.devices.commands.ThermostatTemperatureSetpoint.SetHeat",
"params": {"heatCelsius": 20.0},
}
async def test_thermostat_set_heat_cool(hass, auth):
"""Test a thermostat in heatcool mode with a temperature change."""
await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {"status": "OFF"},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "HEATCOOL",
},
"sdm.devices.traits.ThermostatTemperatureSetpoint": {
"heatCelsius": 19.0,
"coolCelsius": 25.0,
},
},
auth=auth,
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_HEAT_COOL
await common.async_set_temperature(
hass, target_temp_low=20.0, target_temp_high=24.0
)
await hass.async_block_till_done()
assert auth.method == "post"
assert auth.url == "some-device-id:executeCommand"
assert auth.json == {
"command": "sdm.devices.commands.ThermostatTemperatureSetpoint.SetRange",
"params": {"heatCelsius": 20.0, "coolCelsius": 24.0},
}
async def test_thermostat_fan_off(hass):
"""Test a thermostat with the fan not running."""
await setup_climate(
hass,
{
"sdm.devices.traits.Fan": {
"timerMode": "OFF",
"timerTimeout": "2019-05-10T03:22:54Z",
},
"sdm.devices.traits.ThermostatHvac": {"status": "OFF"},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "OFF",
},
"sdm.devices.traits.Temperature": {
"ambientTemperatureCelsius": 16.2,
},
},
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_OFF
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_OFF
assert thermostat.attributes[ATTR_CURRENT_TEMPERATURE] == 16.2
assert set(thermostat.attributes[ATTR_HVAC_MODES]) == {
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_OFF,
}
assert thermostat.attributes[ATTR_FAN_MODE] == FAN_OFF
assert thermostat.attributes[ATTR_FAN_MODES] == [FAN_ON, FAN_OFF]
async def test_thermostat_fan_on(hass):
"""Test a thermostat with the fan running."""
await setup_climate(
hass,
{
"sdm.devices.traits.Fan": {
"timerMode": "ON",
"timerTimeout": "2019-05-10T03:22:54Z",
},
"sdm.devices.traits.ThermostatHvac": {
"status": "OFF",
},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "OFF",
},
"sdm.devices.traits.Temperature": {
"ambientTemperatureCelsius": 16.2,
},
},
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_FAN_ONLY
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_IDLE
assert thermostat.attributes[ATTR_CURRENT_TEMPERATURE] == 16.2
assert set(thermostat.attributes[ATTR_HVAC_MODES]) == {
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_OFF,
}
assert thermostat.attributes[ATTR_FAN_MODE] == FAN_ON
assert thermostat.attributes[ATTR_FAN_MODES] == [FAN_ON, FAN_OFF]
async def test_thermostat_cool_with_fan(hass):
"""Test a thermostat cooling while the fan is on."""
await setup_climate(
hass,
{
"sdm.devices.traits.Fan": {
"timerMode": "ON",
"timerTimeout": "2019-05-10T03:22:54Z",
},
"sdm.devices.traits.ThermostatHvac": {
"status": "OFF",
},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "COOL",
},
},
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_COOL
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_IDLE
assert set(thermostat.attributes[ATTR_HVAC_MODES]) == {
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_OFF,
}
assert thermostat.attributes[ATTR_FAN_MODE] == FAN_ON
assert thermostat.attributes[ATTR_FAN_MODES] == [FAN_ON, FAN_OFF]
async def test_thermostat_set_fan(hass, auth):
"""Test a thermostat enabling the fan."""
await setup_climate(
hass,
{
"sdm.devices.traits.Fan": {
"timerMode": "ON",
"timerTimeout": "2019-05-10T03:22:54Z",
},
"sdm.devices.traits.ThermostatHvac": {
"status": "OFF",
},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "OFF",
},
},
auth=auth,
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_FAN_ONLY
assert thermostat.attributes[ATTR_FAN_MODE] == FAN_ON
assert thermostat.attributes[ATTR_FAN_MODES] == [FAN_ON, FAN_OFF]
# Turn off fan mode
await common.async_set_fan_mode(hass, FAN_OFF)
await hass.async_block_till_done()
assert auth.method == "post"
assert auth.url == "some-device-id:executeCommand"
assert auth.json == {
"command": "sdm.devices.commands.Fan.SetTimer",
"params": {"timerMode": "OFF"},
}
# Turn on fan mode
await common.async_set_fan_mode(hass, FAN_ON)
await hass.async_block_till_done()
assert auth.method == "post"
assert auth.url == "some-device-id:executeCommand"
assert auth.json == {
"command": "sdm.devices.commands.Fan.SetTimer",
"params": {
"duration": "43200s",
"timerMode": "ON",
},
}
async def test_thermostat_fan_empty(hass):
"""Test a fan trait with an empty response."""
await setup_climate(
hass,
{
"sdm.devices.traits.Fan": {},
"sdm.devices.traits.ThermostatHvac": {"status": "OFF"},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "OFF",
},
"sdm.devices.traits.Temperature": {
"ambientTemperatureCelsius": 16.2,
},
},
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_OFF
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_OFF
assert thermostat.attributes[ATTR_CURRENT_TEMPERATURE] == 16.2
assert set(thermostat.attributes[ATTR_HVAC_MODES]) == {
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
}
assert ATTR_FAN_MODE not in thermostat.attributes
assert ATTR_FAN_MODES not in thermostat.attributes
# Ignores set_fan_mode since it is lacking SUPPORT_FAN_MODE
await common.async_set_fan_mode(hass, FAN_ON)
await hass.async_block_till_done()
assert ATTR_FAN_MODE not in thermostat.attributes
assert ATTR_FAN_MODES not in thermostat.attributes
async def test_thermostat_invalid_fan_mode(hass):
"""Test setting a fan mode that is not supported."""
await setup_climate(
hass,
{
"sdm.devices.traits.Fan": {
"timerMode": "ON",
"timerTimeout": "2019-05-10T03:22:54Z",
},
"sdm.devices.traits.ThermostatHvac": {"status": "OFF"},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "OFF",
},
"sdm.devices.traits.Temperature": {
"ambientTemperatureCelsius": 16.2,
},
},
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_FAN_ONLY
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_IDLE
assert thermostat.attributes[ATTR_CURRENT_TEMPERATURE] == 16.2
assert set(thermostat.attributes[ATTR_HVAC_MODES]) == {
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_OFF,
}
assert thermostat.attributes[ATTR_FAN_MODE] == FAN_ON
assert thermostat.attributes[ATTR_FAN_MODES] == [FAN_ON, FAN_OFF]
with pytest.raises(ValueError):
await common.async_set_fan_mode(hass, FAN_LOW)
await hass.async_block_till_done()
async def test_thermostat_set_hvac_fan_only(hass, auth):
"""Test a thermostat enabling the fan via hvac_mode."""
await setup_climate(
hass,
{
"sdm.devices.traits.Fan": {
"timerMode": "OFF",
"timerTimeout": "2019-05-10T03:22:54Z",
},
"sdm.devices.traits.ThermostatHvac": {
"status": "OFF",
},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "OFF",
},
},
auth=auth,
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_OFF
assert thermostat.attributes[ATTR_FAN_MODE] == FAN_OFF
assert thermostat.attributes[ATTR_FAN_MODES] == [FAN_ON, FAN_OFF]
await common.async_set_hvac_mode(hass, HVAC_MODE_FAN_ONLY)
await hass.async_block_till_done()
assert len(auth.captured_requests) == 2
(method, url, json, headers) = auth.captured_requests.pop(0)
assert method == "post"
assert url == "some-device-id:executeCommand"
assert json == {
"command": "sdm.devices.commands.Fan.SetTimer",
"params": {"duration": "43200s", "timerMode": "ON"},
}
(method, url, json, headers) = auth.captured_requests.pop(0)
assert method == "post"
assert url == "some-device-id:executeCommand"
assert json == {
"command": "sdm.devices.commands.ThermostatMode.SetMode",
"params": {"mode": "OFF"},
}
async def test_thermostat_target_temp(hass, auth):
"""Test a thermostat changing hvac modes and affected on target temps."""
subscriber = await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {
"status": "HEATING",
},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "HEAT",
},
"sdm.devices.traits.Temperature": {
"ambientTemperatureCelsius": 20.1,
},
"sdm.devices.traits.ThermostatTemperatureSetpoint": {
"heatCelsius": 23.0,
},
},
auth=auth,
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_HEAT
assert thermostat.attributes[ATTR_TEMPERATURE] == 23.0
assert thermostat.attributes[ATTR_TARGET_TEMP_LOW] is None
assert thermostat.attributes[ATTR_TARGET_TEMP_HIGH] is None
# Simulate pubsub message changing modes
event = EventMessage(
{
"eventId": "some-event-id",
"timestamp": "2019-01-01T00:00:01Z",
"resourceUpdate": {
"name": "some-device-id",
"traits": {
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "HEATCOOL",
},
"sdm.devices.traits.ThermostatTemperatureSetpoint": {
"heatCelsius": 22.0,
"coolCelsius": 28.0,
},
},
},
},
auth=None,
)
await subscriber.async_receive_event(event)
await hass.async_block_till_done() # Process dispatch/update signal
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_HEAT_COOL
assert thermostat.attributes[ATTR_TARGET_TEMP_LOW] == 22.0
assert thermostat.attributes[ATTR_TARGET_TEMP_HIGH] == 28.0
assert thermostat.attributes[ATTR_TEMPERATURE] is None
async def test_thermostat_missing_mode_traits(hass):
"""Test a thermostat missing many thermostat traits in api response."""
await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {"status": "OFF"},
},
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_OFF
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_OFF
assert thermostat.attributes[ATTR_CURRENT_TEMPERATURE] is None
assert set(thermostat.attributes[ATTR_HVAC_MODES]) == set()
assert ATTR_TEMPERATURE not in thermostat.attributes
assert ATTR_TARGET_TEMP_LOW not in thermostat.attributes
assert ATTR_TARGET_TEMP_HIGH not in thermostat.attributes
assert ATTR_PRESET_MODE not in thermostat.attributes
assert ATTR_PRESET_MODES not in thermostat.attributes
assert ATTR_FAN_MODE not in thermostat.attributes
assert ATTR_FAN_MODES not in thermostat.attributes
await common.async_set_temperature(hass, temperature=24.0)
await hass.async_block_till_done()
assert ATTR_TEMPERATURE not in thermostat.attributes
await common.async_set_preset_mode(hass, PRESET_ECO)
await hass.async_block_till_done()
assert ATTR_PRESET_MODE not in thermostat.attributes
async def test_thermostat_missing_temperature_trait(hass):
"""Test a thermostat missing many thermostat traits in api response."""
await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {"status": "OFF"},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "HEAT",
},
},
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_HEAT
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_IDLE
assert thermostat.attributes[ATTR_CURRENT_TEMPERATURE] is None
assert set(thermostat.attributes[ATTR_HVAC_MODES]) == {
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
}
assert thermostat.attributes[ATTR_TEMPERATURE] is None
assert thermostat.attributes[ATTR_TARGET_TEMP_LOW] is None
assert thermostat.attributes[ATTR_TARGET_TEMP_HIGH] is None
assert ATTR_PRESET_MODE not in thermostat.attributes
assert ATTR_PRESET_MODES not in thermostat.attributes
assert ATTR_FAN_MODE not in thermostat.attributes
assert ATTR_FAN_MODES not in thermostat.attributes
await common.async_set_temperature(hass, temperature=24.0)
await hass.async_block_till_done()
assert thermostat.attributes[ATTR_TEMPERATURE] is None
async def test_thermostat_unexpected_hvac_status(hass):
"""Test a thermostat missing many thermostat traits in api response."""
await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {"status": "UNEXPECTED"},
},
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_OFF
assert ATTR_HVAC_ACTION not in thermostat.attributes
assert thermostat.attributes[ATTR_CURRENT_TEMPERATURE] is None
assert set(thermostat.attributes[ATTR_HVAC_MODES]) == set()
assert ATTR_TEMPERATURE not in thermostat.attributes
assert ATTR_TARGET_TEMP_LOW not in thermostat.attributes
assert ATTR_TARGET_TEMP_HIGH not in thermostat.attributes
assert ATTR_PRESET_MODE not in thermostat.attributes
assert ATTR_PRESET_MODES not in thermostat.attributes
assert ATTR_FAN_MODE not in thermostat.attributes
assert ATTR_FAN_MODES not in thermostat.attributes
with pytest.raises(ValueError):
await common.async_set_hvac_mode(hass, HVAC_MODE_DRY)
await hass.async_block_till_done()
assert thermostat.state == HVAC_MODE_OFF
async def test_thermostat_missing_set_point(hass):
"""Test a thermostat missing many thermostat traits in api response."""
await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {"status": "OFF"},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF"],
"mode": "HEATCOOL",
},
},
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_HEAT_COOL
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_IDLE
assert thermostat.attributes[ATTR_CURRENT_TEMPERATURE] is None
assert set(thermostat.attributes[ATTR_HVAC_MODES]) == {
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
}
assert thermostat.attributes[ATTR_TEMPERATURE] is None
assert thermostat.attributes[ATTR_TARGET_TEMP_LOW] is None
assert thermostat.attributes[ATTR_TARGET_TEMP_HIGH] is None
assert ATTR_PRESET_MODE not in thermostat.attributes
assert ATTR_PRESET_MODES not in thermostat.attributes
assert ATTR_FAN_MODE not in thermostat.attributes
assert ATTR_FAN_MODES not in thermostat.attributes
async def test_thermostat_unexepected_hvac_mode(hass):
"""Test a thermostat missing many thermostat traits in api response."""
await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {"status": "OFF"},
"sdm.devices.traits.ThermostatMode": {
"availableModes": ["HEAT", "COOL", "HEATCOOL", "OFF", "UNEXPECTED"],
"mode": "UNEXPECTED",
},
},
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_OFF
assert thermostat.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_OFF
assert thermostat.attributes[ATTR_CURRENT_TEMPERATURE] is None
assert set(thermostat.attributes[ATTR_HVAC_MODES]) == {
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
}
assert thermostat.attributes[ATTR_TEMPERATURE] is None
assert thermostat.attributes[ATTR_TARGET_TEMP_LOW] is None
assert thermostat.attributes[ATTR_TARGET_TEMP_HIGH] is None
assert ATTR_PRESET_MODE not in thermostat.attributes
assert ATTR_PRESET_MODES not in thermostat.attributes
assert ATTR_FAN_MODE not in thermostat.attributes
assert ATTR_FAN_MODES not in thermostat.attributes
async def test_thermostat_invalid_set_preset_mode(hass, auth):
"""Test a thermostat set with an invalid preset mode."""
await setup_climate(
hass,
{
"sdm.devices.traits.ThermostatHvac": {"status": "OFF"},
"sdm.devices.traits.ThermostatEco": {
"availableModes": ["MANUAL_ECO", "OFF"],
"mode": "OFF",
"heatCelsius": 15.0,
"coolCelsius": 28.0,
},
},
auth=auth,
)
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get("climate.my_thermostat")
assert thermostat is not None
assert thermostat.state == HVAC_MODE_OFF
assert thermostat.attributes[ATTR_PRESET_MODE] == PRESET_NONE
assert thermostat.attributes[ATTR_PRESET_MODES] == [PRESET_ECO, PRESET_NONE]
# Set preset mode that is invalid
with pytest.raises(ValueError):
await common.async_set_preset_mode(hass, PRESET_SLEEP)
await hass.async_block_till_done()
# No RPC sent
assert auth.method is None
# Preset is unchanged
assert thermostat.attributes[ATTR_PRESET_MODE] == PRESET_NONE
assert thermostat.attributes[ATTR_PRESET_MODES] == [PRESET_ECO, PRESET_NONE]
| mezz64/home-assistant | tests/components/nest/test_climate_sdm.py | Python | apache-2.0 | 43,509 | 0.000207 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class RouteFilterRulesOperations:
"""RouteFilterRulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified rule from a route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the rule.
:type rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
async def get(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
**kwargs: Any
) -> "_models.RouteFilterRule":
"""Gets the specified rule from a route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the rule.
:type rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RouteFilterRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_06_01.models.RouteFilterRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
route_filter_rule_parameters: "_models.RouteFilterRule",
**kwargs: Any
) -> "_models.RouteFilterRule":
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(route_filter_rule_parameters, 'RouteFilterRule')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
route_filter_rule_parameters: "_models.RouteFilterRule",
**kwargs: Any
) -> AsyncLROPoller["_models.RouteFilterRule"]:
"""Creates or updates a route in the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the route filter rule.
:type rule_name: str
:param route_filter_rule_parameters: Parameters supplied to the create or update route filter
rule operation.
:type route_filter_rule_parameters: ~azure.mgmt.network.v2018_06_01.models.RouteFilterRule
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either RouteFilterRule or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_06_01.models.RouteFilterRule]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
route_filter_rule_parameters=route_filter_rule_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
async def _update_initial(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
route_filter_rule_parameters: "_models.PatchRouteFilterRule",
**kwargs: Any
) -> "_models.RouteFilterRule":
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(route_filter_rule_parameters, 'PatchRouteFilterRule')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
async def begin_update(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
route_filter_rule_parameters: "_models.PatchRouteFilterRule",
**kwargs: Any
) -> AsyncLROPoller["_models.RouteFilterRule"]:
"""Updates a route in the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the route filter rule.
:type rule_name: str
:param route_filter_rule_parameters: Parameters supplied to the update route filter rule
operation.
:type route_filter_rule_parameters: ~azure.mgmt.network.v2018_06_01.models.PatchRouteFilterRule
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either RouteFilterRule or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_06_01.models.RouteFilterRule]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
route_filter_rule_parameters=route_filter_rule_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
def list_by_route_filter(
self,
resource_group_name: str,
route_filter_name: str,
**kwargs: Any
) -> AsyncIterable["_models.RouteFilterRuleListResult"]:
"""Gets all RouteFilterRules in a route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteFilterRuleListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_06_01.models.RouteFilterRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_route_filter.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('RouteFilterRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_route_filter.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_06_01/aio/operations/_route_filter_rules_operations.py | Python | mit | 28,535 | 0.005011 |
#!/usr/bin/python
import time
from pymlab.sensors import Device
#TODO: set only one pin, not all bus
class I2CPWM(Device):
'Python library for I2CPWM01A MLAB module with NXP Semiconductors PCA9531 I2C-bus LED dimmer'
MODES = {
'X': 0b00,
'LOW': 0b01,
'PWM0': 0b10,
'PWM1': 0b11,
}
def __init__(self, parent = None, address = 0b1100011, **kwargs):
Device.__init__(self, parent, address, **kwargs)
'The INPUT register reflects the state of the device pins. Writes to this register will be acknowledged but will have no effect.'
self.PWM_INPUT = 0x00
'PSC0 is used to program the period of the PWM output.'
self.PWM_PSC0 = 0x01
'The PWM0 register determines the duty cycle of BLINK0. The outputs are LOW (LED on) when the count is less than the value in PWM0 and HIGH (LED off) when it is greater. If PWM0 is programmed with 00h, then the PWM0 output is always HIGH (LED off).'
self.PWM_PWM0 = 0x02
'PSC1 is used to program the period of the PWM output.'
self.PWM_PSC1 = 0x03
'The PWM1 register determines the duty cycle of BLINK1. The outputs are LOW (LED on) when the count is less than the value in PWM1 and HIGH (LED off) when it is greater. If PWM1 is programmed with 00h, then the PWM1 output is always HIGH (LED off).'
self.PWM_PWM1 = 0x04
'The LSn LED select registers determine the source of the LED data.'
self.PWM_LS0 = 0x05
self.PWM_LS1 = 0x06
def set_pwm0(self, frequency, duty): # frequency in Hz, Duty cycle in % (0-100)
period = int((1.0/float(frequency))*152.0)-1
duty = int((float(duty)/100.0)*255.0)
self.bus.write_byte_data(self.address, 0x01, period)
self.bus.write_byte_data(self.address, self.PWM_PWM0, duty)
def set_pwm1(self, frequency, duty): # frequency in Hz, Duty cycle in % (0-100)
period = int((1.0/float(frequency))*152.0)-1
duty = int((float(duty)/100.0)*255.0)
self.bus.write_byte_data(self.address, self.PWM_PSC1, period)
self.bus.write_byte_data(self.address, self.PWM_PWM1, duty)
def set_ls0(self, mode):
self.bus.write_byte_data(self.address, self.PWM_LS0, mode)
def set_ls1(self, mode):
self.bus.write_byte_data(self.address, self.PWM_LS1, mode)
def set_output_type(self, mode = ['X','X','X','X','X','X','X','X']):
set_ls0((MODES[mode[0]] << 6) | (MODES[mode[1]] << 4) | (MODES[mode[2]] << 2) | MODES[mode[3]])
set_ls1((MODES[mode[4]] << 6) | (MODES[mode[5]] << 4) | (MODES[mode[6]] << 2) | MODES[mode[7]])
def get_input(self):
return self.bus.read_byte_data(self.address, self.PWM_INPUT)
def main():
print(__doc__)
if __name__ == "__main__":
main()
| MLAB-project/pymlab | src/pymlab/sensors/i2cpwm.py | Python | gpl-3.0 | 2,819 | 0.010287 |
from azure.servicebus import ServiceBusService, Message, Queue
#c-types
bus_service = ServiceBusService(
service_namespace='SimplyPaper',
shared_access_key_name='RootManageSharedAccessKey',
shared_access_key_value='1Y4YNh7uQ/buNi1v3xunn6F6vfSsJ5+nrmiwKY2WM04')
#Endpoint=sb://simplypaper.servicebus.windows.net/;
#SharedAccessKeyName=RootManageSharedAccessKey;
#SharedAccessKey=1Y4YNh7uQ/buNi1v3xunn6F6vfSsJ5+nrmiwKY2WM04=
bus_service.create_queue('taskqueue')
queue_options = Queue()
queue_options.max_size_in_megabytes = '5120'
queue_options.default_message_time_to_live = 'PT1M'
bus_service.create_queue('taskqueue', queue_options)
msg = Message(b'Test Message Simply Papaer')
bus_service.send_queue_message('taskqueue', msg)
msg = bus_service.receive_queue_message('taskqueue', peek_lock=False)
print(msg.body) | SimplyPaper/SimplyPaper.github.io | AzureBus.py | Python | apache-2.0 | 842 | 0.008314 |
# -*- coding: utf-8 -*-
"""Framework for importing bank statement files."""
import logging
import base64
from StringIO import StringIO
from zipfile import ZipFile, BadZipfile # BadZipFile in Python >= 3.2
from openerp import api, models, fields
from openerp.tools.translate import _
from openerp.exceptions import Warning as UserError
_logger = logging.getLogger(__name__) # pylint: disable=invalid-name
class AccountBankStatementLine(models.Model):
"""Extend model account.bank.statement.line."""
# pylint: disable=too-many-public-methods
_inherit = "account.bank.statement.line"
# Ensure transactions can be imported only once (if the import format
# provides unique transaction ids)
unique_import_id = fields.Char('Import ID', readonly=True, copy=False)
_sql_constraints = [
('unique_import_id',
'unique (unique_import_id)',
'A bank account transactions can be imported only once !')
]
class AccountBankStatementImport(models.TransientModel):
"""Extend model account.bank.statement."""
_name = 'account.bank.statement.import'
_description = 'Import Bank Statement'
@api.model
def _get_hide_journal_field(self):
""" Return False if the journal_id can't be provided by the parsed
file and must be provided by the wizard.
See account_bank_statement_import_qif """
# pylint: disable=no-self-use
return True
journal_id = fields.Many2one(
'account.journal', string='Journal',
help='Accounting journal related to the bank statement you\'re '
'importing. It has be be manually chosen for statement formats which '
'doesn\'t allow automatic journal detection (QIF for example).')
hide_journal_field = fields.Boolean(
string='Hide the journal field in the view',
compute='_get_hide_journal_field')
data_file = fields.Binary(
'Bank Statement File', required=True,
help='Get you bank statements in electronic format from your bank '
'and select them here.')
@api.multi
def import_file(self):
"""Process the file chosen in the wizard, create bank statement(s) and
go to reconciliation."""
self.ensure_one()
data_file = base64.b64decode(self.data_file)
# pylint: disable=protected-access
statement_ids, notifications = self.with_context(
active_id=self.id # pylint: disable=no-member
)._import_file(data_file)
# dispatch to reconciliation interface
action = self.env.ref(
'account.action_bank_reconcile_bank_statements')
return {
'name': action.name,
'tag': action.tag,
'context': {
'statement_ids': statement_ids,
'notifications': notifications
},
'type': 'ir.actions.client',
}
@api.model
def _parse_all_files(self, data_file):
"""Parse one file or multiple files from zip-file.
Return array of statements for further processing.
"""
statements = []
files = [data_file]
try:
with ZipFile(StringIO(data_file), 'r') as archive:
files = [
archive.read(filename) for filename in archive.namelist()
if not filename.endswith('/')
]
except BadZipfile:
pass
# Parse the file(s)
for import_file in files:
# The appropriate implementation module(s) returns the statements.
# Actually we don't care wether all the files have the same
# format. Although unlikely you might mix mt940 and camt files
# in one zipfile.
parse_result = self._parse_file(import_file)
# Check for old version result, with separate currency and account
if isinstance(parse_result, tuple) and len(parse_result) == 3:
(currency_code, account_number, new_statements) = parse_result
for stmt_vals in new_statements:
stmt_vals['currency_code'] = currency_code
stmt_vals['account_number'] = account_number
else:
new_statements = parse_result
statements += new_statements
return statements
@api.model
def _import_file(self, data_file):
""" Create bank statement(s) from file."""
# The appropriate implementation module returns the required data
statement_ids = []
notifications = []
statements = self._parse_all_files(data_file)
# Check raw data:
self._check_parsed_data(statements)
# Import all statements:
for stmt_vals in statements:
(statement_id, new_notifications) = (
self._import_statement(stmt_vals))
if statement_id:
statement_ids.append(statement_id)
notifications.extend(new_notifications)
if len(statement_ids) == 0:
raise UserError(_('You have already imported that file.'))
return statement_ids, notifications
@api.model
def _import_statement(self, stmt_vals):
"""Import a single bank-statement.
Return ids of created statements and notifications.
"""
currency_code = stmt_vals.pop('currency_code')
account_number = stmt_vals.pop('account_number')
# Try to find the bank account and currency in odoo
currency_id = self._find_currency_id(currency_code)
bank_account_id = self._find_bank_account_id(account_number)
if not bank_account_id and account_number:
raise UserError(
_('Can not find the account number %s.') % account_number
)
# Find the bank journal
journal_id = self._get_journal(currency_id, bank_account_id)
# By now journal and account_number must be known
if not journal_id:
raise UserError(_('Can not determine journal for import.'))
# Prepare statement data to be used for bank statements creation
stmt_vals = self._complete_statement(
stmt_vals, journal_id, account_number)
# Create the bank stmt_vals
return self._create_bank_statement(stmt_vals)
@api.model
def _parse_file(self, data_file):
# pylint: disable=no-self-use
# pylint: disable=unused-argument
""" Each module adding a file support must extends this method. It
processes the file if it can, returns super otherwise, resulting in a
chain of responsability.
This method parses the given file and returns the data required by
the bank statement import process, as specified below.
- bank statements data: list of dict containing (optional
items marked by o) :
-o currency code: string (e.g: 'EUR')
The ISO 4217 currency code, case insensitive
-o account number: string (e.g: 'BE1234567890')
The number of the bank account which the statement
belongs to
- 'name': string (e.g: '000000123')
- 'date': date (e.g: 2013-06-26)
-o 'balance_start': float (e.g: 8368.56)
-o 'balance_end_real': float (e.g: 8888.88)
- 'transactions': list of dict containing :
- 'name': string
(e.g: 'KBC-INVESTERINGSKREDIET 787-5562831-01')
- 'date': date
- 'amount': float
- 'unique_import_id': string
-o 'account_number': string
Will be used to find/create the res.partner.bank
in odoo
-o 'note': string
-o 'partner_name': string
-o 'ref': string
"""
raise UserError(_(
'Could not make sense of the given file.\n'
'Did you install the module to support this type of file?'
))
@api.model
def _check_parsed_data(self, statements):
# pylint: disable=no-self-use
""" Basic and structural verifications """
if len(statements) == 0:
raise UserError(_('This file doesn\'t contain any statement.'))
for stmt_vals in statements:
if 'transactions' in stmt_vals and stmt_vals['transactions']:
return
# If we get here, no transaction was found:
raise UserError(_('This file doesn\'t contain any transaction.'))
@api.model
def _find_currency_id(self, currency_code):
""" Get res.currency ID."""
if currency_code:
currency_ids = self.env['res.currency'].search(
[('name', '=ilike', currency_code)])
if currency_ids:
return currency_ids[0].id
else:
raise UserError(_(
'Statement has invalid currency code %s') % currency_code)
# if no currency_code is provided, we'll use the company currency
return self.env.user.company_id.currency_id.id
@api.model
def _find_bank_account_id(self, account_number):
""" Get res.partner.bank ID """
bank_account_id = None
if account_number and len(account_number) > 4:
bank_account_ids = self.env['res.partner.bank'].search(
[('acc_number', '=', account_number)], limit=1)
if bank_account_ids:
bank_account_id = bank_account_ids[0].id
return bank_account_id
@api.model
def _get_journal(self, currency_id, bank_account_id):
""" Find the journal """
bank_model = self.env['res.partner.bank']
# Find the journal from context, wizard or bank account
journal_id = self.env.context.get('journal_id') or self.journal_id.id
if bank_account_id:
bank_account = bank_model.browse(bank_account_id)
if journal_id:
if (bank_account.journal_id.id and
bank_account.journal_id.id != journal_id):
raise UserError(
_('The account of this statement is linked to '
'another journal.'))
if not bank_account.journal_id.id:
bank_model.write({'journal_id': journal_id})
else:
if bank_account.journal_id.id:
journal_id = bank_account.journal_id.id
# If importing into an existing journal, its currency must be the same
# as the bank statement. When journal has no currency, currency must
# be equal to company currency.
if journal_id and currency_id:
journal_obj = self.env['account.journal'].browse(journal_id)
if journal_obj.currency:
journal_currency_id = journal_obj.currency.id
if currency_id != journal_currency_id:
# ALso log message with id's for technical analysis:
_logger.warn(
_('Statement currency id is %d,'
' but journal currency id = %d.'),
currency_id,
journal_currency_id
)
raise UserError(_(
'The currency of the bank statement is not '
'the same as the currency of the journal !'
))
else:
company_currency_id = self.env.user.company_id.currency_id.id
if currency_id != company_currency_id:
# ALso log message with id's for technical analysis:
_logger.warn(
_('Statement currency id is %d,'
' but company currency id = %d.'),
currency_id,
company_currency_id
)
raise UserError(_(
'The currency of the bank statement is not '
'the same as the company currency !'
))
return journal_id
@api.model
@api.returns('res.partner.bank')
def _create_bank_account(
self, account_number, company_id=False, currency_id=False):
"""Automagically create bank account, when not yet existing."""
try:
bank_type = self.env.ref('base.bank_normal')
bank_code = bank_type.code
except ValueError:
bank_code = 'bank'
vals_acc = {
'acc_number': account_number,
'state': bank_code,
}
# Odoo users bank accounts (which we import statement from) have
# company_id and journal_id set while 'counterpart' bank accounts
# (from which statement transactions originate) don't.
# Warning : if company_id is set, the method post_write of class
# bank will create a journal
if company_id:
vals = self.env['res.partner.bank'].onchange_company_id(company_id)
vals_acc.update(vals.get('value', {}))
vals_acc['company_id'] = company_id
# When the journal is created at same time of the bank account, we need
# to specify the currency to use for the account.account and
# account.journal
return self.env['res.partner.bank'].with_context(
default_currency_id=currency_id,
default_currency=currency_id).create(vals_acc)
@api.model
def _complete_statement(self, stmt_vals, journal_id, account_number):
"""Complete statement from information passed."""
stmt_vals['journal_id'] = journal_id
for line_vals in stmt_vals['transactions']:
unique_import_id = line_vals.get('unique_import_id', False)
if unique_import_id:
line_vals['unique_import_id'] = (
(account_number and account_number + '-' or '') +
unique_import_id
)
if not line_vals.get('bank_account_id'):
# Find the partner and his bank account or create the bank
# account. The partner selected during the reconciliation
# process will be linked to the bank when the statement is
# closed.
partner_id = False
bank_account_id = False
partner_account_number = line_vals.get('account_number')
if partner_account_number:
bank_model = self.env['res.partner.bank']
banks = bank_model.search(
[('acc_number', '=', partner_account_number)], limit=1)
if banks:
bank_account_id = banks[0].id
partner_id = banks[0].partner_id.id
else:
bank_obj = self._create_bank_account(
partner_account_number)
bank_account_id = bank_obj and bank_obj.id or False
line_vals['partner_id'] = partner_id
line_vals['bank_account_id'] = bank_account_id
return stmt_vals
@api.model
def _create_bank_statement(self, stmt_vals):
""" Create bank statement from imported values, filtering out
already imported transactions, and return data used by the
reconciliation widget
"""
bs_model = self.env['account.bank.statement']
bsl_model = self.env['account.bank.statement.line']
# Filter out already imported transactions and create statement
ignored_line_ids = []
filtered_st_lines = []
for line_vals in stmt_vals['transactions']:
unique_id = (
'unique_import_id' in line_vals and
line_vals['unique_import_id']
)
if not unique_id or not bool(bsl_model.sudo().search(
[('unique_import_id', '=', unique_id)], limit=1)):
filtered_st_lines.append(line_vals)
else:
ignored_line_ids.append(unique_id)
statement_id = False
if len(filtered_st_lines) > 0:
# Remove values that won't be used to create records
stmt_vals.pop('transactions', None)
for line_vals in filtered_st_lines:
line_vals.pop('account_number', None)
# Create the statement
stmt_vals['line_ids'] = [
[0, False, line] for line in filtered_st_lines]
statement_id = bs_model.create(stmt_vals).id
# Prepare import feedback
notifications = []
num_ignored = len(ignored_line_ids)
if num_ignored > 0:
notifications += [{
'type': 'warning',
'message':
_("%d transactions had already been imported and "
"were ignored.") % num_ignored
if num_ignored > 1
else _("1 transaction had already been imported and "
"was ignored."),
'details': {
'name': _('Already imported items'),
'model': 'account.bank.statement.line',
'ids': bsl_model.search(
[('unique_import_id', 'in', ignored_line_ids)]).ids}
}]
return statement_id, notifications
| hbrunn/bank-statement-import | account_bank_statement_import/models/account_bank_statement_import.py | Python | agpl-3.0 | 17,538 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2017-07-05 10:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pipeline', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='tasklog',
name='formatted_tb',
field=models.TextField(null=True),
),
]
| ebmdatalab/openprescribing | openprescribing/pipeline/migrations/0002_tasklog_formatted_tb.py | Python | mit | 402 | 0 |
#
# Copyright (c) 2013 Chris Manton <cmanton@gmail.com> www.onesockoff.org
# See the file LICENSE.txt for your full rights.
#
# Special recognition to Lars de Bruin <l...@larsdebruin.net> for contributing
# packet decoding code.
#
# pylint parameters
# suppress global variable warnings
# pylint: disable-msg=W0603
# suppress weewx driver methods not implemented
# pylint: disable-msg=W0223
# suppress weewx driver methods non-conforming name
# pylint: disable-msg=C0103
# suppress too many lines in module
# pylint: disable-msg=C0302
# suppress too many instance attributes
# pylint: disable-msg=R0902
# suppress too many public methods
# pylint: disable-msg=R0904
# suppress too many statements
# pylint: disable-msg=R0915
# suppress unused arguments e.g. loader(...,engine)
# pylint: disable-msg=W0613
"""Classes and functions to interfacing with an Oregon Scientific WMR200 station
Oregon Scientific
http://us.oregonscientific.com/ulimages/manuals2/WMR200.pdf
Bronberg Weather Station
For a pretty good summary of what's in these packets see
http://www.bashewa.com/wmr200-protocol.php
"""
import select
import socket
import syslog
import threading
import time
import usb
import weewx.drivers
import weeutil.weeutil
DRIVER_NAME = 'WMR200'
DRIVER_VERSION = "3.1"
def loader(config_dict, engine):
return WMR200(**config_dict[DRIVER_NAME])
def confeditor_loader():
return WMR200ConfEditor()
# General decoding sensor maps.
WIND_DIR_MAP = {0: 'N', 1: 'NNE', 2: 'NE', 3: 'ENE',
4: 'E', 5: 'ESE', 6: 'SE', 7: 'SSE',
8: 'S', 9: 'SSW', 10: 'SW', 11: 'WSW',
12: 'W', 13: 'WNW', 14: 'NW', 15: 'NNW'}
FORECAST_MAP = {0: 'Partly Cloudy', 1: 'Rainy', 2: 'Cloudy',
3: 'Sunny', 4: 'Clear Night', 5: 'Snowy',
6: 'Partly Cloudy Night', 7: 'Unknown7'}
TRENDS = {0: 'Stable', 1: 'Rising', 2: 'Falling', 3: 'Undefined'}
# Size of USB frame to read from weather console.
_WMR200_USB_FRAME_SIZE = 8
# Time to sleep in seconds between querying usb device thread
# for data. This should be non-zero and reduces load on the machine.
_WMR200_USB_POLL_INTERVAL = 1
# Time interval in secs to send data to the wmr200 to request live data.
_WMR200_REQUEST_LIVE_DATA_INTERVAL = 30
# Time in secs to block and wait for data from the weather console device.
# Related to time to request live data.
_WMR200_USB_READ_DATA_INTERVAL = _WMR200_REQUEST_LIVE_DATA_INTERVAL / 2
# Time in ms to wait for USB reset to complete.
_WMR200_USB_RESET_TIMEOUT = 1000
# Guessed wmr200 protocol max packet size in bytes.
# This is only a screen to differentiate between good and
# bad packets.
_WMR200_MAX_PACKET_SIZE = 0x80
# Driver name.
_WMR200_DRIVER_NAME = 'wmr200'
# weewx configurable flags for enabling/disabling debug verbosity.
# Prints processed packets with context from console.
DEBUG_PACKETS_COOKED = 0
# Prints raw pre-processed packets from console.
DEBUG_PACKETS_RAW = 0
# Prints respective packets individually.
DEBUG_PACKETS_ARCHIVE = 0
DEBUG_PACKETS_PRESSURE = 0
DEBUG_PACKETS_RAIN = 0
DEBUG_PACKETS_STATUS = 0
DEBUG_PACKETS_TEMP = 0
DEBUG_PACKETS_UVI = 0
DEBUG_PACKETS_WIND = 0
# Print communication messages
DEBUG_COMM = 0
# Print weather station configuration.
DEBUG_CONFIG_DATA = 0
# Print all writes to weather console.
DEBUG_WRITES = 0
DEBUG_READS = 0
DEBUG_CHECKSUM = 0
def logmsg(dst, msg):
"""Base syslog helper"""
syslog.syslog(dst, ('%s: %s: %s' %
(_WMR200_DRIVER_NAME,
threading.currentThread().getName(), msg)))
def logdbg(msg):
"""Debug syslog helper"""
logmsg(syslog.LOG_DEBUG, 'D ' + msg)
def loginf(msg):
"""Info syslog helper"""
logmsg(syslog.LOG_INFO, 'I ' + msg)
def logwar(msg):
"""Warning syslog helper"""
logmsg(syslog.LOG_WARNING, 'W ' + msg)
def logerr(msg):
"""Error syslog helper"""
logmsg(syslog.LOG_ERR, 'E ' + msg)
def logcrt(msg):
"""Critical syslog helper"""
logmsg(syslog.LOG_CRIT, 'C ' + msg)
class WMR200PacketParsingError(Exception):
"""A driver handled recoverable packet parsing error condition."""
def __init__(self, msg):
super(WMR200PacketParsingError, self).__init__()
self._msg = msg
@property
def msg(self):
"""Exception message to be logged to console."""
return self._msg
class WMR200ProtocolError(weewx.WeeWxIOError):
"""Used to signal a protocol error condition"""
def __init__(self, msg):
super(WMR200ProtocolError, self).__init__()
self._msg = msg
logerr(msg)
class UsbDevice(object):
"""General class to handles all access to device via USB bus."""
def __init__(self):
# Polling read timeout.
self.timeout_read = _WMR200_USB_READ_DATA_INTERVAL
# USB device used for libusb
self.dev = None
# Holds device handle for access
self.handle = None
# debug byte count
self.byte_cnt_rd = 0
self.byte_cnt_wr = 0
# default to a sane endpoint
self.in_endpoint = usb.ENDPOINT_IN + 1
# only one interface
self.interface = 0
def find_device(self, vendor_id, product_id):
"""Find the given vendor and product IDs on the USB bus
Returns: True if specified device was found, otherwise false. """
for bus in usb.busses():
for dev in bus.devices:
if dev.idVendor == vendor_id \
and dev.idProduct == product_id:
self.dev = dev
return True
return False
def open_device(self):
"""Opens a USB device and get a handle to read and write.
A specific device must have been found."""
try:
self.handle = self.dev.open()
except usb.USBError, exception:
logcrt(('open_device() Unable to open USB interface.'
' Reason: %s' % exception))
raise weewx.WakeupError(exception)
except AttributeError, exception:
logcrt('open_device() Device not specified.')
raise weewx.WakeupError(exception)
# Detach any old claimed interfaces
try:
self.handle.detachKernelDriver(self.interface)
except usb.USBError:
pass
try:
self.handle.claimInterface(self.interface)
except usb.USBError, exception:
logcrt(('open_device() Unable to'
' claim USB interface. Reason: %s' % exception))
raise weewx.WakeupError(exception)
def close_device(self):
"""Close a device for access.
NOTE(CMM) There is no busses[].devices[].close() so under linux the
file descriptor will remain open for the life of the process.
An OS independant mechanism is required so 'lsof' and friends will
not be cross platform."""
try:
self.handle.releaseInterface()
except usb.USBError, exception:
logcrt('close_device() Unable to'
' release device interface. Reason: %s' % exception)
def read_device(self):
"""Read a stream of data bytes from the device.
Returns a list of valid protocol bytes from the device.
The first byte indicates the number of valid bytes following
the first byte that are valid protocol bytes. Only the valid
protocol bytes are returned. """
if not self.handle:
msg = 'read_device() No USB handle for usb_device Read'
logerr(msg)
raise weewx.WeeWxIOError(msg)
report = None
try:
report = self.handle.interruptRead(self.in_endpoint,
_WMR200_USB_FRAME_SIZE,
int(self.timeout_read) * 1000)
# I think this value indicates that the buffer has overflowed.
if report[0] == 8:
msg = 'USB read_device overflow error'
logerr(msg)
raise weewx.WeeWxIOError(msg)
self.byte_cnt_rd += len(report)
# The first byte is the size of valid data following.
# We only want to return the valid data.
if DEBUG_READS:
buf = ''
for byte in report[1:report[0]+1]:
buf += '%02x ' % byte
logdbg('read_device(): %s' % buf)
return report[1:report[0] + 1]
except IndexError, e:
# This indicates we failed an index range above.
logerr('read_device() Failed the index rage %s: %s' % (report, e))
except usb.USBError, ex:
# No data presented on the bus. This is a normal part of
# the process that indicates that the current live records
# have been exhausted. We have to send a heartbeat command
# to tell the weather console to start streaming live data
# again.
if ex.args[0].find('No data available') == -1:
msg = 'read_device() USB Error Reason:%s' % ex
logerr(msg)
raise weewx.WeeWxIOError(msg)
else:
# No data avail...not an error but probably ok.
logdbg(('No data received in'
' %d seconds' % int(self.timeout_read)))
return []
def write_device(self, buf):
"""Writes a command packet to the device."""
# Unclear how to create this number, but is the wValue portion
# of the set_configuration() specified in the USB spec.
value = 0x00000220
if not self.handle:
msg = 'No USB handle for usb_device Write'
logerr(msg)
raise weewx.WeeWxIOError(msg)
try:
if DEBUG_WRITES:
logdbg('write_device(): %s' % buf)
self.byte_cnt_wr += len(buf)
self.handle.controlMsg(
usb.TYPE_CLASS + usb.RECIP_INTERFACE, # requestType
0x0000009, # request
buf,
value, # value
0x0000000, # index
_WMR200_USB_RESET_TIMEOUT) # timeout
except usb.USBError, exception:
msg = ('write_device() Unable to'
' send USB control message %s' % exception)
logerr(msg)
# Convert to a Weewx error:
raise weewx.WeeWxIOError(exception)
class Packet(object):
"""Top level class for all WMR200 packets.
All wmr200 packets inherit from this class. The process() method
is used to provide useful data to the weewx engine. Some packets
require special processing due to discontinuities in the wmr200
protocol."""
pkt_cmd = 0
pkt_name = 'AbstractPacket'
pkt_len = 0
pkt_id = 0
def __init__(self, wmr200):
"""Initialize base elements of the packet parser."""
# Keep reference to the wmr200 for any special considerations
# or options.
self.wmr200 = wmr200
# Accumulated raw byte data from console.
self._pkt_data = []
# Record dictionary to pass to weewx engine.
self._record = {}
# Add the command byte as the first field
self.append_data(self.pkt_cmd)
# Packet identifier
Packet.pkt_id += 1
self.pkt_id = Packet.pkt_id
def append_data(self, char):
"""Appends new data to packet buffer.
Verifies that the size is a reasonable value.
Upon startup or other times we can may get out
of sync with the weather console."""
self._pkt_data.append(char)
if len(self._pkt_data) == 2 and \
self._pkt_data[1] > _WMR200_MAX_PACKET_SIZE:
raise weewx.WeeWxIOError('Max packet size exceeded')
def size_actual(self):
"""Size of bytes of data in packet received from console."""
return len(self._pkt_data)
def size_expected(self):
"""Expected size of packet from packet protocol field."""
try:
return self._pkt_data[1]
except IndexError:
logerr('Failed to extract size from packet')
return 0
def packet_complete(self):
"""Determines if packet is complete and ready for weewx engine
processing.
This method assumes the packet is at least 2 bytes long"""
if self.size_actual() < 2:
return False
return self.size_actual() == self.size_expected()
def packet_process(self):
"""Process the raw data and creates a record field."""
# Convention is that this driver only works in metric units.
self._record.update({'usUnits': weewx.METRIC})
if DEBUG_PACKETS_RAW or DEBUG_PACKETS_COOKED:
logdbg('Processing %s' % self.pkt_name)
if self.pkt_len and self.pkt_len != self.size_actual():
logwar(('Unexpected packet size act:%d exp:%d' %
(self.size_actual(), self.pkt_len)))
# If applicable calculate time drift between packet and host.
self.calc_time_drift()
def packet_record(self):
"""Returns the dictionary of processed records for this packet."""
return self._record
def record_get(self, key):
"""Returns the record indexed by the key."""
try:
return self._record[key]
except KeyError:
logerr('Record get key not found in record key:%s' % key)
def record_set(self, key, val):
"""Sets the record indexed by the key."""
try:
self._record[key] = val
except KeyError:
logerr('Record set key not found in record key:%s val:%s'
% (key, val))
def record_update(self, record):
"""Updates record dictionary with additional dictionary."""
try:
self._record.update(record)
except (TypeError, KeyError):
logerr('Record update failed to apply record:%s' % record)
def _checksum_calculate(self):
"""Returns the calculated checksum of the current packet.
If the entire packet has not been received will simply
return the checksum of whatever data values exist in the packet."""
try:
cksum = 0
# Checksum is last two bytes in packet.
for byte in self._pkt_data[:-2]:
cksum += byte
return cksum
except IndexError:
msg = 'Packet too small to compute 16 bit checksum'
raise WMR200ProtocolError(msg)
def _checksum_field(self):
"""Returns the checksum field of the current packet.
If the entire packet has not been received will simply
return the last two bytes which are unlikely checksum values."""
try:
return (self._pkt_data[-1] << 8) | self._pkt_data[-2]
except IndexError:
msg = 'Packet too small to contain 16 bit checksum'
raise WMR200ProtocolError(msg)
def verify_checksum(self):
"""Verifies packet for checksum correctness.
Raises exception upon checksum failure unless configured to drop."""
if self._checksum_calculate() != self._checksum_field():
msg = ('Checksum miscompare act:0x%04x exp:0x%04x' %
(self._checksum_calculate(), self._checksum_field()))
logerr(self.to_string_raw('%s packet:' % msg))
if self.wmr200.ignore_checksum:
raise WMR200PacketParsingError(msg)
raise weewx.CRCError(msg)
# Debug test to force checksum recovery testing.
if DEBUG_CHECKSUM and (self.pkt_id % DEBUG_CHECKSUM) == 0:
raise weewx.CRCError('Debug forced checksum error')
@staticmethod
def timestamp_host():
"""Returns the host epoch timestamp"""
return int(time.time() + 0.5)
def timestamp_record(self):
"""Returns the epoch timestamp in the record."""
try:
return self._record['dateTime']
except KeyError:
msg = 'timestamp_record() Timestamp not set in record'
logerr(msg)
raise weewx.ViolatedPrecondition(msg)
def _timestamp_packet(self, pkt_data):
"""Pulls the epoch timestamp from the packet."""
try:
minute = pkt_data[0]
hour = pkt_data[1]
day = pkt_data[2]
month = pkt_data[3]
year = 2000 + pkt_data[4]
return time.mktime((year, month, day, hour, minute,
0, -1, -1, -1))
except IndexError:
msg = ('Packet length too short to get timestamp len:%d'
% len(self._pkt_data))
raise WMR200ProtocolError(msg)
except (OverflowError, ValueError), exception:
msg = ('Packet timestamp with bogus fields min:%d hr:%d day:%d'
' m:%d y:%d %s' % (pkt_data[0], pkt_data[1],
pkt_data[2], pkt_data[3], pkt_data[4], exception))
raise WMR200PacketParsingError(msg)
def timestamp_packet(self):
"""Pulls the epoch timestamp from the packet.
Must only be called by packets that have timestamps in the
protocal packet."""
return self._timestamp_packet(self._pkt_data[2:7])
def calc_time_drift(self):
"""Calculate time drift between host and packet
Not all packets have a live timestamp so must be implemented
by the packet type."""
pass
def to_string_raw(self, out=''):
"""Returns raw string of this packet appended to optional
input string"""
for byte in self._pkt_data:
out += '%02x ' % byte
return out
def print_cooked(self):
"""Debug method method to print the processed packet.
Must be called after the Process() method."""
try:
out = ' Packet cooked: '
out += 'id:%d ' % self.pkt_id
out += '%s ' % self.pkt_name
out += '%s ' % weeutil.weeutil.timestamp_to_string(
self.timestamp_record())
out += 'len:%d ' % self.size_actual()
out += 'fields:%d ' % len(self._record)
out += str(self._record)
logdbg(out)
except KeyError:
msg = 'print_cooked() called before proper setup'
logerr(msg)
raise weewx.ViolatedPrecondition(msg)
class PacketLive(Packet):
"""Packets with live sensor data from console."""
# Number of live packets received from console.
pkt_rx = 0
# Queue of processed packets to be delivered to weewx.
pkt_queue = []
def __init__(self, wmr200):
super(PacketLive, self).__init__(wmr200)
PacketLive.pkt_rx += 1
@staticmethod
def packet_live_data():
"""Yield live data packets to interface on the weewx engine."""
return True
@staticmethod
def packet_archive_data():
"""Yield archived data packets to interface on the weewx engine."""
return False
def packet_process(self):
"""Returns a records field to be processed by the weewx engine."""
super(PacketLive, self).packet_process()
self._record.update({'dateTime': self.timestamp_live(), })
def calc_time_drift(self):
"""Returns the difference between PC time and the packet timestamp.
This value is approximate as all timestamps from a given archive
interval will be the same while PC time marches onwards.
Only done once upon first live packet received."""
if self.wmr200.time_drift is None:
self.wmr200.time_drift = self.timestamp_host() \
- self.timestamp_packet()
loginf('Time drift between host and console in seconds:%d' %
self.wmr200.time_drift)
def timestamp_live(self):
"""Returns the timestamp from a live packet.
Caches the last live timestamp to add to packets that do
not provide timestamps."""
if self.wmr200.use_pc_time:
self.wmr200.last_time_epoch = self.timestamp_host()
else:
self.wmr200.last_time_epoch = self.timestamp_packet()
return self.wmr200.last_time_epoch
class PacketArchive(Packet):
"""Packets with archived sensor data from console."""
# Number of archive packets received from console.
pkt_rx = 0
# Queue of processed packets to be delivered to weewx.
pkt_queue = []
def __init__(self, wmr200):
super(PacketArchive, self).__init__(wmr200)
PacketArchive.pkt_rx += 1
@staticmethod
def packet_live_data():
"""Yield live data packets to interface on the weewx engine."""
return False
@staticmethod
def packet_archive_data():
"""Yield archived data packets to interface on the weewx engine."""
return True
def packet_process(self):
"""Returns a records field to be processed by the weewx engine."""
super(PacketArchive, self).packet_process()
# If we need to adjust the timestamp if pc time is set we will do it
# later
self._record.update({'dateTime': self.timestamp_packet(), })
# Archive packets have extra field indicating interval time.
self._record.update({'interval':
int(self.wmr200.archive_interval / 60.0), })
def timestamp_adjust_drift(self):
"""Archive records may need time adjustment when using PC time."""
try:
loginf(('Using pc time adjusting archive record time by %d sec'
' %s => %s' % (self.wmr200.time_drift,
weeutil.weeutil.timestamp_to_string\
(self.timestamp_record()),
weeutil.weeutil.timestamp_to_string\
(self.timestamp_record()
+ int(self.wmr200.time_drift)))))
self._record['dateTime'] += int(self.wmr200.time_drift)
except TypeError:
logerr('timestamp_adjust_drift() called with invalid time drift')
class PacketControl(Packet):
"""Packets with protocol control info from console."""
# Number of control packets received from console.
pkt_rx = 0
def __init__(self, wmr200):
super(PacketControl, self).__init__(wmr200)
PacketControl.pkt_rx += 1
@staticmethod
def packet_live_data():
"""Yield live data packets to interface on the weewx engine."""
return False
@staticmethod
def packet_archive_data():
"""Yield archived data packets to interface on the weewx engine."""
return False
def size_expected(self):
"""Control packets do not have length field and are only one byte."""
return 1
def verify_checksum(self):
"""This packet does not have a checksum."""
pass
def packet_complete(self):
"""Determines if packet is complete and ready for weewx engine
processing."""
if self.size_actual() == 1:
return True
return False
def packet_process(self):
"""Returns a records field to be processed by the weewx engine.
This packet isn't really passed up to weewx but is assigned a
timestamp for completeness."""
self._record.update({'dateTime': self.timestamp_host(), })
def print_cooked(self):
"""Print the processed packet.
This packet consists of a single byte and thus not much to print."""
out = ' Packet cooked: '
out += '%s ' % self.pkt_name
logdbg(out)
class PacketArchiveReady(PacketControl):
"""Packet parser for control command acknowledge."""
pkt_cmd = 0xd1
pkt_name = 'CmdAck'
pkt_len = 1
def __init__(self, wmr200):
super(PacketArchiveReady, self).__init__(wmr200)
def packet_process(self):
"""Returns a records field to be processed by the weewx engine."""
super(PacketArchiveReady, self).packet_process()
# Immediately request to the console a command to send archived data.
self.wmr200.request_archive_data()
class PacketArchiveData(PacketArchive):
"""Packet parser for archived data."""
pkt_cmd = 0xd2
pkt_name = 'Archive Data'
# Initial console rain total value since 2007-1-1.
rain_total_last = None
def __init__(self, wmr200):
super(PacketArchiveData, self).__init__(wmr200)
def packet_process(self):
"""Returns a records field to be processed by the weewx engine."""
super(PacketArchiveData, self).packet_process()
try:
self._record.update(decode_rain(self, self._pkt_data[ 7:20]))
self._record.update(decode_wind(self, self._pkt_data[20:27]))
self._record.update(decode_uvi(self, self._pkt_data[27:28]))
self._record.update(decode_pressure(self, self._pkt_data[28:32]))
# Number of sensors starting at zero inclusive.
num_sensors = self._pkt_data[32]
for i in xrange(0, num_sensors+1):
base = 33 + i*7
self._record.update(decode_temp(self,
self._pkt_data[base:base+7]))
except IndexError:
msg = ('%s decode index failure' % self.pkt_name)
raise WMR200ProtocolError(msg)
# Tell wmr200 console we have processed it and can handle more.
self.wmr200.request_archive_data()
if DEBUG_PACKETS_ARCHIVE:
logdbg(' Archive packet num_temp_sensors:%d' % num_sensors)
def timestamp_last_rain(self):
"""Pulls the epoch timestamp from the packet.
Returns the epoch time since last accumualted rainfall."""
return self._timestamp_packet(self._pkt_data[15:20])
def decode_wind(pkt, pkt_data):
"""Decode the wind portion of a wmr200 packet."""
try:
# Low byte of gust speed in 0.1 m/s.
gust_speed = ((((pkt_data[3]) & 0x0f) << 8)
| pkt_data[2]) / 10.0
# High nibble is low nibble of average speed.
# Low nibble of high byte and high nibble of low byte
# of average speed. Value is in 0.1 m/s
avg_speed = ((pkt_data[3] >> 4)
| ((pkt_data[4] << 4))) / 10.0
# Wind direction in steps of 22.5 degrees.
# 0 is N, 1 is NNE and so on. See WIND_DIR_MAP for complete list.
# Default to none unless speed is above zero.
dir_deg = None
if avg_speed > 0.0:
dir_deg = (pkt_data[0] & 0x0f) * 22.5
# Windchill temperature. The value is in degrees F.
# Set default to no windchill as it may not exist.
# Convert to metric for weewx presentation.
windchill = None
if pkt_data[6] != 0x20:
if pkt_data[6] & 0x10:
# Think it's a flag of some sort
pass
elif pkt_data[6] != 0x80:
windchill = (((pkt_data[6] << 8) | pkt_data[5]) - 320) \
* (5.0 / 90.0)
elif pkt_data[6] & 0x80:
windchill = ((((pkt_data[5]) * -1) - 320) * (5.0/90.0))
# The console returns wind speeds in m/s. weewx requires
# kph, so the speeds needs to be converted.
record = {'windSpeed' : avg_speed * 3.60,
'windGust' : gust_speed * 3.60,
'windDir' : dir_deg,
'windchill' : windchill,
}
# Sometimes the station emits a wind gust that is less than the
# average wind. weewx requires kph, so the result needs to be
# converted.
if gust_speed < avg_speed:
record['windGust'] = None
record['windGustDir'] = None
else:
# use the regular wind direction for the gust direction
record['windGustDir'] = record['windDir']
if DEBUG_PACKETS_WIND:
logdbg(' Wind Dir: %s' % (WIND_DIR_MAP[pkt_data[0] & 0x0f]))
logdbg(' Gust: %.1f m/s Wind:%.1f m/s' % (gust_speed, avg_speed))
if windchill is not None:
logdbg(' Windchill: %.1f C' % (windchill))
return record
except IndexError:
msg = ('%s decode index failure' % pkt.pkt_name)
raise WMR200ProtocolError(msg)
class PacketWind(PacketLive):
"""Packet parser for wind."""
pkt_cmd = 0xd3
pkt_name = 'Wind'
pkt_len = 0x10
def __init__(self, wmr200):
super(PacketWind, self).__init__(wmr200)
def packet_process(self):
"""Decode a wind packet. Wind speed will be in kph
Returns a packet that can be processed by the weewx engine."""
super(PacketWind, self).packet_process()
self._record.update(decode_wind(self, self._pkt_data[7:14]))
def decode_rain(pkt, pkt_data):
"""Decode the rain portion of a wmr200 packet."""
try:
# Bytes 0 and 1: high and low byte encode the current rainfall rate
# in 0.01 in/h. Convert into metric.
rain_rate = (((pkt_data[1] & 0x0f) << 8) | pkt_data[0]) / 100.0 * 2.54
# Bytes 2 and 3: high and low byte encode rain of the last hour in 0.01in
# Convert into metric.
rain_hour = ((pkt_data[3] << 8) | pkt_data[2]) / 100.0 * 2.54
# Bytes 4 and 5: high and low byte encode rain of the last 24 hours,
# excluding the current hour, in 0.01in
# Convert into metric.
rain_day = ((pkt_data[5] << 8) | pkt_data[4]) / 100.0 * 2.54
# Bytes 6 and 7: high and low byte encode the total rainfall in 0.01in.
# Convert into metric.
rain_total = ((pkt_data[7] << 8) | pkt_data[6]) / 100.0 * 2.54
record = {'rainRate' : rain_rate,
'hourRain' : rain_hour,
'rain24' : rain_day + rain_hour,
'totalRain' : rain_total}
if DEBUG_PACKETS_RAIN:
try:
formatted = ["0x%02x" % x for x in pkt_data]
logdbg(' Rain packets:' + ', '.join(formatted))
logdbg(' Rain rate:%.02f; hour_rain:%.02f; day_rain:%.02f' %
(rain_rate, rain_hour, rain_day))
logdbg(' Total rain_total:%.02f' % (rain_total))
logdbg(' Last rain %s' %
weeutil.weeutil.timestamp_to_string\
(pkt.timestamp_last_rain()))
except Exception:
pass
return record
except IndexError:
msg = ('%s decode index failure' % pkt.pkt_name)
raise WMR200ProtocolError(msg)
def adjust_rain(pkt, packet):
"""Calculate rainfall per poll interval.
Because the WMR does not offer anything like bucket tips, we must
calculate it by looking for the change in total rain.
After driver startup we need to initialize the total rain presented
by the console.
There are two different rain total last values kept. One for archive
data and one for live loop data. They are addressed using a static
variable within the scope of the respective class name."""
record = {}
# Get the total current rain field from the console.
rain_total = pkt.record_get('totalRain')
# Calculate the amount of rain occurring for this interval.
try:
rain_interval = rain_total - packet.rain_total_last
except TypeError:
rain_interval = 0.0
record['rain'] = rain_interval
record['totalRainLast'] = packet.rain_total_last
try:
logdbg(' adjust_rain rain_total:%.02f %s.rain_total_last:%.02f'
' rain_interval:%.02f' % (rain_total, packet.pkt_name,
packet.rain_total_last, rain_interval))
except TypeError:
logdbg(' Initializing %s.rain_total_last to %.02f' %
(packet.pkt_name, rain_total))
packet.rain_total_last = rain_total
return record
class PacketRain(PacketLive):
"""Packet parser for rain."""
pkt_cmd = 0xd4
pkt_name = 'Rain'
pkt_len = 0x16
# Initial console rain total value since 2007-1-1.
rain_total_last = None
def __init__(self, wmr200):
super(PacketRain, self).__init__(wmr200)
def packet_process(self):
"""Returns a packet that can be processed by the weewx engine."""
super(PacketRain, self).packet_process()
self._record.update(decode_rain(self, self._pkt_data[7:20]))
self._record.update(adjust_rain(self, PacketRain))
def timestamp_last_rain(self):
"""Pulls the epoch timestamp from the packet.
Returns the epoch time since last accumualted rainfall."""
return self._timestamp_packet(self._pkt_data[15:20])
def decode_uvi(pkt, pkt_data):
"""Decode the uvi portion of a wmr200 packet."""
try:
record = {'UV': pkt_data[0 & 0x0f]}
if DEBUG_PACKETS_UVI:
logdbg(" UV index:%s\n" % record['UV'])
return record
except IndexError:
msg = ('%s index decode index failure' % pkt.pkt_name)
raise WMR200ProtocolError(msg)
class PacketUvi(PacketLive):
"""Packet parser for ultra violet sensor."""
pkt_cmd = 0xd5
pkt_name = 'UVI'
pkt_len = 0x0a
def __init__(self, wmr200):
super(PacketUvi, self).__init__(wmr200)
def packet_process(self):
"""Returns a packet that can be processed by the weewx engine."""
super(PacketUvi, self).packet_process()
self._record.update(decode_uvi(self, self._pkt_data[7:8]))
def decode_pressure(pkt, pkt_data):
"""Decode the pressure portion of a wmr200 packet."""
try:
# Low byte of pressure. Value is in hPa.
# High nibble is forecast
# Low nibble is high byte of pressure.
# Unfortunately, we do not know if this is MSLP corrected pressure,
# or "gauge" pressure. We will assume the latter.
pressure = float(((pkt_data[1] & 0x0f) << 8) | pkt_data[0])
forecast = (pkt_data[1] >> 4) & 0x7
# Similar to bytes 0 and 1, but altitude corrected
# pressure. Upper nibble of byte 3 is still unknown. Seems to
# be always 3.
altimeter = float(((pkt_data[3] & 0x0f) << 8)
| pkt_data[2])
unknown_nibble = (pkt_data[3] >> 4)
record = {'pressure' : pressure,
'altimeter' : altimeter,
'forecastIcon': forecast}
if DEBUG_PACKETS_PRESSURE:
logdbg(' Forecast: %s' % FORECAST_MAP[forecast])
logdbg(' Raw pressure: %.02f hPa' % pressure)
if unknown_nibble != 3:
logdbg(' Pressure unknown nibble: 0x%x' % unknown_nibble)
logdbg(' Altitude corrected pressure: %.02f hPa console' %
altimeter)
return record
except IndexError:
msg = ('%s index decode index failure' % pkt.pkt_name)
raise WMR200ProtocolError(msg)
class PacketPressure(PacketLive):
"""Packet parser for barometer sensor."""
pkt_cmd = 0xd6
pkt_name = 'Pressure'
pkt_len = 0x0d
def __init__(self, wmr200):
super(PacketPressure, self).__init__(wmr200)
def packet_process(self):
"""Returns a packet that can be processed by the weewx engine."""
super(PacketPressure, self).packet_process()
self._record.update(decode_pressure(self, self._pkt_data[7:11]))
def decode_temp(pkt, pkt_data):
"""Decode the temperature portion of a wmr200 packet."""
try:
record = {}
# The historic data can contain data from multiple sensors. I'm not
# sure if the 0xD7 frames can do too. I've never seen a frame with
# multiple sensors. But historic data bundles data for multiple
# sensors.
# Byte 0: low nibble contains sensor ID. 0 for base station.
sensor_id = pkt_data[0] & 0x0f
# '00 Temp steady
# '01 Temp rising
# '10 Temp falling
temp_trend = (pkt_data[0] >> 6) & 0x3
# '00 Humidity steady
# '01 Humidity rising
# '10 Humidity falling
hum_trend = (pkt_data[0] >> 4) & 0x3
# The high nible contains the sign indicator.
# The low nibble is the high byte of the temperature.
# The low byte of the temperature. The value is in 1/10
# degrees centigrade.
temp = (((pkt_data[2] & 0x0f) << 8) | pkt_data[1]) / 10.0
if pkt_data[2] & 0x80:
temp *= -1
# The humidity in percent.
humidity = pkt_data[3]
# The first high nibble contains the sign indicator.
# The first low nibble is the high byte of the temperature.
# The second byte is low byte of the temperature. The value is in 1/10
# degrees centigrade.
dew_point = (((pkt_data[5] & 0x0f) << 8)
| pkt_data[4]) / 10.0
if pkt_data[5] & 0x80:
dew_point *= -1
# Heat index reported by console.
heat_index = None
if pkt_data[6] != 0:
# For some strange reason it's reported in degF so convert
# to metric.
heat_index = (pkt_data[6] - 32) / (9.0 / 5.0)
if sensor_id == 0:
# Indoor temperature sensor.
record['inTemp'] = temp
record['inHumidity'] = humidity
elif sensor_id == 1:
# Outdoor temperature sensor.
record['outTemp'] = temp
record['outHumidity'] = humidity
record['heatindex'] = heat_index
elif sensor_id >= 2:
# Extra temperature sensors.
# If additional temperature sensors exist (channel>=2), then
# use observation types 'extraTemp1', 'extraTemp2', etc.
record['extraTemp%d' % sensor_id] = temp
record['extraHumid%d' % sensor_id] = humidity
if DEBUG_PACKETS_TEMP:
logdbg(' Temperature id:%d %.1f C trend: %s'
% (sensor_id, temp, TRENDS[temp_trend]))
logdbg(' Humidity id:%d %d%% trend: %s'
% (sensor_id, humidity, TRENDS[hum_trend]))
logdbg((' Dew point id:%d: %.1f C' % (sensor_id, dew_point)))
if heat_index:
logdbg(' Heat id:%d index:%d' % (sensor_id, heat_index))
return record
except IndexError:
msg = ('%s index decode index failure' % pkt.pkt_name)
raise WMR200ProtocolError(msg)
class PacketTemperature(PacketLive):
"""Packet parser for temperature and humidity sensor."""
pkt_cmd = 0xd7
pkt_name = 'Temperature'
pkt_len = 0x10
def __init__(self, wmr200):
super(PacketTemperature, self).__init__(wmr200)
def packet_process(self):
"""Returns a packet that can be processed by the weewx engine."""
super(PacketTemperature, self).packet_process()
self._record.update(decode_temp(self, self._pkt_data[7:14]))
# Save the temp record for possible windchill calculation.
self.wmr200.last_temp_record = self._record
class PacketStatus(PacketLive):
"""Packet parser for console sensor status."""
pkt_cmd = 0xd9
pkt_name = 'Status'
pkt_len = 0x08
def __init__(self, wmr200):
super(PacketStatus, self).__init__(wmr200)
def timestamp_live(self):
"""Return timestamp of packet.
This packet does not have a timestamp so we just return the
previous cached timestamp from the last live packet.
Note: If there is no previous cached timestamp then we
return the initial PC timestamp. This would occur quite early
in the driver startup and this time may be quite out of
sequence from the rest of the packets. Another option would be
to simply discard this status packet at this time."""
return self.wmr200.last_time_epoch
def packet_process(self):
"""Returns a packet that can be processed by the weewx engine.
Not all console status aligns with the weewx API but we try
to make it fit."""
super(PacketStatus, self).packet_process()
# Setup defaults as good status.
self._record.update({'outTempFault' : 0,
'windFault' : 0,
'uvFault' : 0,
'rainFault' : 0,
'clockUnsynchronized' : 0,
'outTempBatteryStatus' : 1.0,
'windBatteryStatus' : 1.0,
'uvBatteryStatus' : 1.0,
'rainBatteryStatus' : 1.0,
})
# This information may be sent to syslog
msg_status = []
if self._pkt_data[2] & 0x02:
msg_status.append('Temp outdoor sensor fault')
self._record['outTempFault'] = 1
if self._pkt_data[2] & 0x01:
msg_status.append('Wind sensor fault')
self._record['windFault'] = 1
if self._pkt_data[3] & 0x20:
msg_status.append('UV Sensor fault')
self._record['uvFault'] = 1
if self._pkt_data[3] & 0x10:
msg_status.append('Rain sensor fault')
self._record['rainFault'] = 1
if self._pkt_data[4] & 0x80:
msg_status.append('Clock time unsynchronized')
self._record['clockUnsynchronized'] = 1
if self._pkt_data[4] & 0x02:
msg_status.append('Temp outdoor sensor: Battery low')
self._record['outTempBatteryStatus'] = 0.0
if self._pkt_data[4] & 0x01:
msg_status.append('Wind sensor: Battery low')
self._record['windBatteryStatus'] = 0.0
if self._pkt_data[5] & 0x20:
msg_status.append('UV sensor: Battery low')
self._record['uvBatteryStatus'] = 0.0
if self._pkt_data[5] & 0x10:
msg_status.append('Rain sensor: Battery low')
self._record['rainBatteryStatus'] = 0.0
if self.wmr200.sensor_stat:
while msg_status:
msg = msg_status.pop(0)
logwar(msg)
# Output packet to try to understand other fields.
if DEBUG_PACKETS_STATUS:
logdbg(self.to_string_raw(' Sensor packet:'))
def calc_time_drift(self):
"""Returns the difference between PC time and the packet timestamp.
This packet has no timestamp so cannot be used to calculate."""
pass
class PacketEraseAcknowledgement(PacketControl):
"""Packet parser for archived data is ready to receive."""
pkt_cmd = 0xdb
pkt_name = 'Erase Acknowledgement'
pkt_len = 0x01
def __init__(self, wmr200):
super(PacketEraseAcknowledgement, self).__init__(wmr200)
class PacketFactory(object):
"""Factory to create proper packet from first command byte from device."""
def __init__(self, *subclass_list):
self.subclass = dict((s.pkt_cmd, s) for s in subclass_list)
self.skipped_bytes = 0
def num_packets(self):
"""Returns the number of packets handled by the factory."""
return len(self.subclass)
def get_packet(self, pkt_cmd, wmr200):
"""Returns a protocol packet instance from initial packet command byte.
Returns None if there was no mapping for the protocol command.
Upon startup we may read partial packets. We need to resync to a
valid packet command from the weather console device if we start
reading in the middle of a previous packet.
We may also get out of sync during operation."""
if pkt_cmd in self.subclass:
if self.skipped_bytes:
logwar(('Skipped bytes before resync:%d' %
self.skipped_bytes))
self.skipped_bytes = 0
return self.subclass[pkt_cmd](wmr200)
self.skipped_bytes += 1
return None
# Packet factory parser for each packet presented by weather console.
PACKET_FACTORY = PacketFactory(
PacketArchiveReady,
PacketArchiveData,
PacketWind,
PacketRain,
PacketPressure,
PacketUvi,
PacketTemperature,
PacketStatus,
PacketEraseAcknowledgement,
)
# Count of restarts
STAT_RESTART = 0
class RequestLiveData(threading.Thread):
"""Watchdog thread to poke the console requesting live data.
If the console does not receive a request or heartbeat periodically
for live data then it automatically resets into archive mode."""
def __init__(self, kwargs):
super(RequestLiveData, self).__init__()
self.wmr200 = kwargs['wmr200']
self.poke_time = kwargs['poke_time']
self.sock_rd = kwargs['sock_rd']
loginf(('Created watchdog thread to poke for live data every %d'
' seconds') % self.poke_time)
def run(self):
"""Periodically inform the main driver thread to request live data.
When its time to shutdown this thread, the main thread will send any
string across the socket. This both wakes up this timer thread and
also tells it to expire."""
loginf('Started watchdog thread live data')
while True:
self.wmr200.ready_to_poke(True)
main_thread_comm = \
select.select([self.sock_rd], [], [], self.poke_time)
if main_thread_comm[0]:
# Data is ready to read on socket to indicate thread teardown.
buf = self.sock_rd.recv(4096)
loginf('Watchdog received %s' % buf)
break
loginf('Watchdog thread exiting')
class PollUsbDevice(threading.Thread):
"""A thread continually polls for data with blocking read from a device.
Some devices may overflow buffers if not drained within a timely manner.
This thread will read block on the USB port and buffer data from the
device for consumption."""
def __init__(self, kwargs):
super(PollUsbDevice, self).__init__()
self.wmr200 = kwargs['wmr200']
self.usb_device = self.wmr200.usb_device
# Buffer list to read data from weather console
self._buf = []
# Lock to wrap around the buffer
self._lock_poll = threading.Lock()
# Conditional variable to gate thread after reset applied.
# We don't want to read previous data, if any, until a reset
# has been sent.
self._cv_poll = threading.Condition()
# Gates initial entry into reading from device
self._ok_to_read = False
loginf('Created USB polling thread to read block on device')
def run(self):
"""Polling function to block read the USB device.
This method appends new data after previous buffer
data in preparation for reads to the main driver
thread.
Once this thread is started it will be gated by
a reset to the weather console device to sync it
up."""
loginf('USB polling device thread for live data launched')
# Wait for the main thread to indicate it's safe to read.
self._cv_poll.acquire()
while not self._ok_to_read:
self._cv_poll.wait()
self._cv_poll.release()
loginf('USB polling device thread signaled to start')
# Read and discard next data from weather console device.
_ = self.usb_device.read_device()
read_timeout_cnt = 0
read_reset_cnt = 0
# Loop indefinitely until main thread indicates time to expire.
while self.wmr200.poll_usb_device_enable():
try:
buf = self.usb_device.read_device()
if buf:
self._append_usb_device(buf)
read_timeout_cnt = 0
read_reset_cnt = 0
else:
# We timed out here. We should poke the device
# after a read timeout, and also prepare for more
# serious measures.
self.wmr200.ready_to_poke(True)
read_timeout_cnt += 1
# If we don't receive any data from the console
# after several attempts, send down a reset.
if read_timeout_cnt == 4:
self.reset_console()
read_timeout_cnt = 0
read_reset_cnt += 1
# If we have sent several resets with no data,
# give up and abort.
if read_reset_cnt == 2:
msg = ('Device unresponsive after multiple resets')
logerr(msg)
raise weewx.RetriesExceeded(msg)
except:
logerr('USB device read error')
raise
loginf('USB polling device thread exiting')
def _append_usb_device(self, buf):
"""Appends data from USB device to shared buffer.
Called from child thread."""
self._lock_poll.acquire()
# Append the list of bytes to this buffer.
self._buf.append(buf)
self._lock_poll.release()
def read_usb_device(self):
"""Reads the buffered USB device data.
Called from main thread.
Returns a list of bytes."""
buf = []
self._lock_poll.acquire()
if len(self._buf):
buf = self._buf.pop(0)
self._lock_poll.release()
return buf
def flush_usb_device(self):
"""Flush any previous USB device data.
Called from main thread."""
self._lock_poll.acquire()
self._buf = []
self._lock_poll.release()
loginf('Flushed USB device')
def reset_console(self):
"""Send a reset to wake up the weather console device
Called from main thread or child thread."""
buf = [0x20, 0x00, 0x08, 0x01, 0x00, 0x00, 0x00, 0x00]
try:
self.usb_device.write_device(buf)
loginf('Reset console device')
self._ok_to_read = True
time.sleep(1)
except usb.USBError, exception:
msg = ('reset_console() Unable to send USB control'
'message %s' % exception)
logerr(msg)
# Convert to a Weewx error:
raise weewx.WeeWxIOError(exception)
def notify(self):
"""Gates thread to read of the device.
Called from main thread."""
self._cv_poll.acquire()
self._cv_poll.notify()
self._cv_poll.release()
class WMR200(weewx.drivers.AbstractDevice):
"""Driver for the Oregon Scientific WMR200 station."""
def __init__(self, **stn_dict):
"""Initialize the wmr200 driver.
NAMED ARGUMENTS:
model: Which station model is this? [Optional]
sensor_status: Print sensor faults or failures to syslog. [Optional]
use_pc_time: Use the console timestamp or the Pc. [Optional]
erase_archive: Erase archive upon startup. [Optional]
archive_interval: Time in seconds between intervals [Optional]
archive_threshold: Max time in seconds between valid archive packets [Optional]
ignore_checksum: Ignore checksum failures and drop packet.
archive_startup: Time after startup to await archive data draining.
--- User should not typically change anything below here ---
vendor_id: The USB vendor ID for the WMR [Optional]
product_id: The USB product ID for the WM [Optional]
interface: The USB interface [Optional]
in_endpoint: The IN USB endpoint used by the WMR [Optional]
"""
super(WMR200, self).__init__()
## User configurable options
self._model = stn_dict.get('model', 'WMR200')
# Provide sensor faults in syslog.
self._sensor_stat = weeutil.weeutil.tobool(stn_dict.get('sensor_status',
True))
# Use pc timestamps or weather console timestamps.
self._use_pc_time = \
weeutil.weeutil.tobool(stn_dict.get('use_pc_time', True))
# Use archive data when possible.
self._erase_archive = \
weeutil.weeutil.tobool(stn_dict.get('erase_archive', False))
# Archive interval in seconds.
self._archive_interval = int(stn_dict.get('archive_interval', 60))
if self._archive_interval not in [60, 300]:
logwar('Unverified archive interval:%d sec'
% self._archive_interval)
# Archive threshold in seconds between archive packets before dropping.
self._archive_threshold = int(stn_dict.get('archive_threshold',
3600*24*7))
# Ignore checksum errors.
self._ignore_checksum = \
weeutil.weeutil.tobool(stn_dict.get('ignore_checksum', False))
# Archive startup time in seconds.
self._archive_startup = int(stn_dict.get('archive_startup', 120))
# Device specific hardware options.
vendor_id = int(stn_dict.get('vendor_id', '0x0fde'), 0)
product_id = int(stn_dict.get('product_id', '0xca01'), 0)
interface = int(stn_dict.get('interface', 0))
in_endpoint = int(stn_dict.get('IN_endpoint',
usb.ENDPOINT_IN + 1))
# Buffer of bytes read from weather console device.
self._buf = []
# Packet created from the buffer data read from the weather console
# device.
self._pkt = None
# Setup the generator to get a byte stream from the console.
self.gen_byte = self._generate_bytestream
# Calculate time delta in seconds between host and console.
self.time_drift = None
# Create USB accessor to communiate with weather console device.
self.usb_device = UsbDevice()
# Pass USB parameters to the USB device accessor.
self.usb_device.in_endpoint = in_endpoint
self.usb_device.interface = interface
# Locate the weather console device on the USB bus.
if not self.usb_device.find_device(vendor_id, product_id):
logcrt('Unable to find device with VendorID=%04x ProductID=%04x' %
(vendor_id, product_id))
raise weewx.WeeWxIOError("Unable to find USB device")
# Open the weather console USB device for read and writes.
self.usb_device.open_device()
# Initialize watchdog to poke device to request live
# data stream.
self._rdy_to_poke = True
# Create the lock to sync between main thread and watchdog thread.
self._poke_lock = threading.Lock()
# Create a socket pair to communicate with the watchdog thread.
(self.sock_rd, self.sock_wr) = \
socket.socketpair(socket.AF_UNIX, socket.SOCK_STREAM, 0)
# Create the watchdog thread to request live data.
self._thread_watchdog = RequestLiveData(
kwargs = {'wmr200' : self,
'poke_time' : _WMR200_REQUEST_LIVE_DATA_INTERVAL,
'sock_rd' : self.sock_rd})
# Create the usb polling device thread.
self._thread_usb_poll = PollUsbDevice(kwargs={'wmr200': self})
# Start the usb polling device thread.
self._poll_device_enable = True
self._thread_usb_poll.start()
# Send the device a reset
self._thread_usb_poll.reset_console()
self._thread_usb_poll.notify()
# Start the watchdog for live data thread.
self._thread_watchdog.start()
# Not all packets from wmr200 have timestamps, yet weewx requires
# timestamps on all packets pass up the stack. So we will use the
# timestamp from the most recent packet, but still need to see an
# initial timestamp, so we'll seed this with current PC time.
self.last_time_epoch = int(time.time() + 0.5)
# Restart counter when driver crashes and is restarted by the
# weewx engine.
global STAT_RESTART
STAT_RESTART += 1
if STAT_RESTART > 1:
logwar(('Restart count: %d') % STAT_RESTART)
# Reset any other state during startup or after a crash.
PacketArchiveData.rain_total_last = None
# Debugging flags
global DEBUG_WRITES
DEBUG_WRITES = int(stn_dict.get('debug_writes', 0))
global DEBUG_COMM
DEBUG_COMM = int(stn_dict.get('debug_comm', 0))
global DEBUG_CONFIG_DATA
DEBUG_CONFIG_DATA = int(stn_dict.get('debug_config_data', 1))
global DEBUG_PACKETS_RAW
DEBUG_PACKETS_RAW = int(stn_dict.get('debug_packets_raw', 0))
global DEBUG_PACKETS_COOKED
DEBUG_PACKETS_COOKED = int(stn_dict.get('debug_packets_cooked', 0))
global DEBUG_PACKETS_ARCHIVE
DEBUG_PACKETS_ARCHIVE = int(stn_dict.get('debug_packets_archive', 0))
global DEBUG_PACKETS_TEMP
DEBUG_PACKETS_TEMP = int(stn_dict.get('debug_packets_temp', 0))
global DEBUG_PACKETS_RAIN
DEBUG_PACKETS_RAIN = int(stn_dict.get('debug_packets_rain', 0))
global DEBUG_PACKETS_WIND
DEBUG_PACKETS_WIND = int(stn_dict.get('debug_packets_wind', 0))
global DEBUG_PACKETS_STATUS
DEBUG_PACKETS_STATUS = int(stn_dict.get('debug_packets_status', 0))
global DEBUG_PACKETS_PRESSURE
DEBUG_PACKETS_PRESSURE = int(stn_dict.get('debug_packets_pressure', 0))
global DEBUG_CHECKSUM
DEBUG_CHECKSUM = int(stn_dict.get('debug_checksum', 0))
if DEBUG_CONFIG_DATA:
logdbg('Configuration setup')
logdbg(' Log sensor faults: %s' % self._sensor_stat)
logdbg(' Using PC Time: %s' % self._use_pc_time)
logdbg(' Erase archive data: %s' % self._erase_archive)
logdbg(' Archive interval: %d' % self._archive_interval)
logdbg(' Archive threshold: %d' % self._archive_threshold)
@property
def hardware_name(self):
"""weewx api."""
return self._model
@property
def sensor_stat(self):
"""Return if sensor status is enabled for device."""
return self._sensor_stat
@property
def use_pc_time(self):
"""Flag to use pc time rather than weather console time."""
return self._use_pc_time
@property
def archive_interval(self):
"""weewx api. Time in seconds between archive intervals."""
return self._archive_interval
@property
def ignore_checksum(self):
"""Flag to drop rather than fail on checksum errors."""
return self._ignore_checksum
def ready_to_poke(self, val):
"""Set info that device is ready to be poked."""
self._poke_lock.acquire()
self._rdy_to_poke = val
self._poke_lock.release()
def is_ready_to_poke(self):
"""Get info that device is ready to be poked."""
self._poke_lock.acquire()
val = self._rdy_to_poke
self._poke_lock.release()
return val
def poll_usb_device_enable(self):
"""The USB thread calls this to enable data reads from the console."""
return self._poll_device_enable
def _write_cmd(self, cmd):
"""Writes a single command to the wmr200 console."""
buf = [0x01, cmd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
try:
self.usb_device.write_device(buf)
except usb.USBError, exception:
msg = (('_write_cmd() Unable to send USB cmd:0x%02x control'
' message' % cmd))
logerr(msg)
# Convert to a Weewx error:
raise weewx.WeeWxIOError(exception)
def _poke_console(self):
"""Send a heartbeat command to the weather console.
This is used to inform the weather console to continue streaming
live data across the USB bus. Otherwise it enters archive mode
were data is stored on the weather console."""
self._write_cmd(0xD0)
if self._erase_archive:
self._write_cmd(0xDB)
# Reset the ready to poke flag.
self.ready_to_poke(False)
if DEBUG_COMM:
logdbg('Poked device for live data')
def _generate_bytestream(self):
"""Generator to provide byte stream to packet collector.
We need to return occasionally to handle both reading data
from the weather console and handing that data."""
while True:
# Read WMR200 protocol bytes from the weather console
# via a proxy thread that ensure we drain the USB
# fifo data from the weather console.
buf = self._thread_usb_poll.read_usb_device()
# Add list of new USB bytes to previous buffer byte
# array, if any.
if buf:
self._buf.extend(buf)
while self._buf:
# Generate one byte at a time.
yield self._buf.pop(0)
# Bail if there is a lull in data from the weather console
# If we don't bail we won't be able to do other processing
# required to keep the weather console operating.
# e.g. poking the console to maintain live data stream.
if not buf and not self._buf:
return
def _poll_for_data(self):
"""Poll for data from the weather console device.
Read a byte from the weather console. If we are starting
a new packet, get one using that byte from the packet factory.
Otherwise add the byte to the current packet.
Each USB packet may stradle a protocol packet so make sure
we assign the data appropriately."""
if not self._thread_usb_poll.is_alive():
msg = 'USB polling thread unexpectedly terminated'
logerr(msg)
raise weewx.WeeWxIOError(msg)
for byte in self.gen_byte():
if self._pkt:
self._pkt.append_data(byte)
else:
# This may return None if we are out of sync
# with the console.
self._pkt = PACKET_FACTORY.get_packet(byte, self)
if self._pkt is not None and self._pkt.packet_complete():
# If we have a complete packet then bail to handle it.
return
# Prevent busy loop by suspending process a bit to
# wait for usb read thread to accumulate data from the
# weather console.
time.sleep(_WMR200_USB_POLL_INTERVAL)
def request_archive_data(self):
"""Request archive packets from console."""
self._write_cmd(0xDA)
def print_stats(self):
"""Print summary of driver statistics."""
loginf(('Received packet count live:%d archive:%d'
' control:%d') % (PacketLive.pkt_rx,
PacketArchive.pkt_rx,
PacketControl.pkt_rx))
loginf('Received bytes:%d sent bytes:%d' %
(self.usb_device.byte_cnt_rd,
self.usb_device.byte_cnt_wr))
loginf('Packet archive queue len:%d live queue len:%d'
% (len(PacketArchive.pkt_queue), len(PacketLive.pkt_queue)))
def _process_packet_complete(self):
"""Process a completed packet from the wmr200 console."""
if DEBUG_PACKETS_RAW:
logdbg(self._pkt.to_string_raw('Packet raw:'))
# This will raise exception if checksum fails.
self._pkt.verify_checksum()
try:
# Process the actual packet.
self._pkt.packet_process()
if self._pkt.packet_live_data():
PacketLive.pkt_queue.append(self._pkt)
logdbg(' Queuing live packet rx:%d live_queue_len:%d' %
(PacketLive.pkt_rx, len(PacketLive.pkt_queue)))
elif self._pkt.packet_archive_data():
PacketArchive.pkt_queue.append(self._pkt)
logdbg(' Queuing archive packet rx:%d archive_queue_len:%d'
% (PacketArchive.pkt_rx, len(PacketArchive.pkt_queue)))
else:
logdbg((' Acknowledged control packet'
' rx:%d') % PacketControl.pkt_rx)
except WMR200PacketParsingError, e:
# Drop any bogus packets.
logerr(self._pkt.to_string_raw('Discarding bogus packet: %s '
% e.msg))
# Reset this packet to get ready for next one
self._pkt = None
def genLoopPackets(self):
"""Main generator function that continuously returns loop packets
weewx api to return live records."""
# Reset the current packet upon entry.
self._pkt = None
logdbg('genLoop() phase getting live packets')
while True:
# Loop through indefinitely generating records to the
# weewx engine. This loop may resume at the yield()
# or upon entry during any exception, even an exception
# not generated from this driver. e.g. weewx.service.
if self._pkt is not None and self._pkt.packet_complete():
self._process_packet_complete()
# If it's time to poke the console and we are not
# in the middle of collecting a packet then do it here.
if self.is_ready_to_poke() and self._pkt is None:
self._poke_console()
# Pull data from the weather console.
# This may create a packet or append data to existing packet.
self._poll_for_data()
# Yield any live packets we may have obtained from this callback
# or queued from other driver callback services.
while PacketLive.pkt_queue:
pkt = PacketLive.pkt_queue.pop(0)
if DEBUG_PACKETS_COOKED:
pkt.print_cooked()
logdbg('genLoop() Yielding live queued packet id:%d'
% pkt.pkt_id)
yield pkt.packet_record()
def XXXgenArchiveRecords(self, since_ts=0):
"""A generator function to return archive packets from the wmr200.
weewx api to return archive records.
since_ts: A timestamp in database time. All data since but not
including this time will be returned.
Pass in None for all data
NOTE: This API is disabled so that the weewx engine will default
to using sofware archive generation. There may be a way
to use hardware generation if one plays with not poking the console
which would allow archive packets to be created.
yields: a sequence of dictionary records containing the console
data."""
logdbg('genArchive() phase getting archive packets since %s'
% weeutil.weeutil.timestamp_to_string(since_ts))
if self.use_pc_time and self.time_drift is None:
loginf(('genArchive() Unable to process archive packets'
' until live packet received'))
return
while True:
# Loop through indefinitely generating records to the
# weewx engine. This loop may resume at the yield()
# or upon entry during any exception, even an exception
# not generated from this driver. e.g. weewx.service.
if self._pkt is not None and self._pkt.packet_complete():
self._process_packet_complete()
# If it's time to poke the console and we are not
# in the middle of collecting a packet then do it here.
if self.is_ready_to_poke() and self._pkt is None:
self._poke_console()
# Pull data from the weather console.
# This may create a packet or append data to existing packet.
self._poll_for_data()
# Yield any live packets we may have obtained from this callback
# or queued from other driver callback services.
while PacketArchive.pkt_queue:
pkt = PacketLive.pkt_queue.pop(0)
# If we are using PC time we need to adjust the record timestamp
# with the PC drift.
if self.use_pc_time:
pkt.timestamp_adjust_drift()
if DEBUG_PACKETS_COOKED:
pkt.print_cooked()
if pkt.timestamp_record() > since_ts:
logdbg(('genArchive() Yielding received archive record'
' after requested timestamp'))
yield pkt.packet_record()
else:
loginf(('genArchive() Ignoring received archive record'
' before requested timestamp'))
def genStartupRecords(self, since_ts=0):
"""A generator function to present archive packets on start.
weewx api to return archive records."""
logdbg('genStartup() phase getting archive packets since %s'
% weeutil.weeutil.timestamp_to_string(since_ts))
# Reset the current packet upon entry.
self._pkt = None
# Time after last archive packet to indicate there are
# likely no more archive packets left to drain.
timestamp_last_archive_rx = int(time.time() + 0.5)
# Statisics to calculate time in this phase.
timestamp_packet_first = None
timestamp_packet_current = None
timestamp_packet_previous = None
cnt = 0
# If no previous database this parameter gets passed as None.
# Convert to a numerical value representing start of unix epoch.
if since_ts is None:
loginf('genStartup() Database initialization')
since_ts = 0
while True:
# Loop through indefinitely generating archive records to the
# weewx engine. This loop may resume at the yield()
# or upon entry during any exception, even an exception
# not generated from this driver. e.g. weewx.service.
if self._pkt is not None and self._pkt.packet_complete():
self._process_packet_complete()
# If it's time to poke the console and we are not
# in the middle of collecting a packet then do it here.
if self.is_ready_to_poke() and self._pkt is None:
self._poke_console()
# Pull data from the weather console.
# This may create a packet or append data to existing packet.
self._poll_for_data()
# If we have archive packets in the queue then yield them here.
while PacketArchive.pkt_queue:
timestamp_last_archive_rx = int(time.time() + 0.5)
# Present archive packets
# If PC time is set, we must have at least one
# live packet to calculate timestamps in PC time.
if self.use_pc_time and self.time_drift is None:
loginf(('genStartup() Delaying archive packet processing'
' until live packet received'))
break
loginf(('genStartup() Still receiving archive packets'
' cnt:%d len:%d') % (cnt, len(PacketArchive.pkt_queue)))
pkt = PacketArchive.pkt_queue.pop(0)
# If we are using PC time we need to adjust the
# record timestamp with the PC drift.
if self.use_pc_time:
pkt.timestamp_adjust_drift()
# Statisics indicating packets sent in this phase.
if timestamp_packet_first is None:
timestamp_packet_first = pkt.timestamp_record()
if timestamp_packet_previous is None:
if since_ts == 0:
timestamp_packet_previous = pkt.timestamp_record()
else:
timestamp_packet_previous = since_ts
timestamp_packet_current = pkt.timestamp_record()
# Calculate time interval between archive packets.
timestamp_packet_interval = timestamp_packet_current \
- timestamp_packet_previous
if pkt.timestamp_record() > (timestamp_packet_previous
+ self._archive_threshold):
loginf(('genStartup() Discarding received archive'
' record exceeding archive interval cnt:%d'
' threshold:%d timestamp:%s')
% (cnt, self._archive_threshold,
weeutil.weeutil.timestamp_to_string\
(pkt.timestamp_record())))
elif pkt.timestamp_record() > since_ts:
# Calculate the rain accumulation between valid archive
# packets.
pkt.record_update(adjust_rain(pkt, PacketArchiveData))
timestamp_packet_previous = timestamp_packet_current
cnt += 1
logdbg(('genStartup() Yielding received archive'
' record cnt:%d after requested timestamp'
':%d pkt_interval:%d pkt:%s')
% (cnt, since_ts, timestamp_packet_interval,
weeutil.weeutil.timestamp_to_string\
(pkt.timestamp_record())))
if DEBUG_PACKETS_COOKED:
pkt.print_cooked()
yield pkt.packet_record()
else:
timestamp_packet_previous = timestamp_packet_current
loginf(('genStartup() Discarding received archive'
' record before time requested cnt:%d'
' timestamp:%s')
% (cnt, weeutil.weeutil.timestamp_to_string\
(since_ts)))
# Return if we receive not more archive packets in a given time
# interval.
if (int(time.time() + 0.5) - timestamp_last_archive_rx >
self._archive_startup):
loginf(('genStartup() phase exiting since looks like all'
' archive packets have been retrieved after %d'
' sec cnt:%d')
% (self._archive_startup, cnt))
if timestamp_packet_first is not None:
startup_time = timestamp_packet_current \
- timestamp_packet_first
loginf(('genStartup() Yielded %d packets in %d sec '
' between these dates %s ==> %s' %
(cnt, startup_time,
weeutil.weeutil.timestamp_to_string\
(timestamp_packet_first),
weeutil.weeutil.timestamp_to_string\
(timestamp_packet_current))))
if startup_time > 0:
loginf(('genStartup() Average packets per minute:%f' %
(cnt/(startup_time/60.0))))
return
def closePort(self):
"""Closes the USB port to the device.
weewx api to shutdown the weather console."""
# Send a command to the wmr200 console indicating
# we are leaving.
self._write_cmd(0xDF)
# Let the polling thread die off.
self._poll_device_enable = False
# Join with the polling thread.
self._thread_usb_poll.join()
if self._thread_usb_poll.is_alive():
logerr('USB polling thread still alive')
else:
loginf('USB polling thread expired')
# Shutdown the watchdog thread.
self.sock_wr.send('shutdown')
# Join with the watchdog thread.
self._thread_watchdog.join()
if self._thread_watchdog.is_alive():
logerr('Watchdog thread still alive')
else:
loginf('Watchdog thread expired')
self.print_stats()
# Indicate if queues have not been drained.
if len(PacketArchive.pkt_queue):
logwar('Exiting with packets still in archive queue cnt:%d' %
len(PacketArchive.pkt_queue))
if len(PacketLive.pkt_queue):
logwar('Exiting with packets still in live queue cnt:%d' %
len(PacketLive.pkt_queue))
# Shutdown the USB acccess to the weather console device.
self.usb_device.close_device()
loginf('Driver gracefully exiting')
class WMR200ConfEditor(weewx.drivers.AbstractConfEditor):
@property
def default_stanza(self):
return """
[WMR200]
# This section is for the Oregon Scientific WMR200
# The station model, e.g., WMR200, WMR200A, Radio Shack W200
model = WMR200
# The driver to use:
driver = weewx.drivers.wmr200
"""
| tony-rasskazov/meteo | weewx/bin/weewx/drivers/wmr200.py | Python | mit | 77,782 | 0.002031 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_django_busybody
------------
Tests for `django_busybody` models module.
"""
from __future__ import unicode_literals
import datetime
import json
import uuid
from mock import patch
from django.test import TestCase
# from django.conf import settings
from django.core.files.storage import default_storage
from django.contrib.contenttypes.models import ContentType
from django.core.files.base import ContentFile
from django.test.client import RequestFactory
from django.contrib.auth import get_user_model
from django.utils import timezone
import django_busybody.models as bb_models
from django_busybody import easy_crypto
from django_busybody.middlewares import GlobalRequestMiddleware
from . import models
class TestDjango_busybody(TestCase):
def setUp(self):
self.obj = models.EncryptTest.objects.create(
without_encrypt='1',
with_encrypt='1',
without_encrypt_with_log='1',
with_encrypt_with_log='1')
def test_get(self):
print("=" * 120)
obj = models.EncryptTest.objects.get(pk=self.obj.pk)
print("=" * 80)
self.assertEqual(obj.without_encrypt, '1')
self.assertEqual(obj.with_encrypt, '1')
self.assertEqual(obj.without_encrypt_with_log, '1')
self.assertEqual(obj.with_encrypt_with_log, '1')
def test_get_and_save(self):
obj = models.EncryptTest.objects.get(pk=self.obj.pk)
self.assertEqual(obj.without_encrypt, '1')
self.assertEqual(obj.with_encrypt, '1')
self.assertEqual(obj.without_encrypt_with_log, '1')
self.assertEqual(obj.with_encrypt_with_log, '1')
obj.save()
self.assertEqual(obj.without_encrypt, '1')
self.assertEqual(obj.with_encrypt, '1')
self.assertEqual(obj.without_encrypt_with_log, '1')
self.assertEqual(obj.with_encrypt_with_log, '1')
self.assertEqual(models.EncryptTest.objects.filter(without_encrypt__exact='1').count(), 1)
self.assertEqual(models.EncryptTest.objects.filter(with_encrypt__exact='1').count(), 0)
self.assertEqual(models.EncryptTest.objects.filter(without_encrypt_with_log__exact='1').count(), 1)
self.assertEqual(models.EncryptTest.objects.filter(with_encrypt_with_log__exact='1').count(), 0)
def test_encrypt(self):
self.assertEqual(models.EncryptTest.objects.filter(without_encrypt__exact='1').count(), 1)
self.assertEqual(models.EncryptTest.objects.filter(with_encrypt__exact='1').count(), 0)
self.assertEqual(models.EncryptTest.objects.filter(without_encrypt_with_log__exact='1').count(), 1)
self.assertEqual(models.EncryptTest.objects.filter(with_encrypt_with_log__exact='1').count(), 0)
def test_unicode(self):
obj = models.EncryptTest.objects.create(
without_encrypt='日本語',
with_encrypt='日本語',
without_encrypt_with_log='日本語',
with_encrypt_with_log='日本語')
obj = models.EncryptTest.objects.get(pk=obj.pk)
self.assertEqual(obj.without_encrypt, '日本語')
self.assertEqual(obj.with_encrypt, '日本語')
self.assertEqual(obj.without_encrypt_with_log, '日本語')
self.assertEqual(obj.with_encrypt_with_log, '日本語')
def test_invalid_decrypt(self):
models.EncryptTest.objects.filter(pk=self.obj.pk).update(with_encrypt='no_encrypt')
self.assertEqual(models.EncryptTest.objects.filter(with_encrypt__exact='no_encrypt').count(), 1)
obj = models.EncryptTest.objects.get(pk=self.obj.pk)
self.assertEqual(obj.with_encrypt, 'no_encrypt')
def test_invalid_decrypt2(self):
models.EncryptTest.objects.filter(pk=self.obj.pk).update(with_encrypt='日本語')
self.assertEqual(models.EncryptTest.objects.filter(with_encrypt__exact='日本語').count(), 1)
obj = models.EncryptTest.objects.get(pk=self.obj.pk)
self.assertEqual(obj.with_encrypt, '日本語')
def test_invalid_decrypt3(self):
import base64
from Crypto.Cipher import AES
iv = b'\xf2\xae' * 8
raw = '日本語' * 16
cipher = AES.new(easy_crypto._cipher.key, AES.MODE_CBC, iv)
value = base64.b64encode(iv + cipher.encrypt(raw.encode('utf-8')))
models.EncryptTest.objects.filter(pk=self.obj.pk).update(with_encrypt=value)
models.EncryptTest.objects.get(pk=self.obj.pk)
def tearDown(self):
models.EncryptTest.objects.get(pk=self.obj.pk).delete()
class TestDjango_history(TestCase):
def setUp(self):
self.user = get_user_model().objects.create(username='test')
self.obj = models.AllField.objects.create(
big_integer=0,
binary=b"",
boolean=True,
char="",
date=timezone.now(),
date_time=timezone.now(),
decimal=0,
duration=datetime.timedelta(seconds=1),
email="hoge@email.com",
_file=default_storage.save("hello.txt", ContentFile("hello world")),
file_path="hoge.txt",
_float=0.0,
integer=0,
generic_ip_address="0.0.0.0",
null_boolean=None,
positive_integer=1,
positive_small_integer=1,
slug="slug",
small_integer=0,
text="text",
time=timezone.now(),
url="http://hoge.com",
uuid=uuid.uuid4().hex,
foreign_key=self.user,
one_to_one=self.user)
@property
def latest_history(self):
return bb_models.History.objects.all().order_by('-changed_at')[0]
def test_history_bool(self):
obj = models.AllField.objects.get(pk=self.obj.pk)
obj.boolean = False
obj.null_boolean = True
obj.save()
def test_history_integer(self):
obj = models.AllField.objects.get(pk=self.obj.pk)
obj.big_integer = 10
obj.decimal = 10
obj._float = 0.1
obj.integer = 10
obj.positive_integer = 10
obj.positive_small_integer = 10
obj.small_integer = 1
obj.save()
def test_history_binary(self):
obj = models.AllField.objects.get(pk=self.obj.pk)
obj.binary = b"binary_value"
obj.save()
def test_history_string(self):
obj = models.AllField.objects.get(pk=self.obj.pk)
obj.char = "char"
obj.email = "hoge2@email.com"
obj.file_path = "hoge2.txt"
obj.generic_ip_address = "0.0.0.1"
obj.slug = "slug1"
obj.text = "text1"
obj.url = "http://hoge1.com"
obj.uuid = uuid.uuid4().hex
obj.save()
def test_history_datetime(self):
obj = models.AllField.objects.get(pk=self.obj.pk)
obj.date = timezone.now()
obj.date_time = timezone.now()
obj.duration = datetime.timedelta(seconds=2)
obj.time = timezone.now()
obj.save()
def test_history_file(self):
obj = models.AllField.objects.get(pk=self.obj.pk)
obj._file.save("hello2.txt",
ContentFile("hello world2"), save=True)
obj2 = models.AllField.objects.create()
obj2._file.save("hello2.txt",
ContentFile("hello world2"), save=True)
obj2 = models.AllField.objects.create()
obj2._file.save("hello2.txt",
ContentFile("hello world2"), save=True)
def test_history_key(self):
new_user = get_user_model().objects.create(username='test2')
obj = models.AllField.objects.get(pk=self.obj.pk)
obj.foreign_key = new_user
obj.one_to_one = new_user
obj.save()
class TestDjango_history_encrypt(TestCase):
def setUp(self):
self.obj = models.EncryptTest.objects.create(
without_encrypt='1',
with_encrypt='1',
without_encrypt_with_log='1',
with_encrypt_with_log='1')
def tearDown(self):
bb_models.History.objects.all().delete()
models.EncryptTest.objects.get(pk=self.obj.pk).delete()
def check_history(self, obj, key='without_encrypt_with_log'):
history = bb_models.History.objects.filter(
target_type=ContentType.objects.get_for_model(models.EncryptTest),
target_object_id=obj.pk).order_by('-changed_at')[0]
j = json.loads(history.changes)
self.assertEqual(eval(j[key][0]), "1")
self.assertEqual(eval(j[key][1]), "2")
return history
def test_history(self):
obj = models.EncryptTest.objects.get(pk=self.obj.pk)
obj.without_encrypt_with_log = '2'
obj.save()
history = self.check_history(obj)
self.assertEqual(history.target, obj)
def test_history_encrypted(self):
obj = models.EncryptTest.objects.get(pk=self.obj.pk)
obj.with_encrypt_with_log = '2'
obj.save()
history = self.check_history(obj, 'with_encrypt_with_log')
self.assertEqual(history.target, obj)
def test_history_with_request(self):
request = RequestFactory().get('/customer/details')
GlobalRequestMiddleware.thread_local.request = request
obj = models.EncryptTest.objects.get(pk=self.obj.pk)
obj.without_encrypt_with_log = '2'
obj.save()
self.check_history(obj)
def test_history_with_request_user(self):
request = RequestFactory().get('/customer/details')
request.user = get_user_model().objects.create(username='test')
GlobalRequestMiddleware.thread_local.request = request
obj = models.EncryptTest.objects.get(pk=self.obj.pk)
obj.without_encrypt_with_log = '2'
obj.save()
history = self.check_history(obj)
self.assertEqual(history.who, request.user)
def test_history_without_request(self):
if hasattr(GlobalRequestMiddleware.thread_local, 'request'):
delattr(GlobalRequestMiddleware.thread_local, 'request')
obj = models.EncryptTest.objects.get(pk=self.obj.pk)
obj.without_encrypt_with_log = '2'
obj.save()
self.check_history(obj)
class TestDjango_encrypt(TestCase):
def setUp(self):
self.cipher0 = easy_crypto.AESCipher('key' * 32)
self.cipher1 = easy_crypto.AESCipher('key')
def test_encrypt(self):
values = [
b'binary',
'ascii',
1,
[1, 'str'],
[1, '日本語'],
]
for cipher in [self.cipher0, self.cipher1]:
for value in values:
encrypted = cipher.encrypt(value)
decrypted = cipher.decrypt(encrypted)
self.assertEqual(value, decrypted)
class MyTime(object):
def time(self):
return self.time
my_time = MyTime()
class NaiveLockTest(TestCase):
def test_lock_work(self):
bb_models.NaiveLock.acquire('test')
def test_lock_work_with(self):
with bb_models.NaiveLock.acquire('test'):
pass
def test_lock_failure(self):
with bb_models.NaiveLock.acquire('test'):
try:
bb_models.NaiveLock.acquire('test')
except bb_models.LockError:
pass
else:
self.assertFalse(True)
@patch('django_busybody.models.NaiveLock.get_current_time', my_time.time)
def test_lock_timeout(self):
my_time.time = 0
with bb_models.NaiveLock.acquire('test', timeout=100):
my_time.time = 1000
bb_models.NaiveLock.acquire('test')
| s1s5/django_busybody | tests/test_models.py | Python | mit | 11,615 | 0.001213 |
'''
Example of a spike receptor (only receives spikes)
In this example spikes are received and processed creating a raster plot at the end of the simulation.
'''
from brian import *
import numpy
from brian_multiprocess_udp import BrianConnectUDP
# The main function with the NeuronGroup(s) and Synapse(s) must be named "main_NeuronGroup".
# It will receive two objects: input_Neuron_Group and the simulation_clock. The input_Neuron_Group
# will supply the input spikes to the network. The size of the spike train received equals NumOfNeuronsInput.
# The size of the output spike train equals NumOfNeuronsOutput and must be the same size of the NeuronGroup who is
# going to interface with the rest of the system to send spikes.
# The function must return all the NeuronGroup objects and all the Synapse objects this way:
# ([list of all NeuronGroups],[list of all Synapses])
# and the FIRST (index 0) NeuronGroup of the list MUST be the one where the OUTPUT spikes will be taken by the simulation.
#
# Here is also possible to use "dummy" NeuronGroups only to receive and/or send spikes.
my_neuron_input_number = 100
def main_NeuronGroup(input_Neuron_Group, simulation_clock):
print "main_NeuronGroup!" #DEBUG!
simclock = simulation_clock
Nr=NeuronGroup(my_neuron_input_number, model='v:1', reset=0, threshold=0.5, clock=simclock)
Nr.v=0
# SYNAPSES BETWEEN REAL NEURON NETWORK AND THE INPUT
Syn_iNG_Nr=Synapses(input_Neuron_Group, Nr, model='w:1', pre='v+=w', clock=simclock)
Syn_iNG_Nr[:,:]='i==j'
print "Total Number of Synapses:", len(Syn_iNG_Nr) #DEBUG!
Syn_iNG_Nr.w=1
MExt=SpikeMonitor(Nr) # Spikes sent by UDP
Mdummy=SpikeMonitor(input_Neuron_Group) # Spikes received by UDP
return ([Nr],[Syn_iNG_Nr],[MExt,Mdummy])
def post_simulation_function(input_NG, simulation_NG, simulation_SYN, simulation_MN):
"""
input_NG: the neuron group that receives the input spikes
simulation_NG: the neuron groups list passed to the system by the user function (main_NeuronGroup)
simulation_SYN: the synapses list passed to the system by the user function (main_NeuronGroup)
simulation_MN: the monitors list passed to the system by the user function (main_NeuronGroup)
This way it is possible to plot, save or do whatever you want with these objects after the end of the simulation!
"""
pass
figure()
raster_plot(simulation_MN[1])
title("Spikes Received by UDP")
show(block=True)
# savefig('output.pdf')
if __name__=="__main__":
my_simulation = BrianConnectUDP(main_NeuronGroup, NumOfNeuronsInput=my_neuron_input_number, post_simulation_function=post_simulation_function,
input_addresses=[("127.0.0.1", 14141, 40),("127.0.0.1", 16161, 60)], simclock_dt=1, inputclock_dt=2, TotalSimulationTime=10000, sim_repetitions=0, brian_address=2)
| ricardodeazambuja/BrianConnectUDP | examples/OutputNeuronGroup_multiple_inputs.py | Python | cc0-1.0 | 2,862 | 0.014675 |
#!/usr/bin/env python3
from multiprocessing import Process, Pool
import os, time
def proc(name):
print(time.asctime(), 'child process(name: %s) id %s. ppid %s' % (name, os.getpid(), os.getppid()))
time.sleep(3)
print(time.asctime(), 'child process end')
if __name__ == '__main__':
p = Process(target = proc, args = ('child',))
print(time.asctime(), 'child process will start')
p.start()
p.join()
print('first child process end')
pl = Pool(4)
for index in range(4):
pl.apply_async(proc, args = (index,))
pl.close()
pl.join()
print(time.asctime(), 'parent process end')
| JShadowMan/package | python/multi-process-thread/multiprocess.py | Python | mit | 660 | 0.019697 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-15 09:46
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('scoping', '0128_auto_20170808_0954'),
]
operations = [
migrations.CreateModel(
name='ProjectRoles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('role', models.CharField(choices=[('OW', 'Owner'), ('AD', 'Admin'), ('RE', 'Reviewer'), ('VE', 'Viewer')], max_length=2)),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.RemoveField(
model_name='project',
name='owner',
),
migrations.AddField(
model_name='projectroles',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='scoping.Project'),
),
migrations.AddField(
model_name='project',
name='users',
field=models.ManyToManyField(through='scoping.ProjectRoles', to=settings.AUTH_USER_MODEL),
),
]
| mcallaghan/tmv | BasicBrowser/scoping/migrations/0129_auto_20170815_0946.py | Python | gpl-3.0 | 1,435 | 0.003484 |
__author__ = 'PARKJINSANG'
from SIMON.algorithms.genetic.SIMONGeneticAlgorithm import SIMONGeneticAlgorithm
#
# genetic algorithm for learning and evaluating
#
#
def run_genetic_algorithm(group, actionPool, propertyPool):
algorithm = SIMONGeneticAlgorithm()
for actionName, actionDnaList in actionPool.items():
if(len(actionDnaList) < 1):
continue
selectedPool = algorithm.selection_action(group, actionName, actionDnaList)
crossedPool = algorithm.crossover_action(selectedPool)
mutatedPool = algorithm.mutation_action(crossedPool)
update_action(group, actionName, mutatedPool)
selectedPool = algorithm.selection_property(group, propertyPool)
crossedPool = algorithm.crossover_property(selectedPool)
mutatedPool = algorithm.mutation_property(crossedPool)
update_property(group, mutatedPool)
#
# update action dna in the group
#
#
def update_action(group, actionName, actionPool=None):
import random
for element in group:
if(len(actionPool)-1 < 0):
continue
evolve_idx = random.randint(0, len(actionPool)-1)
if(element.Actions.__contains__(actionName)):
element.Actions[actionName].ActionDNA = actionPool[evolve_idx]
#
# update property dna in the group
#
#
def update_property(group, propertyPool=None):
import random
for prop_list in propertyPool:
for element in group:
if(len(prop_list)-1 < 0):
continue
update_idx = random.randint(0, len(prop_list)-1)
for key_prop_list, element_prop_list in prop_list[update_idx].items():
if(element.PropertyDNA.__contains__(key_prop_list)):
element.PropertyDNA[key_prop_list] = element_prop_list
element.Properties[key_prop_list] = element_prop_list
| ParkJinSang/SIMONFramework | src/SIMON_Py/SIMON/algorithms/SIMONAlgorithmMain.py | Python | apache-2.0 | 1,860 | 0.003763 |
from traitlets import Unicode, Bool
from textwrap import dedent
from .. import utils
from . import NbGraderPreprocessor
class ClearSolutions(NbGraderPreprocessor):
code_stub = Unicode(
"# YOUR CODE HERE\nraise NotImplementedError()",
config=True,
help="The code snippet that will replace code solutions")
text_stub = Unicode(
"YOUR ANSWER HERE",
config=True,
help="The text snippet that will replace written solutions")
comment_mark = Unicode(
"#",
config=True,
help="The comment mark to prefix solution delimiters")
begin_solution_delimeter = Unicode(
"## BEGIN SOLUTION",
config=True,
help="The delimiter marking the beginning of a solution (excluding comment mark)")
end_solution_delimeter = Unicode(
"## END SOLUTION",
config=True,
help="The delimiter marking the end of a solution (excluding comment mark)")
enforce_metadata = Bool(
True,
config=True,
help=dedent(
"""
Whether or not to complain if cells containing solutions regions are
not marked as solution cells. WARNING: this will potentially cause
things to break if you are using the full nbgrader pipeline. ONLY
disable this option if you are only ever planning to use nbgrader
assign.
"""
)
)
@property
def begin_solution(self):
return "{}{}".format(self.comment_mark, self.begin_solution_delimeter)
@property
def end_solution(self):
return "{}{}".format(self.comment_mark, self.end_solution_delimeter)
def _replace_solution_region(self, cell):
"""Find a region in the cell that is delimeted by
`self.begin_solution` and `self.end_solution` (e.g. ### BEGIN
SOLUTION and ### END SOLUTION). Replace that region either
with the code stub or text stub, depending the cell type.
This modifies the cell in place, and then returns True if a
solution region was replaced, and False otherwise.
"""
# pull out the cell input/source
lines = cell.source.split("\n")
if cell.cell_type == "code":
stub_lines = self.code_stub.split("\n")
else:
stub_lines = self.text_stub.split("\n")
new_lines = []
in_solution = False
replaced_solution = False
for line in lines:
# begin the solution area
if line.strip() == self.begin_solution:
# check to make sure this isn't a nested BEGIN
# SOLUTION region
if in_solution:
raise RuntimeError(
"encountered nested begin solution statements")
in_solution = True
replaced_solution = True
# replace it with the stub, indented as necessary
indent = line[:line.find(self.begin_solution)]
for stub_line in stub_lines:
new_lines.append(indent + stub_line)
# end the solution area
elif line.strip() == self.end_solution:
in_solution = False
# add lines as long as it's not in the solution area
elif not in_solution:
new_lines.append(line)
# we finished going through all the lines, but didn't find a
# matching END SOLUTION statment
if in_solution:
raise RuntimeError("no end solution statement found")
# replace the cell source
cell.source = "\n".join(new_lines)
return replaced_solution
def preprocess(self, nb, resources):
nb, resources = super(ClearSolutions, self).preprocess(nb, resources)
if 'celltoolbar' in nb.metadata:
del nb.metadata['celltoolbar']
return nb, resources
def preprocess_cell(self, cell, resources, cell_index):
# replace solution regions with the relevant stubs
replaced_solution = self._replace_solution_region(cell)
# determine whether the cell is a solution/grade cell
is_solution = utils.is_solution(cell)
# check that it is marked as a solution cell if we replaced a solution
# region -- if it's not, then this is a problem, because the cell needs
# to be given an id
if not is_solution and replaced_solution:
if self.enforce_metadata:
raise RuntimeError(
"Solution region detected in a non-solution cell; please make sure "
"all solution regions are within solution cells."
)
# replace solution cells with the code/text stub -- but not if
# we already replaced a solution region, because that means
# there are parts of the cells that should be preserved
if is_solution and not replaced_solution:
if cell.cell_type == 'code':
cell.source = self.code_stub
else:
cell.source = self.text_stub
return cell, resources
| EdwardJKim/nbgrader | nbgrader/preprocessors/clearsolutions.py | Python | bsd-3-clause | 5,142 | 0.000778 |
# encoding: utf-8
from django.conf.urls import url
from fileupload.views import (
BasicVersionCreateView, BasicPlusVersionCreateView,
jQueryVersionCreateView, AngularVersionCreateView,
PictureCreateView, PictureDeleteView, PictureListView,
)
urlpatterns = [
url(r'^new/$', PictureCreateView.as_view(), name='upload-new'),
url(r'^delete/(?P<pk>\d+)$', PictureDeleteView.as_view(), name='upload-delete'),
url(r'^view/$', PictureListView.as_view(), name='upload-view'),
]
| semitki/canales | fileupload/urls.py | Python | mit | 514 | 0.001946 |
# -*- coding: utf-8 -*-
from django.db import models, migrations
def indicator_links(apps, schema_editor):
""" Migration generating foreign keys from indicators and indicator periods in child results
frameworks to parents of the same object type in the parent results framework
"""
Result = apps.get_model('rsr', 'Result')
Indicator = apps.get_model('rsr', 'Indicator')
IndicatorPeriod = apps.get_model('rsr', 'IndicatorPeriod')
parent_results = Result.objects.annotate(
children=models.Count('child_results')
).exclude(children=0)
for result in parent_results:
child_results = result.child_results.all()
# Find all indicators for the current Result
parent_indicators = Indicator.objects.filter(result=result)
for parent_indicator in parent_indicators:
# Child indicators have the same title etc and the parent indicator, and a result that's
# a child of the current result
child_indicators = Indicator.objects.filter(
result__in=child_results,
title=parent_indicator.title,
measure=parent_indicator.measure,
ascending=parent_indicator.ascending
)
# Set FK in child indicators to parent indicator
for child_indicator in child_indicators:
child_indicator.parent_indicator = parent_indicator
# basic saving only
super(Indicator, child_indicator).save()
# Same pattern applies to IndicatorPeriods
parent_periods = IndicatorPeriod.objects.filter(indicator__result=result)
for parent_period in parent_periods:
child_periods = IndicatorPeriod.objects.filter(
indicator__result__in=child_results,
indicator__title=parent_period.indicator.title,
period_start=parent_period.period_start,
period_end=parent_period.period_end
)
for child_period in child_periods:
child_period.parent_period = parent_period
super(IndicatorPeriod, child_period).save()
class Migration(migrations.Migration):
dependencies = [
('rsr', '0090_auto_20170207_2235'),
]
operations = [
migrations.RunPython(indicator_links, reverse_code=lambda x, y: None),
]
| akvo/akvo-rsr | akvo/rsr/migrations/0091_auto_20170208_1035.py | Python | agpl-3.0 | 2,382 | 0.001679 |
"""Deprecated import support. Auto-generated by import_shims/generate_shims.sh."""
# pylint: disable=redefined-builtin,wrong-import-position,wildcard-import,useless-suppression,line-too-long
from import_shims.warn import warn_deprecated_import
warn_deprecated_import('experiments.stable_bucketing', 'lms.djangoapps.experiments.stable_bucketing')
from lms.djangoapps.experiments.stable_bucketing import *
| eduNEXT/edunext-platform | import_shims/lms/experiments/stable_bucketing.py | Python | agpl-3.0 | 407 | 0.009828 |
import os
import cPickle as pkl
from collections import namedtuple
import requests
from bs4 import BeautifulSoup
Song = namedtuple('Song', ['title', 'artist', 'album', 'length'])
class Playlist(object):
def __init__(self, title, url):
self.title = title
self.file_name = title.lower().replace(' ', '-') + '.pkl'
self.url = url
if os.path.isfile(self.file_name):
self.load_from_pickle()
else:
self.songs = []
def load_from_pickle(self):
with open(self.file_name, 'rb') as in_file:
self.songs = pkl.load(in_file)
def download_data(self):
url = self.url
resp = requests.get(url)
soup = BeautifulSoup(resp.text)
for song_elem in (soup.find(class_='songs')
.find_all(class_='media-body')):
title = song_elem.h4.text
ps = song_elem.find_all('p')
artist, album = ps[0].text.split(u' \xb7 ')
length = ps[1].text
song = Song(title, artist, album, length)
self.songs.append(song)
with open(self.file_name, 'wb') as out:
pkl.dump(self.songs, out)
ambient_bass = Playlist(
'ambient bass',
'http://www.playlister.io/items/playlist/1472493/ambient-bass/#')
beats = Playlist(
'Blissed-Out Beats',
'http://www.playlister.io/items/playlist/1682151/')
liquid = Playlist(
'Liquid Dubstep',
'http://www.playlister.io/items/playlist/1404323/')
liquid.download_data() | mshuffett/MetaPyMusic | playlister.py | Python | gpl-2.0 | 1,557 | 0.001285 |
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from nose.tools import assert_true, assert_false, assert_equal
from desktop.lib.django_test_util import make_logged_in_client
from desktop.lib.test_utils import grant_access
from desktop.models import Settings
from oozie.tests import OozieBase
class TestAboutBase(object):
def setUp(self):
self.client = make_logged_in_client(username="about", is_superuser=False)
grant_access("about", "about", "about")
self.client_admin = make_logged_in_client(username="about_admin", is_superuser=True)
grant_access("about_admin", "about_admin", "about")
class TestAbout(TestAboutBase, OozieBase):
def test_admin_wizard_permissions(self):
response = self.client_admin.get(reverse('about:index'))
assert_true('Check Configuration' in response.content, response.content)
response = self.client.get(reverse('about:index'))
assert_false('Check Configuration' in response.content, response.content)
class TestAboutWithNoCluster(TestAboutBase):
def test_dump_config(self):
# Exception raised if bad unicode
self.client_admin.get(reverse('about:index'), HTTP_ACCEPT_LANGUAGE='fr-fr')
def test_collect_usage(self):
collect_usage = Settings.get_settings().collect_usage
try:
response = self.client.post(reverse('about:update_preferences'), {'collect_usage': False})
data = json.loads(response.content)
assert_equal(data['status'], 0)
assert_false(data['collect_usage'] == True) # Weird but works
response = self.client.post(reverse('about:update_preferences'), {'collect_usage': True})
data = json.loads(response.content)
assert_equal(data['status'], 0)
assert_true(data['collect_usage'])
finally:
settings = Settings.get_settings()
settings.collect_usage = collect_usage
settings.save()
| jayceyxc/hue | apps/about/src/about/tests.py | Python | apache-2.0 | 2,701 | 0.007775 |
import bpy
from bpy.props import *
from ... sockets.info import toListDataType
from ... base_types.node import AnimationNode
class FloatRangeListNode(bpy.types.Node, AnimationNode):
bl_idname = "an_FloatRangeListNode"
bl_label = "Number Range"
dynamicLabelType = "ALWAYS"
onlySearchTags = True
searchTags = [ ("Float Range", {"dataType" : repr("Float")}),
("Integer Range", {"dataType" : repr("Integer")}) ]
def dataTypeChanged(self, context):
self.generateSockets()
dataType = StringProperty(default = "Float", update = dataTypeChanged)
def create(self):
self.generateSockets()
def drawLabel(self):
return self.inputs[1].dataType + " Range"
def generateSockets(self):
self.inputs.clear()
self.outputs.clear()
self.newInput("Integer", "Amount", "amount", value = 5)
self.newInput(self.dataType, "Start", "start")
self.newInput(self.dataType, "Step", "step", value = 1)
self.newOutput(toListDataType(self.dataType), "List", "list")
def getExecutionCode(self):
if self.dataType == "Float":
return "list = [start + i * step for i in range(amount)]"
if self.dataType == "Integer":
return "list = [int(start + i * step) for i in range(amount)]"
| Thortoise/Super-Snake | Blender/animation_nodes-master/nodes/number/float_range_list.py | Python | gpl-3.0 | 1,324 | 0.009819 |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest
from airflow.contrib.hooks.gcp_video_intelligence_hook import CloudVideoIntelligenceHook
from google.cloud.videointelligence_v1 import enums
from tests.contrib.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id
from tests.compat import mock
INPUT_URI = "gs://bucket-name/input-file"
OUTPUT_URI = "gs://bucket-name/output-file"
FEATURES = [enums.Feature.LABEL_DETECTION]
ANNOTATE_VIDEO_RESPONSE = {'test': 'test'}
class CloudVideoIntelligenceHookTestCase(unittest.TestCase):
def setUp(self):
with mock.patch(
"airflow.contrib.hooks.gcp_video_intelligence_hook.CloudVideoIntelligenceHook.__init__",
new=mock_base_gcp_hook_default_project_id,
):
self.hook = CloudVideoIntelligenceHook(gcp_conn_id="test")
@mock.patch("airflow.contrib.hooks.gcp_video_intelligence_hook.CloudVideoIntelligenceHook.get_conn")
def test_annotate_video(self, get_conn):
# Given
annotate_video_method = get_conn.return_value.annotate_video
get_conn.return_value.annotate_video.return_value = ANNOTATE_VIDEO_RESPONSE
# When
result = self.hook.annotate_video(input_uri=INPUT_URI, features=FEATURES)
# Then
self.assertIs(result, ANNOTATE_VIDEO_RESPONSE)
annotate_video_method.assert_called_once_with(
input_uri=INPUT_URI,
input_content=None,
features=FEATURES,
video_context=None,
output_uri=None,
location_id=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.contrib.hooks.gcp_video_intelligence_hook.CloudVideoIntelligenceHook.get_conn")
def test_annotate_video_with_output_uri(self, get_conn):
# Given
annotate_video_method = get_conn.return_value.annotate_video
get_conn.return_value.annotate_video.return_value = ANNOTATE_VIDEO_RESPONSE
# When
result = self.hook.annotate_video(input_uri=INPUT_URI, output_uri=OUTPUT_URI, features=FEATURES)
# Then
self.assertIs(result, ANNOTATE_VIDEO_RESPONSE)
annotate_video_method.assert_called_once_with(
input_uri=INPUT_URI,
output_uri=OUTPUT_URI,
input_content=None,
features=FEATURES,
video_context=None,
location_id=None,
retry=None,
timeout=None,
metadata=None,
)
| owlabs/incubator-airflow | tests/contrib/hooks/test_gcp_video_intelligence_hook.py | Python | apache-2.0 | 3,290 | 0.002736 |
from django import http
from django.conf.urls import include, patterns, url
from rest_framework.routers import SimpleRouter
from lib.misc.urlconf_decorator import decorate
import amo
from amo.decorators import write
from amo.urlresolvers import reverse
from mkt.api.base import SubRouter
from mkt.developers.api import ContentRatingList, ContentRatingsPingback
from mkt.developers.api_payments import (
AddonPaymentAccountViewSet, PaymentAccountViewSet, PaymentCheckViewSet,
PaymentDebugViewSet, PaymentViewSet, UpsellViewSet)
from mkt.developers.decorators import use_apps
from mkt.receipts.urls import test_patterns
from . import views
from . import views_payments
def provider_patterns(prefix):
return patterns('',
url('^accounts$', views_payments.payment_accounts,
name='mkt.developers.%s.payment_accounts' % prefix),
url('^accounts/form$', views_payments.payment_accounts_form,
name='mkt.developers.%s.payment_accounts_form' % prefix),
url('^accounts/add$', views_payments.payments_accounts_add,
name='mkt.developers.%s.add_payment_account' % prefix),
url('^accounts/(?P<id>\d+)/delete$',
views_payments.payments_accounts_delete,
name='mkt.developers.%s.delete_payment_account' % prefix),
url('^accounts/(?P<id>\d+)$',
views_payments.payments_account,
name='mkt.developers.%s.payment_account' % prefix),
url('^accounts/(?P<id>\d+)/agreement/$', views_payments.agreement,
name='mkt.developers.%s.agreement' % prefix)
)
# These will all start with /app/<app_slug>/
app_detail_patterns = patterns('',
# Redirect people who go to / instead of /edit.
('^$', lambda r, app_slug: http.HttpResponseRedirect(
reverse('mkt.developers.apps.edit', args=[app_slug]))),
url('^edit$', views.edit, name='mkt.developers.apps.edit'),
url('^edit_(?P<section>[^/]+)(?:/(?P<editable>[^/]+))?$',
views.addons_section, name='mkt.developers.apps.section'),
url('^refresh_manifest$', views.refresh_manifest,
name='mkt.developers.apps.refresh_manifest'),
url('^ownership$', views.ownership, name='mkt.developers.apps.owner'),
url('^enable$', views.enable, name='mkt.developers.apps.enable'),
url('^delete$', views.delete, name='mkt.developers.apps.delete'),
url('^disable$', views.disable, name='mkt.developers.apps.disable'),
url('^publicise$', views.publicise, name='mkt.developers.apps.publicise'),
url('^status$', views.status, name='mkt.developers.apps.versions'),
url('^blocklist$', views.blocklist, name='mkt.developers.apps.blocklist'),
# IARC content ratings.
url('^content_ratings$', views.content_ratings,
name='mkt.developers.apps.ratings'),
url('^content_ratings/edit$', views.content_ratings_edit,
name='mkt.developers.apps.ratings_edit'),
url('^status/preload$', views.preload_home,
name='mkt.developers.apps.preload_home'),
url('^status/preload/submit$', views.preload_submit,
name='mkt.developers.apps.preload_submit'),
# TODO: '^versions/$'
url('^versions/(?P<version_id>\d+)$', views.version_edit,
name='mkt.developers.apps.versions.edit'),
url('^versions/delete$', views.version_delete,
name='mkt.developers.apps.versions.delete'),
url('^versions/publicise$', views.version_publicise,
name='mkt.developers.apps.versions.publicise'),
url('^payments/$', views_payments.payments,
name='mkt.developers.apps.payments'),
url('^payments/disable$', views_payments.disable_payments,
name='mkt.developers.apps.payments.disable'),
url('^payments/bango-portal$', views_payments.bango_portal_from_addon,
name='mkt.developers.apps.payments.bango_portal_from_addon'),
# in-app payments.
url('^in-app-config/$', views_payments.in_app_config,
name='mkt.developers.apps.in_app_config'),
url('^in-app-secret/$', views_payments.in_app_secret,
name='mkt.developers.apps.in_app_secret'),
# Old stuff.
url('^upload_preview$', views.upload_media, {'upload_type': 'preview'},
name='mkt.developers.apps.upload_preview'),
url('^upload_icon$', views.upload_media, {'upload_type': 'icon'},
name='mkt.developers.apps.upload_icon'),
url('^upload_image$', views.upload_media, {'upload_type': 'image'},
name='mkt.developers.apps.upload_image'),
url('^rmlocale$', views.remove_locale,
name='mkt.developers.apps.remove-locale'),
# Not apps-specific (yet).
url('^file/(?P<file_id>[^/]+)/validation$', views.file_validation,
name='mkt.developers.apps.file_validation'),
url('^file/(?P<file_id>[^/]+)/validation.json$',
views.json_file_validation,
name='mkt.developers.apps.json_file_validation'),
url('^upload$', views.upload_for_addon,
name='mkt.developers.upload_for_addon'),
url('^upload/(?P<uuid>[^/]+)$', views.upload_detail_for_addon,
name='mkt.developers.upload_detail_for_addon'),
)
# These will all start with /ajax/app/<app_slug>/
ajax_patterns = patterns('',
url('^image/status$', views.image_status,
name='mkt.developers.apps.ajax.image.status'),
)
urlpatterns = decorate(write, patterns('',
# Redirect people who have /apps/ instead of /app/.
('^apps/\d+/.*',
lambda r: http.HttpResponseRedirect(r.path.replace('apps', 'app', 1))),
# Standalone validator:
url('^validator/?$', views.validate_addon,
name='mkt.developers.validate_addon'),
# Redirect to /addons/ at the base.
url('^submissions$', use_apps(views.dashboard),
name='mkt.developers.apps'),
url('^upload$', views.upload_new, name='mkt.developers.upload'),
url('^upload/([^/]+)(?:/([^/]+))?$', views.upload_detail,
name='mkt.developers.upload_detail'),
url('^standalone-hosted-upload$', views.standalone_hosted_upload,
name='mkt.developers.standalone_hosted_upload'),
url('^standalone-packaged-upload$', views.standalone_packaged_upload,
name='mkt.developers.standalone_packaged_upload'),
url('^standalone-(hosted|packaged)-upload/([^/]+)$',
views.standalone_upload_detail,
name='mkt.developers.standalone_upload_detail'),
# Standalone tools.
url('^upload-manifest$', views.upload_manifest,
name='mkt.developers.upload_manifest'),
url('^in-app-keys/$', views_payments.in_app_keys,
name='mkt.developers.apps.in_app_keys'),
url('^in-app-key-secret/([^/]+)$', views_payments.in_app_key_secret,
name='mkt.developers.apps.in_app_key_secret'),
# URLs for a single app.
url('^app/%s/' % amo.APP_SLUG, include(app_detail_patterns)),
url('^ajax/app/%s/' % amo.APP_SLUG, include(ajax_patterns)),
url('^terms$', views.terms, name='mkt.developers.apps.terms'),
url('^api$', views.api, name='mkt.developers.apps.api'),
# Developer docs
url('docs/(?P<doc_name>[-_\w]+)?$',
views.docs, name='mkt.developers.docs'),
url('docs/(?P<doc_name>[-_\w]+)/(?P<doc_page>[-_\w]+)',
views.docs, name='mkt.developers.docs'),
url('^transactions/', views.transactions,
name='mkt.developers.transactions'),
# Bango-specific stuff.
url('^provider/', include(provider_patterns('provider'))),
url('^test/$', views.testing, name='mkt.developers.apps.testing'),
url('^test/receipts/', include(test_patterns)),
))
api_payments = SimpleRouter()
api_payments.register(r'account', PaymentAccountViewSet,
base_name='payment-account')
api_payments.register(r'upsell', UpsellViewSet, base_name='app-upsell')
api_payments.register(r'app', AddonPaymentAccountViewSet,
base_name='app-payment-account')
app_payments = SubRouter()
app_payments.register(r'payments', PaymentViewSet, base_name='app-payments')
app_payments.register(r'payments/status', PaymentCheckViewSet,
base_name='app-payments-status')
app_payments.register(r'payments/debug', PaymentDebugViewSet,
base_name='app-payments-debug')
payments_api_patterns = patterns('',
url(r'^payments/', include(api_payments.urls)),
url(r'^apps/app/', include(app_payments.urls)),
)
dev_api_patterns = patterns('',
url(r'^apps/app/(?P<pk>[^/<>"\']+)/content-ratings/pingback/',
ContentRatingsPingback.as_view(), name='content-ratings-pingback'),
url(r'^apps/app/(?P<pk>[^/<>"\']+)/content-ratings/',
ContentRatingList.as_view(), name='content-ratings-list'),
)
| wagnerand/zamboni | mkt/developers/urls.py | Python | bsd-3-clause | 8,583 | 0.001864 |
# Copyright (c) 2015, Artem Osadchyi
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import glanceclient as glance_client
from keystoneclient.v2_0 import client as identity_client
from novaclient import client as compute_client
from saharaclient.api import client as data_processing_client
GLANCE_VERSION = 1
NOVA_VERSION = 2
def client(credentials):
return OpenStack(credentials)
def identity(credentials):
return identity_client.Client(
username=credentials.user_name,
password=credentials.password,
tenant_name=credentials.tenant,
auth_url=credentials.auth_url,
)
def compute(credentials):
return compute_client.Client(
version=NOVA_VERSION,
username=credentials.user_name,
api_key=credentials.password,
project_id=credentials.tenant,
auth_url=credentials.auth_url,
)
def images(credentials):
return glance_client.Client(
version=GLANCE_VERSION,
endpoint=_get_url("image", credentials),
token=credentials.auth_token,
)
def data_processing(credentials):
sahara_url = _get_url("data-processing", credentials)
sahara_url += "/" + credentials.tenant_id
return data_processing_client.Client(
input_auth_token=credentials.auth_token,
project_name=credentials.tenant,
sahara_url=sahara_url,
)
def _get_url(service_type, credentials):
i_client = identity(credentials)
service = i_client.services.find(type=service_type)
endpoint = i_client.endpoints.find(service_id=service.id)
return endpoint.publicurl
class OpenStack(object):
def __init__(self, credentials):
self._credentials = credentials
self._compute = None
self._images = None
self._identity = None
self._data_processing = None
self._auth_token = None
@property
def compute(self):
if not self._compute:
self._compute = compute(self._credentials)
return self._compute
@property
def images(self):
if not self._images:
self._images = images(self._credentials)
return self._images
@property
def identity(self):
if not self._identity:
self._identity = identity(self._credentials)
return self._identity
@property
def data_processing(self):
if not self._data_processing:
self._data_processing = data_processing(self._credentials)
return self._data_processing
| smithsane/openstack-env | openstack_env/openstack.py | Python | bsd-3-clause | 3,248 | 0 |
# test_help.py - Ensure scripts can run --help.
#
# Copyright (C) 2010, Stefano Rivera <stefanor@ubuntu.com>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
import fcntl
import os
import select
import signal
import subprocess
import time
import unittest
from . import SCRIPTS
TIMEOUT = 5
def load_tests(loader, tests, pattern): # pylint: disable=unused-argument
"Give HelpTestCase a chance to populate before loading its test cases"
suite = unittest.TestSuite()
HelpTestCase.populate()
suite.addTests(loader.loadTestsFromTestCase(HelpTestCase))
return suite
class HelpTestCase(unittest.TestCase):
@classmethod
def populate(cls):
for script in SCRIPTS:
setattr(cls, 'test_' + script, cls.make_help_tester(script))
@classmethod
def make_help_tester(cls, script):
def tester(self):
with open('/dev/null', 'r') as null:
process = subprocess.Popen(['./' + script, '--help'],
close_fds=True, stdin=null,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
started = time.time()
out = []
fds = [process.stdout.fileno(), process.stderr.fileno()]
for fd in fds:
fcntl.fcntl(fd, fcntl.F_SETFL,
fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
while time.time() - started < TIMEOUT:
for fd in select.select(fds, [], fds, TIMEOUT)[0]:
out.append(os.read(fd, 1024))
if process.poll() is not None:
break
if process.poll() is None:
os.kill(process.pid, signal.SIGTERM)
time.sleep(1)
if process.poll() is None:
os.kill(process.pid, signal.SIGKILL)
self.assertEqual(process.poll(), 0,
"%s failed to return usage within %i seconds.\n"
"Output:\n%s"
% (script, TIMEOUT, ''.encode('ascii').join(out)))
return tester
| Debian/devscripts | scripts/devscripts/test/test_help.py | Python | gpl-2.0 | 2,924 | 0 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class VaultCertificate(Model):
"""Describes a single certificate reference in a Key Vault, and where the
certificate should reside on the VM.
:param certificate_url: This is the URL of a certificate that has been
uploaded to Key Vault as a secret. For adding a secret to the Key Vault,
see [Add a key or secret to the key
vault](https://docs.microsoft.com/azure/key-vault/key-vault-get-started/#add).
In this case, your certificate needs to be It is the Base64 encoding of
the following JSON Object which is encoded in UTF-8: <br><br> {<br>
"data":"<Base64-encoded-certificate>",<br> "dataType":"pfx",<br>
"password":"<pfx-file-password>"<br>}
:type certificate_url: str
:param certificate_store: For Windows VMs, specifies the certificate store
on the Virtual Machine to which the certificate should be added. The
specified certificate store is implicitly in the LocalMachine account.
<br><br>For Linux VMs, the certificate file is placed under the
/var/lib/waagent directory, with the file name <UppercaseThumbprint>.crt
for the X509 certificate file and <UppercaseThumbpring>.prv for private
key. Both of these files are .pem formatted.
:type certificate_store: str
"""
_attribute_map = {
'certificate_url': {'key': 'certificateUrl', 'type': 'str'},
'certificate_store': {'key': 'certificateStore', 'type': 'str'},
}
def __init__(self, **kwargs):
super(VaultCertificate, self).__init__(**kwargs)
self.certificate_url = kwargs.get('certificate_url', None)
self.certificate_store = kwargs.get('certificate_store', None)
| lmazuel/azure-sdk-for-python | azure-mgmt-compute/azure/mgmt/compute/v2017_12_01/models/vault_certificate.py | Python | mit | 2,185 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-02-17 01:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Client',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('address', models.CharField(max_length=100)),
],
),
]
| fernandolobato/balarco | clients/migrations/0001_initial.py | Python | mit | 614 | 0.001629 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ActionGroupsOperations:
"""ActionGroupsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~$(python-base-namespace).v2018_03_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def create_or_update(
self,
resource_group_name: str,
action_group_name: str,
action_group: "_models.ActionGroupResource",
**kwargs: Any
) -> "_models.ActionGroupResource":
"""Create a new action group or update an existing one.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param action_group_name: The name of the action group.
:type action_group_name: str
:param action_group: The action group to create or use for the update.
:type action_group: ~$(python-base-namespace).v2018_03_01.models.ActionGroupResource
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ActionGroupResource, or the result of cls(response)
:rtype: ~$(python-base-namespace).v2018_03_01.models.ActionGroupResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ActionGroupResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'actionGroupName': self._serialize.url("action_group_name", action_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(action_group, 'ActionGroupResource')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ActionGroupResource', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ActionGroupResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/actionGroups/{actionGroupName}'} # type: ignore
async def get(
self,
resource_group_name: str,
action_group_name: str,
**kwargs: Any
) -> "_models.ActionGroupResource":
"""Get an action group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param action_group_name: The name of the action group.
:type action_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ActionGroupResource, or the result of cls(response)
:rtype: ~$(python-base-namespace).v2018_03_01.models.ActionGroupResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ActionGroupResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-03-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'actionGroupName': self._serialize.url("action_group_name", action_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ActionGroupResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/actionGroups/{actionGroupName}'} # type: ignore
async def delete(
self,
resource_group_name: str,
action_group_name: str,
**kwargs: Any
) -> None:
"""Delete an action group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param action_group_name: The name of the action group.
:type action_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-03-01"
accept = "application/json"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'actionGroupName': self._serialize.url("action_group_name", action_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/actionGroups/{actionGroupName}'} # type: ignore
async def update(
self,
resource_group_name: str,
action_group_name: str,
action_group_patch: "_models.ActionGroupPatchBody",
**kwargs: Any
) -> "_models.ActionGroupResource":
"""Updates an existing action group's tags. To update other fields use the CreateOrUpdate method.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param action_group_name: The name of the action group.
:type action_group_name: str
:param action_group_patch: Parameters supplied to the operation.
:type action_group_patch: ~$(python-base-namespace).v2018_03_01.models.ActionGroupPatchBody
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ActionGroupResource, or the result of cls(response)
:rtype: ~$(python-base-namespace).v2018_03_01.models.ActionGroupResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ActionGroupResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'actionGroupName': self._serialize.url("action_group_name", action_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(action_group_patch, 'ActionGroupPatchBody')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ActionGroupResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/actionGroups/{actionGroupName}'} # type: ignore
def list_by_subscription_id(
self,
**kwargs: Any
) -> AsyncIterable["_models.ActionGroupList"]:
"""Get a list of all action groups in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ActionGroupList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~$(python-base-namespace).v2018_03_01.models.ActionGroupList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ActionGroupList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_subscription_id.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ActionGroupList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_subscription_id.metadata = {'url': '/subscriptions/{subscriptionId}/providers/microsoft.insights/actionGroups'} # type: ignore
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ActionGroupList"]:
"""Get a list of all action groups in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ActionGroupList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~$(python-base-namespace).v2018_03_01.models.ActionGroupList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ActionGroupList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ActionGroupList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/actionGroups'} # type: ignore
async def enable_receiver(
self,
resource_group_name: str,
action_group_name: str,
enable_request: "_models.EnableRequest",
**kwargs: Any
) -> None:
"""Enable a receiver in an action group. This changes the receiver's status from Disabled to
Enabled. This operation is only supported for Email or SMS receivers.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param action_group_name: The name of the action group.
:type action_group_name: str
:param enable_request: The receiver to re-enable.
:type enable_request: ~$(python-base-namespace).v2018_03_01.models.EnableRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.enable_receiver.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'actionGroupName': self._serialize.url("action_group_name", action_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(enable_request, 'EnableRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 409]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
enable_receiver.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/actionGroups/{actionGroupName}/subscribe'} # type: ignore
| Azure/azure-sdk-for-python | sdk/monitor/azure-mgmt-monitor/azure/mgmt/monitor/v2018_03_01/aio/operations/_action_groups_operations.py | Python | mit | 24,867 | 0.005027 |
import sys, getopt
import errno
import os.path
import epub
import lxml
from bs4 import BeautifulSoup
class EPubToTxtParser:
# Epub parsing specific code
def get_linear_items_data( self, in_file_name ):
book_items = []
book = epub.open_epub( in_file_name )
for item_id, linear in book.opf.spine.itemrefs:
item = book.get_item( item_id )
if linear:
data = book.read_item( item )
book_items.append( data )
return book_items
def get_narrative( self, linear_items_data ):
avg_len = 0
count = 0
for data in linear_items_data:
count += 1
avg_len = ( ( avg_len * ( count - 1 ) ) + len( data ) ) / count
book_narrative = [ data for data in linear_items_data if len(data) >= avg_len ]
return book_narrative
def extract_paragraph_text( self, book_narrative ):
paragraph_text = ''
for data in book_narrative:
soup = BeautifulSoup( data, "lxml" )
paragraphs = soup.find_all( 'p' )
# Thanks to Eric Storms for finding the solution
# to some 0kB parse results…
if paragraphs == []:
paragraphs = soup.find_all( 'div' )
for paragraph in paragraphs:
paragraph_text += ( paragraph.get_text() + '\n' )
return paragraph_text
def narrative_from_epub_to_txt( self, in_file_name ):
if os.path.isfile( in_file_name ):
book_items = self.get_linear_items_data( in_file_name )
book_narrative = self.get_narrative( book_items )
paragraph_text = self.extract_paragraph_text( book_narrative )
return( paragraph_text )
else:
raise FileNotFoundError( errno.ENOENT, os.strerror( errno.ENOENT ), in_file_name )
# Command line usage stuff
def print_usage_and_exit():
print( "Usage: %s -i epub_file_in -o txt_file_out" % sys.argv[ 0 ] )
sys.exit( 2 )
def parse_opts( opts ):
in_file_name, out_file_name = None, None
for o, a in opts:
if o == '-i':
in_file_name = a
elif o == '-o':
out_file_name = a
return ( in_file_name, out_file_name )
# Main
if __name__ == '__main__':
try:
opts, args = getopt.getopt(sys.argv[1:], "i:o:")
assert( len(opts) != 0 )
in_file_name, out_file_name = parse_opts( opts )
except getopt.GetoptError as e:
print( str( e ) )
print_usage_and_exit()
except AssertionError:
print_usage_and_exit()
try:
parser = EPubToTxtParser()
narrative_text = parser.narrative_from_epub_to_txt( in_file_name )
if( out_file_name != None ):
with open( out_file_name, "w" ) as out_file:
out_file.write( narrative_text )
out_file.close()
else:
print( narrative_text )
except FileNotFoundError:
print( "File not found: {file_name}".format( file_name = in_file_name ) )
| jorisvanzundert/sfsf | sfsf/epub_to_txt_parser.py | Python | mit | 3,042 | 0.029934 |
import math
import sys
# read FILE with CVs and weights
FILENAME_ = sys.argv[1]
# number of CVs for FES
NCV_ = int(sys.argv[2])
# read minimum, maximum and number of bins for FES grid
gmin = []; gmax = []; nbin = []
for i in range(0, NCV_):
i0 = 3*i + 3
gmin.append(float(sys.argv[i0]))
gmax.append(float(sys.argv[i0+1]))
nbin.append(int(sys.argv[i0+2]))
# read KBT_
KBT_ = float(sys.argv[3*NCV_+3])
# read output fes
FESFILE_ = sys.argv[3*NCV_+4]
def get_indexes_from_index(index, nbin):
indexes = []
# get first index
indexes.append(index%nbin[0])
# loop
kk = index
for i in range(1, len(nbin)-1):
kk = ( kk - indexes[i-1] ) / nbin[i-1]
indexes.append(kk%nbin[i])
if(len(nbin)>=2):
indexes.append( ( kk - indexes[len(nbin)-2] ) / nbin[len(nbin) -2] )
return indexes
def get_indexes_from_cvs(cvs, gmin, dx):
keys = []
for i in range(0, len(cvs)):
keys.append(int( round( ( cvs[i] - gmin[i] ) / dx[i] ) ))
return tuple(keys)
def get_points(key, gmin, dx):
xs = []
for i in range(0, len(key)):
xs.append(gmin[i] + float(key[i]) * dx[i])
return xs
# define bin size
dx = []
for i in range(0, NCV_):
dx.append( (gmax[i]-gmin[i])/float(nbin[i]-1) )
# create histogram
histo = {}
# read file and fill in histogram
for lines in open(FILENAME_, "r").readlines():
riga = lines.strip().split()
# check format
if(len(riga)!=NCV_ and len(riga)!=NCV_+1):
print FILENAME_,"is in the wrong format!"
exit()
# read CVs
cvs = []
for i in range(0, NCV_):
cvs.append(float(riga[i]))
# get indexes
key = get_indexes_from_cvs(cvs, gmin, dx)
if(len(riga)==NCV_+1):
# read weight
w = float(riga[NCV_])
else: w = 1.0
# update histogram
if key in histo: histo[key] += w
else: histo[key] = w
# calculate free-energy and minimum value
min_fes = 1.0e+15
for key in histo:
histo[key] = -KBT_ * math.log(histo[key])
if(histo[key] < min_fes): min_fes = histo[key]
# total numbers of bins
nbins = 1
for i in range(0, len(nbin)): nbins *= nbin[i]
# print out FES
log = open(FESFILE_, "w")
# this is needed to add a blank line
xs_old = []
for i in range(0, nbins):
# get the indexes in the multi-dimensional grid
key = tuple(get_indexes_from_index(i, nbin))
# get CV values for that grid point
xs = get_points(key, gmin, dx)
# add a blank line for gnuplot
if(i == 0):
xs_old = xs[:]
else:
flag = 0
for j in range(1,len(xs)):
if(xs[j] != xs_old[j]):
flag = 1
xs_old = xs[:]
if (flag == 1): log.write("\n")
# print value of CVs
for x in xs:
log.write("%12.6lf " % x)
# print FES
if key in histo:
fes = histo[key]-min_fes
log.write(" %12.6lf\n" % fes)
else:
log.write(" Infinity\n")
log.close()
| valsson/plumed2 | user-doc/tutorials/old_tutorials/munster/SCRIPTS/do_fes.py | Python | lgpl-3.0 | 2,940 | 0.019388 |
import re
import xml.dom.minidom
from typing import List, Any, Optional
from pcs import utils
from pcs.cli.reports.output import warn
from pcs.common import (
const,
pacemaker,
)
from pcs.common.str_tools import format_list_custom_last_separator
# pylint: disable=not-callable
# main functions
def parse_argv(argv, extra_options=None):
"""
Commandline options: no options
"""
options = {"id": None, "role": None, "score": None, "score-attribute": None}
if extra_options:
options.update(dict(extra_options))
# parse options
while argv:
found = False
option = argv.pop(0)
for name in options:
if option.startswith(name + "="):
options[name] = option.split("=", 1)[1]
found = True
break
if not found:
argv.insert(0, option)
break
return options, argv
def dom_rule_add(dom_element, options, rule_argv, cib_schema_version):
# pylint: disable=too-many-branches
"""
Commandline options: no options
"""
# validate options
if options.get("score") and options.get("score-attribute"):
utils.err("can not specify both score and score-attribute")
if options.get("score") and not utils.is_score(options["score"]):
# preserving legacy behaviour
print(
"Warning: invalid score '%s', setting score-attribute=pingd instead"
% options["score"]
)
warn(
"Converting invalid score to score-attribute=pingd is deprecated "
"and will be removed.",
stderr=True,
)
options["score-attribute"] = "pingd"
options["score"] = None
if options.get("role"):
role = options["role"].capitalize()
supported_roles = (
const.PCMK_ROLES_PROMOTED + const.PCMK_ROLES_UNPROMOTED
)
if role not in supported_roles:
utils.err(
"invalid role '{role}', use {supported_roles}".format(
role=options["role"],
supported_roles=format_list_custom_last_separator(
list(supported_roles), " or "
),
)
)
options["role"] = pacemaker.role.get_value_for_cib(
role,
cib_schema_version >= const.PCMK_NEW_ROLES_CIB_VERSION,
)
if options.get("id"):
id_valid, id_error = utils.validate_xml_id(options["id"], "rule id")
if not id_valid:
utils.err(id_error)
if utils.does_id_exist(dom_element.ownerDocument, options["id"]):
utils.err(
"id '%s' is already in use, please specify another one"
% options["id"]
)
# parse rule
if not rule_argv:
utils.err("no rule expression was specified")
try:
preprocessor = TokenPreprocessor()
dom_rule = CibBuilder(cib_schema_version).build(
dom_element,
RuleParser().parse(preprocessor.run(rule_argv)),
options.get("id"),
)
except SyntaxError as e:
utils.err(
"'%s' is not a valid rule expression: %s" % (" ".join(rule_argv), e)
)
except UnexpectedEndOfInput as e:
utils.err(
"'%s' is not a valid rule expression: unexpected end of rule"
% " ".join(rule_argv)
)
except (ParserException, CibBuilderException) as e:
utils.err("'%s' is not a valid rule expression" % " ".join(rule_argv))
for msg in preprocessor.warning_list:
warn(msg, stderr=True)
# add options into rule xml
if not options.get("score") and not options.get("score-attribute"):
options["score"] = "INFINITY"
for name, value in options.items():
if name != "id" and value is not None:
dom_rule.setAttribute(name, value)
# score or score-attribute is required for the nested rules in order to have
# valid CIB, pacemaker does not use the score of the nested rules
for rule in dom_rule.getElementsByTagName("rule"):
rule.setAttribute("score", "0")
if dom_element.hasAttribute("score"):
dom_element.removeAttribute("score")
if dom_element.hasAttribute("node"):
dom_element.removeAttribute("node")
return dom_element
class ExportDetailed:
def __init__(self):
self.show_detail = False
self.rule_expired = False
def get_string(self, rule, rule_expired, show_detail, indent=""):
self.show_detail = show_detail
self.rule_expired = rule_expired
return indent + ("\n" + indent).join(self.list_rule(rule))
def list_rule(self, rule):
rule_parts = [
"Rule{0}: {1}".format(
" (expired)" if self.rule_expired else "",
" ".join(self._list_attributes(rule)),
)
]
for child in rule.childNodes:
if child.nodeType == xml.dom.minidom.Node.TEXT_NODE:
continue
if child.tagName == "expression":
self.indent_append(rule_parts, self.list_expression(child))
elif child.tagName == "date_expression":
self.indent_append(rule_parts, self.list_date_expression(child))
elif child.tagName == "rule":
self.indent_append(rule_parts, self.list_rule(child))
return rule_parts
def list_expression(self, expression):
if "value" in expression.attributes.keys():
exp_parts = [
expression.getAttribute("attribute"),
expression.getAttribute("operation"),
]
if expression.hasAttribute("type"):
exp_parts.append(expression.getAttribute("type"))
exp_parts.append(expression.getAttribute("value"))
else:
exp_parts = [
expression.getAttribute("operation"),
expression.getAttribute("attribute"),
]
if self.show_detail:
exp_parts.append("(id:%s)" % expression.getAttribute("id"))
return ["Expression: %s" % " ".join(exp_parts)]
def list_date_expression(self, expression):
operation = expression.getAttribute("operation")
if operation == "date_spec":
date_spec_parts = self._list_attributes(
expression.getElementsByTagName("date_spec")[0]
)
exp_parts = ["Expression:"]
if self.show_detail:
exp_parts.append("(id:%s)" % expression.getAttribute("id"))
return self.indent_append(
[" ".join(exp_parts)],
["Date Spec: %s" % " ".join(date_spec_parts)],
)
if operation == "in_range":
exp_parts = ["date", "in_range"]
if expression.hasAttribute("start"):
exp_parts.extend([expression.getAttribute("start"), "to"])
if expression.hasAttribute("end"):
exp_parts.append(expression.getAttribute("end"))
durations = expression.getElementsByTagName("duration")
if durations:
exp_parts.append("duration")
duration_parts = self._list_attributes(durations[0])
if self.show_detail:
exp_parts.append("(id:%s)" % expression.getAttribute("id"))
result = ["Expression: %s" % " ".join(exp_parts)]
if durations:
self.indent_append(
result, ["Duration: %s" % " ".join(duration_parts)]
)
return result
exp_parts = ["date", expression.getAttribute("operation")]
if expression.hasAttribute("start"):
exp_parts.append(expression.getAttribute("start"))
if expression.hasAttribute("end"):
exp_parts.append(expression.getAttribute("end"))
if self.show_detail:
exp_parts.append("(id:%s)" % expression.getAttribute("id"))
return ["Expression: " + " ".join(exp_parts)]
def _list_attributes(self, element):
attributes = utils.dom_attrs_to_list(element, with_id=False)
if self.show_detail:
attributes.append("(id:%s)" % (element.getAttribute("id")))
return attributes
@staticmethod
def indent_append(target, source, indent=" "):
for part in source:
target.append(indent + part)
return target
class ExportAsExpression:
def __init__(self):
self.normalize = False
def get_string(self, rule, normalize=False):
self.normalize = normalize
return self.string_rule(rule)
def string_rule(self, rule):
boolean_op = rule.getAttribute("boolean-op") or "or"
rule_parts = []
for child in rule.childNodes:
if child.nodeType == xml.dom.minidom.Node.TEXT_NODE:
continue
if child.tagName == "expression":
rule_parts.append(self.string_expression(child))
elif child.tagName == "date_expression":
rule_parts.append(self.string_date_expression(child))
elif child.tagName == "rule":
rule_parts.append("(%s)" % self.string_rule(child))
if self.normalize:
rule_parts.sort()
return (" %s " % boolean_op).join(rule_parts)
def string_expression(self, expression):
if "value" in expression.attributes.keys():
exp_parts = [
expression.getAttribute("attribute"),
expression.getAttribute("operation"),
]
if expression.hasAttribute("type"):
exp_parts.append(expression.getAttribute("type"))
elif self.normalize:
exp_parts.append("string")
value = expression.getAttribute("value")
if " " in value:
value = '"%s"' % value
exp_parts.append(value)
else:
exp_parts = [
expression.getAttribute("operation"),
expression.getAttribute("attribute"),
]
return " ".join(exp_parts)
def string_date_expression(self, expression):
operation = expression.getAttribute("operation")
if operation == "date_spec":
exp_parts = ["date-spec"] + self._list_attributes(
expression.getElementsByTagName("date_spec")[0]
)
return " ".join(exp_parts)
if operation == "in_range":
exp_parts = ["date", "in_range"]
if expression.hasAttribute("start"):
exp_parts.extend([expression.getAttribute("start"), "to"])
if expression.hasAttribute("end"):
exp_parts.append(expression.getAttribute("end"))
durations = expression.getElementsByTagName("duration")
if durations:
exp_parts.append("duration")
exp_parts.extend(self._list_attributes(durations[0]))
return " ".join(exp_parts)
exp_parts = ["date", expression.getAttribute("operation")]
if expression.hasAttribute("start"):
exp_parts.append(expression.getAttribute("start"))
if expression.hasAttribute("end"):
exp_parts.append(expression.getAttribute("end"))
return " ".join(exp_parts)
@staticmethod
def _list_attributes(element):
attributes = utils.dom_attrs_to_list(element, with_id=False)
# sort it always to get the same output for the same input as dict is
# unordered
attributes.sort()
return attributes
def has_node_attr_expr_with_type_integer(rule_tree):
if isinstance(rule_tree, SymbolOperator):
if rule_tree.symbol_id in RuleParser.boolean_list:
return any(
has_node_attr_expr_with_type_integer(child)
for child in rule_tree.children
)
if (
rule_tree.symbol_id in RuleParser.date_comparison_list
and rule_tree.children[0].value == "date"
and rule_tree.children[1].is_literal()
) or (
isinstance(rule_tree, SymbolTypeDateCommon)
and rule_tree.date_value_class == DateSpecValue
):
return False
if isinstance(rule_tree, SymbolPrefix):
return False
child = rule_tree.children[1]
if isinstance(child, SymbolType) and child.symbol_id == "integer":
return True
return False
return False
# generic parser
class SymbolBase:
END = "{end}"
LITERAL = "{literal}"
symbol_id = None
left_binding_power = 0
def null_denotation(self):
raise SyntaxError("unexpected '%s'" % self.label())
def left_denotation(self, left):
raise SyntaxError(
"unexpected '%s' after '%s'" % (self.label(), left.label())
)
def is_end(self):
return self.symbol_id == SymbolBase.END
def is_literal(self):
return self.symbol_id == SymbolBase.LITERAL
def label(self):
return self.symbol_id
def __str__(self):
return "(%s)" % self.symbol_id
class SymbolLiteral(SymbolBase):
def __init__(self, value):
self.value = value
def null_denotation(self):
return self
def label(self):
return "end" if self.is_end() else str(self.value)
def __str__(self):
return "(end)" if self.is_end() else "(literal %s)" % self.value
class SymbolParenthesisOpen(SymbolBase):
expression_func = None
advance_func = None
close_symbol_id = None
def null_denotation(self):
expression = self.expression_func()
self.advance_func(self.close_symbol_id)
return expression
class SymbolOperator(SymbolBase):
expression_func = None
# Note: not properly typed
allowed_child_ids: List[Any] = []
def __init__(self):
self.children = []
def is_allowed_child(self, child_symbol, child_position):
return (
not self.allowed_child_ids
or not self.allowed_child_ids[child_position]
or child_symbol.symbol_id in self.allowed_child_ids[child_position]
)
def __str__(self):
string = " ".join(
[str(part) for part in [self.symbol_id] + self.children]
)
return "(" + string + ")"
class SymbolPrefix(SymbolOperator):
def null_denotation(self):
self.children.append(self.expression_func(self.left_binding_power))
if not self.is_allowed_child(self.children[0], 0):
raise SyntaxError(
"unexpected '%s' after '%s'"
% (self.children[0].label(), self.symbol_id)
)
return self
class SymbolType(SymbolPrefix):
value_re = None
def null_denotation(self):
super().null_denotation()
if self.value_re and not self.value_re.match(self.children[0].value):
raise SyntaxError(
"invalid %s value '%s'"
% (self.symbol_id, self.children[0].value)
)
return self
class SymbolInfix(SymbolOperator):
def left_denotation(self, left):
self.children.append(left)
if not self.is_allowed_child(self.children[0], 0):
raise SyntaxError(
"unexpected '%s' before '%s'" % (left.label(), self.symbol_id)
)
self.children.append(self.expression_func(self.left_binding_power))
if not self.is_allowed_child(self.children[1], 1):
raise SyntaxError(
"unexpected '%s' after '%s'"
% (self.children[1].label(), self.symbol_id)
)
return self
class SymbolTernary(SymbolOperator):
advance_func = None
symbol_second_id: Optional[str] = None
def left_denotation(self, left):
self.children.append(left)
if not self.is_allowed_child(self.children[0], 0):
raise SyntaxError(
"unexpected '%s' before '%s'" % (left.label(), self.symbol_id)
)
self.children.append(self.expression_func(self.left_binding_power))
if not self.is_allowed_child(self.children[1], 1):
raise SyntaxError(
"unexpected '%s' after '%s'"
% (self.children[1].label(), self.symbol_id)
)
self.advance_func(self.symbol_second_id)
self.children.append(self.expression_func(self.left_binding_power))
if not self.is_allowed_child(self.children[2], 2):
raise SyntaxError(
"unexpected '%s' after '%s ... %s'"
% (
self.children[2].label(),
self.symbol_id,
self.symbol_second_id,
)
)
return self
class SymbolTable:
def __init__(self):
self.table = dict()
def has_symbol(self, symbol_id):
return symbol_id in self.table
def get_symbol(self, symbol_id):
return self.table[symbol_id]
def new_symbol(
self,
symbol_id,
superclass,
binding_power=0,
expression_func=None,
advance_func=None,
):
if not self.has_symbol(symbol_id):
class SymbolClass(superclass):
pass
# enforce str to be both python2 and python3 compatible
SymbolClass.__name__ = str("symbol_" + symbol_id)
SymbolClass.symbol_id = symbol_id
SymbolClass.left_binding_power = binding_power
if expression_func:
SymbolClass.expression_func = expression_func
if advance_func:
SymbolClass.advance_func = advance_func
self.table[symbol_id] = SymbolClass
return SymbolClass
return self.get_symbol(symbol_id)
class Parser:
def __init__(self):
self.current_symbol = None
self.current_symbol_index = -1
self.program = list()
self.symbol_table = SymbolTable()
self.new_symbol_literal(SymbolBase.LITERAL)
self.new_symbol_literal(SymbolBase.END)
def new_symbol_literal(self, symbol_id):
return self.symbol_table.new_symbol(symbol_id, SymbolLiteral)
def new_symbol_prefix(self, symbol_id, binding_power):
return self.symbol_table.new_symbol(
symbol_id, SymbolPrefix, binding_power, self.expression
)
def new_symbol_type(self, symbol_id, binding_power):
return self.symbol_table.new_symbol(
symbol_id, SymbolType, binding_power, self.expression
)
def new_symbol_infix(self, symbol_id, binding_power):
return self.symbol_table.new_symbol(
symbol_id, SymbolInfix, binding_power, self.expression
)
def new_symbol_ternary(self, symbol_id, second_id, binding_power):
self.symbol_table.new_symbol(second_id, SymbolBase)
symbol_class = self.symbol_table.new_symbol(
symbol_id,
SymbolTernary,
binding_power,
self.expression,
self.advance,
)
symbol_class.symbol_second_id = second_id
return symbol_class
def new_symbol_parenthesis(self, symbol_id, closing_id):
self.symbol_table.new_symbol(closing_id, SymbolBase)
symbol_class = self.symbol_table.new_symbol(
symbol_id, SymbolParenthesisOpen, 0, self.expression, self.advance
)
symbol_class.close_symbol_id = closing_id
return symbol_class
def symbolize(self, program):
symbolized_program = list()
literal_class = self.symbol_table.get_symbol(SymbolBase.LITERAL)
for token in program:
if self.symbol_table.has_symbol(token) and (
not symbolized_program
or not isinstance(symbolized_program[-1], SymbolType)
):
symbolized = self.symbol_table.get_symbol(token)()
else:
symbolized = literal_class(token)
symbolized_program.append(symbolized)
symbolized_program.append(
self.symbol_table.get_symbol(SymbolBase.END)(None)
)
return symbolized_program
def advance(self, expected_symbol_id=None):
if (
expected_symbol_id
and self.current_symbol.symbol_id != expected_symbol_id
):
if self.current_symbol.is_end():
raise SyntaxError("missing '%s'" % expected_symbol_id)
raise SyntaxError(
"expecting '%s', got '%s'"
% (expected_symbol_id, self.current_symbol.label())
)
self.current_symbol_index += 1
if self.current_symbol_index >= len(self.program):
raise UnexpectedEndOfInput()
self.current_symbol = self.program[self.current_symbol_index]
return self
def expression(self, right_binding_power=0):
symbol = self.current_symbol
self.advance()
left = symbol.null_denotation()
while right_binding_power < self.current_symbol.left_binding_power:
symbol = self.current_symbol
self.advance()
left = symbol.left_denotation(left)
return left
def parse(self, program):
self.current_symbol = None
self.current_symbol_index = -1
self.program = self.symbolize(program)
self.advance()
result = self.expression()
symbol = self.current_symbol
if not symbol.is_end():
raise SyntaxError("unexpected '%s'" % symbol.label())
return result
class ParserException(Exception):
pass
class UnexpectedEndOfInput(ParserException):
pass
class SyntaxError(ParserException):
# pylint: disable=redefined-builtin
pass
# rule parser specific code
class DateCommonValue:
allowed_items = [
"hours",
"monthdays",
"weekdays",
"yeardays",
"months",
"weeks",
"years",
"weekyears",
"moon",
]
KEYWORD: Optional[str] = None
def __init__(self, parts_string, keyword=None):
self.parts = dict()
for part in parts_string.split():
if not self.accepts_part(part):
raise SyntaxError("unexpected '%s' in %s" % (part, keyword))
if "=" not in part:
raise SyntaxError(
"missing =value after '%s' in %s" % (part, keyword)
)
name, value = part.split("=", 1)
if value == "":
raise SyntaxError(
"missing value after '%s' in %s" % (part, keyword)
)
self.parts[name] = value
if not self.parts:
raise SyntaxError(
"missing one of '%s=' in %s"
% ("=', '".join(DateCommonValue.allowed_items), keyword)
)
self.validate()
def validate(self):
return self
@classmethod
def accepts_part(cls, part):
for name in cls.allowed_items:
if part == name or part.startswith(name + "="):
return True
return False
def __str__(self):
# sort it always to get the same output for the same input as dict is
# unordered
return " ".join(
[
"%s=%s" % (name, value)
for name, value in sorted(self.parts.items())
]
)
class DateSpecValue(DateCommonValue):
KEYWORD = "date-spec"
part_re = re.compile(r"^(?P<since>\d+)(-(?P<until>\d+))?$")
part_limits = {
"hours": (0, 23),
"monthdays": (0, 31),
"weekdays": (1, 7),
"yeardays": (1, 366),
"months": (1, 12),
"weeks": (1, 53),
"weekyears": (1, 53),
"moon": (0, 7),
}
def __init__(self, parts_string):
super().__init__(parts_string, self.KEYWORD)
def validate(self):
for name, value in self.parts.items():
if not self.valid_part(name, value):
raise SyntaxError(
"invalid %s '%s' in '%s'"
% (name, value, DateSpecValue.KEYWORD)
)
return self
def valid_part(self, name, value):
match = DateSpecValue.part_re.match(value)
if not match:
return False
match_dict = match.groupdict()
if not self.valid_part_limits(name, match_dict["since"]):
return False
if match_dict["until"]:
if not self.valid_part_limits(name, match_dict["since"]):
return False
if int(match_dict["since"]) >= int(match_dict["until"]):
return False
return True
@staticmethod
def valid_part_limits(name, value):
if name not in DateSpecValue.part_limits:
return True
limits = DateSpecValue.part_limits[name]
return limits[0] <= int(value) <= limits[1]
class DateDurationValue(DateCommonValue):
KEYWORD = "duration"
def __init__(self, parts_string):
super().__init__(parts_string, self.KEYWORD)
def validate(self):
for name, value in self.parts.items():
if not value.isdigit():
raise SyntaxError(
"invalid %s '%s' in '%s'"
% (name, value, DateDurationValue.KEYWORD)
)
return self
class SymbolTypeDateCommon(SymbolType):
date_value_class = None
def null_denotation(self):
symbol = self.expression_func(self.left_binding_power)
symbol.value = self.date_value_class(symbol.value)
self.children.append(symbol)
return self
class SymbolTernaryInRange(SymbolTernary):
allowed_child_ids = [
[SymbolBase.LITERAL],
[SymbolBase.LITERAL],
[SymbolBase.LITERAL, DateDurationValue.KEYWORD],
]
symbol_second_id = "to"
def is_allowed_child(self, child_symbol, child_position):
return super().is_allowed_child(child_symbol, child_position) and (
child_position != 0 or child_symbol.value == "date"
)
def left_denotation(self, left):
super().left_denotation(left)
for child in self.children[1:]:
if child.is_literal() and not utils.is_iso8601_date(child.value):
raise SyntaxError(
"invalid date '%s' in 'in_range ... to'" % child.value
)
return self
class RuleParser(Parser):
comparison_list = ["eq", "ne", "lt", "gt", "lte", "gte", "in_range"]
date_comparison_list = ["gt", "lt", "in_range"]
prefix_list = ["defined", "not_defined"]
boolean_list = ["and", "or"]
simple_type_list = ["string", "integer", "number", "version"]
parenthesis_open = "("
parenthesis_close = ")"
def __init__(self):
super().__init__()
for operator in RuleParser.comparison_list:
if operator == "in_range":
continue
symbol_class = self.new_symbol_infix(operator, 50)
symbol_class.allowed_child_ids = [
[SymbolBase.LITERAL],
[SymbolBase.LITERAL] + RuleParser.simple_type_list,
]
self.symbol_table.new_symbol(
"in_range", SymbolTernaryInRange, 50, self.expression, self.advance
)
self.symbol_table.new_symbol("to", SymbolBase)
for operator in RuleParser.prefix_list:
symbol_class = self.new_symbol_prefix(operator, 60)
symbol_class.allowed_child_ids = [[SymbolBase.LITERAL]]
for operator in RuleParser.simple_type_list:
symbol_class = self.new_symbol_type(operator, 70)
self.symbol_table.get_symbol("integer").value_re = re.compile(
r"^[-+]?\d+$"
)
# rhbz#1869399
# Originally, pacemaker only supported 'number', treated it as an
# integer and documented it as 'integer'. With CIB schema 3.5.0+,
# 'integer' is supported as well. With crm_feature_set 3.5.0+, 'number'
# is treated as a floating point number.
# Since pcs never supported 'number' until the above changes in
# pacemaker happened and pacemaker was able to handle floating point
# numbers before (even though truncating them to integers), we'll just
# check for a float here. If that's not good enough, we can fix it
# later and validate the value as integer when crm_feature_set < 3.5.0.
self.symbol_table.get_symbol("number").value_re = re.compile(
r"^[-+]?(\d+|(\d*\.\d+)|(\d+\.\d*))([eE][+-]?\d+)?$"
)
self.symbol_table.get_symbol("version").value_re = re.compile(
r"^\d+(\.\d+)*$"
)
symbol_class = self.new_symbol_type_date(DateSpecValue, 70)
symbol_class = self.new_symbol_type_date(DateDurationValue, 70)
for operator in RuleParser.boolean_list:
symbol_class = self.new_symbol_infix(operator, 40)
symbol_class.allowed_child_ids = [
RuleParser.comparison_list
+ RuleParser.prefix_list
+ [DateSpecValue.KEYWORD]
+ RuleParser.boolean_list
] * 2
self.new_symbol_parenthesis(
RuleParser.parenthesis_open, RuleParser.parenthesis_close
)
def parse(self, program):
syntactic_tree = super().parse(program)
if syntactic_tree.is_literal() or (
isinstance(syntactic_tree, SymbolType)
and not (
isinstance(syntactic_tree, SymbolTypeDateCommon)
and syntactic_tree.date_value_class == DateSpecValue
)
):
raise SyntaxError(
"missing one of '%s'"
% "', '".join(
RuleParser.comparison_list
+ RuleParser.prefix_list
+ [DateSpecValue.KEYWORD]
)
)
return syntactic_tree
def new_symbol_type_date(self, date_value_class, binding_power):
symbol_class = self.symbol_table.new_symbol(
date_value_class.KEYWORD,
SymbolTypeDateCommon,
binding_power,
self.expression,
)
symbol_class.date_value_class = date_value_class
return symbol_class
# cib builder
class CibBuilder:
def __init__(self, cib_schema_version):
self.cib_schema_version = cib_schema_version
def build(self, dom_element, syntactic_tree, rule_id=None):
dom_rule = self.add_element(
dom_element,
"rule",
rule_id if rule_id else dom_element.getAttribute("id") + "-rule",
)
self.build_rule(dom_rule, syntactic_tree)
return dom_rule
def build_rule(self, dom_rule, syntactic_tree):
if isinstance(syntactic_tree, SymbolOperator):
if syntactic_tree.symbol_id in RuleParser.boolean_list:
self.build_boolean(dom_rule, syntactic_tree)
elif (
syntactic_tree.symbol_id in RuleParser.date_comparison_list
and syntactic_tree.children[0].value == "date"
and syntactic_tree.children[1].is_literal()
):
self.build_date_expression(dom_rule, syntactic_tree)
elif (
isinstance(syntactic_tree, SymbolTypeDateCommon)
and syntactic_tree.date_value_class == DateSpecValue
):
self.build_datespec(dom_rule, syntactic_tree)
else:
self.build_expression(dom_rule, syntactic_tree)
else:
raise InvalidSyntacticTree(syntactic_tree)
def build_datespec(self, dom_element, syntactic_tree):
dom_expression = self.add_element(
dom_element,
"date_expression",
dom_element.getAttribute("id") + "-expr",
)
dom_expression.setAttribute("operation", "date_spec")
dom_datespec = self.add_element(
dom_expression,
"date_spec",
dom_expression.getAttribute("id") + "-datespec",
)
for key, value in syntactic_tree.children[0].value.parts.items():
dom_datespec.setAttribute(key, value)
def build_expression(self, dom_element, syntactic_tree):
dom_expression = self.add_element(
dom_element, "expression", dom_element.getAttribute("id") + "-expr"
)
dom_expression.setAttribute("operation", syntactic_tree.symbol_id)
dom_expression.setAttribute(
"attribute", syntactic_tree.children[0].value
)
if not isinstance(syntactic_tree, SymbolPrefix):
child = syntactic_tree.children[1]
if isinstance(child, SymbolType):
# rhbz#1869399
# Pcs was always accepting 'integer', while CIB was only
# supporting 'number' (and 'string' and 'version'). Pacemaker
# was documenting it as 'integer' and was treating it as
# integer (not float). With CIB schema 3.5.0, both 'integer'
# and 'number' are accepted by CIB. For older schemas, we turn
# 'integer' to 'number'.
if (
self.cib_schema_version
< const.PCMK_RULES_NODE_ATTR_EXPR_WITH_INT_TYPE_CIB_VERSION
and child.symbol_id == "integer"
):
dom_expression.setAttribute("type", "number")
else:
dom_expression.setAttribute("type", child.symbol_id)
child = child.children[0]
dom_expression.setAttribute("value", child.value)
def build_date_expression(self, dom_element, syntactic_tree):
dom_expression = self.add_element(
dom_element,
"date_expression",
dom_element.getAttribute("id") + "-expr",
)
dom_expression.setAttribute("operation", syntactic_tree.symbol_id)
if syntactic_tree.symbol_id == "gt":
dom_expression.setAttribute(
"start", syntactic_tree.children[1].value
)
elif syntactic_tree.symbol_id == "lt":
dom_expression.setAttribute("end", syntactic_tree.children[1].value)
elif syntactic_tree.symbol_id == "in_range":
dom_expression.setAttribute(
"start", syntactic_tree.children[1].value
)
if (
isinstance(syntactic_tree.children[2], SymbolTypeDateCommon)
and syntactic_tree.children[2].date_value_class
== DateDurationValue
):
dom_duration = self.add_element(
dom_expression,
"duration",
dom_expression.getAttribute("id") + "-duration",
)
duration = syntactic_tree.children[2].children[0].value
for key, value in duration.parts.items():
dom_duration.setAttribute(key, value)
else:
dom_expression.setAttribute(
"end", syntactic_tree.children[2].value
)
def build_boolean(self, dom_element, syntactic_tree):
dom_element.setAttribute("boolean-op", syntactic_tree.symbol_id)
for subtree in syntactic_tree.children:
if (
subtree.symbol_id in RuleParser.boolean_list
and subtree.symbol_id != syntactic_tree.symbol_id
):
self.build(
dom_element,
subtree,
dom_element.getAttribute("id") + "-rule",
)
else:
self.build_rule(dom_element, subtree)
@staticmethod
def add_element(parent, tag_name, element_id):
dom = parent.ownerDocument
child = parent.appendChild(dom.createElement(tag_name))
child.setAttribute("id", utils.find_unique_id(dom, element_id))
return child
class CibBuilderException(Exception):
pass
class InvalidSyntacticTree(CibBuilderException):
pass
# token preprocessing
class TokenPreprocessor:
def __init__(self):
self._warning_list = []
def run(self, token_list):
self._warning_list = []
return self.convert_legacy_date(
self.join_date_common(self.separate_parenthesis(token_list))
)
@property
def warning_list(self):
return self._warning_list
@staticmethod
def separate_parenthesis(input_list):
output_list = []
for token in input_list:
if not (
RuleParser.parenthesis_open in token
or RuleParser.parenthesis_close in token
):
output_list.append(token)
else:
part = []
for char in token:
if char in [
RuleParser.parenthesis_open,
RuleParser.parenthesis_close,
]:
if part:
output_list.append("".join(part))
part = []
output_list.append(char)
else:
part.append(char)
if part:
output_list.append("".join(part))
return output_list
def join_date_common(self, input_list):
output_list = []
token_parts = []
in_datecommon = False
for token in input_list:
if in_datecommon:
if DateCommonValue.accepts_part(token):
token_parts.append(token)
elif (
token == "operation=date_spec"
and token_parts[0] == DateSpecValue.KEYWORD
):
self._warning_list.append(
"Syntax 'operation=date_spec' "
"is deprecated and will be removed. Please use "
"'date-spec <date-spec options>'."
)
else:
in_datecommon = False
output_list.append(token_parts[0])
if len(token_parts) > 1:
output_list.append(" ".join(token_parts[1:]))
output_list.append(token)
token_parts = []
elif token in [DateSpecValue.KEYWORD, DateDurationValue.KEYWORD]:
in_datecommon = True
token_parts = [token]
else:
output_list.append(token)
if token_parts:
output_list.append(token_parts[0])
if len(token_parts) > 1:
output_list.append(" ".join(token_parts[1:]))
return output_list
def convert_legacy_date(self, input_list):
output_list = []
in_date = False
date_start = ""
date_end = ""
token_parts = []
for token in input_list:
if in_date:
token_parts.append(token)
if token.startswith("start="):
date_start = token[len("start=") :]
elif token.startswith("end="):
date_end = token[len("end=") :]
else:
if token == "gt" and date_start and not date_end:
output_list.extend(["date", "gt", date_start])
self._warning_list.append(
"Syntax 'date start=<date> gt' "
"is deprecated and will be removed. Please use "
"'date gt <date>'."
)
elif token == "lt" and not date_start and date_end:
output_list.extend(["date", "lt", date_end])
self._warning_list.append(
"Syntax 'date end=<date> lt' "
"is deprecated and will be removed. Please use "
"'date lt <date>'."
)
elif token == "in_range" and date_start and date_end:
output_list.extend(
["date", "in_range", date_start, "to", date_end]
)
self._warning_list.append(
"Syntax 'date start=<date> end=<date> in_range' "
"is deprecated and will be removed. Please use "
"'date in_range <date> to <date>'."
)
else:
output_list.extend(token_parts)
token_parts = []
date_start = date_end = ""
in_date = False
elif token == "date":
token_parts = ["date"]
in_date = True
else:
output_list.append(token)
if token_parts:
output_list.extend(token_parts)
return output_list
| feist/pcs | pcs/rule.py | Python | gpl-2.0 | 41,255 | 0.000194 |
#!/usr/bin/python
################################################################################
#
# Module: cmtest.py
#
# Author: Paul Tindle ( mailto:Paul@Tindle.org )
#
# Descr: Main Test executive
#
# Version: 0.1 $Id$
#
# Changes: 05/18/17 Conversion from perl - JSW
#
# Still ToDo:
#
# License: This software is subject to and may be distributed under the
# terms of the GNU General Public License as described in the
# file License.html found in this or a parent directory.
# Forward any and all validated updates to Paul@Tindle.org
#
# Copyright (c) 1995 - 2005 Paul Tindle. All rights reserved.
# Copyright (c) 2005-2008 Stoke. All rights reserved.
# Copyright (c) 2017 Joe White. All rights reserved.
#
################################################################################
VER= 'v0.1 5/9/2017'; # Conversion to Python from Perl 050917 JSW
CVS_VER = ' [ CVS: $Id: Logs.pm,v 1.10 2011/01/21 18:38:56 joe Exp $ ]';
global CMtestVersion;
if not "CMtestVersion" in globals() : CMtestVersion={}
CMtestVersion['cmtest'] = VER + CVS_VER
import Globals
#from Globals import *
import Util
import Init
import FileOp
import Logs
import Connect
import Menu
import sys
sys.path.append("../lib;")
import os
import os.path
from os.path import expanduser
import socket
from optparse import OptionParser
#import lib # import private library functions for CMtestpy, see lib/__init__.py
#import Globals
#print(globals())
#from lib.Util import Abort
#from lib.Globals import Myglobals
#import Mav
#import Power
#import lib.GUI
import sys, traceback # catch keyboard interupt
import platform
from os.path import isfile, join
#__________________________________________________________________________
def main():
#Globals.Myglobals()
#import Globals
print (Globals.Debug)
#print("My Debug = %i" % Debug)
print(globals())
#Debug flag
#global Debug
#Debug = 1
#global Verbose
#global Menu1
#global session
#global Cfg_File
#global Tmp
#global CmdFilePath
#global Version
#global Session
#global SessionForce
#global CMPipe; CMPipe=os.getenv('CmTest_Release_Pipe', "No_Pipe")
#global UserID
#global Out_File
#global Loop_overide
#global shucks; shucks = 0
#global GlobalVar
#print (global())
#Get input from command line
usage = "usage: %prog session#"
parser = OptionParser(usage)
parser.add_option("-d", "--debug", action="count", dest="Debug", default=0,
help="Turn on Debug Stetments")
parser.add_option("-v", "--verbose", action="count", dest="Verbose", default=0,
help="Turn on more output")
parser.add_option("-B", "--Batch", dest="Menu1", default="",
help="Batch Mode - no Menu prompt, does not support multi level menu" )
parser.add_option("-s", "--session", dest="Session", type="int", default=0,
help="Set Sesion #, Default is first avaiable")
parser.add_option("-L", "--Loop", dest="Loop", type="int", default=0,
help="Overide all Loop counts(seconds)")
parser.add_option("-F", "--Force", dest="Force", type="int", default=0,
help="Force Session #")
parser.add_option("-r", "--regress", dest="Regress", default="null",
help="Directly execute a subroutine")
parser.add_option("-U", "--User", dest="User", default="None",
help="Set User ID")
parser.add_option("-O", "--Output", dest="Output", default=r"cmtest.xml",
help="Set Output XML file, will default to tmp/cmtest.xml")
(options, args) = parser.parse_args()
#if not options.Session :
#parser.error("-s session# required")
Globals.Debug += options.Debug
Globals.Verbose += options.Verbose
Globals.Menu1 = options.Menu1
Globals.Regress = options.Regress
Globals.Session = options.Session
Globals.SessionForce = options.Force
Globals.Force = options.Force
Globals.CurrentUserID = options.User
Globals.Out_File = options.Output
Globals.Loop_overide = options.Loop
OS = os.name
if os.name == "nt":
Globals.OS = "NT"
else:
Globals.OS = "Linux"
if Globals.Debug : print ("Detected OS: %s " % Globals.OS)
#Get our base directory and find the Station Config File
File = os.path.abspath(__file__)
Globals.GP_Path = FileOp.fnstrip(File,1)
PPATH = FileOp.fnstrip(File,1)
if Globals.Debug : print ("OS path detected is: %s " % Globals.GP_Path)
if Globals.GP_Path == '': Globals.GP_Path = ".."
if Globals.OS == "NT":
Globals.Cfg_File = join(Globals.GP_Path,"cfgfiles","testctrl.defaults.cfg")
#Globals[LogPath] = "\Logs"
TmpDir = expanduser("~")
else:
Globals.Cfg_File = '/usr/local/cmtest/testctrl.cfg'
#Globals[LogPath] = r"/var/local/cmtest/logs"
TmpDir = expanduser("~") + "/tmp"
if Globals.Debug : print ("Config path detected is: %s " % Globals.Cfg_File)
#if OS == 'nt':
#Cfg_File = PPath + "/" + "cfgfiles/testctrl.defaults.cfg"
#Tmp = os.getenv('TMP', "NO_TMP")
#else :
#Cfg_File = r'/usr/local/cmtest/testctrl.cfg'
#Tmp = os.getenv(expanduser("~") + "/tmp", "NO_TMP")
Globals.CmdFilePath = r"../" + PPATH +r"/cmdfiles"
Util.ASCIIColor('reset')
_Init()
GUI = 0
# uneeded Perl &GUI_Init if $GUI;
Quiet = 0; # Don't allow since we only have a char menu right now
shucks = 0
try:
Menu.Menu_main() # Bring up menu and start excution
except KeyboardInterrupt:
print( "Shutdown requested...exiting")
_catch_zap()
except Exception:
traceback.print_exc(file=sys.stdout)
sys.exit(0)
if not Quiet : print("done\n")
Util.Exit(0)
#_____________________________________________________________________________
def _Init():
"Initialize Cmtest"
if Globals.Debug : print("In this Function %s" % __name__)
global Linux_gbl
global Erc
global Force
if not os.name == "nt" :
Linux_gbl = 'Ubuntu'; # Added 3/4/10 to support Ubuntu install
try:
with open ("/etc/*release", r) as fh :
for line in fh:
if re.search(r"Ubuntu", line) : Linux_gbl = 'Ubuntu'
elif re.search(r"Fedora", line) : Linux_gbl = 'Fedora'
elif re.search(r"CentOS", line) : Linux_gbl = 'CentOS'
else :
Linux_gbl = 'unknown';
print ("Un-suported linux type found, I am going to die now")
exit()
except:
print ("Un-suported linux type found, are we Windows? I am going to die now")
if not Debug : exit()
#else we are NT
print ("Debug in _Init %i" % Globals.Debug)
Init.Init_All (0)
Globals.Erc = 101
Globals.Erc = 0
Init.Init_Also(0)
return
#____________________________________________________________________________________
def _catch_zap():
global shucks; shucks +=1
Power ('OFF');
Globals.Stats['Status'] = 'Aborted';
Exit(998,"<Ctrl>-C Aborted");
#____________________________________________________________________________________
if __name__ == "__main__":
main() | white111/CMtestpy | bin/cmtest.py | Python | lgpl-2.1 | 7,516 | 0.023018 |
# This import fixes sys.path issues
from . import parentpath
from datawrap import tablewrap
import unittest
class TableWrapTest(unittest.TestCase):
'''
Tests the capability to wrap 2D objects in Tables and transpose them.
'''
def setUp(self):
# self.table doesn't need the tablewrap.Table object to work
# but this tests both wrappers at once
self.table = tablewrap.Table([[1,2,3,4,5], [6,7,8,9,10], ['a','b','c','d','e']])
self.transpose = tablewrap.TableTranspose(self.table)
def test_table_transpose(self):
self.assertEqual(self.transpose[0][0], self.table[0][0])
self.assertEqual(self.transpose[4][0], self.table[0][4])
self.assertEqual(self.transpose[0][2], self.table[2][0])
self.assertEqual(self.transpose[4][2], self.table[2][4])
self.assertEqual(self.transpose[-1][-1], self.table[-1][-1])
self.assertEqual(self.transpose[-2][-3], self.table[-3][-2])
for c,col in enumerate(self.transpose):
for r,elem in enumerate(col):
self.assertEqual(elem, self.table[r][c])
def test_table_slice(self):
# Try copy slice requests
self.assertEqual(self.transpose[:][0][0], self.table[0][0])
self.assertEqual(self.transpose[:][4][0], self.table[0][4])
self.assertEqual(self.transpose[:][0][2], self.table[2][0])
self.assertEqual(self.transpose[:][4][2], self.table[2][4])
# Make a change
self.transpose[:][0][2] = 'new'
self.assertEqual(self.transpose[0][2], 'new')
self.assertEqual(self.table[2][0], 'new')
# Try some different slices
tslice = slice(1, None, 2)
translice = self.transpose[tslice]
self.assertEqual(len(translice), 2)
self.assertEqual(len(translice[0]), 3)
translice[0][0] = 'new2'
self.assertEqual(translice[0][0], self.table[0][tslice][0])
self.assertEqual(translice[1][0], self.table[0][tslice][1])
self.assertEqual(translice[0][2], self.table[2][tslice][0])
self.assertEqual(translice[1][2], self.table[2][tslice][1])
tslice = slice(None, 1, None)
translice = self.transpose[tslice]
self.assertEqual(len(translice), 1)
self.assertEqual(len(translice[0]), 3)
translice[0][0] = 'new3'
self.assertEqual(translice[0][0], self.table[0][tslice][0])
self.assertEqual(translice[0][2], self.table[2][tslice][0])
def test_verify(self):
# Check that valid finds bad tables
bad_table = [[1, 2, 3], ['a', 'b'], [4, 5, 6]]
self.assertRaises(ValueError, lambda: tablewrap.Table(bad_table, True, False))
self.assertRaises(ValueError, lambda: tablewrap.TableTranspose(bad_table, True, False))
bad_table = [[1], ['a', 'b'], [4, 5, 6]]
self.assertRaises(ValueError, lambda: tablewrap.Table(bad_table, True, False))
self.assertRaises(ValueError, lambda: tablewrap.TableTranspose(bad_table, True, False))
bad_table = [[1, 2], ['a'], [4]]
self.assertRaises(ValueError, lambda: tablewrap.Table(bad_table, True, False))
self.assertRaises(ValueError, lambda: tablewrap.TableTranspose(bad_table, True, False))
bad_table = [[1, 2], ['a'], [4]]
noCheck = tablewrap.TableTranspose(bad_table, False, False)
# If we don't do validity checks and instead access a bad index...
self.assertRaises(IndexError, lambda: noCheck[2][1])
def test_repair(self):
# Check that valid finds bad tables
bad_table = [[1, 2, 3], ['a', 'b'], [4, 5, 6]]
self.assertRaises(ValueError, lambda: tablewrap.Table(bad_table, True, False))
# Neither of thse should explode
table = tablewrap.Table(bad_table, False, True)
self.assertIsNone(table[1][2])
self.assertIsNone(bad_table[1][2])
bad_table = [[1, 2, 3], ['a', 'b'], [4, 5, 6]]
table = tablewrap.Table(bad_table, True, True)
self.assertIsNone(table[1][2])
self.assertIsNone(bad_table[1][2])
# Check that valid finds bad tables
bad_table = [[1, 2, 3], ['a', 'b'], [4, 5, 6]]
self.assertRaises(ValueError, lambda: tablewrap.TableTranspose(bad_table, True, False))
# Neither of thse should explode
transpose = tablewrap.TableTranspose(bad_table, False, True)
self.assertIsNone(transpose[2][1])
self.assertIsNone(bad_table[1][2])
bad_table = [[1, 2, 3], ['a', 'b'], [4, 5, 6]]
transpose = tablewrap.TableTranspose(bad_table, True, True)
self.assertIsNone(transpose[2][1])
self.assertIsNone(bad_table[1][2])
if __name__ == '__main__':
unittest.main()
| OpenGov/python_data_wrap | tests/table_wrap_test.py | Python | lgpl-2.1 | 4,724 | 0.005292 |
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Contact: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from __future__ import absolute_import, division, unicode_literals
from mo_dots import Data, wrap
from mo_files import File
import mo_json
from mo_logs import Log
from mo_logs.exceptions import suppress_exception
from mo_math.randoms import Random
from mo_threads import Lock, Signal, THREAD_STOP
DEBUG = True
class PersistentQueue(object):
"""
THREAD-SAFE, PERSISTENT QUEUE
CAN HANDLE MANY PRODUCERS, BUT THE pop(), commit() IDIOM CAN HANDLE ONLY
ONE CONSUMER.
IT IS IMPORTANT YOU commit() or close(), OTHERWISE NOTHING COMES OFF THE QUEUE
"""
def __init__(self, _file):
"""
file - USES FILE FOR PERSISTENCE
"""
self.file = File.new_instance(_file)
self.lock = Lock("lock for persistent queue using file " + self.file.name)
self.please_stop = Signal()
self.db = Data()
self.pending = []
if self.file.exists:
for line in self.file:
with suppress_exception:
delta = mo_json.json2value(line)
apply_delta(self.db, delta)
if self.db.status.start == None: # HAPPENS WHEN ONLY ADDED TO QUEUE, THEN CRASH
self.db.status.start = 0
self.start = self.db.status.start
# SCRUB LOST VALUES
lost = 0
for k in self.db.keys():
with suppress_exception:
if k!="status" and int(k) < self.start:
self.db[k] = None
lost += 1
# HAPPENS FOR self.db.status, BUT MAYBE OTHER PROPERTIES TOO
if lost:
Log.warning("queue file had {{num}} items lost", num= lost)
DEBUG and Log.note("Persistent queue {{name}} found with {{num}} items", name=self.file.abspath, num=len(self))
else:
self.db.status = Data(
start=0,
end=0
)
self.start = self.db.status.start
DEBUG and Log.note("New persistent queue {{name}}", name=self.file.abspath)
def _add_pending(self, delta):
delta = wrap(delta)
self.pending.append(delta)
def _apply_pending(self):
for delta in self.pending:
apply_delta(self.db, delta)
self.pending = []
def __iter__(self):
"""
BLOCKING ITERATOR
"""
while not self.please_stop:
try:
value = self.pop()
if value is not THREAD_STOP:
yield value
except Exception as e:
Log.warning("Tell me about what happened here", cause=e)
def add(self, value):
with self.lock:
if self.closed:
Log.error("Queue is closed")
if value is THREAD_STOP:
DEBUG and Log.note("Stop is seen in persistent queue")
self.please_stop.go()
return
self._add_pending({"add": {str(self.db.status.end): value}})
self.db.status.end += 1
self._add_pending({"add": {"status.end": self.db.status.end}})
self._commit()
return self
def __len__(self):
with self.lock:
return self.db.status.end - self.start
def __getitem__(self, item):
return self.db[str(item + self.start)]
def pop(self, timeout=None):
"""
:param timeout: OPTIONAL DURATION
:return: None, IF timeout PASSES
"""
with self.lock:
while not self.please_stop:
if self.db.status.end > self.start:
value = self.db[str(self.start)]
self.start += 1
return value
if timeout is not None:
with suppress_exception:
self.lock.wait(timeout=timeout)
if self.db.status.end <= self.start:
return None
else:
self.lock.wait()
DEBUG and Log.note("persistent queue already stopped")
return THREAD_STOP
def pop_all(self):
"""
NON-BLOCKING POP ALL IN QUEUE, IF ANY
"""
with self.lock:
if self.please_stop:
return [THREAD_STOP]
if self.db.status.end == self.start:
return []
output = []
for i in range(self.start, self.db.status.end):
output.append(self.db[str(i)])
self.start = self.db.status.end
return output
def rollback(self):
with self.lock:
if self.closed:
return
self.start = self.db.status.start
self.pending = []
def commit(self):
with self.lock:
if self.closed:
Log.error("Queue is closed, commit not allowed")
try:
self._add_pending({"add": {"status.start": self.start}})
for i in range(self.db.status.start, self.start):
self._add_pending({"remove": str(i)})
if self.db.status.end - self.start < 10 or Random.range(0, 1000) == 0: # FORCE RE-WRITE TO LIMIT FILE SIZE
# SIMPLY RE-WRITE FILE
if DEBUG:
Log.note("Re-write {{num_keys}} keys to persistent queue", num_keys=self.db.status.end - self.start)
for k in self.db.keys():
if k == "status" or int(k) >= self.db.status.start:
continue
Log.error("Not expecting {{key}}", key=k)
self._commit()
self.file.write(mo_json.value2json({"add": self.db}) + "\n")
else:
self._commit()
except Exception as e:
raise e
def _commit(self):
self.file.append("\n".join(mo_json.value2json(p) for p in self.pending))
self._apply_pending()
def close(self):
self.please_stop.go()
with self.lock:
if self.db is None:
return
self.add(THREAD_STOP)
if self.db.status.end == self.start:
DEBUG and Log.note("persistent queue clear and closed")
self.file.delete()
else:
DEBUG and Log.note("persistent queue closed with {{num}} items left", num=len(self))
try:
self._add_pending({"add": {"status.start": self.start}})
for i in range(self.db.status.start, self.start):
self._add_pending({"remove": str(i)})
self.file.write(mo_json.value2json({"add": self.db}) + "\n" + ("\n".join(mo_json.value2json(p) for p in self.pending)) + "\n")
self._apply_pending()
except Exception as e:
raise e
self.db = None
@property
def closed(self):
with self.lock:
return self.db is None
def apply_delta(value, delta):
if delta.add:
for k, v in delta.add.items():
value[k] = v
elif delta.remove:
value[delta.remove] = None
| klahnakoski/TestLog-ETL | vendor/mo_collections/persistent_queue.py | Python | mpl-2.0 | 7,580 | 0.002111 |
from challenge47 import bleichenbacher
def crack():
bleichenbacher(768)
| troismph/matasano-challenges | src/challenge48.py | Python | gpl-3.0 | 78 | 0 |
# Copyright 2015. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cfgm_common import exceptions as vnc_exc
import contrail_res_handler as res_handler
import netaddr
from neutron.common import constants as n_constants
from neutron.common import exceptions as n_exceptions
import subnet_res_handler as subnet_handler
import vmi_res_handler as vmi_handler
from vnc_api import vnc_api
SNAT_SERVICE_TEMPLATE_FQ_NAME = ['default-domain', 'netns-snat-template']
class LogicalRouterMixin(object):
@staticmethod
def _get_external_gateway_info(rtr_obj):
vn_refs = rtr_obj.get_virtual_network_refs()
if vn_refs:
return vn_refs[0]['uuid']
def _neutron_dict_to_rtr_obj(self, router_q, rtr_obj):
rtr_name = router_q.get('name')
id_perms = rtr_obj.get_id_perms()
if 'admin_state_up' in router_q:
id_perms.enable = router_q['admin_state_up']
rtr_obj.set_id_perms(id_perms)
if rtr_name:
rtr_obj.display_name = rtr_name
return rtr_obj
def _rtr_obj_to_neutron_dict(self, rtr_obj,
contrail_extensions_enabled=True,
fields=None):
rtr_q_dict = {}
rtr_q_dict['id'] = rtr_obj.uuid
if not rtr_obj.display_name:
rtr_q_dict['name'] = rtr_obj.get_fq_name()[-1]
else:
rtr_q_dict['name'] = rtr_obj.display_name
rtr_q_dict['tenant_id'] = self._project_id_vnc_to_neutron(
rtr_obj.parent_uuid)
rtr_q_dict['admin_state_up'] = rtr_obj.get_id_perms().enable
rtr_q_dict['shared'] = False
rtr_q_dict['status'] = n_constants.NET_STATUS_ACTIVE
rtr_q_dict['gw_port_id'] = None
ext_net_uuid = self._get_external_gateway_info(rtr_obj)
if not ext_net_uuid:
rtr_q_dict['external_gateway_info'] = None
else:
rtr_q_dict['external_gateway_info'] = {'network_id': ext_net_uuid,
'enable_snat': True}
if contrail_extensions_enabled:
rtr_q_dict.update({'contrail:fq_name': rtr_obj.get_fq_name()})
if fields:
rtr_q_dict = self._filter_res_dict(rtr_q_dict, fields)
return rtr_q_dict
def _router_add_gateway(self, router_q, rtr_obj):
ext_gateway = router_q.get('external_gateway_info')
old_ext_gateway = self._get_external_gateway_info(rtr_obj)
if ext_gateway or old_ext_gateway:
network_id = None
if ext_gateway:
network_id = ext_gateway.get('network_id')
if network_id:
if old_ext_gateway and network_id == old_ext_gateway:
return
try:
vn_obj = self._vnc_lib.virtual_network_read(id=network_id)
if not vn_obj.get_router_external():
self._raise_contrail_exception(
'BadRequest', resource='router',
msg="Network %s is not a valid "
"external network" % network_id)
except vnc_exc.NoIdError:
self._raise_contrail_exception('NetworkNotFound',
net_id=network_id)
self._router_set_external_gateway(rtr_obj, vn_obj)
else:
self._router_clear_external_gateway(rtr_obj)
def _router_set_external_gateway(self, router_obj, ext_net_obj):
router_obj.set_virtual_network(ext_net_obj)
self._vnc_lib.logical_router_update(router_obj)
def _router_clear_external_gateway(self, router_obj):
router_obj.set_virtual_network_list([])
self._vnc_lib.logical_router_update(router_obj)
def _set_snat_routing_table(self, router_obj, network_id):
project_obj = self._project_read(proj_id=router_obj.parent_uuid)
rt_name = 'rt_' + router_obj.uuid
rt_fq_name = project_obj.get_fq_name() + [rt_name]
try:
rt_obj = self._vnc_lib.route_table_read(fq_name=rt_fq_name)
except vnc_exc.NoIdError:
# No route table set with that router ID, the gateway is not set
return
try:
vn_obj = self._vnc_lib.virtual_network_read(id=network_id)
except vnc_exc.NoIdError:
self._raise_contrail_exception(
'NetworkNotFound', net_id=network_id)
vn_obj.set_route_table(rt_obj)
self._vnc_lib.virtual_network_update(vn_obj)
def _clear_snat_routing_table(self, router_obj, network_id):
project_obj = self._project_read(proj_id=router_obj.parent_uuid)
rt_name = 'rt_' + router_obj.uuid
rt_fq_name = project_obj.get_fq_name() + [rt_name]
try:
rt_obj = self._vnc_lib.route_table_read(fq_name=rt_fq_name)
except vnc_exc.NoIdError:
# No route table set with that router ID, the gateway is not set
return
try:
vn_obj = self._vnc_lib.virtual_network_read(id=network_id)
except vnc_exc.NoIdError:
self._raise_contrail_exception(
'NetworkNotFound', net_id=network_id)
vn_obj.del_route_table(rt_obj)
self._vnc_lib.virtual_network_update(vn_obj)
class LogicalRouterCreateHandler(res_handler.ResourceCreateHandler,
LogicalRouterMixin):
resource_create_method = 'logical_router_create'
def _create_router(self, router_q):
project_id = self._project_id_neutron_to_vnc(router_q['tenant_id'])
project_obj = self._project_read(proj_id=project_id)
id_perms = vnc_api.IdPermsType(enable=True)
return vnc_api.LogicalRouter(router_q.get('name'), project_obj,
id_perms=id_perms)
def resource_create(self, context, router_q):
rtr_obj = self._neutron_dict_to_rtr_obj(
router_q, self._create_router(router_q))
rtr_uuid = self._resource_create(rtr_obj)
contrail_extensions_enabled = self._kwargs.get(
'contrail_extensions_enabled', False)
# read it back to update id perms
rtr_obj = self._resource_get(id=rtr_uuid)
self._router_add_gateway(router_q, rtr_obj)
return self._rtr_obj_to_neutron_dict(
rtr_obj, contrail_extensions_enabled=contrail_extensions_enabled)
class LogicalRouterDeleteHandler(res_handler.ResourceDeleteHandler,
LogicalRouterMixin):
resource_delete_method = 'logical_router_delete'
def resource_delete(self, context, rtr_id):
try:
rtr_obj = self._resource_get(id=rtr_id)
if rtr_obj.get_virtual_machine_interface_refs():
self._raise_contrail_exception('RouterInUse',
router_id=rtr_id)
except vnc_exc.NoIdError:
self._raise_contrail_exception('RouterNotFound',
router_id=rtr_id)
self._router_clear_external_gateway(rtr_obj)
try:
self._resource_delete(id=rtr_id)
except vnc_exc.RefsExistError:
self._raise_contrail_exception('RouterInUse', router_id=rtr_id)
class LogicalRouterUpdateHandler(res_handler.ResourceUpdateHandler,
LogicalRouterMixin):
resource_update_method = 'logical_router_update'
def _get_rtr_obj(self, router_q):
return self._resource_get(id=router_q.get('id'))
def resource_update(self, context, rtr_id, router_q):
router_q['id'] = rtr_id
rtr_obj = self._neutron_dict_to_rtr_obj(
router_q, self._get_rtr_obj(router_q))
self._resource_update(rtr_obj)
self._router_add_gateway(router_q, rtr_obj)
return self._rtr_obj_to_neutron_dict(rtr_obj)
class LogicalRouterGetHandler(res_handler.ResourceGetHandler,
LogicalRouterMixin):
resource_get_method = 'logical_router_read'
resource_list_method = 'logical_routers_list'
def _router_list_project(self, project_id=None, detail=False):
if project_id:
try:
project_uuid = self._project_id_neutron_to_vnc(project_id)
except Exception:
return []
else:
project_uuid = None
resp = self._resource_list(parent_id=project_uuid,
detail=detail)
if detail:
return resp
return resp['logical-routers']
def _get_router_list_for_ids(self, rtr_ids, extensions_enabled=True):
ret_list = []
for rtr_id in rtr_ids or []:
try:
rtr_obj = self._resource_get(id=rtr_id)
rtr_info = self._rtr_obj_to_neutron_dict(
rtr_obj,
contrail_extensions_enabled=extensions_enabled)
ret_list.append(rtr_info)
except vnc_exc.NoIdError:
pass
return ret_list
def _get_router_list_for_project(self, project_id=None):
project_rtrs = self._router_list_project(project_id=project_id)
rtr_uuids = [rtr['uuid'] for rtr in project_rtrs]
return self._get_router_list_for_ids(rtr_uuids)
def _fip_pool_ref_routers(self, project_id):
"""TODO."""
return []
def get_vmi_obj_router_id(self, vmi_obj, project_id=None):
vmi_get_handler = vmi_handler.VMInterfaceGetHandler(
self._vnc_lib)
port_net_id = vmi_obj.get_virtual_network_refs()[0]['uuid']
# find router_id from port
router_list = self._router_list_project(project_id=project_id,
detail=True)
for router_obj in router_list or []:
for vmi in (router_obj.get_virtual_machine_interface_refs()
or []):
vmi_obj = vmi_get_handler.get_vmi_obj(vmi['uuid'])
if (vmi_obj.get_virtual_network_refs()[0]['uuid'] ==
port_net_id):
return router_obj.uuid
def resource_get(self, context, rtr_uuid, fields=None):
try:
rtr_obj = self._resource_get(id=rtr_uuid)
except vnc_exc.NoIdError:
self._raise_contrail_exception('RouterNotFound',
router_id=rtr_uuid)
return self._rtr_obj_to_neutron_dict(rtr_obj, fields=fields)
def resource_list(self, context, filters, fields=None):
extensions_enabled = self._kwargs.get(
'contrail_extensions_enabled', False)
ret_list = []
if filters and 'shared' in filters:
if filters['shared'][0]:
# no support for shared routers
return ret_list
if not filters:
return self._get_router_list_for_project()
all_rtrs = [] # all n/ws in all projects
if 'id' in filters:
return self._get_router_list_for_ids(filters['id'],
extensions_enabled)
if 'tenant_id' in filters:
# read all routers in project, and prune below
project_ids = self._validate_project_ids(
context, project_ids=filters['tenant_id'])
for p_id in project_ids:
if 'router:external' in filters:
all_rtrs.append(self._fip_pool_ref_routers(p_id))
else:
project_rtrs = self._router_list_project(p_id)
all_rtrs.append(project_rtrs)
else:
# read all routers in all projects
project_rtrs = self._router_list_project()
all_rtrs.append(project_rtrs)
# prune phase
for project_rtrs in all_rtrs:
for proj_rtr in project_rtrs:
proj_rtr_id = proj_rtr['uuid']
if not self._filters_is_present(filters, 'id', proj_rtr_id):
continue
proj_rtr_fq_name = unicode(proj_rtr['fq_name'])
if not self._filters_is_present(filters, 'contrail:fq_name',
proj_rtr_fq_name):
continue
try:
rtr_obj = self._resource_get(id=proj_rtr['uuid'])
if not self._filters_is_present(
filters, 'name',
rtr_obj.get_display_name() or rtr_obj.name):
continue
rtr_info = self._rtr_obj_to_neutron_dict(
rtr_obj,
contrail_extensions_enabled=extensions_enabled,
fields=fields)
ret_list.append(rtr_info)
except vnc_exc.NoIdError:
continue
return ret_list
def resource_count(self, context, filters=None):
count = self._resource_count_optimized(filters)
if count is not None:
return count
rtrs_info = self.router_list(filters=filters)
return len(rtrs_info)
class LogicalRouterInterfaceHandler(res_handler.ResourceGetHandler,
res_handler.ResourceUpdateHandler,
LogicalRouterMixin):
resource_get_method = 'logical_router_read'
resource_list_method = 'logical_routers_list'
resource_update_method = 'logical_router_update'
def __init__(self, vnc_lib):
super(LogicalRouterInterfaceHandler, self).__init__(vnc_lib)
self._vmi_handler = vmi_handler.VMInterfaceHandler(
self._vnc_lib)
self._subnet_handler = subnet_handler.SubnetHandler(self._vnc_lib)
def _get_subnet_cidr(self, subnet_id, subnet_dict):
for subnet in subnet_dict:
if subnet['id'] == subnet_id:
return subnet['cidr']
def _check_for_dup_router_subnet(self, router_id, subnet_id,
subnet_cidr):
try:
router_vmi_objs = self._vmi_handler.get_vmi_list(
back_ref_id=[router_id])
# It's possible router ports are on the same network, but
# different subnets.
new_ipnet = netaddr.IPNetwork(subnet_cidr)
port_req_memo = {'virtual-machines': {},
'instance-ips': {},
'subnets': {}}
for vmi_obj in router_vmi_objs:
net_id = self._vmi_handler.get_vmi_net_id(vmi_obj)
vn_obj = self._vnc_lib.virtual_network_read(id=net_id)
fixed_ips = self._vmi_handler.get_vmi_ip_dict(vmi_obj, vn_obj,
port_req_memo)
vn_subnets = (
subnet_handler.ContrailSubnetHandler.get_vn_subnets(
vn_obj))
for ip in fixed_ips:
if ip['subnet_id'] == subnet_id:
msg = ("Router %s already has a port on subnet %s"
% (router_id, subnet_id))
self._raise_contrail_exception(
'BadRequest', resource='router', msg=msg)
sub_id = ip['subnet_id']
cidr = self._get_subnet_cidr(sub_id, vn_subnets)
ipnet = netaddr.IPNetwork(cidr)
match1 = netaddr.all_matching_cidrs(new_ipnet, [cidr])
match2 = netaddr.all_matching_cidrs(ipnet, [subnet_cidr])
if match1 or match2:
data = {'subnet_cidr': subnet_cidr,
'subnet_id': subnet_id,
'cidr': cidr,
'sub_id': sub_id}
msg = (("Cidr %(subnet_cidr)s of subnet "
"%(subnet_id)s overlaps with cidr %(cidr)s "
"of subnet %(sub_id)s") % data)
self._raise_contrail_exception(
'BadRequest', resource='router', msg=msg)
except vnc_exc.NoIdError:
pass
def _get_router_iface_vnc_info(self, context, router_id, port_id=None,
subnet_id=None):
if port_id:
vmi_obj, vn_obj, rtr_uuid, fixed_ips = self._get_vmi_info(port_id)
net_id = vn_obj.uuid
if rtr_uuid:
self._raise_contrail_exception('PortInUse',
net_id=net_id,
port_id=port_id,
device_id=rtr_uuid)
if len(fixed_ips) != 1:
self._raise_contrail_exception(
'BadRequest', resource='router',
msg='Router port must have exactly one fixed IP')
subnet_id = fixed_ips[0]['subnet_id']
subnet_vnc = self._subnet_handler._subnet_read(subnet_id=subnet_id)
if not subnet_vnc.default_gateway:
self._raise_contrail_exception(
'BadRequest', resource='router',
msg='Subnet for router interface must have a gateway IP')
subnet_cidr = '%s/%s' % (subnet_vnc.subnet.get_ip_prefix(),
subnet_vnc.subnet.get_ip_prefix_len())
self._check_for_dup_router_subnet(router_id, subnet_id, subnet_cidr)
if not port_id:
vn_obj = self._subnet_handler.get_vn_obj_for_subnet_id(subnet_id)
fixed_ip = {'ip_address': subnet_vnc.default_gateway,
'subnet_id': subnet_id}
port_q = {
'tenant_id': self._project_id_vnc_to_neutron(
vn_obj.parent_uuid),
'network_id': vn_obj.uuid,
'fixed_ips': [fixed_ip],
'admin_state_up': True,
'device_id': router_id,
'device_owner': n_constants.DEVICE_OWNER_ROUTER_INTF,
'name': ''}
port = self._vmi_handler.resource_create(context=context,
port_q=port_q)
vmi_obj = self._vmi_handler.get_vmi_obj(port['id'])
return vmi_obj, vn_obj, subnet_id
def _update_snat_routing_table(self, router_obj, network_id,
set_snat=True):
project_obj = self._vnc_lib.project_read(id=router_obj.parent_uuid)
rt_name = 'rt_' + router_obj.uuid
rt_fq_name = project_obj.get_fq_name() + [rt_name]
try:
rt_obj = self._vnc_lib.route_table_read(fq_name=rt_fq_name)
except vnc_exc.NoIdError:
# No route table set with that router ID, the gateway is not set
return
try:
vn_obj = self._vnc_lib.virtual_network_read(id=network_id)
except vnc_exc.NoIdError:
raise n_exceptions.NetworkNotFound(net_id=network_id)
if set_snat:
vn_obj.set_route_table(rt_obj)
else:
vn_obj.del_route_table(rt_obj)
self._vnc_lib.virtual_network_update(vn_obj)
def _get_vmi_info(self, port_id):
vmi_obj = self._vmi_handler.get_vmi_obj(
port_id, fields=['logical_router_back_refs',
'instance_ip_back_refs'])
net_id = self._vmi_handler.get_vmi_net_id(vmi_obj)
port_req_memo = {'virtual-machines': {},
'instance-ips': {},
'subnets': {}}
router_refs = getattr(vmi_obj, 'logical_router_back_refs', None)
if router_refs:
rtr_uuid = router_refs[0]['uuid']
else:
vm_ref = vmi_obj.get_virtual_machine_refs()
if vm_ref:
rtr_uuid = self._vmi_handler.get_port_gw_id(vm_ref[0],
port_req_memo)
else:
rtr_uuid = None
vn_obj = self._vnc_lib.virtual_network_read(id=net_id)
fixed_ips = self._vmi_handler.get_vmi_ip_dict(vmi_obj, vn_obj,
port_req_memo)
return vmi_obj, vn_obj, rtr_uuid, fixed_ips
def add_router_interface(self, context, router_id, port_id=None,
subnet_id=None):
router_obj = self._resource_get(id=router_id)
if not port_id and not subnet_id:
self._raise_contrail_exception(
'BadRequest', resource='router',
msg='Either port or subnet must be specified')
vmi_obj, vn_obj, subnet_id = self._get_router_iface_vnc_info(
context, router_id, port_id=port_id, subnet_id=subnet_id)
self._update_snat_routing_table(router_obj, vn_obj.uuid)
vmi_obj.set_virtual_machine_interface_device_owner(
n_constants.DEVICE_OWNER_ROUTER_INTF)
self._vnc_lib.virtual_machine_interface_update(vmi_obj)
router_obj.add_virtual_machine_interface(vmi_obj)
self._resource_update(router_obj)
info = {
'id': router_id,
'tenant_id': self._project_id_vnc_to_neutron(vn_obj.parent_uuid),
'port_id': vmi_obj.uuid,
'subnet_id': subnet_id}
return info
def remove_router_interface(self, context, router_id, port_id=None,
subnet_id=None):
router_obj = self._resource_get(id=router_id)
tenant_id = None
vmi_obj = None
if port_id:
vmi_obj, vn_obj, rtr_uuid, fixed_ips = self._get_vmi_info(port_id)
if not rtr_uuid:
self._raise_contrail_exception('RouterInterfaceNotFound',
router_id=router_id,
port_id=port_id)
port_subnet_id = fixed_ips[0]['subnet_id']
if subnet_id and (port_subnet_id != subnet_id):
self._raise_contrail_exception('SubnetMismatchForPort',
port_id=port_id,
subnet_id=subnet_id)
subnet_id = port_subnet_id
elif subnet_id:
vn_obj = self._subnet_handler.get_vn_obj_for_subnet_id(subnet_id)
for intf in router_obj.get_virtual_machine_interface_refs() or []:
port_id = intf['uuid']
_, _, _, fixed_ips = self._get_vmi_info(port_id)
if subnet_id == fixed_ips[0]['subnet_id']:
break
else:
msg = ("Subnet %s not connected to router %s "
% (router_id, subnet_id))
self._raise_contrail_exception('BadRequest',
resource='router', msg=msg)
network_id = vn_obj.uuid
tenant_id = self._project_id_vnc_to_neutron(vn_obj.parent_uuid)
self._update_snat_routing_table(router_obj, network_id, set_snat=False)
if not vmi_obj:
vmi_obj = self._vnc_lib.virtual_machine_interface_read(id=port_id)
router_obj.del_virtual_machine_interface(vmi_obj)
self._vnc_lib.logical_router_update(router_obj)
self._vmi_handler.resource_delete(context, port_id=port_id)
info = {'id': router_id,
'tenant_id': tenant_id,
'port_id': port_id,
'subnet_id': subnet_id}
return info
class LogicalRouterHandler(LogicalRouterGetHandler,
LogicalRouterCreateHandler,
LogicalRouterDeleteHandler,
LogicalRouterUpdateHandler):
pass
| vpramo/contrail-neutron-plugin | neutron_plugin_contrail/plugins/opencontrail/vnc_client/router_res_handler.py | Python | apache-2.0 | 24,484 | 0 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime, timedelta
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT, DEFAULT_SERVER_DATETIME_FORMAT
import openerp.addons.decimal_precision as dp
from openerp import workflow
class res_company(osv.Model):
_inherit = "res.company"
_columns = {
'sale_note': fields.text('Default Terms and Conditions', translate=True, help="Default terms and conditions for quotations."),
}
class sale_order(osv.osv):
_name = "sale.order"
_inherit = ['mail.thread', 'ir.needaction_mixin']
_description = "Sales Order"
_track = {
'state': {
'sale.mt_order_confirmed': lambda self, cr, uid, obj, ctx=None: obj.state in ['manual'],
'sale.mt_order_sent': lambda self, cr, uid, obj, ctx=None: obj.state in ['sent']
},
}
def _amount_line_tax(self, cr, uid, line, context=None):
val = 0.0
for c in self.pool.get('account.tax').compute_all(cr, uid, line.tax_id, line.price_unit * (1-(line.discount or 0.0)/100.0), line.product_uom_qty, line.product_id, line.order_id.partner_id)['taxes']:
val += c.get('amount', 0.0)
return val
def _amount_all_wrapper(self, cr, uid, ids, field_name, arg, context=None):
""" Wrapper because of direct method passing as parameter for function fields """
return self._amount_all(cr, uid, ids, field_name, arg, context=context)
def _amount_all(self, cr, uid, ids, field_name, arg, context=None):
cur_obj = self.pool.get('res.currency')
res = {}
for order in self.browse(cr, uid, ids, context=context):
res[order.id] = {
'amount_untaxed': 0.0,
'amount_tax': 0.0,
'amount_total': 0.0,
}
val = val1 = 0.0
cur = order.pricelist_id.currency_id
for line in order.order_line:
val1 += line.price_subtotal
val += self._amount_line_tax(cr, uid, line, context=context)
res[order.id]['amount_tax'] = cur_obj.round(cr, uid, cur, val)
res[order.id]['amount_untaxed'] = cur_obj.round(cr, uid, cur, val1)
res[order.id]['amount_total'] = res[order.id]['amount_untaxed'] + res[order.id]['amount_tax']
return res
def _invoiced_rate(self, cursor, user, ids, name, arg, context=None):
res = {}
for sale in self.browse(cursor, user, ids, context=context):
if sale.invoiced:
res[sale.id] = 100.0
continue
tot = 0.0
for invoice in sale.invoice_ids:
if invoice.state not in ('draft', 'cancel'):
tot += invoice.amount_untaxed
if tot:
res[sale.id] = min(100.0, tot * 100.0 / (sale.amount_untaxed or 1.00))
else:
res[sale.id] = 0.0
return res
def _invoice_exists(self, cursor, user, ids, name, arg, context=None):
res = {}
for sale in self.browse(cursor, user, ids, context=context):
res[sale.id] = False
if sale.invoice_ids:
res[sale.id] = True
return res
def _invoiced(self, cursor, user, ids, name, arg, context=None):
res = {}
for sale in self.browse(cursor, user, ids, context=context):
res[sale.id] = True
invoice_existence = False
for invoice in sale.invoice_ids:
if invoice.state!='cancel':
invoice_existence = True
if invoice.state != 'paid':
res[sale.id] = False
break
if not invoice_existence or sale.state == 'manual':
res[sale.id] = False
return res
def _invoiced_search(self, cursor, user, obj, name, args, context=None):
if not len(args):
return []
clause = ''
sale_clause = ''
no_invoiced = False
for arg in args:
if (arg[1] == '=' and arg[2]) or (arg[1] == '!=' and not arg[2]):
clause += 'AND inv.state = \'paid\''
else:
clause += 'AND inv.state != \'cancel\' AND sale.state != \'cancel\' AND inv.state <> \'paid\' AND rel.order_id = sale.id '
sale_clause = ', sale_order AS sale '
no_invoiced = True
cursor.execute('SELECT rel.order_id ' \
'FROM sale_order_invoice_rel AS rel, account_invoice AS inv '+ sale_clause + \
'WHERE rel.invoice_id = inv.id ' + clause)
res = cursor.fetchall()
if no_invoiced:
cursor.execute('SELECT sale.id ' \
'FROM sale_order AS sale ' \
'WHERE sale.id NOT IN ' \
'(SELECT rel.order_id ' \
'FROM sale_order_invoice_rel AS rel) and sale.state != \'cancel\'')
res.extend(cursor.fetchall())
if not res:
return [('id', '=', 0)]
return [('id', 'in', [x[0] for x in res])]
def _get_order(self, cr, uid, ids, context=None):
result = {}
for line in self.pool.get('sale.order.line').browse(cr, uid, ids, context=context):
result[line.order_id.id] = True
return result.keys()
def _get_default_company(self, cr, uid, context=None):
company_id = self.pool.get('res.users')._get_company(cr, uid, context=context)
if not company_id:
raise osv.except_osv(_('Error!'), _('There is no default company for the current user!'))
return company_id
def _get_default_section_id(self, cr, uid, context=None):
""" Gives default section by checking if present in the context """
section_id = self._resolve_section_id_from_context(cr, uid, context=context) or False
if not section_id:
section_id = self.pool.get('res.users').browse(cr, uid, uid, context).default_section_id.id or False
return section_id
def _resolve_section_id_from_context(self, cr, uid, context=None):
""" Returns ID of section based on the value of 'section_id'
context key, or None if it cannot be resolved to a single
Sales Team.
"""
if context is None:
context = {}
if type(context.get('default_section_id')) in (int, long):
return context.get('default_section_id')
if isinstance(context.get('default_section_id'), basestring):
section_ids = self.pool.get('crm.case.section').name_search(cr, uid, name=context['default_section_id'], context=context)
if len(section_ids) == 1:
return int(section_ids[0][0])
return None
#This function automatically sets the currency to EUR.
def _get_default_currency(self, cr, uid, context=None):
res = self.pool.get('res.company').search(cr, uid, [('currency_id','=','EUR')], context=context)
return res and res[0] or False
_columns = {
#This fills the Many2one with all data from res.currency
'currency_id_invoices': fields.many2one('res.currency', string="Valuta", required=True),
'name': fields.char('Order Reference', required=True, copy=False,
readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, select=True),
'origin': fields.char('Source Document', help="Reference of the document that generated this sales order request."),
'client_order_ref': fields.char('Reference/Description', copy=False),
'state': fields.selection([
('draft', 'Draft Quotation'),
('sent', 'Quotation Sent'),
('cancel', 'Cancelled'),
('waiting_date', 'Waiting Schedule'),
('progress', 'Sales Order'),
('manual', 'Sale to Invoice'),
('shipping_except', 'Shipping Exception'),
('invoice_except', 'Invoice Exception'),
('done', 'Done'),
], 'Status', readonly=True, copy=False, help="Gives the status of the quotation or sales order.\
\nThe exception status is automatically set when a cancel operation occurs \
in the invoice validation (Invoice Exception) or in the picking list process (Shipping Exception).\nThe 'Waiting Schedule' status is set when the invoice is confirmed\
but waiting for the scheduler to run on the order date.", select=True),
'date_order': fields.datetime('Date', required=True, readonly=True, select=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, copy=False),
'create_date': fields.datetime('Creation Date', readonly=True, select=True, help="Date on which sales order is created."),
'date_confirm': fields.date('Confirmation Date', readonly=True, select=True, help="Date on which sales order is confirmed.", copy=False),
'user_id': fields.many2one('res.users', 'Salesperson', states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, select=True, track_visibility='onchange'),
'partner_id': fields.many2one('res.partner', 'Customer', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, required=True, change_default=True, select=True, track_visibility='always'),
'partner_invoice_id': fields.many2one('res.partner', 'Invoice Address', readonly=True, required=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="Invoice address for current sales order."),
'partner_shipping_id': fields.many2one('res.partner', 'Delivery Address', readonly=True, required=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="Delivery address for current sales order."),
'order_policy': fields.selection([
('manual', 'On Demand'),
], 'Create Invoice', required=True, readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]},
help="""This field controls how invoice and delivery operations are synchronized."""),
'pricelist_id': fields.many2one('product.pricelist', 'Pricelist', required=True, readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="Pricelist for current sales order."),
'currency_id': fields.related('pricelist_id', 'currency_id', type="many2one", relation="res.currency", string="Currency", readonly=True, required=True),
'project_id': fields.many2one('account.analytic.account', 'Contract / Analytic', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="The analytic account related to a sales order."),
'order_line': fields.one2many('sale.order.line', 'order_id', 'Order Lines', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, copy=True),
'invoice_ids': fields.many2many('account.invoice', 'sale_order_invoice_rel', 'order_id', 'invoice_id', 'Invoices', readonly=True, copy=False, help="This is the list of invoices that have been generated for this sales order. The same sales order may have been invoiced in several times (by line for example)."),
'invoiced_rate': fields.function(_invoiced_rate, string='Invoiced Ratio', type='float'),
'invoiced': fields.function(_invoiced, string='Paid',
fnct_search=_invoiced_search, type='boolean', help="It indicates that an invoice has been paid."),
'invoice_exists': fields.function(_invoice_exists, string='Invoiced',
fnct_search=_invoiced_search, type='boolean', help="It indicates that sales order has at least one invoice."),
'note': fields.text('Terms and conditions'),
'amount_untaxed': fields.function(_amount_all_wrapper, digits_compute=dp.get_precision('Account'), string='Untaxed Amount',
store={
'sale.order': (lambda self, cr, uid, ids, c={}: ids, ['order_line'], 10),
'sale.order.line': (_get_order, ['price_unit', 'tax_id', 'discount', 'product_uom_qty'], 10),
},
multi='sums', help="The amount without tax.", track_visibility='always'),
'amount_tax': fields.function(_amount_all_wrapper, digits_compute=dp.get_precision('Account'), string='Taxes',
store={
'sale.order': (lambda self, cr, uid, ids, c={}: ids, ['order_line'], 10),
'sale.order.line': (_get_order, ['price_unit', 'tax_id', 'discount', 'product_uom_qty'], 10),
},
multi='sums', help="The tax amount."),
'amount_total': fields.function(_amount_all_wrapper, digits_compute=dp.get_precision('Account'), string='Total',
store={
'sale.order': (lambda self, cr, uid, ids, c={}: ids, ['order_line'], 10),
'sale.order.line': (_get_order, ['price_unit', 'tax_id', 'discount', 'product_uom_qty'], 10),
},
multi='sums', help="The total amount."),
'payment_term': fields.many2one('account.payment.term', 'Payment Term'),
'fiscal_position': fields.many2one('account.fiscal.position', 'Fiscal Position'),
'company_id': fields.many2one('res.company', 'Company'),
'section_id': fields.many2one('crm.case.section', 'Sales Team'),
'procurement_group_id': fields.many2one('procurement.group', 'Procurement group', copy=False),
'product_id': fields.related('order_line', 'product_id', type='many2one', relation='product.product', string='Product'),
}
_defaults = {
#This makes the function go off that sets EUR.
'currency_id_invoices': _get_default_currency,
'date_order': fields.datetime.now,
'order_policy': 'manual',
'company_id': _get_default_company,
'state': 'draft',
'user_id': lambda obj, cr, uid, context: uid,
'name': lambda obj, cr, uid, context: '/',
'partner_invoice_id': lambda self, cr, uid, context: context.get('partner_id', False) and self.pool.get('res.partner').address_get(cr, uid, [context['partner_id']], ['invoice'])['invoice'],
'partner_shipping_id': lambda self, cr, uid, context: context.get('partner_id', False) and self.pool.get('res.partner').address_get(cr, uid, [context['partner_id']], ['delivery'])['delivery'],
'note': lambda self, cr, uid, context: self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.sale_note,
'section_id': lambda s, cr, uid, c: s._get_default_section_id(cr, uid, c),
}
_sql_constraints = [
('name_uniq', 'unique(name, company_id)', 'Order Reference must be unique per Company!'),
]
_order = 'date_order desc, id desc'
# Form filling
def unlink(self, cr, uid, ids, context=None):
sale_orders = self.read(cr, uid, ids, ['state'], context=context)
unlink_ids = []
for s in sale_orders:
if s['state'] in ['draft', 'cancel']:
unlink_ids.append(s['id'])
else:
raise osv.except_osv(_('Invalid Action!'), _('In order to delete a confirmed sales order, you must cancel it before!'))
return osv.osv.unlink(self, cr, uid, unlink_ids, context=context)
def copy_quotation(self, cr, uid, ids, context=None):
id = self.copy(cr, uid, ids[0], context=context)
view_ref = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'sale', 'view_order_form')
view_id = view_ref and view_ref[1] or False,
return {
'type': 'ir.actions.act_window',
'name': _('Sales Order'),
'res_model': 'sale.order',
'res_id': id,
'view_type': 'form',
'view_mode': 'form',
'view_id': view_id,
'target': 'current',
'nodestroy': True,
}
def onchange_pricelist_id(self, cr, uid, ids, pricelist_id, order_lines, context=None):
context = context or {}
if not pricelist_id:
return {}
value = {
'currency_id': self.pool.get('product.pricelist').browse(cr, uid, pricelist_id, context=context).currency_id.id
}
if not order_lines or order_lines == [(6, 0, [])]:
return {'value': value}
warning = {
'title': _('Pricelist Warning!'),
'message' : _('If you change the pricelist of this order (and eventually the currency), prices of existing order lines will not be updated.')
}
return {'warning': warning, 'value': value}
def get_salenote(self, cr, uid, ids, partner_id, context=None):
context_lang = context.copy()
if partner_id:
partner_lang = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context).lang
context_lang.update({'lang': partner_lang})
return self.pool.get('res.users').browse(cr, uid, uid, context=context_lang).company_id.sale_note
def onchange_delivery_id(self, cr, uid, ids, company_id, partner_id, delivery_id, fiscal_position, context=None):
r = {'value': {}}
if not fiscal_position:
if not company_id:
company_id = self._get_default_company(cr, uid, context=context)
fiscal_position = self.pool['account.fiscal.position'].get_fiscal_position(cr, uid, company_id, partner_id, delivery_id, context=context)
if fiscal_position:
r['value']['fiscal_position'] = fiscal_position
return r
def onchange_partner_id(self, cr, uid, ids, part, context=None):
if not part:
return {'value': {'partner_invoice_id': False, 'partner_shipping_id': False, 'payment_term': False, 'fiscal_position': False}}
part = self.pool.get('res.partner').browse(cr, uid, part, context=context)
addr = self.pool.get('res.partner').address_get(cr, uid, [part.id], ['delivery', 'invoice', 'contact'])
pricelist = part.property_product_pricelist and part.property_product_pricelist.id or False
payment_term = part.property_payment_term and part.property_payment_term.id or False
dedicated_salesman = part.user_id and part.user_id.id or uid
val = {
'partner_invoice_id': addr['invoice'],
'partner_shipping_id': addr['delivery'],
'payment_term': payment_term,
'user_id': dedicated_salesman,
}
delivery_onchange = self.onchange_delivery_id(cr, uid, ids, False, part.id, addr['delivery'], False, context=context)
val.update(delivery_onchange['value'])
if pricelist:
val['pricelist_id'] = pricelist
sale_note = self.get_salenote(cr, uid, ids, part.id, context=context)
if sale_note: val.update({'note': sale_note})
return {'value': val}
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
if vals.get('name', '/') == '/':
vals['name'] = self.pool.get('ir.sequence').get(cr, uid, 'sale.order') or '/'
if vals.get('partner_id') and any(f not in vals for f in ['partner_invoice_id', 'partner_shipping_id', 'pricelist_id', 'fiscal_position']):
defaults = self.onchange_partner_id(cr, uid, [], vals['partner_id'], context=context)['value']
if not vals.get('fiscal_position') and vals.get('partner_shipping_id'):
delivery_onchange = self.onchange_delivery_id(cr, uid, [], vals.get('company_id'), None, vals['partner_id'], vals.get('partner_shipping_id'), context=context)
defaults.update(delivery_onchange['value'])
vals = dict(defaults, **vals)
ctx = dict(context or {}, mail_create_nolog=True)
new_id = super(sale_order, self).create(cr, uid, vals, context=ctx)
self.message_post(cr, uid, [new_id], body=_("Quotation created"), context=ctx)
return new_id
def button_dummy(self, cr, uid, ids, context=None):
return True
# FIXME: deprecated method, overriders should be using _prepare_invoice() instead.
# can be removed after 6.1.
def _inv_get(self, cr, uid, order, context=None):
return {}
def _prepare_invoice(self, cr, uid, order, lines, context=None):
"""Prepare the dict of values to create the new invoice for a
sales order. This method may be overridden to implement custom
invoice generation (making sure to call super() to establish
a clean extension chain).
:param browse_record order: sale.order record to invoice
:param list(int) line: list of invoice line IDs that must be
attached to the invoice
:return: dict of value to create() the invoice
"""
if context is None:
context = {}
journal_ids = self.pool.get('account.journal').search(cr, uid,
[('type', '=', 'sale'), ('company_id', '=', order.company_id.id)],
limit=1)
if not journal_ids:
raise osv.except_osv(_('Error!'),
_('Please define sales journal for this company: "%s" (id:%d).') % (order.company_id.name, order.company_id.id))
invoice_vals = {
'name': order.client_order_ref or '',
'origin': order.name,
'type': 'out_invoice',
'reference': order.client_order_ref or order.name,
'account_id': order.partner_id.property_account_receivable.id,
'partner_id': order.partner_invoice_id.id,
'journal_id': journal_ids[0],
'invoice_line': [(6, 0, lines)],
'currency_id': order.pricelist_id.currency_id.id,
'comment': order.note,
'payment_term': order.payment_term and order.payment_term.id or False,
'fiscal_position': order.fiscal_position.id or order.partner_id.property_account_position.id,
'date_invoice': context.get('date_invoice', False),
'company_id': order.company_id.id,
'user_id': order.user_id and order.user_id.id or False,
'section_id' : order.section_id.id
}
# Care for deprecated _inv_get() hook - FIXME: to be removed after 6.1
invoice_vals.update(self._inv_get(cr, uid, order, context=context))
return invoice_vals
def _make_invoice(self, cr, uid, order, lines, context=None):
inv_obj = self.pool.get('account.invoice')
obj_invoice_line = self.pool.get('account.invoice.line')
if context is None:
context = {}
invoiced_sale_line_ids = self.pool.get('sale.order.line').search(cr, uid, [('order_id', '=', order.id), ('invoiced', '=', True)], context=context)
from_line_invoice_ids = []
for invoiced_sale_line_id in self.pool.get('sale.order.line').browse(cr, uid, invoiced_sale_line_ids, context=context):
for invoice_line_id in invoiced_sale_line_id.invoice_lines:
if invoice_line_id.invoice_id.id not in from_line_invoice_ids:
from_line_invoice_ids.append(invoice_line_id.invoice_id.id)
for preinv in order.invoice_ids:
if preinv.state not in ('cancel',) and preinv.id not in from_line_invoice_ids:
for preline in preinv.invoice_line:
inv_line_id = obj_invoice_line.copy(cr, uid, preline.id, {'invoice_id': False, 'price_unit': -preline.price_unit})
lines.append(inv_line_id)
inv = self._prepare_invoice(cr, uid, order, lines, context=context)
inv_id = inv_obj.create(cr, uid, inv, context=context)
data = inv_obj.onchange_payment_term_date_invoice(cr, uid, [inv_id], inv['payment_term'], time.strftime(DEFAULT_SERVER_DATE_FORMAT))
if data.get('value', False):
inv_obj.write(cr, uid, [inv_id], data['value'], context=context)
inv_obj.button_compute(cr, uid, [inv_id])
return inv_id
def print_quotation(self, cr, uid, ids, context=None):
'''
This function prints the sales order and mark it as sent, so that we can see more easily the next step of the workflow
'''
assert len(ids) == 1, 'This option should only be used for a single id at a time'
self.signal_workflow(cr, uid, ids, 'quotation_sent')
return self.pool['report'].get_action(cr, uid, ids, 'sale.report_saleorder', context=context)
def manual_invoice(self, cr, uid, ids, context=None):
""" create invoices for the given sales orders (ids), and open the form
view of one of the newly created invoices
"""
mod_obj = self.pool.get('ir.model.data')
# create invoices through the sales orders' workflow
inv_ids0 = set(inv.id for sale in self.browse(cr, uid, ids, context) for inv in sale.invoice_ids)
self.signal_workflow(cr, uid, ids, 'manual_invoice')
inv_ids1 = set(inv.id for sale in self.browse(cr, uid, ids, context) for inv in sale.invoice_ids)
# determine newly created invoices
new_inv_ids = list(inv_ids1 - inv_ids0)
res = mod_obj.get_object_reference(cr, uid, 'account', 'invoice_form')
res_id = res and res[1] or False,
return {
'name': _('Customer Invoices'),
'view_type': 'form',
'view_mode': 'form',
'view_id': [res_id],
'res_model': 'account.invoice',
'context': "{'type':'out_invoice'}",
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'current',
'res_id': new_inv_ids and new_inv_ids[0] or False,
}
def action_view_invoice(self, cr, uid, ids, context=None):
'''
This function returns an action that display existing invoices of given sales order ids. It can either be a in a list or in a form view, if there is only one invoice to show.
'''
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
result = mod_obj.get_object_reference(cr, uid, 'account', 'action_invoice_tree1')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
#compute the number of invoices to display
inv_ids = []
for so in self.browse(cr, uid, ids, context=context):
inv_ids += [invoice.id for invoice in so.invoice_ids]
#choose the view_mode accordingly
if len(inv_ids)>1:
result['domain'] = "[('id','in',["+','.join(map(str, inv_ids))+"])]"
else:
res = mod_obj.get_object_reference(cr, uid, 'account', 'invoice_form')
result['views'] = [(res and res[1] or False, 'form')]
result['res_id'] = inv_ids and inv_ids[0] or False
return result
def test_no_product(self, cr, uid, order, context):
for line in order.order_line:
if line.product_id and (line.product_id.type<>'service'):
return False
return True
def action_invoice_create(self, cr, uid, ids, grouped=False, states=None, date_invoice = False, context=None):
if states is None:
states = ['confirmed', 'done', 'exception']
res = False
invoices = {}
invoice_ids = []
invoice = self.pool.get('account.invoice')
obj_sale_order_line = self.pool.get('sale.order.line')
partner_currency = {}
# If date was specified, use it as date invoiced, usefull when invoices are generated this month and put the
# last day of the last month as invoice date
if date_invoice:
context = dict(context or {}, date_invoice=date_invoice)
for o in self.browse(cr, uid, ids, context=context):
currency_id = o.pricelist_id.currency_id.id
if (o.partner_id.id in partner_currency) and (partner_currency[o.partner_id.id] <> currency_id):
raise osv.except_osv(
_('Error!'),
_('You cannot group sales having different currencies for the same partner.'))
partner_currency[o.partner_id.id] = currency_id
lines = []
for line in o.order_line:
if line.invoiced:
continue
elif (line.state in states):
lines.append(line.id)
created_lines = obj_sale_order_line.invoice_line_create(cr, uid, lines)
if created_lines:
invoices.setdefault(o.partner_invoice_id.id or o.partner_id.id, []).append((o, created_lines))
if not invoices:
for o in self.browse(cr, uid, ids, context=context):
for i in o.invoice_ids:
if i.state == 'draft':
return i.id
for val in invoices.values():
if grouped:
res = self._make_invoice(cr, uid, val[0][0], reduce(lambda x, y: x + y, [l for o, l in val], []), context=context)
invoice_ref = ''
origin_ref = ''
for o, l in val:
invoice_ref += (o.client_order_ref or o.name) + '|'
origin_ref += (o.origin or o.name) + '|'
self.write(cr, uid, [o.id], {'state': 'progress'})
cr.execute('insert into sale_order_invoice_rel (order_id,invoice_id) values (%s,%s)', (o.id, res))
self.invalidate_cache(cr, uid, ['invoice_ids'], [o.id], context=context)
#remove last '|' in invoice_ref
if len(invoice_ref) >= 1:
invoice_ref = invoice_ref[:-1]
if len(origin_ref) >= 1:
origin_ref = origin_ref[:-1]
invoice.write(cr, uid, [res], {'origin': origin_ref, 'name': invoice_ref})
else:
for order, il in val:
res = self._make_invoice(cr, uid, order, il, context=context)
invoice_ids.append(res)
self.write(cr, uid, [order.id], {'state': 'progress'})
cr.execute('insert into sale_order_invoice_rel (order_id,invoice_id) values (%s,%s)', (order.id, res))
self.invalidate_cache(cr, uid, ['invoice_ids'], [order.id], context=context)
return res
def action_invoice_cancel(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'invoice_except'}, context=context)
return True
def action_invoice_end(self, cr, uid, ids, context=None):
for this in self.browse(cr, uid, ids, context=context):
for line in this.order_line:
if line.state == 'exception':
line.write({'state': 'confirmed'})
if this.state == 'invoice_except':
this.write({'state': 'progress'})
return True
def action_cancel(self, cr, uid, ids, context=None):
if context is None:
context = {}
sale_order_line_obj = self.pool.get('sale.order.line')
account_invoice_obj = self.pool.get('account.invoice')
for sale in self.browse(cr, uid, ids, context=context):
for inv in sale.invoice_ids:
if inv.state not in ('draft', 'cancel'):
raise osv.except_osv(
_('Cannot cancel this sales order!'),
_('First cancel all invoices attached to this sales order.'))
inv.signal_workflow('invoice_cancel')
sale_order_line_obj.write(cr, uid, [l.id for l in sale.order_line],
{'state': 'cancel'})
self.write(cr, uid, ids, {'state': 'cancel'})
return True
def action_button_confirm(self, cr, uid, ids, context=None):
assert len(ids) == 1, 'This option should only be used for a single id at a time.'
self.signal_workflow(cr, uid, ids, 'order_confirm')
return True
def action_wait(self, cr, uid, ids, context=None):
context = context or {}
for o in self.browse(cr, uid, ids):
if not o.order_line:
raise osv.except_osv(_('Error!'),_('You cannot confirm a sales order which has no line.'))
noprod = self.test_no_product(cr, uid, o, context)
if (o.order_policy == 'manual') or noprod:
self.write(cr, uid, [o.id], {'state': 'manual', 'date_confirm': fields.date.context_today(self, cr, uid, context=context)})
else:
self.write(cr, uid, [o.id], {'state': 'progress', 'date_confirm': fields.date.context_today(self, cr, uid, context=context)})
self.pool.get('sale.order.line').button_confirm(cr, uid, [x.id for x in o.order_line])
return True
def action_quotation_send(self, cr, uid, ids, context=None):
'''
This function opens a window to compose an email, with the edi sale template message loaded by default
'''
assert len(ids) == 1, 'This option should only be used for a single id at a time.'
ir_model_data = self.pool.get('ir.model.data')
try:
template_id = ir_model_data.get_object_reference(cr, uid, 'sale', 'email_template_edi_sale')[1]
except ValueError:
template_id = False
try:
compose_form_id = ir_model_data.get_object_reference(cr, uid, 'mail', 'email_compose_message_wizard_form')[1]
except ValueError:
compose_form_id = False
ctx = dict()
ctx.update({
'default_model': 'sale.order',
'default_res_id': ids[0],
'default_use_template': bool(template_id),
'default_template_id': template_id,
'default_composition_mode': 'comment',
'mark_so_as_sent': True
})
return {
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'form',
'res_model': 'mail.compose.message',
'views': [(compose_form_id, 'form')],
'view_id': compose_form_id,
'target': 'new',
'context': ctx,
}
def action_done(self, cr, uid, ids, context=None):
for order in self.browse(cr, uid, ids, context=context):
self.pool.get('sale.order.line').write(cr, uid, [line.id for line in order.order_line], {'state': 'done'}, context=context)
return self.write(cr, uid, ids, {'state': 'done'}, context=context)
def _prepare_order_line_procurement(self, cr, uid, order, line, group_id=False, context=None):
date_planned = self._get_date_planned(cr, uid, order, line, order.date_order, context=context)
return {
'name': line.name,
'origin': order.name,
'date_planned': date_planned,
'product_id': line.product_id.id,
'product_qty': line.product_uom_qty,
'product_uom': line.product_uom.id,
'product_uos_qty': (line.product_uos and line.product_uos_qty) or line.product_uom_qty,
'product_uos': (line.product_uos and line.product_uos.id) or line.product_uom.id,
'company_id': order.company_id.id,
'group_id': group_id,
'invoice_state': (order.order_policy == 'picking') and '2binvoiced' or 'none',
'sale_line_id': line.id
}
def _get_date_planned(self, cr, uid, order, line, start_date, context=None):
date_planned = datetime.strptime(start_date, DEFAULT_SERVER_DATETIME_FORMAT) + timedelta(days=line.delay or 0.0)
return date_planned
def _prepare_procurement_group(self, cr, uid, order, context=None):
return {'name': order.name, 'partner_id': order.partner_shipping_id.id}
def procurement_needed(self, cr, uid, ids, context=None):
#when sale is installed only, there is no need to create procurements, that's only
#further installed modules (sale_service, sale_stock) that will change this.
sale_line_obj = self.pool.get('sale.order.line')
res = []
for order in self.browse(cr, uid, ids, context=context):
res.append(sale_line_obj.need_procurement(cr, uid, [line.id for line in order.order_line], context=context))
return any(res)
def action_ignore_delivery_exception(self, cr, uid, ids, context=None):
for sale_order in self.browse(cr, uid, ids, context=context):
self.write(cr, uid, ids, {'state': 'progress' if sale_order.invoice_exists else 'manual'}, context=context)
return True
def action_ship_create(self, cr, uid, ids, context=None):
"""Create the required procurements to supply sales order lines, also connecting
the procurements to appropriate stock moves in order to bring the goods to the
sales order's requested location.
:return: True
"""
procurement_obj = self.pool.get('procurement.order')
sale_line_obj = self.pool.get('sale.order.line')
for order in self.browse(cr, uid, ids, context=context):
proc_ids = []
vals = self._prepare_procurement_group(cr, uid, order, context=context)
if not order.procurement_group_id:
group_id = self.pool.get("procurement.group").create(cr, uid, vals, context=context)
order.write({'procurement_group_id': group_id}, context=context)
for line in order.order_line:
#Try to fix exception procurement (possible when after a shipping exception the user choose to recreate)
if line.procurement_ids:
#first check them to see if they are in exception or not (one of the related moves is cancelled)
procurement_obj.check(cr, uid, [x.id for x in line.procurement_ids if x.state not in ['cancel', 'done']])
line.refresh()
#run again procurement that are in exception in order to trigger another move
proc_ids += [x.id for x in line.procurement_ids if x.state in ('exception', 'cancel')]
elif sale_line_obj.need_procurement(cr, uid, [line.id], context=context):
if (line.state == 'done') or not line.product_id:
continue
vals = self._prepare_order_line_procurement(cr, uid, order, line, group_id=group_id, context=context)
proc_id = procurement_obj.create(cr, uid, vals, context=context)
proc_ids.append(proc_id)
#Confirm procurement order such that rules will be applied on it
#note that the workflow normally ensure proc_ids isn't an empty list
procurement_obj.run(cr, uid, proc_ids, context=context)
#if shipping was in exception and the user choose to recreate the delivery order, write the new status of SO
if order.state == 'shipping_except':
val = {'state': 'progress', 'shipped': False}
if (order.order_policy == 'manual'):
for line in order.order_line:
if (not line.invoiced) and (line.state not in ('cancel', 'draft')):
val['state'] = 'manual'
break
order.write(val)
return True
def onchange_fiscal_position(self, cr, uid, ids, fiscal_position, order_lines, context=None):
'''Update taxes of order lines for each line where a product is defined
:param list ids: not used
:param int fiscal_position: sale order fiscal position
:param list order_lines: command list for one2many write method
'''
order_line = []
fiscal_obj = self.pool.get('account.fiscal.position')
product_obj = self.pool.get('product.product')
line_obj = self.pool.get('sale.order.line')
fpos = False
if fiscal_position:
fpos = fiscal_obj.browse(cr, uid, fiscal_position, context=context)
for line in order_lines:
# create (0, 0, { fields })
# update (1, ID, { fields })
if line[0] in [0, 1]:
prod = None
if line[2].get('product_id'):
prod = product_obj.browse(cr, uid, line[2]['product_id'], context=context)
elif line[1]:
prod = line_obj.browse(cr, uid, line[1], context=context).product_id
if prod and prod.taxes_id:
line[2]['tax_id'] = [[6, 0, fiscal_obj.map_tax(cr, uid, fpos, prod.taxes_id)]]
order_line.append(line)
# link (4, ID)
# link all (6, 0, IDS)
elif line[0] in [4, 6]:
line_ids = line[0] == 4 and [line[1]] or line[2]
for line_id in line_ids:
prod = line_obj.browse(cr, uid, line_id, context=context).product_id
if prod and prod.taxes_id:
order_line.append([1, line_id, {'tax_id': [[6, 0, fiscal_obj.map_tax(cr, uid, fpos, prod.taxes_id)]]}])
else:
order_line.append([4, line_id])
else:
order_line.append(line)
return {'value': {'order_line': order_line}}
def test_procurements_done(self, cr, uid, ids, context=None):
for sale in self.browse(cr, uid, ids, context=context):
for line in sale.order_line:
if not all([x.state == 'done' for x in line.procurement_ids]):
return False
return True
def test_procurements_except(self, cr, uid, ids, context=None):
for sale in self.browse(cr, uid, ids, context=context):
for line in sale.order_line:
if any([x.state == 'cancel' for x in line.procurement_ids]):
return True
return False
# TODO add a field price_unit_uos
# - update it on change product and unit price
# - use it in report if there is a uos
class sale_order_line(osv.osv):
def need_procurement(self, cr, uid, ids, context=None):
#when sale is installed only, there is no need to create procurements, that's only
#further installed modules (sale_service, sale_stock) that will change this.
prod_obj = self.pool.get('product.product')
for line in self.browse(cr, uid, ids, context=context):
if prod_obj.need_procurement(cr, uid, [line.product_id.id], context=context):
return True
return False
def _amount_line(self, cr, uid, ids, field_name, arg, context=None):
tax_obj = self.pool.get('account.tax')
cur_obj = self.pool.get('res.currency')
res = {}
if context is None:
context = {}
for line in self.browse(cr, uid, ids, context=context):
price = line.price_unit * (1 - (line.discount or 0.0) / 100.0)
taxes = tax_obj.compute_all(cr, uid, line.tax_id, price, line.product_uom_qty, line.product_id, line.order_id.partner_id)
cur = line.order_id.pricelist_id.currency_id
res[line.id] = cur_obj.round(cr, uid, cur, taxes['total'])
return res
def _get_uom_id(self, cr, uid, *args):
try:
proxy = self.pool.get('ir.model.data')
result = proxy.get_object_reference(cr, uid, 'product', 'product_uom_unit')
return result[1]
except Exception, ex:
return False
def _fnct_line_invoiced(self, cr, uid, ids, field_name, args, context=None):
res = dict.fromkeys(ids, False)
for this in self.browse(cr, uid, ids, context=context):
res[this.id] = this.invoice_lines and \
all(iline.invoice_id.state != 'cancel' for iline in this.invoice_lines)
return res
def _order_lines_from_invoice(self, cr, uid, ids, context=None):
# direct access to the m2m table is the less convoluted way to achieve this (and is ok ACL-wise)
cr.execute("""SELECT DISTINCT sol.id FROM sale_order_invoice_rel rel JOIN
sale_order_line sol ON (sol.order_id = rel.order_id)
WHERE rel.invoice_id = ANY(%s)""", (list(ids),))
return [i[0] for i in cr.fetchall()]
_name = 'sale.order.line'
_description = 'Sales Order Line'
_columns = {
'order_id': fields.many2one('sale.order', 'Order Reference', required=True, ondelete='cascade', select=True, readonly=True, states={'draft':[('readonly',False)]}),
'name': fields.text('Description', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of sales order lines."),
'product_id': fields.many2one('product.product', 'Product', domain=[('sale_ok', '=', True)], change_default=True, readonly=True, states={'draft': [('readonly', False)]}, ondelete='restrict'),
'invoice_lines': fields.many2many('account.invoice.line', 'sale_order_line_invoice_rel', 'order_line_id', 'invoice_id', 'Invoice Lines', readonly=True, copy=False),
'invoiced': fields.function(_fnct_line_invoiced, string='Invoiced', type='boolean',
store={
'account.invoice': (_order_lines_from_invoice, ['state'], 10),
'sale.order.line': (lambda self,cr,uid,ids,ctx=None: ids, ['invoice_lines'], 10)
}),
'price_unit': fields.float('Unit Price', required=True, digits_compute= dp.get_precision('Product Price'), readonly=True, states={'draft': [('readonly', False)]}),
'price_subtotal': fields.function(_amount_line, string='Subtotal', digits_compute= dp.get_precision('Account')),
'tax_id': fields.many2many('account.tax', 'sale_order_tax', 'order_line_id', 'tax_id', 'Taxes', readonly=True, states={'draft': [('readonly', False)]}),
'address_allotment_id': fields.many2one('res.partner', 'Allotment Partner',help="A partner to whom the particular product needs to be allotted."),
'product_uom_qty': fields.float('Quantity', digits_compute= dp.get_precision('Product UoS'), required=True, readonly=True, states={'draft': [('readonly', False)]}),
'product_uom': fields.many2one('product.uom', 'Unit of Measure ', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'product_uos_qty': fields.float('Quantity (UoS)' ,digits_compute= dp.get_precision('Product UoS'), readonly=True, states={'draft': [('readonly', False)]}),
'product_uos': fields.many2one('product.uom', 'Product UoS'),
'discount': fields.float('Discount (%)', digits_compute= dp.get_precision('Discount'), readonly=True, states={'draft': [('readonly', False)]}),
'th_weight': fields.float('Weight', readonly=True, states={'draft': [('readonly', False)]}),
'state': fields.selection(
[('cancel', 'Cancelled'),('draft', 'Draft'),('confirmed', 'Confirmed'),('exception', 'Exception'),('done', 'Done')],
'Status', required=True, readonly=True, copy=False,
help='* The \'Draft\' status is set when the related sales order in draft status. \
\n* The \'Confirmed\' status is set when the related sales order is confirmed. \
\n* The \'Exception\' status is set when the related sales order is set as exception. \
\n* The \'Done\' status is set when the sales order line has been picked. \
\n* The \'Cancelled\' status is set when a user cancel the sales order related.'),
'order_partner_id': fields.related('order_id', 'partner_id', type='many2one', relation='res.partner', store=True, string='Customer'),
'salesman_id':fields.related('order_id', 'user_id', type='many2one', relation='res.users', store=True, string='Salesperson'),
'company_id': fields.related('order_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True),
'delay': fields.float('Delivery Lead Time', required=True, help="Number of days between the order confirmation and the shipping of the products to the customer", readonly=True, states={'draft': [('readonly', False)]}),
'procurement_ids': fields.one2many('procurement.order', 'sale_line_id', 'Procurements'),
}
_order = 'order_id desc, sequence, id'
_defaults = {
'product_uom' : _get_uom_id,
'discount': 0.0,
'product_uom_qty': 1,
'product_uos_qty': 1,
'sequence': 10,
'state': 'draft',
'price_unit': 0.0,
'delay': 0.0,
}
def _get_line_qty(self, cr, uid, line, context=None):
if line.product_uos:
return line.product_uos_qty or 0.0
return line.product_uom_qty
def _get_line_uom(self, cr, uid, line, context=None):
if line.product_uos:
return line.product_uos.id
return line.product_uom.id
def _prepare_order_line_invoice_line(self, cr, uid, line, account_id=False, context=None):
"""Prepare the dict of values to create the new invoice line for a
sales order line. This method may be overridden to implement custom
invoice generation (making sure to call super() to establish
a clean extension chain).
:param browse_record line: sale.order.line record to invoice
:param int account_id: optional ID of a G/L account to force
(this is used for returning products including service)
:return: dict of values to create() the invoice line
"""
res = {}
if not line.invoiced:
if not account_id:
if line.product_id:
account_id = line.product_id.property_account_income.id
if not account_id:
account_id = line.product_id.categ_id.property_account_income_categ.id
if not account_id:
raise osv.except_osv(_('Error!'),
_('Please define income account for this product: "%s" (id:%d).') % \
(line.product_id.name, line.product_id.id,))
else:
prop = self.pool.get('ir.property').get(cr, uid,
'property_account_income_categ', 'product.category',
context=context)
account_id = prop and prop.id or False
uosqty = self._get_line_qty(cr, uid, line, context=context)
uos_id = self._get_line_uom(cr, uid, line, context=context)
pu = 0.0
if uosqty:
pu = round(line.price_unit * line.product_uom_qty / uosqty,
self.pool.get('decimal.precision').precision_get(cr, uid, 'Product Price'))
fpos = line.order_id.fiscal_position or False
account_id = self.pool.get('account.fiscal.position').map_account(cr, uid, fpos, account_id)
if not account_id:
raise osv.except_osv(_('Error!'),
_('There is no Fiscal Position defined or Income category account defined for default properties of Product categories.'))
res = {
'name': line.name,
'sequence': line.sequence,
'origin': line.order_id.name,
'account_id': account_id,
'price_unit': pu,
'quantity': uosqty,
'discount': line.discount,
'uos_id': uos_id,
'product_id': line.product_id.id or False,
'invoice_line_tax_id': [(6, 0, [x.id for x in line.tax_id])],
'account_analytic_id': line.order_id.project_id and line.order_id.project_id.id or False,
}
return res
def invoice_line_create(self, cr, uid, ids, context=None):
if context is None:
context = {}
create_ids = []
sales = set()
for line in self.browse(cr, uid, ids, context=context):
vals = self._prepare_order_line_invoice_line(cr, uid, line, False, context)
if vals:
inv_id = self.pool.get('account.invoice.line').create(cr, uid, vals, context=context)
self.write(cr, uid, [line.id], {'invoice_lines': [(4, inv_id)]}, context=context)
sales.add(line.order_id.id)
create_ids.append(inv_id)
# Trigger workflow events
for sale_id in sales:
workflow.trg_write(uid, 'sale.order', sale_id, cr)
return create_ids
def button_cancel(self, cr, uid, ids, context=None):
for line in self.browse(cr, uid, ids, context=context):
if line.invoiced:
raise osv.except_osv(_('Invalid Action!'), _('You cannot cancel a sales order line that has already been invoiced.'))
return self.write(cr, uid, ids, {'state': 'cancel'})
def button_confirm(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'confirmed'})
def button_done(self, cr, uid, ids, context=None):
res = self.write(cr, uid, ids, {'state': 'done'})
for line in self.browse(cr, uid, ids, context=context):
workflow.trg_write(uid, 'sale.order', line.order_id.id, cr)
return res
def uos_change(self, cr, uid, ids, product_uos, product_uos_qty=0, product_id=None):
product_obj = self.pool.get('product.product')
if not product_id:
return {'value': {'product_uom': product_uos,
'product_uom_qty': product_uos_qty}, 'domain': {}}
product = product_obj.browse(cr, uid, product_id)
value = {
'product_uom': product.uom_id.id,
}
# FIXME must depend on uos/uom of the product and not only of the coeff.
try:
value.update({
'product_uom_qty': product_uos_qty / product.uos_coeff,
'th_weight': product_uos_qty / product.uos_coeff * product.weight
})
except ZeroDivisionError:
pass
return {'value': value}
def create(self, cr, uid, values, context=None):
if values.get('order_id') and values.get('product_id') and any(f not in values for f in ['name', 'price_unit', 'type', 'product_uom_qty', 'product_uom']):
order = self.pool['sale.order'].read(cr, uid, values['order_id'], ['pricelist_id', 'partner_id', 'date_order', 'fiscal_position'], context=context)
defaults = self.product_id_change(cr, uid, [], order['pricelist_id'][0], values['product_id'],
qty=float(values.get('product_uom_qty', False)),
uom=values.get('product_uom', False),
qty_uos=float(values.get('product_uos_qty', False)),
uos=values.get('product_uos', False),
name=values.get('name', False),
partner_id=order['partner_id'][0],
date_order=order['date_order'],
fiscal_position=order['fiscal_position'][0] if order['fiscal_position'] else False,
flag=False, # Force name update
context=context
)['value']
if defaults.get('tax_id'):
defaults['tax_id'] = [[6, 0, defaults['tax_id']]]
values = dict(defaults, **values)
return super(sale_order_line, self).create(cr, uid, values, context=context)
def product_id_change(self, cr, uid, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, packaging=False, fiscal_position=False, flag=False, context=None):
context = context or {}
lang = lang or context.get('lang', False)
if not partner_id:
raise osv.except_osv(_('No Customer Defined!'), _('Before choosing a product,\n select a customer in the sales form.'))
warning = False
product_uom_obj = self.pool.get('product.uom')
partner_obj = self.pool.get('res.partner')
product_obj = self.pool.get('product.product')
context = {'lang': lang, 'partner_id': partner_id}
partner = partner_obj.browse(cr, uid, partner_id)
lang = partner.lang
context_partner = {'lang': lang, 'partner_id': partner_id}
if not product:
return {'value': {'th_weight': 0,
'product_uos_qty': qty}, 'domain': {'product_uom': [],
'product_uos': []}}
if not date_order:
date_order = time.strftime(DEFAULT_SERVER_DATE_FORMAT)
result = {}
warning_msgs = ''
product_obj = product_obj.browse(cr, uid, product, context=context_partner)
uom2 = False
if uom:
uom2 = product_uom_obj.browse(cr, uid, uom)
if product_obj.uom_id.category_id.id != uom2.category_id.id:
uom = False
if uos:
if product_obj.uos_id:
uos2 = product_uom_obj.browse(cr, uid, uos)
if product_obj.uos_id.category_id.id != uos2.category_id.id:
uos = False
else:
uos = False
fpos = False
if not fiscal_position:
fpos = partner.property_account_position or False
else:
fpos = self.pool.get('account.fiscal.position').browse(cr, uid, fiscal_position)
if update_tax: #The quantity only have changed
result['tax_id'] = self.pool.get('account.fiscal.position').map_tax(cr, uid, fpos, product_obj.taxes_id)
if not flag:
result['name'] = self.pool.get('product.product').name_get(cr, uid, [product_obj.id], context=context_partner)[0][1]
if product_obj.description_sale:
result['name'] += '\n'+product_obj.description_sale
domain = {}
if (not uom) and (not uos):
result['product_uom'] = product_obj.uom_id.id
if product_obj.uos_id:
result['product_uos'] = product_obj.uos_id.id
result['product_uos_qty'] = qty * product_obj.uos_coeff
uos_category_id = product_obj.uos_id.category_id.id
else:
result['product_uos'] = False
result['product_uos_qty'] = qty
uos_category_id = False
result['th_weight'] = qty * product_obj.weight
domain = {'product_uom':
[('category_id', '=', product_obj.uom_id.category_id.id)],
'product_uos':
[('category_id', '=', uos_category_id)]}
elif uos and not uom: # only happens if uom is False
result['product_uom'] = product_obj.uom_id and product_obj.uom_id.id
result['product_uom_qty'] = qty_uos / product_obj.uos_coeff
result['th_weight'] = result['product_uom_qty'] * product_obj.weight
elif uom: # whether uos is set or not
default_uom = product_obj.uom_id and product_obj.uom_id.id
q = product_uom_obj._compute_qty(cr, uid, uom, qty, default_uom)
if product_obj.uos_id:
result['product_uos'] = product_obj.uos_id.id
result['product_uos_qty'] = qty * product_obj.uos_coeff
else:
result['product_uos'] = False
result['product_uos_qty'] = qty
result['th_weight'] = q * product_obj.weight # Round the quantity up
if not uom2:
uom2 = product_obj.uom_id
# get unit price
if not pricelist:
warn_msg = _('You have to select a pricelist or a customer in the sales form !\n'
'Please set one before choosing a product.')
warning_msgs += _("No Pricelist ! : ") + warn_msg +"\n\n"
else:
price = self.pool.get('product.pricelist').price_get(cr, uid, [pricelist],
product, qty or 1.0, partner_id, {
'uom': uom or result.get('product_uom'),
'date': date_order,
})[pricelist]
if price is False:
warn_msg = _("Cannot find a pricelist line matching this product and quantity.\n"
"You have to change either the product, the quantity or the pricelist.")
warning_msgs += _("No valid pricelist line found ! :") + warn_msg +"\n\n"
else:
result.update({'price_unit': price})
if warning_msgs:
warning = {
'title': _('Configuration Error!'),
'message' : warning_msgs
}
return {'value': result, 'domain': domain, 'warning': warning}
def product_uom_change(self, cursor, user, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, context=None):
context = context or {}
lang = lang or ('lang' in context and context['lang'])
if not uom:
return {'value': {'price_unit': 0.0, 'product_uom' : uom or False}}
return self.product_id_change(cursor, user, ids, pricelist, product,
qty=qty, uom=uom, qty_uos=qty_uos, uos=uos, name=name,
partner_id=partner_id, lang=lang, update_tax=update_tax,
date_order=date_order, context=context)
def unlink(self, cr, uid, ids, context=None):
if context is None:
context = {}
"""Allows to delete sales order lines in draft,cancel states"""
for rec in self.browse(cr, uid, ids, context=context):
if rec.state not in ['draft', 'cancel']:
raise osv.except_osv(_('Invalid Action!'), _('Cannot delete a sales order line which is in state \'%s\'.') %(rec.state,))
return super(sale_order_line, self).unlink(cr, uid, ids, context=context)
class mail_compose_message(osv.Model):
_inherit = 'mail.compose.message'
def send_mail(self, cr, uid, ids, context=None):
context = context or {}
if context.get('default_model') == 'sale.order' and context.get('default_res_id') and context.get('mark_so_as_sent'):
context = dict(context, mail_post_autofollow=True)
self.pool.get('sale.order').signal_workflow(cr, uid, [context['default_res_id']], 'quotation_sent')
return super(mail_compose_message, self).send_mail(cr, uid, ids, context=context)
class account_invoice(osv.Model):
_inherit = 'account.invoice'
def _get_default_section_id(self, cr, uid, context=None):
""" Gives default section by checking if present in the context """
section_id = self._resolve_section_id_from_context(cr, uid, context=context) or False
if not section_id:
section_id = self.pool.get('res.users').browse(cr, uid, uid, context).default_section_id.id or False
return section_id
def _resolve_section_id_from_context(self, cr, uid, context=None):
""" Returns ID of section based on the value of 'section_id'
context key, or None if it cannot be resolved to a single
Sales Team.
"""
if context is None:
context = {}
if type(context.get('default_section_id')) in (int, long):
return context.get('default_section_id')
if isinstance(context.get('default_section_id'), basestring):
section_ids = self.pool.get('crm.case.section').name_search(cr, uid, name=context['default_section_id'], context=context)
if len(section_ids) == 1:
return int(section_ids[0][0])
return None
_columns = {
'section_id': fields.many2one('crm.case.section', 'Sales Team'),
}
_defaults = {
'section_id': lambda self, cr, uid, c=None: self._get_default_section_id(cr, uid, context=c)
}
def confirm_paid(self, cr, uid, ids, context=None):
sale_order_obj = self.pool.get('sale.order')
res = super(account_invoice, self).confirm_paid(cr, uid, ids, context=context)
so_ids = sale_order_obj.search(cr, uid, [('invoice_ids', 'in', ids)], context=context)
for so_id in so_ids:
sale_order_obj.message_post(cr, uid, so_id, body=_("Invoice paid"), context=context)
return res
def unlink(self, cr, uid, ids, context=None):
""" Overwrite unlink method of account invoice to send a trigger to the sale workflow upon invoice deletion """
invoice_ids = self.search(cr, uid, [('id', 'in', ids), ('state', 'in', ['draft', 'cancel'])], context=context)
#if we can't cancel all invoices, do nothing
if len(invoice_ids) == len(ids):
#Cancel invoice(s) first before deleting them so that if any sale order is associated with them
#it will trigger the workflow to put the sale order in an 'invoice exception' state
for id in ids:
workflow.trg_validate(uid, 'account.invoice', id, 'invoice_cancel', cr)
return super(account_invoice, self).unlink(cr, uid, ids, context=context)
class procurement_order(osv.osv):
_inherit = 'procurement.order'
_columns = {
'sale_line_id': fields.many2one('sale.order.line', string='Sale Order Line'),
}
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
res = super(procurement_order, self).write(cr, uid, ids, vals, context=context)
from openerp import workflow
if vals.get('state') in ['done', 'cancel', 'exception']:
for proc in self.browse(cr, uid, ids, context=context):
if proc.sale_line_id and proc.sale_line_id.order_id:
order_id = proc.sale_line_id.order_id.id
if self.pool.get('sale.order').test_procurements_done(cr, uid, [order_id], context=context):
workflow.trg_validate(uid, 'sale.order', order_id, 'ship_end', cr)
if self.pool.get('sale.order').test_procurements_except(cr, uid, [order_id], context=context):
workflow.trg_validate(uid, 'sale.order', order_id, 'ship_except', cr)
return res
class product_product(osv.Model):
_inherit = 'product.product'
def _sales_count(self, cr, uid, ids, field_name, arg, context=None):
SaleOrderLine = self.pool['sale.order.line']
return {
product_id: SaleOrderLine.search_count(cr,uid, [('product_id', '=', product_id)], context=context)
for product_id in ids
}
_columns = {
'sales_count': fields.function(_sales_count, string='# Sales', type='integer'),
}
class product_template(osv.Model):
_inherit = 'product.template'
def _sales_count(self, cr, uid, ids, field_name, arg, context=None):
res = dict.fromkeys(ids, 0)
for template in self.browse(cr, uid, ids, context=context):
res[template.id] = sum([p.sales_count for p in template.product_variant_ids])
return res
def action_view_sales(self, cr, uid, ids, context=None):
act_obj = self.pool.get('ir.actions.act_window')
mod_obj = self.pool.get('ir.model.data')
product_ids = []
for template in self.browse(cr, uid, ids, context=context):
product_ids += [x.id for x in template.product_variant_ids]
result = mod_obj.xmlid_to_res_id(cr, uid, 'sale.action_order_line_product_tree',raise_if_not_found=True)
result = act_obj.read(cr, uid, [result], context=context)[0]
result['domain'] = "[('product_id','in',[" + ','.join(map(str, product_ids)) + "])]"
return result
_columns = {
'sales_count': fields.function(_sales_count, string='# Sales', type='integer'),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| Yenthe666/Odoo_Samples | sale/sale.py | Python | agpl-3.0 | 69,258 | 0.005761 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cloudfuzzy.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| arruda/cloudfuzzy | manage.py | Python | mit | 253 | 0 |
""" Define a Check monad and corresponding functions.
"""
from functools import (reduce, partial)
class Check:
""" This super class is not really necessary but helps make the structure
clear.
data Check a = Pass a | Fail Message
"""
pass
class Pass(Check):
def __init__(self, value):
self.value = value
class Fail(Check):
def __init__(self, message):
self.message = message
def is_(t, x):
""" Check whether the type of a given x is a given type t.
"""
return type(x) is t
is_check = partial(is_, Check)
is_pass = partial(is_, Pass)
is_fail = partial(is_, Fail)
def return_(x):
""" Monadic return for the Check monad.
return :: a -> m a
return = Pass
"""
return Pass(x)
def bind(f):
""" Monadic bind for the Check monad.
(>>=) :: m a -> (a -> m b) -> m b
Fail x >>= f = Fail x
Pass x >>= f = f x
"""
def bind_impl(x):
if is_fail(x):
return x
if is_pass(x):
return f(x.value)
raise ValueError('Check has to be of type Pass | Fail.')
return bind_impl
def compose(f, g):
""" Kleisli composition of two (Check-)monadic functions f and g.
(>=>) :: (a -> m b) -> (b -> m c) -> (a -> m c)
"""
def compose_impl(x):
return bind(g)(f(x))
return compose_impl
def compose_many(*fs):
""" Reduces a variable number of functions with composition.
Same as repeatedly calling `compose` on pairs.
"""
return reduce(compose, fs)
def lift(f, message):
""" Lifts a boolean function into the realm of the Check monad.
lift :: (a -> bool) -> String -> (a -> Check a)
"""
def lift_impl(x):
if f(x):
return return_(x)
return Fail(message)
return lift_impl
| kdungs/python-mcheck | mcheck/__init__.py | Python | mit | 1,840 | 0 |
from textblob import TextBlob,Word
def correct(text):
t = TextBlob(text)
return str(t.correct())
def spellcheck(text):
txt=["She","is","mw","moom"]
for w in txt:
word=Word(w)
print(word.spellcheck()) | 2B5/ia-3B5 | module3/preprocessing/errorCorrect.py | Python | mit | 232 | 0.038793 |
from __future__ import division
import numpy as np
from numpy.testing import assert_almost_equal
import pytest
from acoustics.power import lw_iso3746
@pytest.mark.parametrize("background_noise, expected", [
(79, 91.153934187),
(83, 90.187405234),
(88, 88.153934187),
])
def test_lw_iso3746(background_noise, expected):
LpAi = np.array([90, 90, 90, 90])
LpAiB = background_noise * np.ones(4)
S = 10
alpha = np.array([0.1, 0.1, 0.1, 0.1, 0.1, 0.1])
surfaces = np.array([10, 10, 10, 10, 10, 10])
calculated = lw_iso3746(LpAi, LpAiB, S, alpha, surfaces)
assert_almost_equal(calculated, expected)
| FRidh/python-acoustics | tests/test_power.py | Python | bsd-3-clause | 637 | 0 |
import random
from numpy.random import normal, uniform
import numpy as np
import math
from heapq import heapify, heappush, heappop
import os
MIN = 0
MAX = 10000000
POINTS_COUNT = 1000000
QUERIES_COUNT = 200000
def save_dataset(filename, intervals, queries):
intervals_copy = [value for value in intervals]
queries_copy = [value for value in queries]
random.shuffle(intervals_copy)
random.shuffle(queries_copy)
out = open(filename, "w")
out.write(str(len(intervals_copy)) + "\n")
for index in xrange(len(intervals_copy)):
start, length = intervals_copy[index]
out.write(str(start) + "\t" + str(start + length) + "\t" + str(index + 1) + "\n")
out.write(str(len(queries_copy)) + "\n")
for start, length in queries_copy:
out.write(str(start) + "\t" + str(start + length) + "\n")
out.close()
if 1:
# chi_time_mem
len_mean = 100
len_stdev = 10
intervals = []
queries = []
lengths = [length >=0 and length or 0.0 for length in normal(len_mean, len_stdev, POINTS_COUNT)]
for point_index in xrange(POINTS_COUNT):
start = random.random() * (MAX - MIN) + MIN
length = lengths[point_index]
intervals += [(start, length)]
intervals.sort()
overlappings = []
started = []
for start, length in intervals:
while started:
right_border = heappop(started)
if right_border >= start:
heappush(started, right_border)
break
overlappings += [len(started)]
heappush(started, start + length)
avg_overlapping = sum(overlappings) / float(len(overlappings))
print "avg overlapping", avg_overlapping
QUERIES_COUNT_SPEC = 1000000
query_len_mean = 100
lengths = normal(query_len_mean, len_stdev, QUERIES_COUNT_SPEC)
queries = []
for point_index in xrange(QUERIES_COUNT_SPEC):
start = random.random() * (MAX - MIN) + MIN
queries += [(start, lengths[point_index])]
save_dataset("datasets/chi_time_mem_1M_100_1M_100.txt", intervals, queries)
if 1:
# query_len
len_mean = 100
len_stdev = 10
query_len = 1
intervals = []
queries = []
lengths = [length >=0 and length or 0.0 for length in normal(len_mean, len_stdev, POINTS_COUNT)]
for point_index in xrange(POINTS_COUNT):
start = random.random() * (MAX - MIN) + MIN
length = lengths[point_index]
intervals += [(start, length)]
intervals.sort()
overlappings = []
started = []
for start, length in intervals:
while started:
right_border = heappop(started)
if right_border >= start:
heappush(started, right_border)
break
overlappings += [len(started)]
heappush(started, start + length)
avg_overlapping = sum(overlappings) / float(len(overlappings))
lengths = normal(100, 10, QUERIES_COUNT)
DATASETS_COUNT = 30
query_length = 10
factor = math.exp(math.log(10000 / float(query_length) ) / (DATASETS_COUNT - 1))
for length_factor in xrange(DATASETS_COUNT):
queries = []
for point_index in xrange(QUERIES_COUNT):
start = random.random() * (MAX - MIN) + MIN
queries += [(start, query_length)]
save_dataset("datasets/query_len/dataset_query_len_%d.txt" % (query_length), intervals, queries)
print query_length
query_length = math.ceil(query_length * factor)
if 0:
# avg_overlapping
queries = []
for query_index in xrange(QUERIES_COUNT):
start = random.random() * (MAX - MIN) + MIN
length = 100
queries += [(start, length)]
len_mean = 1
max_len = 100000
DATASETS_COUNT = 30
factor = math.exp(math.log(max_len / float(len_mean) ) / (DATASETS_COUNT - 1))
while len_mean <= 100000:
print "mean len:", len_mean
if 1:
intervals = []
lengths = [length >=0 and length or 0.0 for length in normal(len_mean, len_mean / 20.0, POINTS_COUNT)]
if len_mean == 1: #here we want overlapping to be zero
lengths = [0 for l in lengths]
for interval_index in xrange(POINTS_COUNT):
start = random.random() * (MAX - MIN) + MIN
length = lengths[interval_index]
intervals += [(start, length)]
intervals.sort()
overlappings = []
started = []
for start, length in intervals:
while started:
right_border = heappop(started)
if right_border >= start:
heappush(started, right_border)
break
overlappings += [len(started)]
heappush(started, start + length)
avg_overlapping = sum(overlappings) / float(len(overlappings))
print sum(overlappings)
print "avg. overlapping", avg_overlapping
save_dataset("../datasets/avg_overlapping/%f.txt" % (avg_overlapping), intervals, queries)
len_mean = math.ceil(len_mean * factor)
if 0:
# avg_overlapping standard deviation
queries = []
for query_index in xrange(QUERIES_COUNT):
start = random.random() * (MAX - MIN) + MIN
length = 100
queries += [(start, length)]
len_mean = 10000
DATASETS_COUNT = 30
radius = 0
max_radius = len_mean
delta = (max_radius - radius) / (float(DATASETS_COUNT - 1))
for _ in xrange(20):
print "radius:", radius
if 0:
intervals = []
lengths = [length >=0 and length or 0.0 for length in uniform(len_mean - radius, len_mean + radius, POINTS_COUNT)]
print min(lengths), lengths[:15]
for interval_index in xrange(POINTS_COUNT):
start = random.random() * (MAX - MIN) + MIN
length = lengths[interval_index]
intervals += [(start, length)]
intervals.sort()
overlappings = []
started = []
for start, length in intervals:
while started:
right_border = heappop(started)
if right_border >= start:
heappush(started, right_border)
break
overlappings += [len(started)]
heappush(started, start + length)
avg_overlapping = sum(overlappings) / float(len(overlappings))
print sum(overlappings)
print "avg. overlapping", avg_overlapping
save_dataset("../datasets/avg_overlapping_stdev/%f.txt" % (2 * radius), intervals, queries)
radius += delta
if 0:
# different number of intervals
intervals_counts = [10000]
for _ in xrange(50):
intervals_counts += [int(1.15 * intervals_counts[-1])]
max_values = [counts for counts in intervals_counts]
interval_length = 10
for dataset_index in xrange(len(intervals_counts)):
intervals_count = intervals_counts[dataset_index]
MAX = max_values[dataset_index]
intervals = []
for _ in xrange(intervals_count):
start = random.random() * MAX
intervals += [(start, interval_length)]
if intervals_count < 10000000:
intervals.sort()
overlappings = []
started = []
for start, length in intervals:
while started:
right_border = heappop(started)
if right_border >= start:
heappush(started, right_border)
break
overlappings += [len(started)]
heappush(started, start + length)
avg_overlapping = sum(overlappings) / float(len(overlappings))
print sum(overlappings)
print "avg. overlapping", avg_overlapping
queries = []
for query_index in xrange(QUERIES_COUNT):
start = random.random() * MAX
length = 1000
queries += [(start, length)]
print "intervals_count", intervals_count
save_dataset("../datasets/intervals_count/%d.txt" % (intervals_count), intervals, queries)
if 0:
#real: exome
queries = []
for line in open("../datasets/exome_alignement/20130108.exome.targets.bed"):
_, start, end = [float(item) for item in line.split("\t")]
queries += [(start, end - start)]
queries.sort()
intervals = []
for line in open("../datasets/exome_alignement/exome.bed"):
start, end = [float(item) for item in line.split("\t")[1:3]]
intervals += [(start, end - start)]
intervals.sort()
if 1:
overlappings = []
started = []
for start, length in intervals:
while started:
right_border = heappop(started)
if right_border >= start:
heappush(started, right_border)
break
overlappings += [len(started)]
heappush(started, start + length)
avg_overlapping = sum(overlappings) / float(len(overlappings))
print sum(overlappings)
print "avg. overlapping", avg_overlapping
save_dataset("../datasets/exome_alignement/dataset.txt", intervals, queries)
if 1:
#real: time intervals
import os
path = "../datasets/time_intervals/"
files = [fname for fname in os.listdir(path) if fname.startswith("all_intervals")]
id = 0
intervals = []
for file in files:
file = path + file
for line in open(file):
start, end = [float(item) for item in line.split("\t")[1:-1]]
intervals += [(start, end - start)]
min_left = min(left for left, _ in intervals )
max_right = max(left + length for left, length in intervals)
max_right -= min_left
intervals = [(start - min_left, length) for start, length in intervals]
queries = []
for query_index in xrange(100000):
start = random.random() * max_right
length = 100
queries += [(start, length)]
save_dataset("../datasets/time_intervals/dataset.txt", intervals, queries)
print len(intervals)
intervals.sort()
out = open("t", "w")
for start, end in intervals:
out.write(str(start) + "\t" + str(end) + "\n")
out.close()
if 1:
overlappings = []
started = []
for start, length in intervals:
while started:
right_border = heappop(started)
if right_border >= start:
heappush(started, right_border)
break
overlappings += [len(started)]
heappush(started, start + length)
avg_overlapping = sum(overlappings) / float(len(overlappings))
print sum(overlappings)
print "avg. overlapping", avg_overlapping
#save_dataset("../datasets/exome_alignement/dataset.txt", intervals, queries)
exit()
queries = []
for _ in xrange(QUERIES_COUNT):
start = random.randint(MIN, MAX)
length = random.randint(1, MAX_LENGTH)
queries += [(start, start + length)]
queries.sort()
queries_start_indices = []
if not queries_start_indices:
interval_index = 0
for query_index in xrange(QUERIES_COUNT):
query_start = queries[query_index][0]
added = False
while True:
if interval_index == POINTS_COUNT:
break
if query_start <= intervals[interval_index][0]:
queries_start_indices += [interval_index]
added = True
break
else:
interval_index += 1
if not added:
queries_start_indices += [POINTS_COUNT]
chipseqs = []
total_size = 0
checkpoint_intervals_values = [[] for _ in xrange(len(intervals))]
for chi_value in xrange(1, len(intervals)):
offset = 0
while offset < len(intervals):
checkpoint_intervals_values[offset] += [chi_value]
offset += chi_value
total_size += 1
import math
print POINTS_COUNT, math.log(POINTS_COUNT) * POINTS_COUNT, total_size
steps = 0
overheads = [0 for _ in xrange(POINTS_COUNT)]
for query_index in xrange(len(queries)):
query_start, query_end = queries[query_index]
query_start_index = queries_start_indices[query_index]
overhead = 0
for checkpoint_position in xrange(query_start_index - 1, query_start_index / 2 - 1, -1):
if intervals[checkpoint_position][1] <= query_start:
overhead += 1
for chi_value_index in xrange(len(checkpoint_intervals_values[checkpoint_position]) - 1, -1, -1):
checkpoint_value = checkpoint_intervals_values[checkpoint_position][chi_value_index]
if checkpoint_position + checkpoint_value < query_start_index:
break
overheads[checkpoint_value] += overhead
steps += 1
print steps, POINTS_COUNT * POINTS_COUNT
print overheads[:100]
overheads_other = [0]
if 1:
for checkpoint_interval in xrange(1, 100):
checkpoint_overheads = []
for query_index in xrange(QUERIES_COUNT):
overhead = 0
query_start = queries[query_index][0]
query_start_index = queries_start_indices[query_index]
if not query_start_index:
checkpoint_overheads += [0.0]
continue
closest_checkpoint = (query_start_index - 1) - ((query_start_index - 1) % checkpoint_interval)
for interval_index in xrange(closest_checkpoint, query_start_index):
if intervals[interval_index][1] <= query_start:
overhead += 1
path_size = query_start_index - closest_checkpoint
if path_size:
checkpoint_overheads += [float(overhead)]
else:
checkpoint_overheads += [0.0]
overheads_other += [int(sum(checkpoint_overheads))]
print overheads_other
| mavlyutovrus/interval_index | python/create_datasets.py | Python | apache-2.0 | 14,048 | 0.003702 |
# !/usr/bin/env python3
# -*- encoding: utf-8 -*-
"""
ERP+
"""
__author__ = 'CVtek dev'
__credits__ = []
__version__ = "1.0"
__maintainer__ = "CVTek dev"
__status__ = "Development"
__model_name__ = 'sr_mulher.SRMulher'
import auth, base_models
from orm import *
from form import *
class SRMulher(Model, View):
def __init__(self, **kargs):
Model.__init__(self, **kargs)
self.__name__ = 'sr_mulher'
self.__title__ ='Inscrição e Identificação da Mulher'
self.__model_name__ = __model_name__
self.__list_edit_mode__ = 'edit'
self.__get_options__ = ['nome'] # define tambem o campo a ser mostrado no m2m, independentemente da descricao no field do m2m
self.__order_by__ = 'sr_mulher.nome'
#choice field com a estrutura de saude
self.numero_inscricao = integer_field(view_order = 1, name = 'Nº de Inscrição', size = 40)
self.nome = string_field(view_order = 2, name = 'Nome Completo', size = 70, onlist = True)
self.data_nascimento = date_field(view_order = 3, name = 'Data Nascimento', size=40, args = 'required', onlist = True)
self.escolaridade = combo_field(view_order = 4, name = 'Escolaridade', size = 40, default = '', options = [('analfabeta','Analfabeta'), ('primaria','Primária'), ('secundaria','Secundária'), ('mais','Mais')], onlist = True)
self.telefone = string_field(view_order = 5, name = 'Telefone', size = 40, onlist = True)
self.endereco_familia = text_field(view_order=6, name='Endereço Familia', size=70, args="rows=30", onlist=False, search=False)
self.endereco_actual = text_field(view_order=7, name='Endereço Fixo Actual', size=70, args="rows=30", onlist=False, search=False)
self.observacoes = text_field(view_order=8, name='Observações', size=80, args="rows=30", onlist=False, search=False)
self.estado = combo_field(view_order = 9, name = 'Estado', size = 40, default = 'active', options = [('active','Activo'), ('canceled','Cancelado')], onlist = True)
| IdeaSolutionsOnline/ERP4R | core/objs/sr_mulher.py | Python | mit | 2,051 | 0.040236 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.