text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
from data_utils import get_file
import string
import random
import cPickle
def make_reuters_dataset(path='datasets/temp/reuters21578/', min_samples_per_topic=15):
import os
import re
from preprocessing.text import Tokenizer
wire_topics = []
topic_counts = {}
wire_bodies = []
for fname in os.listdir(path):
if 'sgm' in fname:
s = open(path + fname).read()
tag = '<TOPICS>'
while tag in s:
s = s[s.find(tag)+len(tag):]
topics = s[:s.find('</')]
if topics and not '</D><D>' in topics:
topic = topics.replace('<D>', '').replace('</D>', '')
wire_topics.append(topic)
topic_counts[topic] = topic_counts.get(topic, 0) + 1
else:
continue
bodytag = '<BODY>'
body = s[s.find(bodytag)+len(bodytag):]
body = body[:body.find('</')]
wire_bodies.append(body)
# only keep most common topics
items = topic_counts.items()
items.sort(key = lambda x: x[1])
kept_topics = set()
for x in items:
print x[0] + ': ' + str(x[1])
if x[1] >= min_samples_per_topic:
kept_topics.add(x[0])
print '-'
print 'Kept topics:', len(kept_topics)
# filter wires with rare topics
kept_wires = []
labels = []
topic_indexes = {}
for t, b in zip(wire_topics, wire_bodies):
if t in kept_topics:
if t not in topic_indexes:
topic_index = len(topic_indexes)
topic_indexes[t] = topic_index
else:
topic_index = topic_indexes[t]
labels.append(topic_index)
kept_wires.append(b)
# vectorize wires
tokenizer = Tokenizer()
tokenizer.fit(kept_wires)
X = tokenizer.transform(kept_wires)
print 'Sanity check:'
for w in ["banana", "oil", "chocolate", "the", "dsft"]:
print '...index of', w, ':', tokenizer.word_index.get(w)
dataset = (X, labels)
print '-'
print 'Saving...'
cPickle.dump(dataset, open('datasets/data/reuters.pkl', 'w'))
def load_data(path="reuters.pkl", nb_words=100000, maxlen=None, test_split=0.2, seed=113):
path = get_file(path, origin="https://s3.amazonaws.com/text-datasets/reuters.pkl")
f = open(path, 'rb')
X, labels = cPickle.load(f)
f.close()
random.seed(seed)
random.shuffle(X)
random.seed(seed)
random.shuffle(labels)
if maxlen:
new_X = []
new_labels = []
for x, y in zip(X, labels):
if len(x) < maxlen:
new_X.append(x)
new_labels.append(y)
X = new_X
labels = new_labels
X = [[1 if w >= nb_words else w for w in x] for x in X]
X_train = X[:int(len(X)*(1-test_split))]
y_train = labels[:int(len(X)*(1-test_split))]
X_test = X[int(len(X)*(1-test_split)):]
y_test = labels[int(len(X)*(1-test_split)):]
return (X_train, y_train), (X_test, y_test)
if __name__ == "__main__":
make_reuters_dataset()
(X_train, y_train), (X_test, y_test) = load_data()
| wavelets/keras | keras/datasets/reuters.py | Python | mit | 3,237 | 0.003089 |
import unittest
import sys
from PySide.QtCore import QObject, SIGNAL, QUrl
from PySide.QtWebKit import *
from PySide.QtNetwork import QNetworkRequest
from helper import adjust_filename, UsesQApplication
class TestWebFrame(UsesQApplication):
def load_finished(self, ok):
self.assert_(ok)
page = self.view.page()
self.assert_(page)
frame = page.mainFrame()
self.assert_(frame)
meta = frame.metaData()
self.assertEqual(meta['description'], ['PySide Test METADATA.'])
self.app.quit()
def testMetaData(self):
self.view = QWebView()
QObject.connect(self.view, SIGNAL('loadFinished(bool)'),
self.load_finished)
url = QUrl.fromLocalFile(adjust_filename('fox.html', __file__))
self.view.setUrl(url)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
| enthought/pyside | tests/QtWebKit/webframe_test.py | Python | lgpl-2.1 | 899 | 0.002225 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-2015 Elico Corp (<http://www.elico-corp.com>)
# Authors: Siyuan Gu
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Add multiprice to product form view',
'version': '8.0.1.0.0',
'category': 'Sales',
'depends': ['product_multiprices'],
'author': 'Elico Corp',
'license': 'AGPL-3',
'website': 'https://www.elico-corp.com',
'support': 'support@elico-corp.com',
'data': ['views/product.xml'],
'installable': True,
'application': False
}
| Elico-Corp/odoo-addons | multiprice_to_product_form/__openerp__.py | Python | agpl-3.0 | 1,385 | 0 |
from flask import Flask, request, jsonify
import random
import re
import sys
app = Flask(__name__)
SPEC = re.compile('^(\d+)d(\d+) ?(\w+)?$')
HIDDEN = ('hide', 'hidden', 'invisible', 'ephemeral', 'private')
USAGE = 'USAGE:\n' \
'`/roll [n]d[x] [options]`\n' \
'where:\n' \
' n == number of dice\n' \
' x == number of sides on each die\n' \
'e.g. `/roll 3d6` will roll 3 6-sided dice. ' \
'[options] may be any of (hide|hidden|invisible|ephemeral|private) ' \
'for a private roll.'
def do_roll(spec):
match = SPEC.match(spec)
if match is None:
return {
'response_type': 'ephemeral',
'text': 'ERROR: invalid roll command `%s`\n\n%s' % (
spec, USAGE)
}
num = int(match.group(1))
size = int(match.group(2))
flag = match.group(3)
if flag is not None and flag not in HIDDEN:
return {
'response_type': 'ephemeral',
'text': 'ERROR: unrecognized modifier `%s`' % flag
}
vals = []
for i in range(0, num):
vals.append(random.randint(1, size))
data = {
'response_type': 'ephemeral' if flag in HIDDEN else 'in_channel'
}
if num == 1:
data['text'] = str(vals[0])
else:
data['text'] = '%s = %d' % (
' + '.join([str(v) for v in vals]), sum(vals))
return data
@app.route("/", methods=['GET', 'POST'])
def roll():
try:
if request.method == 'POST':
spec = request.form['text']
else:
spec = request.args['spec']
return jsonify(do_roll(spec))
except:
return jsonify({
'response_type': 'ephemeral',
'text': USAGE
})
if __name__ == "__main__":
app.run(debug=True)
| NUKnightLab/slackdice | app.py | Python | mit | 1,779 | 0.003373 |
# -*- coding: UTF-8 -*-
import ldap
class Connection:
def __init__(self, url='ldap://annuaire.math.univ-paris-diderot.fr:389', \
base='ou=users,dc=chevaleret,dc=univ-paris-diderot,dc=fr'):
self.base = base
self.url = url
self.con = ldap.initialize(self.url)
self.con.bind_s('', '')
def __del__(self):
self.con.unbind()
def search(self, kw, field='cn'):
"""
Search someone in the LDAP directory using a keyword
"""
qry = "%s=*%s*" % (field, kw)
return self.con.search_s(self.base, ldap.SCOPE_SUBTREE, qry, None)
| bfontaine/p7ldap | p7ldap/connection.py | Python | mit | 619 | 0.004847 |
# coding=utf-8
from __future__ import absolute_import
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
def can_perform_update(target, check):
return "python_updater" in check and check["python_updater"] is not None
def perform_update(target, check, target_version, log_cb=None):
return check["python_updater"].perform_update(target, check, target_version, log_cb=None)
| chriskoz/OctoPrint | src/octoprint/plugins/softwareupdate/updaters/python_updater.py | Python | agpl-3.0 | 565 | 0.008881 |
import sqlite3
import urllib
import re
from urllib.request import urlopen
from bs4 import BeautifulSoup
from phyllo.phyllo_logger import logger
# Note: The original ordering of chapters and verses was extremely complex.
# As a result, chapters are the bold headers and subsections are each p tag.
# Case 1: Sections split by numbers (Roman or not) followed by a period, or bracketed. Subsections split by <p> tags
def parsecase1(ptags, c, colltitle, title, author, date, URL):
# ptags contains all <p> tags. c is the cursor object.
chapter = '-1'
verse = 0
for p in ptags:
# make sure it's not a paragraph without the main text
try:
if p['class'][0].lower() in ['border', 'pagehead', 'shortborder', 'smallboarder', 'margin',
'internal_navigation']: # these are not part of the main t
continue
except:
pass
passage = ''
text = p.get_text().strip()
# Skip empty paragraphs. and skip the last part with the collection link.
if len(text) <= 0 or text.startswith('Asconius\n'):
continue
chapterb = p.find('b')
if chapterb is not None and text[0].isalpha():
test = chapterb.find(text = True)
if text == test:
chapter = text
verse = 0
continue
passage = text
verse+=1
if passage.startswith('Asconius'):
continue
c.execute("INSERT INTO texts VALUES (?,?,?,?,?,?,?, ?, ?, ?, ?)",
(None, colltitle, title, 'Latin', author, date, chapter,
verse, passage.strip(), URL, 'prose'))
def main():
collURL = 'http://www.thelatinlibrary.com/asconius.html'
collOpen = urllib.request.urlopen(collURL)
collSOUP = BeautifulSoup(collOpen, 'html5lib')
author = collSOUP.title.string.strip()
colltitle = 'QUINTUS ASCONIUS PEDIANUS'
date = 'c. 9 B.C. - c. A.D. 76'
textsURL = [collURL]
with sqlite3.connect('texts.db') as db:
c = db.cursor()
c.execute(
'CREATE TABLE IF NOT EXISTS texts (id INTEGER PRIMARY KEY, title TEXT, book TEXT,'
' language TEXT, author TEXT, date TEXT, chapter TEXT, verse TEXT, passage TEXT,'
' link TEXT, documentType TEXT)')
c.execute("DELETE FROM texts WHERE author='Asconius'")
for url in textsURL:
openurl = urllib.request.urlopen(url)
textsoup = BeautifulSoup(openurl, 'html5lib')
try:
title = textsoup.title.string.split(':')[1].strip()
except:
title = textsoup.title.string.strip()
getp = textsoup.find_all('p')
parsecase1(getp, c, colltitle, title, author, date, url)
logger.info("Program runs successfully.")
if __name__ == '__main__':
main()
| oudalab/phyllo | phyllo/extractors/asconiusDB.py | Python | apache-2.0 | 2,970 | 0.005051 |
from pymongo import MongoClient
import os
from bson import json_util
from numpy import unique
client = MongoClient(os.environ.get("MONGOLAB_URI"))
db = client[os.environ.get("MONGOLAB_DB")]
class Organization:
def __init__(self):
self.orgs = self.get_orgs()
self.count = len(self.orgs)
def get_orgs(self):
out = []
for org in db.organizations.find():
out.append(org)
return out
def get_unique_orgs(self):
out = []
for org in self.orgs:
try:
out.append(org["organization_name"].strip())
except:
pass
return unique(out)
class Attorney:
def __init__(self):
self.attorneys = self.get_attorneys()
self.count = len(self.attorneys)
def get_attorneys(self):
out = []
for org in db.attorneys.find():
out.append(org)
return out
def get_unique_orgs(self):
out = []
for attorney in self.attorneys:
try:
out.append(attorney["organization_name"].strip())
except:
pass
return unique(out)
if __name__ == "__main__":
org = Organization()
attorney = Attorney()
print(json_util.dumps(attorney.get_unique_orgs(), indent=2))
| mitzvotech/honorroll | lib/cleanup.py | Python | mit | 1,317 | 0.001519 |
import os, psycopg2, rsa
from urllib.parse import urlparse
class Installer:
def connect(self):
self.url = urlparse(os.environ["DATABASE_URL"])
self.db = psycopg2.connect(database=self.url.path[1:],
user=self.url.username,
password=self.url.password,
host=self.url.hostname,
port=self.url.port)
def create_database(self):
c = self.db.cursor()
c.execute('''CREATE TABLE users (name text PRIMARY KEY,
password text,
friends text ARRAY,
favorites text ARRAY,
blacklist text ARRAY,
dialogs text ARRAY)''')
c.execute('''CREATE TABLE profiles (name text PRIMARY KEY
REFERENCES users(name),
status text,
email text,
birthday bigint,
about text,
image bytea)''')
c.execute('''CREATE TABLE sessions (name text,
pub_key text ARRAY,
ip text UNIQUE,
last_active bigint)''')
c.execute('''CREATE TABLE requests (from_who text,
to_who text,
message text)''')
c.execute('''CREATE TABLE key (pub_key text ARRAY,
priv_key text ARRAY)''')
self.db.commit()
c.close()
def seed_database(self):
pubkey, privkey = rsa.newkeys(2048, accurate = False)
c = self.db.cursor()
c.execute('''INSERT INTO key VALUES (%s, %s)''',
(list(map(str, pubkey.__getstate__())),
list(map(str, privkey.__getstate__()))))
c.close()
self.db.commit()
self.db.close()
def install(self):
self.connect()
self.create_database()
self.seed_database()
if __name__ == '__main__':
Installer().install()
| MoarCatz/chat-server | installer.py | Python | gpl-3.0 | 2,510 | 0.001195 |
# -*- coding: utf-8 -*-
# Copyright (C) 2006-2007 Søren Roug, European Environment Agency
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Contributor(s):
#
from namespaces import OFFICENS
from element import Element
from draw import StyleRefElement
# Autogenerated
def Annotation(**args):
return StyleRefElement(qname = (OFFICENS,'annotation'), **args)
def AutomaticStyles(**args):
return Element(qname = (OFFICENS, 'automatic-styles'), **args)
def BinaryData(**args):
return Element(qname = (OFFICENS,'binary-data'), **args)
def Body(**args):
return Element(qname = (OFFICENS, 'body'), **args)
def ChangeInfo(**args):
return Element(qname = (OFFICENS,'change-info'), **args)
def Chart(**args):
return Element(qname = (OFFICENS,'chart'), **args)
def DdeSource(**args):
return Element(qname = (OFFICENS,'dde-source'), **args)
def Document(**args):
return Element(qname = (OFFICENS,'document'), **args)
def DocumentContent(version="1.0", **args):
return Element(qname = (OFFICENS, 'document-content'), version=version, **args)
def DocumentMeta(version="1.0", **args):
return Element(qname = (OFFICENS, 'document-meta'), version=version, **args)
def DocumentSettings(version="1.0", **args):
return Element(qname = (OFFICENS, 'document-settings'), version=version, **args)
def DocumentStyles(version="1.0", **args):
return Element(qname = (OFFICENS, 'document-styles'), version=version, **args)
def Drawing(**args):
return Element(qname = (OFFICENS,'drawing'), **args)
def EventListeners(**args):
return Element(qname = (OFFICENS,'event-listeners'), **args)
def FontFaceDecls(**args):
return Element(qname = (OFFICENS, 'font-face-decls'), **args)
def Forms(**args):
return Element(qname = (OFFICENS,'forms'), **args)
def Image(**args):
return Element(qname = (OFFICENS,'image'), **args)
def MasterStyles(**args):
return Element(qname = (OFFICENS, 'master-styles'), **args)
def Meta(**args):
return Element(qname = (OFFICENS, 'meta'), **args)
def Presentation(**args):
return Element(qname = (OFFICENS,'presentation'), **args)
def Script(**args):
return Element(qname = (OFFICENS, 'script'), **args)
def Scripts(**args):
return Element(qname = (OFFICENS, 'scripts'), **args)
def Settings(**args):
return Element(qname = (OFFICENS, 'settings'), **args)
def Spreadsheet(**args):
return Element(qname = (OFFICENS, 'spreadsheet'), **args)
def Styles(**args):
return Element(qname = (OFFICENS, 'styles'), **args)
def Text(**args):
return Element(qname = (OFFICENS, 'text'), **args)
# Autogenerated end
| thuydang/ocrfeeder | src/ocrfeeder/odf/office.py | Python | gpl-3.0 | 3,311 | 0.028399 |
from unittest.mock import patch
def mocked_execute(remote_executor, command, *args, **kwargs):
from .test_assets import TestAsset
return TestAsset.REMOTE_HOST_MOCKS[remote_executor.hostname].execute(
command
)
class PatchRemoteHostMeta(type):
"""
can be used as a metaclass for a TestCase to patch relevant methods, required to mock a RemoteHost
"""
MOCKED_EXECUTE = mocked_execute
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
patch('remote_execution.remote_execution.SshRemoteExecutor.connect', lambda self: None)(self)
patch('remote_execution.remote_execution.SshRemoteExecutor.close', lambda self: None)(self)
patch('remote_execution.remote_execution.SshRemoteExecutor.is_connected', lambda self: True)(self)
patch(
'remote_execution.remote_execution.SshRemoteExecutor._execute',
PatchRemoteHostMeta.MOCKED_EXECUTE
)(self)
class PatchTrackedRemoteExecutionMeta(PatchRemoteHostMeta):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.executed_commands = set()
def tracked_mocked_execute(remote_host, command, *args, **kwargs):
self.executed_commands.add(command)
return PatchRemoteHostMeta.MOCKED_EXECUTE(remote_host, command)
patch(
'remote_execution.remote_execution.SshRemoteExecutor._execute',
tracked_mocked_execute
)(self)
| jdepoix/goto_cloud | goto_cloud/test_assets/remote_host_patch_metaclass.py | Python | mit | 1,494 | 0.004016 |
from django.db import models
from django.utils import timezone
from user.models import User
from adminsortable.models import SortableMixin
class Topic(SortableMixin):
'''
Model to define the behaviour of a topic
'''
class Meta:
'''
Meta options for topic model
Defines the default ordering
'''
ordering = [ 'order' ]
name = models.CharField(max_length=50, unique=True)
identifier = models.SlugField(max_length=50, unique=True)
description = models.CharField(max_length=255)
order = models.PositiveIntegerField(
default=0,
editable=False,
db_index=True
)
@property
def threads_last_day(self):
'''
Counts the amount of threads in this topic
created in the last 24 hours
:return: The amount of threads
:rtype: int
'''
last_day = timezone.now() - timezone.timedelta(days=1)
return len(Thread.objects.filter(topic=self, created__gte=last_day))
def __str__(self):
'''
Represents a topic as a string
:return: The name of the topic
:rtype: str
'''
return self.name
class Thread(models.Model):
'''
Model to define the behaviour of a thread
Is related to a topic
'''
class Meta:
'''
Meta options for thread model
Defines the default ordering
'''
ordering = [ '-created' ]
user = models.ForeignKey(User)
topic = models.ForeignKey(Topic, related_name='threads')
title = models.CharField(max_length=50)
content = models.TextField()
created = models.DateTimeField(auto_now_add=True)
def __str__(self):
'''
Represents a thread as a string
:return: The title of the thread
:rtype: str
'''
return self.title
class Comment(models.Model):
'''
Model to define the behaviour of a comment
Is related to a user and a thread
'''
class Meta:
'''
Meta options for comment model
Defines the default ordering
'''
ordering = [ 'created' ]
user = models.ForeignKey(User)
thread = models.ForeignKey(Thread, related_name='comments')
content = models.CharField(max_length=140)
created = models.DateTimeField(auto_now_add=True)
def __str__(self):
'''
Represents a comment as a string
:return: The user identifier and the thread title
:rtype: str
'''
return 'Comment from %s to thread %s' % (
str(self.user),
str(self.thread)
)
| anehx/anonboard-backend | core/models.py | Python | mit | 2,696 | 0.006677 |
import numpy as np
def sigmoid(x):
"""
Calculate sigmoid
"""
return 1 / (1 + np.exp(-x))
# Network size
N_input = 4
N_hidden = 3
N_output = 2
np.random.seed(42)
# Make some fake data
X = np.random.randn(4)
weights_input_to_hidden = np.random.normal(
0, scale=0.1, size=(N_input, N_hidden))
weights_hidden_to_output = np.random.normal(
0, scale=0.1, size=(N_hidden, N_output))
# Make a forward pass through the network
hidden_layer_in = np.dot(X, weights_input_to_hidden)
hidden_layer_out = sigmoid(hidden_layer_in)
print('Hidden-layer Output:')
print(hidden_layer_out)
output_layer_in = np.dot(hidden_layer_out, weights_hidden_to_output)
output_layer_out = sigmoid(output_layer_in)
print('Output-layer Output:')
print(output_layer_out)
| swirlingsand/deep-learning-foundations | play/multi-layer.py | Python | mit | 771 | 0.001297 |
#coding=UTF-8
from pyspark import SparkContext, SparkConf, SQLContext, Row, HiveContext
from pyspark.sql.types import *
from datetime import date, datetime, timedelta
import sys, re, os
st = datetime.now()
conf = SparkConf().setAppName('PROC_O_FIN_FIN_PRODAREAINFO').setMaster(sys.argv[2])
sc = SparkContext(conf = conf)
sc.setLogLevel('WARN')
if len(sys.argv) > 5:
if sys.argv[5] == "hive":
sqlContext = HiveContext(sc)
else:
sqlContext = SQLContext(sc)
hdfs = sys.argv[3]
dbname = sys.argv[4]
#处理需要使用的日期
etl_date = sys.argv[1]
#etl日期
V_DT = etl_date
#上一日日期
V_DT_LD = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8])) + timedelta(-1)).strftime("%Y%m%d")
#月初日期
V_DT_FMD = date(int(etl_date[0:4]), int(etl_date[4:6]), 1).strftime("%Y%m%d")
#上月末日期
V_DT_LMD = (date(int(etl_date[0:4]), int(etl_date[4:6]), 1) + timedelta(-1)).strftime("%Y%m%d")
#10位日期
V_DT10 = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8]))).strftime("%Y-%m-%d")
V_STEP = 0
O_CM_FIN_PRODAREAINFO = sqlContext.read.parquet(hdfs+'/O_CM_FIN_PRODAREAINFO/*')
O_CM_FIN_PRODAREAINFO.registerTempTable("O_CM_FIN_PRODAREAINFO")
#任务[12] 001-01::
V_STEP = V_STEP + 1
F_CM_FIN_PRODAREAINFO = sqlContext.read.parquet(hdfs+'/F_CM_FIN_PRODAREAINFO_BK/'+V_DT_LD+'.parquet/*')
F_CM_FIN_PRODAREAINFO.registerTempTable("F_CM_FIN_PRODAREAINFO")
sql = """
SELECT A.PRODCODE AS PRODCODE
,A.PRODZONENO AS PRODZONENO
,A.AGTBRNO AS AGTBRNO
,A.AGTZONENO AS AGTZONENO
,A.AGTZONENA AS AGTZONENA
,A.FEERATE AS FEERATE
,A.ZONENOMINAMT AS ZONENOMINAMT
,A.ZONENOMAXAMT AS ZONENOMAXAMT
,A.AGTAMT AS AGTAMT
,A.AGTSTATE AS AGTSTATE
,A.REGZONENO AS REGZONENO
,A.REGBRNO AS REGBRNO
,A.REGTELLERNO AS REGTELLERNO
,A.REGDATE AS REGDATE
,A.REGTIME AS REGTIME
,A.ENSZONENO AS ENSZONENO
,A.ENSBRNO AS ENSBRNO
,A.ENSTELLERNO AS ENSTELLERNO
,A.ENSDATE AS ENSDATE
,A.ENSTIME AS ENSTIME
,A.NOTE1 AS NOTE1
,A.NOTE2 AS NOTE2
,A.NOTE3 AS NOTE3
,A.NOTE4 AS NOTE4
,A.NOTE5 AS NOTE5
,A.FR_ID AS FR_ID
,V_DT AS ODS_ST_DATE
,'FIN' AS ODS_SYS_ID
FROM O_CM_FIN_PRODAREAINFO A --销售法人范围表
"""
sql = re.sub(r"\bV_DT\b", "'"+V_DT10+"'", sql)
F_CM_FIN_PRODAREAINFO_INNTMP1 = sqlContext.sql(sql)
F_CM_FIN_PRODAREAINFO_INNTMP1.registerTempTable("F_CM_FIN_PRODAREAINFO_INNTMP1")
#F_CM_FIN_PRODAREAINFO = sqlContext.read.parquet(hdfs+'/F_CM_FIN_PRODAREAINFO/*')
#F_CM_FIN_PRODAREAINFO.registerTempTable("F_CM_FIN_PRODAREAINFO")
sql = """
SELECT DST.PRODCODE --产品代码:src.PRODCODE
,DST.PRODZONENO --产品归属法人:src.PRODZONENO
,DST.AGTBRNO --代销法人机构码:src.AGTBRNO
,DST.AGTZONENO --代销法人:src.AGTZONENO
,DST.AGTZONENA --代销法人机构名称:src.AGTZONENA
,DST.FEERATE --手续费比例:src.FEERATE
,DST.ZONENOMINAMT --法人最低销售额度:src.ZONENOMINAMT
,DST.ZONENOMAXAMT --法人最高销售额度:src.ZONENOMAXAMT
,DST.AGTAMT --已销售额度:src.AGTAMT
,DST.AGTSTATE --销售代理状态:src.AGTSTATE
,DST.REGZONENO --登记机构所属法人:src.REGZONENO
,DST.REGBRNO --登记机构:src.REGBRNO
,DST.REGTELLERNO --登记柜员:src.REGTELLERNO
,DST.REGDATE --登记日期:src.REGDATE
,DST.REGTIME --登记时间:src.REGTIME
,DST.ENSZONENO --确认机构所属法人:src.ENSZONENO
,DST.ENSBRNO --确认机构:src.ENSBRNO
,DST.ENSTELLERNO --确认柜员:src.ENSTELLERNO
,DST.ENSDATE --确认日期:src.ENSDATE
,DST.ENSTIME --确认时间:src.ENSTIME
,DST.NOTE1 --备用1:src.NOTE1
,DST.NOTE2 --备用2:src.NOTE2
,DST.NOTE3 --备用3:src.NOTE3
,DST.NOTE4 --备用4:src.NOTE4
,DST.NOTE5 --备用5:src.NOTE5
,DST.FR_ID --法人代码:src.FR_ID
,DST.ODS_ST_DATE --系统平台日期:src.ODS_ST_DATE
,DST.ODS_SYS_ID --系统代码:src.ODS_SYS_ID
FROM F_CM_FIN_PRODAREAINFO DST
LEFT JOIN F_CM_FIN_PRODAREAINFO_INNTMP1 SRC
ON SRC.PRODCODE = DST.PRODCODE
WHERE SRC.PRODCODE IS NULL """
sql = re.sub(r"\bV_DT\b", "'"+V_DT10+"'", sql)
F_CM_FIN_PRODAREAINFO_INNTMP2 = sqlContext.sql(sql)
dfn="F_CM_FIN_PRODAREAINFO/"+V_DT+".parquet"
UNION=F_CM_FIN_PRODAREAINFO_INNTMP2.unionAll(F_CM_FIN_PRODAREAINFO_INNTMP1)
F_CM_FIN_PRODAREAINFO_INNTMP1.cache()
F_CM_FIN_PRODAREAINFO_INNTMP2.cache()
nrowsi = F_CM_FIN_PRODAREAINFO_INNTMP1.count()
nrowsa = F_CM_FIN_PRODAREAINFO_INNTMP2.count()
ret = os.system("hdfs dfs -rm -r /"+dbname+"/F_CM_FIN_PRODAREAINFO/*.parquet")
UNION.write.save(path = hdfs + '/' + dfn, mode='overwrite')
F_CM_FIN_PRODAREAINFO_INNTMP1.unpersist()
F_CM_FIN_PRODAREAINFO_INNTMP2.unpersist()
et = datetime.now()
print("Step %d start[%s] end[%s] use %d seconds, insert F_CM_FIN_PRODAREAINFO lines %d, all lines %d") % (V_STEP, st.strftime("%H:%M:%S"), et.strftime("%H:%M:%S"), (et-st).seconds, nrowsi, nrowsa)
ret = os.system("hdfs dfs -mv /"+dbname+"/F_CM_FIN_PRODAREAINFO/"+V_DT_LD+".parquet /"+dbname+"/F_CM_FIN_PRODAREAINFO_BK/")
#先删除备表当天数据
ret = os.system("hdfs dfs -rm -r /"+dbname+"/F_CM_FIN_PRODAREAINFO_BK/"+V_DT+".parquet")
#从当天原表复制一份全量到备表
ret = os.system("hdfs dfs -cp -f /"+dbname+"/F_CM_FIN_PRODAREAINFO/"+V_DT+".parquet /"+dbname+"/F_CM_FIN_PRODAREAINFO_BK/"+V_DT+".parquet")
| cysuncn/python | spark/crm/PROC_O_FIN_FIN_PRODAREAINFO.py | Python | gpl-3.0 | 7,144 | 0.012726 |
# Bryan Barrows
# CSC 110 - Winter 17
# fahrenheit.py
# The purpose of this program is to convert a user input temperature from celsius to fahrenheit.
def main():
celsius = eval(input("What is the Celsius temperature? "))
fahrenheit = (9/5) * celsius + 32
print("The temperature is ",fahrenheit," degrees Fahrenheit.")
main()
| bbarrows89/CSC110_Projects | fahrenheit.py | Python | mit | 341 | 0.01173 |
import logging
import urllib
import json
from edge.writer.proxywriter import ProxyWriter
class Writer(ProxyWriter):
def __init__(self, configFilePath):
super(Writer, self).__init__(configFilePath)
def _generateUrl(self, requestHandler):
url = self._configuration.get('solr', 'url')
parameters = {}
parameters['wt'] = 'json'
parameters['omitHeader'] = 'true'
parameters['q'] = '*:*'
try:
parameters['fq'] = 'id:"' + requestHandler.get_argument('id') + '"'
except:
parameters['fl'] = 'id,name,rate,uncertainties,unit,shortenUnit,abbrUnit,updated_at'
try:
if requestHandler.get_argument('latest').lower() == 'true':
parameters['fl'] = 'xLatest,yLatest,unit,abbrUnit,updated_at'
except:
pass
url += '/select?' + urllib.urlencode(parameters)
logging.debug("proxy to url : " + url)
return url
def onResponse(self, response):
if response.error:
self.requestHandler.set_status(404)
self.requestHandler.write(str(response.error))
self.requestHandler.finish()
else:
for name, value in response.headers.iteritems():
logging.debug('header: '+name+':'+value)
self.requestHandler.set_header(name, value)
self.requestHandler.set_header('Access-Control-Allow-Origin', '*')
solrJson = json.loads(response.body)
if len(solrJson['response']['docs']) > 1:
# Need to order indicators accordingly
solrJsonClone = {}
solrJsonClone['response'] = {}
solrJsonClone['response']['start'] = solrJson['response']['start']
solrJsonClone['response']['numFound'] = solrJson['response']['numFound']
solrJsonClone['response']['docs'] = []
indicators = {}
for doc in solrJson['response']['docs']:
indicators[doc['id']] = doc
for indicator in self._configuration.get('solr', 'ordering').split(','):
if indicator in indicators:
solrJsonClone['response']['docs'].append(indicators[indicator])
solrJson = solrJsonClone
for doc in solrJson['response']['docs']:
if 'uncertainties' in doc:
if doc['id'] in self._configuration.get('solr', 'uncertainties').split(','):
doc['uncertainties'] = int(round(doc['uncertainties']))
doc['rate'] = int(round(doc['rate']))
self.requestHandler.write(solrJson)
self.requestHandler.finish()
| dataplumber/edge | src/main/python/plugins/slcp/indicator/Writer.py | Python | apache-2.0 | 2,746 | 0.003277 |
"""
Minimal Django settings for tests of common/lib.
Required in Django 1.9+ due to imports of models in stock Django apps.
"""
import sys
import tempfile
from django.utils.translation import ugettext_lazy as _
from path import Path
# TODO: Remove the rest of the sys.path modification here and in (cms|lms)/envs/common.py
REPO_ROOT = Path(__file__).abspath().dirname().dirname().dirname() # /edx-platform/
sys.path.append(REPO_ROOT / 'common' / 'djangoapps')
sys.path.append(REPO_ROOT / 'lms' / 'djangoapps')
ALL_LANGUAGES = []
BLOCK_STRUCTURES_SETTINGS = dict(
COURSE_PUBLISH_TASK_DELAY=30,
TASK_DEFAULT_RETRY_DELAY=30,
TASK_MAX_RETRIES=5,
)
COURSE_KEY_PATTERN = r'(?P<course_key_string>[^/+]+(/|\+)[^/+]+(/|\+)[^/?]+)'
COURSE_ID_PATTERN = COURSE_KEY_PATTERN.replace('course_key_string', 'course_id')
USAGE_KEY_PATTERN = r'(?P<usage_key_string>(?:i4x://?[^/]+/[^/]+/[^/]+/[^@]+(?:@[^/]+)?)|(?:[^/]+))'
COURSE_MODE_DEFAULTS = {
'bulk_sku': None,
'currency': 'usd',
'description': None,
'expiration_datetime': None,
'min_price': 0,
'name': 'Audit',
'sku': None,
'slug': 'audit',
'suggested_prices': '',
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'default.db',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
PROCTORING_BACKENDS = {
'DEFAULT': 'mock',
'mock': {},
'mock_proctoring_without_rules': {},
}
FEATURES = {}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'djcelery',
'django_sites_extensions',
'openedx.core.djangoapps.django_comment_common',
'openedx.core.djangoapps.video_config',
'openedx.core.djangoapps.video_pipeline',
'openedx.core.djangoapps.bookmarks.apps.BookmarksConfig',
'edxval',
'lms.djangoapps.courseware',
'lms.djangoapps.instructor_task',
'student',
'openedx.core.djangoapps.site_configuration',
'lms.djangoapps.grades.apps.GradesConfig',
'lms.djangoapps.certificates.apps.CertificatesConfig',
'openedx.core.djangoapps.user_api',
'course_modes.apps.CourseModesConfig',
'lms.djangoapps.verify_student.apps.VerifyStudentConfig',
'openedx.core.djangoapps.dark_lang',
'openedx.core.djangoapps.content.course_overviews.apps.CourseOverviewsConfig',
'openedx.core.djangoapps.content.block_structure.apps.BlockStructureConfig',
'openedx.core.djangoapps.catalog',
'openedx.core.djangoapps.self_paced',
'openedx.core.djangoapps.schedules.apps.SchedulesConfig',
'openedx.core.djangoapps.theming.apps.ThemingConfig',
'openedx.core.djangoapps.external_user_ids',
'experiments',
'openedx.features.content_type_gating',
'openedx.features.course_duration_limits',
'openedx.features.discounts',
'milestones',
'celery_utils',
'waffle',
'edx_when',
'rest_framework_jwt',
# Django 1.11 demands to have imported models supported by installed apps.
'completion',
'entitlements',
)
LMS_ROOT_URL = "http://localhost:8000"
MEDIA_ROOT = tempfile.mkdtemp()
RECALCULATE_GRADES_ROUTING_KEY = 'edx.core.default'
POLICY_CHANGE_GRADES_ROUTING_KEY = 'edx.core.default'
POLICY_CHANGE_TASK_RATE_LIMIT = '300/h'
SECRET_KEY = 'insecure-secret-key'
SITE_ID = 1
SITE_NAME = "localhost"
PLATFORM_NAME = _('Your Platform Name Here')
DEFAULT_FROM_EMAIL = 'registration@example.com'
TRACK_MAX_EVENT = 50000
USE_TZ = True
RETIREMENT_SERVICE_WORKER_USERNAME = 'RETIREMENT_SERVICE_USER'
RETIRED_USERNAME_PREFIX = 'retired__user_'
PROCTORING_SETTINGS = {}
# Software Secure request retry settings
# Time in seconds before a retry of the task should be 60 mints.
SOFTWARE_SECURE_REQUEST_RETRY_DELAY = 60 * 60
# Maximum of 6 retries before giving up.
SOFTWARE_SECURE_RETRY_MAX_ATTEMPTS = 6
| mitocw/edx-platform | openedx/tests/settings.py | Python | agpl-3.0 | 3,886 | 0.001544 |
from django.shortcuts import render
from django.http import HttpResponse
# Include the `fusioncharts.py` file which has required functions to embed the charts in html page
from ..fusioncharts import FusionCharts
# Loading Data from a Static JSON String
# It is a example to show a Column 2D chart where data is passed as JSON string format.
# The `chart` method is defined to load chart data from an JSON string.
def chart(request):
# Create an object for the column2d chart using the FusionCharts class constructor
column2d = FusionCharts("column2d", "ex1", '100%', '100%', "chartContainer", "json",
# The chart data is passed as a string to the `dataSource` parameter.
"""{
"chart":
{
"caption": "Countries With Most Oil Reserves [2017-18]",
"subcaption": "In MMbbl = One Million barrels",
"xaxisname": "Country",
"yaxisname": "Reserves (MMbbl)",
"numbersuffix": "K",
"theme": "fusion"
},
"data": [{
"label": "Venezuela",
"value": "290"
}, {
"label": "Saudi",
"value": "260"
}, {
"label": "Canada",
"value": "180"
}, {
"label": "Iran",
"value": "140"
}, {
"label": "Russia",
"value": "115"
}, {
"label": "UAE",
"value": "100"
}, {
"label": "US",
"value": "30"
}, {
"label": "China",
"value": "30"
}]
}""")
# returning complete JavaScript and HTML code, which is used to generate chart in the browsers.
return render(request, 'dynamic-resize.html', {'output' : column2d.render(),'chartTitle': 'Chart Auto-Resise Sample'}) | emundus/v6 | plugins/fabrik_visualization/fusionchart/libs/fusioncharts-suite-xt/integrations/django/samples/fusioncharts/samples/dynamic_chart_resize.py | Python | gpl-2.0 | 1,972 | 0.010649 |
import pytest
from umodbus import conf
from umodbus.client import tcp
@pytest.fixture(scope='module', autouse=True)
def enable_signed_values(request):
""" Use signed values when running tests it this module. """
tmp = conf.SIGNED_VALUES
conf.SIGNED_VALUES = True
def fin():
conf.SIGNED_VALUES = tmp
request.addfinalizer(fin)
@pytest.mark.parametrize('function', [
tcp.read_coils,
tcp.read_discrete_inputs,
])
def test_response_on_single_bit_value_read_requests(sock, function):
""" Validate response of a succesful Read Coils or Read Discrete Inputs
request.
"""
slave_id, starting_address, quantity = (1, 0, 10)
req_adu = function(slave_id, starting_address, quantity)
assert tcp.send_message(req_adu, sock) == [0, 1, 0, 1, 0, 1, 0, 1, 0, 1]
@pytest.mark.parametrize('function', [
tcp.read_holding_registers,
tcp.read_input_registers,
])
def test_response_on_multi_bit_value_read_requests(sock, function):
""" Validate response of a succesful Read Holding Registers or Read
Input Registers request.
"""
slave_id, starting_address, quantity = (1, 0, 10)
req_adu = function(slave_id, starting_address, quantity)
assert tcp.send_message(req_adu, sock) ==\
[0, -1, -2, -3, -4, -5, -6, -7, -8, -9]
@pytest.mark.parametrize('function, value', [
(tcp.write_single_coil, 1),
(tcp.write_single_register, -1337),
])
def test_response_single_value_write_request(sock, function, value):
""" Validate responde of succesful Read Single Coil and Read Single
Register request.
"""
slave_id, starting_address, value = (1, 0, value)
req_adu = function(slave_id, starting_address, value)
assert tcp.send_message(req_adu, sock) == value
@pytest.mark.parametrize('function, values', [
(tcp.write_multiple_coils, [1, 1]),
(tcp.write_multiple_registers, [1337, 15]),
])
def test_response_multi_value_write_request(sock, function, values):
""" Validate response of succesful Write Multiple Coils and Write Multiple
Registers request.
Both requests write 2 values, starting address is 0.
"""
slave_id, starting_address = (1, 0)
req_adu = function(slave_id, starting_address, values)
assert tcp.send_message(req_adu, sock) == 2
| AdvancedClimateSystems/python-modbus | tests/system/responses/test_succesful_responses.py | Python | mpl-2.0 | 2,290 | 0 |
from .models import *
from django.contrib import admin
class PingLogAdmin(admin.ModelAdmin):
list_display = ('id','hash_key','url','ip_address','user_agent','time')
admin.site.register(PingLog, PingLogAdmin) | thejeshgn/quest | quest/admin.py | Python | gpl-3.0 | 213 | 0.037559 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import Dict, List, Optional, Union
import msrest.serialization
from ._scheduler_management_client_enums import *
class HttpAuthentication(msrest.serialization.Model):
"""HttpAuthentication.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: OAuthAuthentication, BasicAuthentication, ClientCertAuthentication.
All required parameters must be populated in order to send to Azure.
:param type: Required. Gets or sets the HTTP authentication type.Constant filled by server.
Possible values include: "NotSpecified", "ClientCertificate", "ActiveDirectoryOAuth", "Basic".
:type type: str or ~azure.mgmt.scheduler.models.HttpAuthenticationType
"""
_validation = {
'type': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
}
_subtype_map = {
'type': {'ActiveDirectoryOAuth': 'OAuthAuthentication', 'Basic': 'BasicAuthentication', 'ClientCertificate': 'ClientCertAuthentication'}
}
def __init__(
self,
**kwargs
):
super(HttpAuthentication, self).__init__(**kwargs)
self.type = None # type: Optional[str]
class BasicAuthentication(HttpAuthentication):
"""BasicAuthentication.
All required parameters must be populated in order to send to Azure.
:param type: Required. Gets or sets the HTTP authentication type.Constant filled by server.
Possible values include: "NotSpecified", "ClientCertificate", "ActiveDirectoryOAuth", "Basic".
:type type: str or ~azure.mgmt.scheduler.models.HttpAuthenticationType
:param username: Gets or sets the username.
:type username: str
:param password: Gets or sets the password, return value will always be empty.
:type password: str
"""
_validation = {
'type': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
}
def __init__(
self,
*,
username: Optional[str] = None,
password: Optional[str] = None,
**kwargs
):
super(BasicAuthentication, self).__init__(**kwargs)
self.type = 'Basic' # type: str
self.username = username
self.password = password
class ClientCertAuthentication(HttpAuthentication):
"""ClientCertAuthentication.
All required parameters must be populated in order to send to Azure.
:param type: Required. Gets or sets the HTTP authentication type.Constant filled by server.
Possible values include: "NotSpecified", "ClientCertificate", "ActiveDirectoryOAuth", "Basic".
:type type: str or ~azure.mgmt.scheduler.models.HttpAuthenticationType
:param password: Gets or sets the certificate password, return value will always be empty.
:type password: str
:param pfx: Gets or sets the pfx certificate. Accepts certification in base64 encoding, return
value will always be empty.
:type pfx: str
:param certificate_thumbprint: Gets or sets the certificate thumbprint.
:type certificate_thumbprint: str
:param certificate_expiration_date: Gets or sets the certificate expiration date.
:type certificate_expiration_date: ~datetime.datetime
:param certificate_subject_name: Gets or sets the certificate subject name.
:type certificate_subject_name: str
"""
_validation = {
'type': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'pfx': {'key': 'pfx', 'type': 'str'},
'certificate_thumbprint': {'key': 'certificateThumbprint', 'type': 'str'},
'certificate_expiration_date': {'key': 'certificateExpirationDate', 'type': 'iso-8601'},
'certificate_subject_name': {'key': 'certificateSubjectName', 'type': 'str'},
}
def __init__(
self,
*,
password: Optional[str] = None,
pfx: Optional[str] = None,
certificate_thumbprint: Optional[str] = None,
certificate_expiration_date: Optional[datetime.datetime] = None,
certificate_subject_name: Optional[str] = None,
**kwargs
):
super(ClientCertAuthentication, self).__init__(**kwargs)
self.type = 'ClientCertificate' # type: str
self.password = password
self.pfx = pfx
self.certificate_thumbprint = certificate_thumbprint
self.certificate_expiration_date = certificate_expiration_date
self.certificate_subject_name = certificate_subject_name
class HttpRequest(msrest.serialization.Model):
"""HttpRequest.
:param authentication: Gets or sets the authentication method of the request.
:type authentication: ~azure.mgmt.scheduler.models.HttpAuthentication
:param uri: Gets or sets the URI of the request.
:type uri: str
:param method: Gets or sets the method of the request.
:type method: str
:param body: Gets or sets the request body.
:type body: str
:param headers: Gets or sets the headers.
:type headers: dict[str, str]
"""
_attribute_map = {
'authentication': {'key': 'authentication', 'type': 'HttpAuthentication'},
'uri': {'key': 'uri', 'type': 'str'},
'method': {'key': 'method', 'type': 'str'},
'body': {'key': 'body', 'type': 'str'},
'headers': {'key': 'headers', 'type': '{str}'},
}
def __init__(
self,
*,
authentication: Optional["HttpAuthentication"] = None,
uri: Optional[str] = None,
method: Optional[str] = None,
body: Optional[str] = None,
headers: Optional[Dict[str, str]] = None,
**kwargs
):
super(HttpRequest, self).__init__(**kwargs)
self.authentication = authentication
self.uri = uri
self.method = method
self.body = body
self.headers = headers
class JobAction(msrest.serialization.Model):
"""JobAction.
:param type: Gets or sets the job action type. Possible values include: "Http", "Https",
"StorageQueue", "ServiceBusQueue", "ServiceBusTopic".
:type type: str or ~azure.mgmt.scheduler.models.JobActionType
:param request: Gets or sets the http requests.
:type request: ~azure.mgmt.scheduler.models.HttpRequest
:param queue_message: Gets or sets the storage queue message.
:type queue_message: ~azure.mgmt.scheduler.models.StorageQueueMessage
:param service_bus_queue_message: Gets or sets the service bus queue message.
:type service_bus_queue_message: ~azure.mgmt.scheduler.models.ServiceBusQueueMessage
:param service_bus_topic_message: Gets or sets the service bus topic message.
:type service_bus_topic_message: ~azure.mgmt.scheduler.models.ServiceBusTopicMessage
:param retry_policy: Gets or sets the retry policy.
:type retry_policy: ~azure.mgmt.scheduler.models.RetryPolicy
:param error_action: Gets or sets the error action.
:type error_action: ~azure.mgmt.scheduler.models.JobErrorAction
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'request': {'key': 'request', 'type': 'HttpRequest'},
'queue_message': {'key': 'queueMessage', 'type': 'StorageQueueMessage'},
'service_bus_queue_message': {'key': 'serviceBusQueueMessage', 'type': 'ServiceBusQueueMessage'},
'service_bus_topic_message': {'key': 'serviceBusTopicMessage', 'type': 'ServiceBusTopicMessage'},
'retry_policy': {'key': 'retryPolicy', 'type': 'RetryPolicy'},
'error_action': {'key': 'errorAction', 'type': 'JobErrorAction'},
}
def __init__(
self,
*,
type: Optional[Union[str, "JobActionType"]] = None,
request: Optional["HttpRequest"] = None,
queue_message: Optional["StorageQueueMessage"] = None,
service_bus_queue_message: Optional["ServiceBusQueueMessage"] = None,
service_bus_topic_message: Optional["ServiceBusTopicMessage"] = None,
retry_policy: Optional["RetryPolicy"] = None,
error_action: Optional["JobErrorAction"] = None,
**kwargs
):
super(JobAction, self).__init__(**kwargs)
self.type = type
self.request = request
self.queue_message = queue_message
self.service_bus_queue_message = service_bus_queue_message
self.service_bus_topic_message = service_bus_topic_message
self.retry_policy = retry_policy
self.error_action = error_action
class JobCollectionDefinition(msrest.serialization.Model):
"""JobCollectionDefinition.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Gets the job collection resource identifier.
:vartype id: str
:ivar type: Gets the job collection resource type.
:vartype type: str
:param name: Gets or sets the job collection resource name.
:type name: str
:param location: Gets or sets the storage account location.
:type location: str
:param tags: A set of tags. Gets or sets the tags.
:type tags: dict[str, str]
:param properties: Gets or sets the job collection properties.
:type properties: ~azure.mgmt.scheduler.models.JobCollectionProperties
"""
_validation = {
'id': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': 'JobCollectionProperties'},
}
def __init__(
self,
*,
name: Optional[str] = None,
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional["JobCollectionProperties"] = None,
**kwargs
):
super(JobCollectionDefinition, self).__init__(**kwargs)
self.id = None
self.type = None
self.name = name
self.location = location
self.tags = tags
self.properties = properties
class JobCollectionListResult(msrest.serialization.Model):
"""JobCollectionListResult.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: Gets the job collections.
:vartype value: list[~azure.mgmt.scheduler.models.JobCollectionDefinition]
:param next_link: Gets or sets the URL to get the next set of job collections.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[JobCollectionDefinition]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(JobCollectionListResult, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class JobCollectionProperties(msrest.serialization.Model):
"""JobCollectionProperties.
:param sku: Gets or sets the SKU.
:type sku: ~azure.mgmt.scheduler.models.Sku
:param state: Gets or sets the state. Possible values include: "Enabled", "Disabled",
"Suspended", "Deleted".
:type state: str or ~azure.mgmt.scheduler.models.JobCollectionState
:param quota: Gets or sets the job collection quota.
:type quota: ~azure.mgmt.scheduler.models.JobCollectionQuota
"""
_attribute_map = {
'sku': {'key': 'sku', 'type': 'Sku'},
'state': {'key': 'state', 'type': 'str'},
'quota': {'key': 'quota', 'type': 'JobCollectionQuota'},
}
def __init__(
self,
*,
sku: Optional["Sku"] = None,
state: Optional[Union[str, "JobCollectionState"]] = None,
quota: Optional["JobCollectionQuota"] = None,
**kwargs
):
super(JobCollectionProperties, self).__init__(**kwargs)
self.sku = sku
self.state = state
self.quota = quota
class JobCollectionQuota(msrest.serialization.Model):
"""JobCollectionQuota.
:param max_job_count: Gets or set the maximum job count.
:type max_job_count: int
:param max_job_occurrence: Gets or sets the maximum job occurrence.
:type max_job_occurrence: int
:param max_recurrence: Gets or set the maximum recurrence.
:type max_recurrence: ~azure.mgmt.scheduler.models.JobMaxRecurrence
"""
_attribute_map = {
'max_job_count': {'key': 'maxJobCount', 'type': 'int'},
'max_job_occurrence': {'key': 'maxJobOccurrence', 'type': 'int'},
'max_recurrence': {'key': 'maxRecurrence', 'type': 'JobMaxRecurrence'},
}
def __init__(
self,
*,
max_job_count: Optional[int] = None,
max_job_occurrence: Optional[int] = None,
max_recurrence: Optional["JobMaxRecurrence"] = None,
**kwargs
):
super(JobCollectionQuota, self).__init__(**kwargs)
self.max_job_count = max_job_count
self.max_job_occurrence = max_job_occurrence
self.max_recurrence = max_recurrence
class JobDefinition(msrest.serialization.Model):
"""JobDefinition.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Gets the job resource identifier.
:vartype id: str
:ivar type: Gets the job resource type.
:vartype type: str
:ivar name: Gets the job resource name.
:vartype name: str
:param properties: Gets or sets the job properties.
:type properties: ~azure.mgmt.scheduler.models.JobProperties
"""
_validation = {
'id': {'readonly': True},
'type': {'readonly': True},
'name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'JobProperties'},
}
def __init__(
self,
*,
properties: Optional["JobProperties"] = None,
**kwargs
):
super(JobDefinition, self).__init__(**kwargs)
self.id = None
self.type = None
self.name = None
self.properties = properties
class JobErrorAction(msrest.serialization.Model):
"""JobErrorAction.
:param type: Gets or sets the job error action type. Possible values include: "Http", "Https",
"StorageQueue", "ServiceBusQueue", "ServiceBusTopic".
:type type: str or ~azure.mgmt.scheduler.models.JobActionType
:param request: Gets or sets the http requests.
:type request: ~azure.mgmt.scheduler.models.HttpRequest
:param queue_message: Gets or sets the storage queue message.
:type queue_message: ~azure.mgmt.scheduler.models.StorageQueueMessage
:param service_bus_queue_message: Gets or sets the service bus queue message.
:type service_bus_queue_message: ~azure.mgmt.scheduler.models.ServiceBusQueueMessage
:param service_bus_topic_message: Gets or sets the service bus topic message.
:type service_bus_topic_message: ~azure.mgmt.scheduler.models.ServiceBusTopicMessage
:param retry_policy: Gets or sets the retry policy.
:type retry_policy: ~azure.mgmt.scheduler.models.RetryPolicy
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'request': {'key': 'request', 'type': 'HttpRequest'},
'queue_message': {'key': 'queueMessage', 'type': 'StorageQueueMessage'},
'service_bus_queue_message': {'key': 'serviceBusQueueMessage', 'type': 'ServiceBusQueueMessage'},
'service_bus_topic_message': {'key': 'serviceBusTopicMessage', 'type': 'ServiceBusTopicMessage'},
'retry_policy': {'key': 'retryPolicy', 'type': 'RetryPolicy'},
}
def __init__(
self,
*,
type: Optional[Union[str, "JobActionType"]] = None,
request: Optional["HttpRequest"] = None,
queue_message: Optional["StorageQueueMessage"] = None,
service_bus_queue_message: Optional["ServiceBusQueueMessage"] = None,
service_bus_topic_message: Optional["ServiceBusTopicMessage"] = None,
retry_policy: Optional["RetryPolicy"] = None,
**kwargs
):
super(JobErrorAction, self).__init__(**kwargs)
self.type = type
self.request = request
self.queue_message = queue_message
self.service_bus_queue_message = service_bus_queue_message
self.service_bus_topic_message = service_bus_topic_message
self.retry_policy = retry_policy
class JobHistoryDefinition(msrest.serialization.Model):
"""JobHistoryDefinition.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Gets the job history identifier.
:vartype id: str
:ivar type: Gets the job history resource type.
:vartype type: str
:ivar name: Gets the job history name.
:vartype name: str
:ivar properties: Gets or sets the job history properties.
:vartype properties: ~azure.mgmt.scheduler.models.JobHistoryDefinitionProperties
"""
_validation = {
'id': {'readonly': True},
'type': {'readonly': True},
'name': {'readonly': True},
'properties': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'JobHistoryDefinitionProperties'},
}
def __init__(
self,
**kwargs
):
super(JobHistoryDefinition, self).__init__(**kwargs)
self.id = None
self.type = None
self.name = None
self.properties = None
class JobHistoryDefinitionProperties(msrest.serialization.Model):
"""JobHistoryDefinitionProperties.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar start_time: Gets the start time for this job.
:vartype start_time: ~datetime.datetime
:ivar end_time: Gets the end time for this job.
:vartype end_time: ~datetime.datetime
:ivar expected_execution_time: Gets the expected execution time for this job.
:vartype expected_execution_time: ~datetime.datetime
:ivar action_name: Gets the job history action name. Possible values include: "MainAction",
"ErrorAction".
:vartype action_name: str or ~azure.mgmt.scheduler.models.JobHistoryActionName
:ivar status: Gets the job history status. Possible values include: "Completed", "Failed",
"Postponed".
:vartype status: str or ~azure.mgmt.scheduler.models.JobExecutionStatus
:ivar message: Gets the message for the job history.
:vartype message: str
:ivar retry_count: Gets the retry count for job.
:vartype retry_count: int
:ivar repeat_count: Gets the repeat count for the job.
:vartype repeat_count: int
"""
_validation = {
'start_time': {'readonly': True},
'end_time': {'readonly': True},
'expected_execution_time': {'readonly': True},
'action_name': {'readonly': True},
'status': {'readonly': True},
'message': {'readonly': True},
'retry_count': {'readonly': True},
'repeat_count': {'readonly': True},
}
_attribute_map = {
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'expected_execution_time': {'key': 'expectedExecutionTime', 'type': 'iso-8601'},
'action_name': {'key': 'actionName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'retry_count': {'key': 'retryCount', 'type': 'int'},
'repeat_count': {'key': 'repeatCount', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(JobHistoryDefinitionProperties, self).__init__(**kwargs)
self.start_time = None
self.end_time = None
self.expected_execution_time = None
self.action_name = None
self.status = None
self.message = None
self.retry_count = None
self.repeat_count = None
class JobHistoryFilter(msrest.serialization.Model):
"""JobHistoryFilter.
:param status: Gets or sets the job execution status. Possible values include: "Completed",
"Failed", "Postponed".
:type status: str or ~azure.mgmt.scheduler.models.JobExecutionStatus
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
*,
status: Optional[Union[str, "JobExecutionStatus"]] = None,
**kwargs
):
super(JobHistoryFilter, self).__init__(**kwargs)
self.status = status
class JobHistoryListResult(msrest.serialization.Model):
"""JobHistoryListResult.
:param value: Gets or sets the job histories under job.
:type value: list[~azure.mgmt.scheduler.models.JobHistoryDefinition]
:param next_link: Gets or sets the URL to get the next set of job histories.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[JobHistoryDefinition]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["JobHistoryDefinition"]] = None,
next_link: Optional[str] = None,
**kwargs
):
super(JobHistoryListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class JobListResult(msrest.serialization.Model):
"""JobListResult.
:param value: Gets or sets all jobs under job collection.
:type value: list[~azure.mgmt.scheduler.models.JobDefinition]
:param next_link: Gets or sets the URL to get the next set of jobs.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[JobDefinition]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["JobDefinition"]] = None,
next_link: Optional[str] = None,
**kwargs
):
super(JobListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class JobMaxRecurrence(msrest.serialization.Model):
"""JobMaxRecurrence.
:param frequency: Gets or sets the frequency of recurrence (second, minute, hour, day, week,
month). Possible values include: "Minute", "Hour", "Day", "Week", "Month".
:type frequency: str or ~azure.mgmt.scheduler.models.RecurrenceFrequency
:param interval: Gets or sets the interval between retries.
:type interval: int
"""
_attribute_map = {
'frequency': {'key': 'frequency', 'type': 'str'},
'interval': {'key': 'interval', 'type': 'int'},
}
def __init__(
self,
*,
frequency: Optional[Union[str, "RecurrenceFrequency"]] = None,
interval: Optional[int] = None,
**kwargs
):
super(JobMaxRecurrence, self).__init__(**kwargs)
self.frequency = frequency
self.interval = interval
class JobProperties(msrest.serialization.Model):
"""JobProperties.
Variables are only populated by the server, and will be ignored when sending a request.
:param start_time: Gets or sets the job start time.
:type start_time: ~datetime.datetime
:param action: Gets or sets the job action.
:type action: ~azure.mgmt.scheduler.models.JobAction
:param recurrence: Gets or sets the job recurrence.
:type recurrence: ~azure.mgmt.scheduler.models.JobRecurrence
:param state: Gets or set the job state. Possible values include: "Enabled", "Disabled",
"Faulted", "Completed".
:type state: str or ~azure.mgmt.scheduler.models.JobState
:ivar status: Gets the job status.
:vartype status: ~azure.mgmt.scheduler.models.JobStatus
"""
_validation = {
'status': {'readonly': True},
}
_attribute_map = {
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'JobAction'},
'recurrence': {'key': 'recurrence', 'type': 'JobRecurrence'},
'state': {'key': 'state', 'type': 'str'},
'status': {'key': 'status', 'type': 'JobStatus'},
}
def __init__(
self,
*,
start_time: Optional[datetime.datetime] = None,
action: Optional["JobAction"] = None,
recurrence: Optional["JobRecurrence"] = None,
state: Optional[Union[str, "JobState"]] = None,
**kwargs
):
super(JobProperties, self).__init__(**kwargs)
self.start_time = start_time
self.action = action
self.recurrence = recurrence
self.state = state
self.status = None
class JobRecurrence(msrest.serialization.Model):
"""JobRecurrence.
:param frequency: Gets or sets the frequency of recurrence (second, minute, hour, day, week,
month). Possible values include: "Minute", "Hour", "Day", "Week", "Month".
:type frequency: str or ~azure.mgmt.scheduler.models.RecurrenceFrequency
:param interval: Gets or sets the interval between retries.
:type interval: int
:param count: Gets or sets the maximum number of times that the job should run.
:type count: int
:param end_time: Gets or sets the time at which the job will complete.
:type end_time: ~datetime.datetime
:param schedule:
:type schedule: ~azure.mgmt.scheduler.models.JobRecurrenceSchedule
"""
_attribute_map = {
'frequency': {'key': 'frequency', 'type': 'str'},
'interval': {'key': 'interval', 'type': 'int'},
'count': {'key': 'count', 'type': 'int'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'schedule': {'key': 'schedule', 'type': 'JobRecurrenceSchedule'},
}
def __init__(
self,
*,
frequency: Optional[Union[str, "RecurrenceFrequency"]] = None,
interval: Optional[int] = None,
count: Optional[int] = None,
end_time: Optional[datetime.datetime] = None,
schedule: Optional["JobRecurrenceSchedule"] = None,
**kwargs
):
super(JobRecurrence, self).__init__(**kwargs)
self.frequency = frequency
self.interval = interval
self.count = count
self.end_time = end_time
self.schedule = schedule
class JobRecurrenceSchedule(msrest.serialization.Model):
"""JobRecurrenceSchedule.
:param week_days: Gets or sets the days of the week that the job should execute on.
:type week_days: list[str or ~azure.mgmt.scheduler.models.DayOfWeek]
:param hours: Gets or sets the hours of the day that the job should execute at.
:type hours: list[int]
:param minutes: Gets or sets the minutes of the hour that the job should execute at.
:type minutes: list[int]
:param month_days: Gets or sets the days of the month that the job should execute on. Must be
between 1 and 31.
:type month_days: list[int]
:param monthly_occurrences: Gets or sets the occurrences of days within a month.
:type monthly_occurrences:
list[~azure.mgmt.scheduler.models.JobRecurrenceScheduleMonthlyOccurrence]
"""
_attribute_map = {
'week_days': {'key': 'weekDays', 'type': '[str]'},
'hours': {'key': 'hours', 'type': '[int]'},
'minutes': {'key': 'minutes', 'type': '[int]'},
'month_days': {'key': 'monthDays', 'type': '[int]'},
'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[JobRecurrenceScheduleMonthlyOccurrence]'},
}
def __init__(
self,
*,
week_days: Optional[List[Union[str, "DayOfWeek"]]] = None,
hours: Optional[List[int]] = None,
minutes: Optional[List[int]] = None,
month_days: Optional[List[int]] = None,
monthly_occurrences: Optional[List["JobRecurrenceScheduleMonthlyOccurrence"]] = None,
**kwargs
):
super(JobRecurrenceSchedule, self).__init__(**kwargs)
self.week_days = week_days
self.hours = hours
self.minutes = minutes
self.month_days = month_days
self.monthly_occurrences = monthly_occurrences
class JobRecurrenceScheduleMonthlyOccurrence(msrest.serialization.Model):
"""JobRecurrenceScheduleMonthlyOccurrence.
:param day: Gets or sets the day. Must be one of monday, tuesday, wednesday, thursday, friday,
saturday, sunday. Possible values include: "Monday", "Tuesday", "Wednesday", "Thursday",
"Friday", "Saturday", "Sunday".
:type day: str or ~azure.mgmt.scheduler.models.JobScheduleDay
:param occurrence: Gets or sets the occurrence. Must be between -5 and 5.
:type occurrence: int
"""
_attribute_map = {
'day': {'key': 'day', 'type': 'str'},
'occurrence': {'key': 'Occurrence', 'type': 'int'},
}
def __init__(
self,
*,
day: Optional[Union[str, "JobScheduleDay"]] = None,
occurrence: Optional[int] = None,
**kwargs
):
super(JobRecurrenceScheduleMonthlyOccurrence, self).__init__(**kwargs)
self.day = day
self.occurrence = occurrence
class JobStateFilter(msrest.serialization.Model):
"""JobStateFilter.
:param state: Gets or sets the job state. Possible values include: "Enabled", "Disabled",
"Faulted", "Completed".
:type state: str or ~azure.mgmt.scheduler.models.JobState
"""
_attribute_map = {
'state': {'key': 'state', 'type': 'str'},
}
def __init__(
self,
*,
state: Optional[Union[str, "JobState"]] = None,
**kwargs
):
super(JobStateFilter, self).__init__(**kwargs)
self.state = state
class JobStatus(msrest.serialization.Model):
"""JobStatus.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar execution_count: Gets the number of times this job has executed.
:vartype execution_count: int
:ivar failure_count: Gets the number of times this job has failed.
:vartype failure_count: int
:ivar faulted_count: Gets the number of faulted occurrences (occurrences that were retried and
failed as many times as the retry policy states).
:vartype faulted_count: int
:ivar last_execution_time: Gets the time the last occurrence executed in ISO-8601 format.
Could be empty if job has not run yet.
:vartype last_execution_time: ~datetime.datetime
:ivar next_execution_time: Gets the time of the next occurrence in ISO-8601 format. Could be
empty if the job is completed.
:vartype next_execution_time: ~datetime.datetime
"""
_validation = {
'execution_count': {'readonly': True},
'failure_count': {'readonly': True},
'faulted_count': {'readonly': True},
'last_execution_time': {'readonly': True},
'next_execution_time': {'readonly': True},
}
_attribute_map = {
'execution_count': {'key': 'executionCount', 'type': 'int'},
'failure_count': {'key': 'failureCount', 'type': 'int'},
'faulted_count': {'key': 'faultedCount', 'type': 'int'},
'last_execution_time': {'key': 'lastExecutionTime', 'type': 'iso-8601'},
'next_execution_time': {'key': 'nextExecutionTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(JobStatus, self).__init__(**kwargs)
self.execution_count = None
self.failure_count = None
self.faulted_count = None
self.last_execution_time = None
self.next_execution_time = None
class OAuthAuthentication(HttpAuthentication):
"""OAuthAuthentication.
All required parameters must be populated in order to send to Azure.
:param type: Required. Gets or sets the HTTP authentication type.Constant filled by server.
Possible values include: "NotSpecified", "ClientCertificate", "ActiveDirectoryOAuth", "Basic".
:type type: str or ~azure.mgmt.scheduler.models.HttpAuthenticationType
:param secret: Gets or sets the secret, return value will always be empty.
:type secret: str
:param tenant: Gets or sets the tenant.
:type tenant: str
:param audience: Gets or sets the audience.
:type audience: str
:param client_id: Gets or sets the client identifier.
:type client_id: str
"""
_validation = {
'type': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'secret': {'key': 'secret', 'type': 'str'},
'tenant': {'key': 'tenant', 'type': 'str'},
'audience': {'key': 'audience', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
}
def __init__(
self,
*,
secret: Optional[str] = None,
tenant: Optional[str] = None,
audience: Optional[str] = None,
client_id: Optional[str] = None,
**kwargs
):
super(OAuthAuthentication, self).__init__(**kwargs)
self.type = 'ActiveDirectoryOAuth' # type: str
self.secret = secret
self.tenant = tenant
self.audience = audience
self.client_id = client_id
class RetryPolicy(msrest.serialization.Model):
"""RetryPolicy.
:param retry_type: Gets or sets the retry strategy to be used. Possible values include: "None",
"Fixed".
:type retry_type: str or ~azure.mgmt.scheduler.models.RetryType
:param retry_interval: Gets or sets the retry interval between retries, specify duration in ISO
8601 format.
:type retry_interval: ~datetime.timedelta
:param retry_count: Gets or sets the number of times a retry should be attempted.
:type retry_count: int
"""
_attribute_map = {
'retry_type': {'key': 'retryType', 'type': 'str'},
'retry_interval': {'key': 'retryInterval', 'type': 'duration'},
'retry_count': {'key': 'retryCount', 'type': 'int'},
}
def __init__(
self,
*,
retry_type: Optional[Union[str, "RetryType"]] = None,
retry_interval: Optional[datetime.timedelta] = None,
retry_count: Optional[int] = None,
**kwargs
):
super(RetryPolicy, self).__init__(**kwargs)
self.retry_type = retry_type
self.retry_interval = retry_interval
self.retry_count = retry_count
class ServiceBusAuthentication(msrest.serialization.Model):
"""ServiceBusAuthentication.
:param sas_key: Gets or sets the SAS key.
:type sas_key: str
:param sas_key_name: Gets or sets the SAS key name.
:type sas_key_name: str
:param type: Gets or sets the authentication type. Possible values include: "NotSpecified",
"SharedAccessKey".
:type type: str or ~azure.mgmt.scheduler.models.ServiceBusAuthenticationType
"""
_attribute_map = {
'sas_key': {'key': 'sasKey', 'type': 'str'},
'sas_key_name': {'key': 'sasKeyName', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
sas_key: Optional[str] = None,
sas_key_name: Optional[str] = None,
type: Optional[Union[str, "ServiceBusAuthenticationType"]] = None,
**kwargs
):
super(ServiceBusAuthentication, self).__init__(**kwargs)
self.sas_key = sas_key
self.sas_key_name = sas_key_name
self.type = type
class ServiceBusBrokeredMessageProperties(msrest.serialization.Model):
"""ServiceBusBrokeredMessageProperties.
:param content_type: Gets or sets the content type.
:type content_type: str
:param correlation_id: Gets or sets the correlation ID.
:type correlation_id: str
:param force_persistence: Gets or sets the force persistence.
:type force_persistence: bool
:param label: Gets or sets the label.
:type label: str
:param message_id: Gets or sets the message ID.
:type message_id: str
:param partition_key: Gets or sets the partition key.
:type partition_key: str
:param reply_to: Gets or sets the reply to.
:type reply_to: str
:param reply_to_session_id: Gets or sets the reply to session ID.
:type reply_to_session_id: str
:param scheduled_enqueue_time_utc: Gets or sets the scheduled enqueue time UTC.
:type scheduled_enqueue_time_utc: ~datetime.datetime
:param session_id: Gets or sets the session ID.
:type session_id: str
:param time_to_live: Gets or sets the time to live.
:type time_to_live: ~datetime.timedelta
:param to: Gets or sets the to.
:type to: str
:param via_partition_key: Gets or sets the via partition key.
:type via_partition_key: str
"""
_attribute_map = {
'content_type': {'key': 'contentType', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'force_persistence': {'key': 'forcePersistence', 'type': 'bool'},
'label': {'key': 'label', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'partition_key': {'key': 'partitionKey', 'type': 'str'},
'reply_to': {'key': 'replyTo', 'type': 'str'},
'reply_to_session_id': {'key': 'replyToSessionId', 'type': 'str'},
'scheduled_enqueue_time_utc': {'key': 'scheduledEnqueueTimeUtc', 'type': 'iso-8601'},
'session_id': {'key': 'sessionId', 'type': 'str'},
'time_to_live': {'key': 'timeToLive', 'type': 'duration'},
'to': {'key': 'to', 'type': 'str'},
'via_partition_key': {'key': 'viaPartitionKey', 'type': 'str'},
}
def __init__(
self,
*,
content_type: Optional[str] = None,
correlation_id: Optional[str] = None,
force_persistence: Optional[bool] = None,
label: Optional[str] = None,
message_id: Optional[str] = None,
partition_key: Optional[str] = None,
reply_to: Optional[str] = None,
reply_to_session_id: Optional[str] = None,
scheduled_enqueue_time_utc: Optional[datetime.datetime] = None,
session_id: Optional[str] = None,
time_to_live: Optional[datetime.timedelta] = None,
to: Optional[str] = None,
via_partition_key: Optional[str] = None,
**kwargs
):
super(ServiceBusBrokeredMessageProperties, self).__init__(**kwargs)
self.content_type = content_type
self.correlation_id = correlation_id
self.force_persistence = force_persistence
self.label = label
self.message_id = message_id
self.partition_key = partition_key
self.reply_to = reply_to
self.reply_to_session_id = reply_to_session_id
self.scheduled_enqueue_time_utc = scheduled_enqueue_time_utc
self.session_id = session_id
self.time_to_live = time_to_live
self.to = to
self.via_partition_key = via_partition_key
class ServiceBusMessage(msrest.serialization.Model):
"""ServiceBusMessage.
:param authentication: Gets or sets the Service Bus authentication.
:type authentication: ~azure.mgmt.scheduler.models.ServiceBusAuthentication
:param brokered_message_properties: Gets or sets the brokered message properties.
:type brokered_message_properties:
~azure.mgmt.scheduler.models.ServiceBusBrokeredMessageProperties
:param custom_message_properties: Gets or sets the custom message properties.
:type custom_message_properties: dict[str, str]
:param message: Gets or sets the message.
:type message: str
:param namespace: Gets or sets the namespace.
:type namespace: str
:param transport_type: Gets or sets the transport type. Possible values include:
"NotSpecified", "NetMessaging", "AMQP".
:type transport_type: str or ~azure.mgmt.scheduler.models.ServiceBusTransportType
"""
_attribute_map = {
'authentication': {'key': 'authentication', 'type': 'ServiceBusAuthentication'},
'brokered_message_properties': {'key': 'brokeredMessageProperties', 'type': 'ServiceBusBrokeredMessageProperties'},
'custom_message_properties': {'key': 'customMessageProperties', 'type': '{str}'},
'message': {'key': 'message', 'type': 'str'},
'namespace': {'key': 'namespace', 'type': 'str'},
'transport_type': {'key': 'transportType', 'type': 'str'},
}
def __init__(
self,
*,
authentication: Optional["ServiceBusAuthentication"] = None,
brokered_message_properties: Optional["ServiceBusBrokeredMessageProperties"] = None,
custom_message_properties: Optional[Dict[str, str]] = None,
message: Optional[str] = None,
namespace: Optional[str] = None,
transport_type: Optional[Union[str, "ServiceBusTransportType"]] = None,
**kwargs
):
super(ServiceBusMessage, self).__init__(**kwargs)
self.authentication = authentication
self.brokered_message_properties = brokered_message_properties
self.custom_message_properties = custom_message_properties
self.message = message
self.namespace = namespace
self.transport_type = transport_type
class ServiceBusQueueMessage(ServiceBusMessage):
"""ServiceBusQueueMessage.
:param authentication: Gets or sets the Service Bus authentication.
:type authentication: ~azure.mgmt.scheduler.models.ServiceBusAuthentication
:param brokered_message_properties: Gets or sets the brokered message properties.
:type brokered_message_properties:
~azure.mgmt.scheduler.models.ServiceBusBrokeredMessageProperties
:param custom_message_properties: Gets or sets the custom message properties.
:type custom_message_properties: dict[str, str]
:param message: Gets or sets the message.
:type message: str
:param namespace: Gets or sets the namespace.
:type namespace: str
:param transport_type: Gets or sets the transport type. Possible values include:
"NotSpecified", "NetMessaging", "AMQP".
:type transport_type: str or ~azure.mgmt.scheduler.models.ServiceBusTransportType
:param queue_name: Gets or sets the queue name.
:type queue_name: str
"""
_attribute_map = {
'authentication': {'key': 'authentication', 'type': 'ServiceBusAuthentication'},
'brokered_message_properties': {'key': 'brokeredMessageProperties', 'type': 'ServiceBusBrokeredMessageProperties'},
'custom_message_properties': {'key': 'customMessageProperties', 'type': '{str}'},
'message': {'key': 'message', 'type': 'str'},
'namespace': {'key': 'namespace', 'type': 'str'},
'transport_type': {'key': 'transportType', 'type': 'str'},
'queue_name': {'key': 'queueName', 'type': 'str'},
}
def __init__(
self,
*,
authentication: Optional["ServiceBusAuthentication"] = None,
brokered_message_properties: Optional["ServiceBusBrokeredMessageProperties"] = None,
custom_message_properties: Optional[Dict[str, str]] = None,
message: Optional[str] = None,
namespace: Optional[str] = None,
transport_type: Optional[Union[str, "ServiceBusTransportType"]] = None,
queue_name: Optional[str] = None,
**kwargs
):
super(ServiceBusQueueMessage, self).__init__(authentication=authentication, brokered_message_properties=brokered_message_properties, custom_message_properties=custom_message_properties, message=message, namespace=namespace, transport_type=transport_type, **kwargs)
self.queue_name = queue_name
class ServiceBusTopicMessage(ServiceBusMessage):
"""ServiceBusTopicMessage.
:param authentication: Gets or sets the Service Bus authentication.
:type authentication: ~azure.mgmt.scheduler.models.ServiceBusAuthentication
:param brokered_message_properties: Gets or sets the brokered message properties.
:type brokered_message_properties:
~azure.mgmt.scheduler.models.ServiceBusBrokeredMessageProperties
:param custom_message_properties: Gets or sets the custom message properties.
:type custom_message_properties: dict[str, str]
:param message: Gets or sets the message.
:type message: str
:param namespace: Gets or sets the namespace.
:type namespace: str
:param transport_type: Gets or sets the transport type. Possible values include:
"NotSpecified", "NetMessaging", "AMQP".
:type transport_type: str or ~azure.mgmt.scheduler.models.ServiceBusTransportType
:param topic_path: Gets or sets the topic path.
:type topic_path: str
"""
_attribute_map = {
'authentication': {'key': 'authentication', 'type': 'ServiceBusAuthentication'},
'brokered_message_properties': {'key': 'brokeredMessageProperties', 'type': 'ServiceBusBrokeredMessageProperties'},
'custom_message_properties': {'key': 'customMessageProperties', 'type': '{str}'},
'message': {'key': 'message', 'type': 'str'},
'namespace': {'key': 'namespace', 'type': 'str'},
'transport_type': {'key': 'transportType', 'type': 'str'},
'topic_path': {'key': 'topicPath', 'type': 'str'},
}
def __init__(
self,
*,
authentication: Optional["ServiceBusAuthentication"] = None,
brokered_message_properties: Optional["ServiceBusBrokeredMessageProperties"] = None,
custom_message_properties: Optional[Dict[str, str]] = None,
message: Optional[str] = None,
namespace: Optional[str] = None,
transport_type: Optional[Union[str, "ServiceBusTransportType"]] = None,
topic_path: Optional[str] = None,
**kwargs
):
super(ServiceBusTopicMessage, self).__init__(authentication=authentication, brokered_message_properties=brokered_message_properties, custom_message_properties=custom_message_properties, message=message, namespace=namespace, transport_type=transport_type, **kwargs)
self.topic_path = topic_path
class Sku(msrest.serialization.Model):
"""Sku.
:param name: Gets or set the SKU. Possible values include: "Standard", "Free", "P10Premium",
"P20Premium".
:type name: str or ~azure.mgmt.scheduler.models.SkuDefinition
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[Union[str, "SkuDefinition"]] = None,
**kwargs
):
super(Sku, self).__init__(**kwargs)
self.name = name
class StorageQueueMessage(msrest.serialization.Model):
"""StorageQueueMessage.
:param storage_account: Gets or sets the storage account name.
:type storage_account: str
:param queue_name: Gets or sets the queue name.
:type queue_name: str
:param sas_token: Gets or sets the SAS key.
:type sas_token: str
:param message: Gets or sets the message.
:type message: str
"""
_attribute_map = {
'storage_account': {'key': 'storageAccount', 'type': 'str'},
'queue_name': {'key': 'queueName', 'type': 'str'},
'sas_token': {'key': 'sasToken', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
*,
storage_account: Optional[str] = None,
queue_name: Optional[str] = None,
sas_token: Optional[str] = None,
message: Optional[str] = None,
**kwargs
):
super(StorageQueueMessage, self).__init__(**kwargs)
self.storage_account = storage_account
self.queue_name = queue_name
self.sas_token = sas_token
self.message = message
| Azure/azure-sdk-for-python | sdk/scheduler/azure-mgmt-scheduler/azure/mgmt/scheduler/models/_models_py3.py | Python | mit | 48,388 | 0.002232 |
from invoke import task
@task
def runserver(c):
c.run("PYTHONHTTPSVERIFY=0; ./manage.py runserver --settings=mykonosbiennale.offline_settings")
def export_artists(c):
c.run("PYTHONHTTPSVERIFY=0; ./manage.py export_artists - -settings = mykonosbiennale.offline_settings")
def export_filmfestival(c):
c.run("PYTHONHTTPSVERIFY=0; ./manage.py export_filmfestival - -settings = mykonosbiennale.offline_settings")
def staticsitegen(c):
c.run("PYTHONHTTPSVERIFY=0; ./manage.py staticsitegen --settings=mykonosbiennale.offline_settings")
| mykonosbiennale/mykonosbiennale.github.io | tasks.py | Python | apache-2.0 | 552 | 0.018116 |
#!/usr/bin/env python2
## PYTHON 2!!!!!!!!!!!!!!!!!!!
# -*- coding: utf-8 -*-
#==============================================================================#
import os
import sys
import re
import StringIO
import shutil
# import tkFileDialog
import HTMLParser
import re
import base64
import Tkinter
import ttk
#------------------------------------------------------------------------------#
MODULES_DIR = 'modules'
WH_DIR = 'wh'
INFO_DIR = 'info'
M_DIR = 'M'
N_DIR = 'N'
G_DIR = 'G'
OST_DIR = 'OST'
################################################################################
# AUTO DIR MAKE
################################################################################
def make_dirs(main_dir):
def check(s):
s2 = s
FORBIDDEN_CHARS = ('?', '"', '`')
for char in FORBIDDEN_CHARS:
s2 = s2.replace(char, '')
return s2
def check_n_make(fullpath):
dir_fullpath = check(fullpath)
return _make_dir(dir_fullpath)
def _make_dir(fullpath):
if not os.path.exists(fullpath):
os.mkdir(fullpath)
return True
else:
# dir exists
return False
def prepare(main_dir):
main = os.path.normpath(main_dir)
wh_path = os.path.join(main, WH_DIR)
wh_dirname_ = os.path.join(*wh_path.split(os.path.sep)[-2:])
if _make_dir(wh_path):
print 'OK:', wh_dirname_
else:
print 'EXISTS:', wh_dirname_
return None
info_path = os.path.join(wh_path, INFO_DIR)
_make_dir(info_path)
for item in os.listdir(main):
if item != 'wh':
fullpath = os.path.join(main, item)
shutil.move(fullpath, info_path)
prepare(main_dir)
main_dir = os.path.normpath(main_dir)
info_dir = os.path.join(main_dir, WH_DIR, INFO_DIR)
titles = process_info_dir(info_dir)
TITLE_PATTERN = '%s'
YEAR_PATTERN = '(%s)'
TITLE_YEAR_PATTERN = '%s (%s)'
for (title, year) in titles:
if title != None:
if year != None:
if YEAR_PATTERN % (year) in title:
dir_name = TITLE_PATTERN % (title)
else:
dir_name = TITLE_YEAR_PATTERN % (title, year)
else:
dir_name = TITLE_PATTERN % (title)
main_path = os.path.join(main_dir, dir_name)
wh_path = os.path.join(main_dir, WH_DIR, dir_name)
main_path_dirname = os.path.join(*main_path.split(os.path.sep)[-2:])
if check_n_make(main_path):
print 'OK:', main_path_dirname
else:
print 'EXISTS:', main_path_dirname
wh_path_dirname = os.path.join(*wh_path.split(os.path.sep)[-3:])
if check_n_make(wh_path):
print 'OK:', wh_path_dirname
else:
print 'EXISTS:', wh_path_dirname
else:
# cannot create dir (unknown title)
pass
def process_info_dir(info_dir):
titles = []
def is_ignore(filename):
IGNORE = ('Relations',)
for item in IGNORE:
if item in filename:
return True
return False
# if os.path.exists(info_dir) and os.path.isdir(info_dir):
for a_certain_file in os.listdir(info_dir):
fullpath = os.path.join(info_dir, a_certain_file)
if os.path.isfile(fullpath) and not is_ignore(a_certain_file):
# print a_certain_file
parser = AniDBFileParser()
with open(fullpath) as f:
text = ''.join(f.readlines())
parser.feed(text)
(title, year) = parser.parse()
# (title, year) = parse_anidb_file(fullpath)
titles.append((title, year))
return titles
def make_dirs2(path):
parser = AniDBFileParser()
print path
with open(path) as f:
text = ''.join(f.readlines())
parser.feed(text)
print parser.parse()
################################################################################
# PARSERS
################################################################################
class AniDBFileParser(HTMLParser.HTMLParser):
__year = None
__name = None
__is_header = None
__is_table = None
__is_year_row = None
__is_year_col = None
__TITLE_REGEXP = re.compile(r'Anime:\s(.+)')
__YEAR_REGEXP = re.compile(r'(\d{4})')
def __init__(self):
self.__year = None
self.__name = None
self.__is_header = False
self.__is_table = False
self.__is_year_row = False
self.__is_year_col = False
HTMLParser.HTMLParser.__init__(self)
def feed(self, data):
#data = data.replace('</tr><tr', '</tr> <tr')
try:
data = str(data)
HTMLParser.HTMLParser.feed(self, data)
except UnicodeDecodeError:
pass
## piss off
def handle_starttag(self, tag, attrs):
if tag == 'h1':
attrs = Dict(attrs)
if attrs['class'] == 'anime':
self.__is_header = True
if tag == 'table':
self.__is_table = True
if self.__is_table:
if tag == 'tr':
attrs = Dict(attrs)
if (
## <tr class="year">
## <th class="field">Year</th>
## <td class="value">18.12.2009</td>
## </tr>
attrs['class'] == 'year' or
## <tr class="g_odd year">
## <th class="field">Year</th>
## <td class="value">02.07.2013 till 17.09.2013</td>
## </tr>
attrs['class'] == 'g_odd year'
):
self.__is_year_row = True
if self.__is_year_row:
if tag == 'td':
attrs = Dict(attrs)
if attrs['class'] == 'value':
self.__is_year_col = True
def handle_endtag(self, tag):
if tag == 'h1':
if self.__is_header:
self.__is_header = False
if tag == 'table':
self.__is_table = False
if self.__is_table:
if tag == 'tr':
self.__is_year_row = False
if self.__is_year_row:
if tag == 'td':
self.__is_year_col = False
def handle_data(self, data):
if self.__is_header:
data = str(data)
if re.search(self.__TITLE_REGEXP, data) is not None:
title = re.search(self.__TITLE_REGEXP, data).group(1)
self.__name = str(title)
if self.__is_table:
if self.__is_year_row:
if self.__is_year_col:
print 'YEAR DATA:', data
if re.search(self.__YEAR_REGEXP, data) is not None:
year = str(re.search(self.__YEAR_REGEXP, data).group(1))
self.__year = year
def parse(self):
return (self.__name, self.__year)
################################################################################
# FUNCTIONS
################################################################################
# unpacking nested lists
# lst = [1,[2,3,[4,5,6]],7,8,9] to
# lst = [1,2,3,4,5,6,7,8,9]
def unpack_list(lst):
all_items = []
for item in lst:
if isinstance(item, list) or isinstance(item, tuple):
for i in unpack_list(item):
all_items.append(i)
else:
all_items.append(item)
return all_items
################################################################################
# RES COPY
################################################################################
class ResCopy():
stored_dir = None
titles_dir = None
main_dirs = None
def __init__(self):
self.stored_dir = ''
self.titles_dir = ''
self.main_dirs = []
def set_dirs(self, store, titles):
self.stored_dir = store
self.titles_dir = titles
def add(self, dir_):
self.main_dirs.append(dir_)
def search_and_res(self, directory, title_dir):
# print os.path.join(directory, title_dir)
for item in os.listdir(os.path.join(directory, title_dir)):
# print item
if item == WH_DIR:
input_dir = os.path.join(directory, title_dir, WH_DIR)
saved_dir = os.path.join(self.stored_dir, title_dir)
if not os.path.exists(saved_dir):
os.mkdir(saved_dir)
saved_dir = os.path.join(saved_dir, WH_DIR)
shutil.copytree(input_dir, saved_dir)
# print 'copy: '
# print input_dir
# print 'to'
# print saved_dir
return True
else:
# folder exists
pass
return False
def process_dir(self, directory, titles_filepath):
titles = list()
for title_dir in os.listdir(directory):
# print title_dir
result = self.search_and_res(directory, title_dir)
# print result
if result:
print title_dir
titles.append(title_dir)
# if os.path.exists(titles_filepath):
with open(titles_filepath, 'w') as f:
for item in titles:
f.write('%s %s' % (item, os.linesep))
def start(self):
for main_dir in self.main_dirs:
titles_filename = '%s.txt' % (main_dir.replace(os.path.sep, '_'))
titles_filepath = os.path.join(self.titles_dir, titles_filename)
self.process_dir(os.path.abspath(main_dir), titles_filepath)
################################################################################
# FUNCTION
################################################################################
def res_copy(stored_dir, titles_dir):
stored_dir = 'GAMES/wh/wh_ehd7_f/a/'
# stored_dir = 'GAMES/wh/wh_ehd7_f/m/'
titles_dir = 'GAMES/wh/wh_ehd7_f/titles/'
main_dirs = list()
# main_dirs.append('/media/Index/a')
# main_dirs.append('/media/Reserve/a')
# main_dirs.append('/media/temp')
# main_dirs.append('/media/Index1/a')
# main_dirs.append('/media/temp1')
# main_dirs.append('/media/temp2')
#
# main_dirs.append('/media/Index/m')
#
#
def search_and_res(directory, title_dir):
# print os.path.join(directory, title_dir)
for item in os.listdir(os.path.join(directory, title_dir)):
# print item
if item == WH_DIR:
input_dir = os.path.join(directory, title_dir, WH_DIR)
saved_dir = os.path.join(stored_dir, title_dir)
if not os.path.exists(saved_dir):
os.mkdir(saved_dir)
saved_dir = os.path.join(saved_dir, WH_DIR)
shutil.copytree(input_dir, saved_dir)
# print 'copy: '
# print input_dir
# print 'to'
# print saved_dir
return True
else:
# folder exists
pass
return False
def process_dir(directory, titles_filepath):
titles = list()
for title_dir in os.listdir(directory):
# print title_dir
result = search_and_res(directory, title_dir)
# print result
if result:
print title_dir
titles.append(title_dir)
# if os.path.exists(titles_filepath):
with open(titles_filepath, 'w') as f:
for item in titles:
f.write('%s %s' % (item, os.linesep))
for main_dir in main_dirs:
titles_filename = '%s.txt' % (main_dir.replace(os.path.sep, '_'))
titles_filepath = os.path.join(titles_dir, titles_filename)
process_dir(os.path.abspath(main_dir), titles_filepath)
def req_process():
file_ = ''
#make_dirs('/media/2AA92E0025E5B1FF/cop/')
path = '/media/2AA92E0025E5B1FF/Death Note/'
import os
cur_dir = os.path.curdir
#path = os.path.abspath(os.path.join(os.path.curdir, 'TEST', 'ab'))
print path
make_dirs(path)
def test0():
file_ = 'AniDB.net Anime - Angel Beats! .htm'
filepath = path = os.path.abspath(os.path.join(os.path.curdir,
'TEST',
'ab',
file_))
with open(filepath) as f:
text = ''.join(f.readlines())
parser = AniDBFileParser()
parser.feed(text)
################################################################################
# LIST PARSERS
################################################################################
class AniTitle(object):
__ani_id = None
__ani_name = None
__ani_link = None
__char_id = None
__char_name = None
__char_link = None
__type = None
# int
__eps = None
__s_eps = None
# str
__year = None
__year_start = None
__year_end = None
__date_start = None
__date_end = None
# bool
__completed = None
# #
__empty = None
def __init__(self):
self.__ani_id = None
self.__ani_name = None
self.__ani_link = None
self.__char_id = None
self.__char_name = None
self.__char_link = None
self.__type = None
self.__eps = None
self.__s_eps = None
self.__year = None
self.__year_start = None
self.__year_end = None
self.__date_start = None
self.__date_end = None
self.__completed = False
self.__empty = True
@property
def ani_id(self):
return self.__ani_id
@ani_id.setter
def ani_id(self, value):
self.__ani_id = value
self.__empty = False
@property
def ani_name(self):
return self.__ani_name
@ani_name.setter
def ani_name(self, value):
self.__ani_name = value
self.__empty = False
@property
def ani_link(self):
return self.__ani_link
@ani_link.setter
def ani_link(self, value):
self.__ani_link = value
self.__empty = False
@property
def char_id(self):
return self.__char_id
@char_id.setter
def char_id(self, value):
self.__char_id = value
self.__empty = False
@property
def char_name(self):
return self.__char_name
@char_name.setter
def char_name(self, value):
self.__char_name = value
self.__empty = False
@property
def char_link(self):
return self.__char_link
@char_link.setter
def char_link(self, value):
self.__char_link = value
self.__empty = False
@property
def type(self):
return self.__type
@type.setter
def type(self, value):
self.__type = value
self.__empty = False
@property
def eps(self):
return self.__eps
@eps.setter
def eps(self, value):
self.__eps = value
self.__empty = False
@property
def s_eps(self):
return self.__s_eps
@s_eps.setter
def s_eps(self, value):
self.__s_eps = value
self.__empty = False
@property
def year(self):
return self.__year
@year.setter
def year(self, value):
self.__year = value
self.__empty = False
@property
def year_start(self):
return self.__year_start
@year_start.setter
def year_start(self, value):
self.__year_start = value
self.__empty = False
@property
def year_end(self):
return self.__year_end
@year_end.setter
def year_end(self, value):
self.__year_end = value
self.__empty = False
@property
def date_start(self):
return self.__date_start
@date_start.setter
def date_start(self, value):
self.__date_start = value
self.__empty = False
@property
def date_end(self):
return self.__date_end
@date_end.setter
def date_end(self, value):
self.__date_end = value
self.__empty = False
@property
def completed(self):
return self.__completed
@completed.setter
def completed(self, value):
self.__completed = value
self.__empty = False
@property
def empty(self):
return self.__empty
class keys(object):
ANI_NAME = 'ani_name'
ANI_ID = 'ani_id'
ANI_LINK = 'ani_link'
CHAR_NAME = 'char_name'
CHAR_ID = 'char_id'
CHAR_LINK = 'char_link'
TYPE = 'type'
EPS = 'eps'
S_EPS = 's_eps'
YEAR = 'year'
YEAR_START = 'year_start'
YEAR_FINISH = 'year_finish'
DATE_START = 'date_start'
DATE_FINISH = 'date_finish'
COMPLETED = 'completed'
# list keys
LIST_NAME = 'list_name'
LIST_TYPE = 'list_type'
LIST_PATH = 'list_path'
# filetypes
FTYPE_PERSON = 'person'
FTYPE_COMPANY = 'company'
FTYPE_MYLIST = 'mylist'
def pass_year(title, date):
# '1995 - 1996'
# 'XXXX - XXXX'
date_regexp = re.compile(r'\s{0,1}-{0,1}\s{0,1}(.{4})\s{0,1}-{0,1}\s{0,1}')
date_lst = re.findall(date_regexp, str(date))
if len(date_lst) == 1:
finish = date_lst[0]
elif len(date_lst) == 2:
finish = date_lst[1]
else:
raise ValueError('input data error')
year, start = date_lst[0], date_lst[0]
title.year = year
title.year_start = start
title.year_end = finish
return (year, start, finish)
def conv(inp_str):
try:
return str(inp_str)
except UnicodeEncodeError:
try:
return ''.join(chr(char) for char in inp_str)
except TypeError:
# return inp_str
return None
class Bool(object):
'''
Mutable boolean class
'''
__value = None
def __init__(self, val=False):
self.__value = bool(val)
self.__bool__ = self.__nonzero__
def __nonzero__(self):
return self.__value
def __str__(self):
return str(self.__value)
def __repr__(self):
return str(self.__value)
'''
instead of assignment '=' we use addition '+'
'''
def __add__(self, other):
self.__value = bool(other)
class Dict():
'''
dictionary class who return None if has no key
(do not raise exceptions)
'''
__store = None
def __init__(self, input_lst):
self.__store = dict()
for (key, value) in input_lst:
self.__store[conv(key)] = conv(value)
def __getitem__(self, key):
if self.__store.has_key(key):
return self.__store[key]
else:
return None
class ListType(object):
@property
def UNKNOWN(self):
return 0
@property
def PERSON(self):
return 1
@property
def COMPANY(self):
return 2
@property
def MYLIST(self):
return 3
listtype = ListType()
class AniList(object):
__type = None
__name = None
__path = None
__list = None
def __init__(self):
self.__type = listtype.UNKNOWN
self.__name = None
self.__path = None
self.__list = []
@property
def type(self):
return self.__type
@type.setter
def type(self, value):
self.__type = value
@property
def name(self):
return self.__name
@name.setter
def name(self, value):
self.__name = value
@property
def path(self):
return self.__path
@path.setter
def path(self, value):
self.__path = value
@property
def lst(self):
return self.__list
@lst.setter
def lst(self, value):
self.__lst = value
class StopPleaseException(Exception):
def __init__(self):
pass
#
# def __init__(self, message, Errors):
# Exception.__init__(self, message)
# self.Errors = Errors
class PersonParser(HTMLParser.HTMLParser):
is_table = False
is_row = False
is_col = False
CHAR_LINK = 'http://anidb.net/perl-bin/animedb.pl\?show=character&charid='
CHAR_LINK_PATTERN = re.compile(CHAR_LINK + '(\d+)')
ANI_LINK = 'http://anidb.net/perl-bin/animedb.pl\?show=anime&aid='
ANI_LINK_PATTERN = re.compile(ANI_LINK + '(\d+)')
titles = []
charid_zero = False
is_char = False
is_char_link = False
is_ani = False
is_ani_link = False
is_type = False
is_eps = False
is_year = False
__char_link = ''
__char_id = ''
__char_name = ''
__ani_link = ''
__ani_id = ''
__ani_name = ''
__type = ''
__year = ''
__eps = ''
def is_data_empty(self):
_len = 0
for item in (self.__ani_id, self.__ani_link, self.__ani_name,
self.__char_name, self.__type, self.__year, self.__eps):
if len(item) == 0:
return True
return False
def __init__(self):
self.titles = []
HTMLParser.HTMLParser.__init__(self)
def feed(self, data):
'''
stupid HTMLParser understand the folowing:
<tr>spam spam</tr><tr>more spam</tr>
as ONE row
next code works fine:
<tr>spam spam</tr> <tr>more spam</tr>
^
fucking whitespace
html syntax fix (whitespaces beetween <tr> tags)
'''
# HTMLParser.HTMLParser.reset(self)
self.reset()
data = data.replace('</tr><tr', '</tr> <tr')
HTMLParser.HTMLParser.feed(self, data)
def handle_starttag(self, tag, attrs):
attrs = Dict(attrs)
if tag == 'table':
if (attrs['id'] == 'characterlist' and
attrs['class'] == 'characterlist'):
self.is_table = True
if self.is_table:
if tag == 'tr':
self.is_row = True
if attrs['id'] == 'charid_0':
self.charid_zero = True
if self.is_row:
if tag == 'td':
self.is_col = True
if self.is_col:
if attrs['class'] == 'name':
if attrs['rowspan'] != None:
self.is_char = True
else:
self.is_ani = True
if attrs['class'] == 'type':
self.is_type = True
if attrs['class'] == 'eps':
self.is_eps = True
if attrs['class'] == 'year':
self.is_year = True
if self.is_char:
if tag == 'a':
link = attrs['href']
# print link
if self.CHAR_LINK_PATTERN.match(link):
self.__char_id = self.CHAR_LINK_PATTERN.findall(link)[0]
self.__char_link = link
self.is_char_link = True
if self.is_ani:
if tag == 'a':
link = attrs['href']
if self.ANI_LINK_PATTERN.match(link):
self.__ani_id = self.ANI_LINK_PATTERN.findall(link)[0]
self.__ani_link = link
self.is_ani_link = True
def handle_endtag(self, tag):
if tag == 'table':
self.is_table = False
if self.is_table:
if tag == 'tr':
self.is_row = False
if self.is_row:
if tag == 'td':
self.is_col = False
if self.is_char_link:
if tag == 'a':
self.is_char_link = False
if self.is_ani_link:
if tag == 'a':
# self.__ani_id = ''
self.is_ani_link = False
def handle_data(self, data):
if self.is_table:
if self.is_row:
# print 'row'
if self.is_col:
if self.is_char:
if self.charid_zero:
self.__char_name = data.strip()
self.charid_zero = False
self.is_char = False
if self.is_char_link:
self.__char_name = data
self.is_char = False
if self.is_ani:
if self.is_ani_link:
self.__ani_name = data
self.is_ani = False
if self.is_type:
self.__type = str(data)
self.is_type = False
if self.is_eps:
self.__eps = str(data)
self.is_eps = False
if self.is_year:
self.__year = str(data)
self.is_year = False
else:
# row ends
if not self.is_data_empty():
title = AniTitle()
title.ani_id = self.__ani_id
title.ani_name = self.__ani_name
title.ani_link = self.__ani_link
title.char_id = self.__char_id
title.char_name = self.__char_name
title.char_link = self.__char_link
title.type = self.__type
title.eps = self.__eps
pass_year(title, self.__year)
self.titles.append(title)
@property
def titles(self):
return self.titles
class CompanyParser(HTMLParser.HTMLParser):
is_table = False
is_row = False
is_col = False
is_name = None
is_type = None
is_eps = None
is_year = None
is_credit = None
ani_id = None
ani_name = None
ani_link = None
type = None
eps = None
year = None
credit = None
ANI_LINK = 'http://anidb.net/perl-bin/animedb.pl\?show=anime&aid='
ANI_LINK_PATTERN = re.compile(ANI_LINK + '(\d+)')
# list
__animes = None
def __init__(self):
self.__animes = []
HTMLParser.HTMLParser.__init__(self)
def feed(self, data):
'''
stupid HTMLParser understand the folowing:
<tr>spam spam</tr><tr>more spam</tr>
as ONE row
next code works fine:
<tr>spam spam</tr> <tr>more spam</tr>
^
fucking whitespace
html syntax fix (whitespaces beetween <tr> tags)
'''
HTMLParser.HTMLParser.reset(self)
self.reset()
data = data.replace('</tr><tr', '</tr> <tr')
# data = data.replace('</td><td', '</td> <td')
HTMLParser.HTMLParser.feed(self, data)
def handle_starttag(self, tag, attrs):
attrs = Dict(attrs)
if tag == 'table':
if (attrs['id'] == 'stafflist' and
attrs['class'] == 'stafflist'
):
self.is_table = True
if self.is_table:
if tag == 'tr':
self.is_row = True
if self.is_row:
if tag == 'td':
self.is_col = True
if self.is_col:
if attrs['class'] == 'name':
self.is_name = True
if self.is_name:
if tag == 'a':
link = attrs['href']
if self.ANI_LINK_PATTERN.match(link):
self.ani_id = self.ANI_LINK_PATTERN.findall(link)[0]
self.ani_link = link
#print self.ani_link, self.ani_id
# pass_link()
###############
###############
###############
###############
###############
###############
###############
###############
###############
###############
###############
###############
if attrs['class'] == 'type':
self.is_type = True
if attrs['class'] == 'eps':
self.is_eps = True
if attrs['class'] == 'year':
self.is_year = True
if attrs['class'] == 'credit':
self.is_credit = True
def handle_endtag(self, tag):
if tag == 'table':
self.is_table = False
if self.is_table:
if tag == 'tr':
self.is_row = False
if self.is_row:
if tag == 'td':
self.is_col = False
if self.is_col:
if tag == 'td':
self.is_col = False
if self.is_name:
if tag == 'td':
self.is_name = False
if self.is_type:
if tag == 'td':
self.is_type = False
if self.is_eps:
if tag == 'td':
self.is_eps = False
if self.is_year:
if tag == 'td':
self.is_year = False
if self.is_credit:
if tag == 'td':
self.is_credit = False
def handle_data(self, data):
if self.is_table:
if self.is_row:
if self.is_col:
if self.is_name:
self.ani_name = data
if self.is_type:
self.type = data
if self.is_eps:
self.eps = data
# print 'eps:', data
if self.is_year:
self.year = data
# print 'year:', data
if self.is_credit:
self.credit = data
else:
title = AniTitle()
title.ani_name = self.ani_name
title.ani_id = self.ani_id
#print self.ani_id
title.ani_link = self.ani_link
pass_year(title, str(self.year))
title.type = self.type
self.__animes.append(title)
@property
def titles(self):
return self.__animes
class MALParser(HTMLParser.HTMLParser):
def handle_starttag(self, tag, data):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, data):
pass
class MylistParser(HTMLParser.HTMLParser):
is_animes = False
is_ani = False
is_titles = False
is_title = False
# TODO: rewrite using self.ani_id self.type and so on
__animes = []
ani = None
def process_cdata2(self, text):
# very slow
cdata_regexp = re.compile(r'<!\[CDATA\[?(.+?)\]\]>')
while re.search(cdata_regexp, text) is not None:
# print s
found_cdata = re.search(cdata_regexp, text).group()
name = re.search(cdata_regexp, text).group(1).strip()
name_coded = base64.b64encode(name)
text = text.replace(found_cdata, name_coded)
return text
def process_cdata(self, text):
# hayaku !!!!!!
CDATA_BEGIN = '<![CDATA['
CDATA_END = ']]>'
newdata = []
for line in text.split(CDATA_BEGIN):
items = line.split(CDATA_END)
if len(items) > 1:
items[0] = base64.b64encode(items[0].strip())
for item in items:
newdata.append(item)
return ''.join(newdata)
def __init__(self):
self.is_animes = False
self.is_ani = False
self.is_titles = False
self.is_title = False
self.__animes = []
self.ani = AniTitle()
HTMLParser.HTMLParser.__init__(self)
def feed(self, data):
'''
get the fuck out all cdata
encode them by base64
'''
# data = data.replace('<![CDATA[', '"').replace(']]>', '"')
# data = self.process_cdata(data)
data = self.process_cdata(data)
HTMLParser.HTMLParser.feed(self, data)
def handle_starttag(self, tag, attrs):
if tag == 'animes':
self.is_animes = True
if self.is_animes:
attrs = Dict(attrs)
if tag == 'anime':
self.is_ani = True
self.ani.ani_id = str(attrs['id'])
self.ani.type = str(attrs['type'])
pass_year(self.ani, str(attrs['year']))
if tag == 'status':
if str(attrs['watched']) == '1':
self.ani.completed = True
else:
self.ani.completed = False
if tag == 'neps':
self.ani.eps = attrs['cnt']
if tag == 'seps':
self.ani.s_eps = attrs['cnt']
if tag == 'titles':
self.is_titles = True
if self.is_titles:
if tag == 'title':
if attrs['type'] == 'main':
# print 'is title starttag'
self.is_title = True
if tag == 'dates':
self.ani_date_start = attrs['start']
self.ani.date_start = attrs['start']
self.ani_date_finish = attrs['end']
self.ani.date_end = attrs['end']
def handle_endtag(self, tag):
if tag == 'animes':
self.is_animes = False
if self.is_animes:
if tag == 'anime':
self.is_ani = False
if tag == 'titles':
self.is_titles = False
if self.is_titles:
if tag == 'title':
# print 'is title endtag'
self.is_title = False
def handle_data(self, data):
if self.is_animes:
if self.is_ani:
if self.is_titles:
# print 'is titles'
if self.is_title:
# self.ani_title = data
# print 'is title'
# print data
self.ani.ani_name = base64.b64decode(data)
pass
# self.ani[keys.ANI_NAME] = data
else:
if not self.ani.empty:
self.__animes.append(self.ani)
self.ani = AniTitle()
# print self.ani_id, self.ani_type, self.ani_year
@property
def titles(self):
return self.__animes
class TableParser(HTMLParser.HTMLParser):
is_table = False
is_row = False
is_col = False
rowdata = []
tabledata = []
def handle_starttag(self, tag, attrs):
attrs = Dict(attrs)
if tag == 'table':
if (attrs['id'] == 'stafflist' and
attrs['class'] == 'stafflist'
):
self.is_table = True
if self.is_table:
if tag == 'tr':
self.is_row = True
if self.is_row:
if tag == 'td':
self.is_col = True
def handle_endtag(self, tag):
if tag == 'table':
self.is_table = False
if self.is_table:
if tag == 'tr':
self.is_row = False
if self.is_row:
if tag == 'td':
self.is_col = False
def handle_data(self, data):
if self.is_table:
if self.is_row:
if self.is_col:
self.rowdata.append(data)
else:
self.tabledata.append(self.rowdata)
self.rowdata = []
pass
def get_titles(self):
return self.tabledata
class AniDBMylistTypeParser(HTMLParser.HTMLParser):
is_root = Bool()
is_custom = Bool()
is_userinfo = Bool()
is_cats = Bool()
is_animes = Bool()
is_ani = Bool()
is_status = Bool()
is_neps = Bool()
is_seps = Bool()
is_titles = Bool()
is_title = Bool()
#
is_tags = Bool()
is_state = Bool()
is_size = Bool()
is_rating = Bool()
is_reviews = Bool()
#
is_dates = Bool()
#
conditions = None
def __init__(self):
self.conditions = (
self.is_root, self.is_custom, self.is_userinfo, self.is_cats,
self.is_animes, self.is_ani, self.is_status, self.is_neps,
self.is_seps, self.is_titles, self.is_title, self.is_tags,
self.is_state, self.is_size, self.is_rating, self.is_reviews,
self.is_dates)
'memory effect'
for item in self.conditions:
item + False
HTMLParser.HTMLParser.__init__(self)
def feed(self, data):
try:
HTMLParser.HTMLParser.feed(self, data)
except StopPleaseException:
pass
def handle_starttag(self, tag, attrs):
attrs = Dict(attrs)
if tag == 'root':
self.is_root + True
# print 'root'
if self.is_root:
if tag == 'custom':
self.is_custom + True
# print 'custom'
if self.is_custom:
if tag == 'userinfo':
self.is_userinfo + True
# print 'userinfo'
if tag == 'cats':
self.is_cats + True
# print 'cats'
if tag == 'animes':
self.is_animes + True
# print 'animes'
if self.is_animes:
if tag == 'anime':
self.is_ani + True
# print 'anime'
if self.is_ani:
if tag == 'status':
self.is_status + True
# print 'status'
if tag == 'neps':
self.is_neps + True
# print 'neps'
if tag == 'seps':
self.is_seps + True
# print 'seps'
if tag == 'titles':
self.is_titles + True
# print 'titles'
if self.is_titles:
if tag == 'title':
self.is_title + True
# print 'title'
if tag == 'tags':
self.is_tags + True
# print 'tags'
if tag == 'state':
self.is_state + True
# print 'state'
if tag == 'size':
self.is_size + True
# print 'size'
if tag == 'rating':
self.is_rating + True
# print 'rating'
if tag == 'reviews':
self.is_reviews + True
# print 'reviews'
if tag == 'dates':
self.is_dates + True
# print 'dates'
if reduce(lambda res, x : res and x, self.conditions, True):
raise StopPleaseException()
def handle_endtag(self, tag):
pass
def handle_data(self, data):
pass
@property
def type(self):
lst = AniList()
if reduce(lambda res, x : res and x, self.conditions, True):
print 'MUYLIST PARSER LOL=====================>>'
lst.type = listtype.MYLIST
lst.name = 'mylist'
## info = {}
## info[keys.LIST_TYPE] = keys.FTYPE_MYLIST
## info[keys.LIST_NAME] = 'mylist'
## return info
else:
#return None
lst.type = listtype.UNKNOWN
return lst
class AniDBTitleTypeParser(HTMLParser.HTMLParser):
pass
class AniDBListTypeParser(HTMLParser.HTMLParser):
is_header = False
found = False
is_person = False
is_company = False
namae = ''
person_regexp = re.compile(r'Person:\s(.+)')
company_regexp = re.compile(r'Company:\s(.+)')
def handle_starttag(self, tag, attrs):
if tag == 'table':
attrs = Dict(attrs)
if (attrs['id'] == 'stafflist' and
attrs['class'] == 'stafflist' and
not self.is_person
):
self.is_company = True
elif (attrs['id'] == 'characterlist' and
attrs['class'] == 'characterlist' and
not self.is_company
):
self.is_person = True
if tag == 'h1':
self.is_header = True
def handle_endtag(self, tag):
if tag == 'h1':
self.is_header = False
def handle_data(self, data):
if self.is_header:
creator_str = data
if re.search(self.person_regexp, creator_str) is not None:
self.namae = re.search(self.person_regexp,
creator_str).group(1).strip()
if re.search(self.company_regexp, creator_str) is not None:
self.namae = re.search(self.company_regexp,
creator_str).group(1).strip()
def get_type(self):
lst = AniList()
if self.is_person or self.is_company:
lst.name = str(self.namae)
## info = {keys.LIST_NAME : str(self.namae)}
if self.is_person:
lst.type = listtype.PERSON
## info[keys.LIST_TYPE] = keys.FTYPE_PERSON
if self.is_company:
lst.type = listtype.COMPANY
## info[keys.LIST_TYPE] = keys.FTYPE_COMPANY
# return info
else:
lst.type = listtype.UNKNOWN
# return None
return lst
################################################################################
# PARSER TESTING
################################################################################
def test():
import os
curdir = os.path.abspath(os.path.curdir)
file_ = ' AniDB.net Person - Hanazawa Kana .html'
file_path = os.path.join(curdir, 'lists', file_)
# file_path = '/home/alex/table3.html'
with open(file_path) as f:
html_text = ''.join(f.readlines()) # .replace('</tr><tr', '</tr> <tr')
table_parser = PersonParser()
table_parser.feed(html_text.decode('utf-8'))
titles = table_parser.titles
# print table_parser.get_tables()
# print len(table_parser.get_tables())
for t in titles:
print t.ani_id, t.year, '\t' , t.ani_name
print len(titles)
# for t in titles:
# print len(t), t
# print 'row count:', table_parser.ROW_COUNT
def test1():
import os
import xml.etree.ElementTree as et
from xml.etree.ElementTree import XMLParser as fuckyouall
from xml.etree.ElementTree import ParseError as fuckit
curdir = os.path.abspath(os.path.curdir)
file_ = ' AniDB.net Person - Hanazawa Kana .html'
file_path = os.path.join(curdir, 'lists', file_)
with open(file_path) as f:
tree = et.fromstringlist(f.readlines(), fuckyouall(html=True))
root = tree.getroot()
for table in root.findall('table'):
print 'table'
def test3():
import os
curdir = os.path.abspath(os.path.curdir)
file_ = 'mylist.xml'
file_path = os.path.join(curdir, 'lists', file_)
with open(file_path) as f:
file_text = ''.join(f.readlines())
parser = MylistParser()
parser.feed(file_text)
animes = parser.get_animes()
for t in animes[0:50]:
print t
def test4():
text = 'spam spam spam <![CDATA[ Angel Beats! ]]> spam spam <![CDATA[ Another ]]> spam '
mylistParser = MylistParser()
print mylistParser.process_cdata2(text)
print base64.b64decode(mylistParser.process_cdata2(text))
def test5():
import os
# file_ = ' AniDB.net Person - Hanazawa Kana .html'
file_ = ' AniDB.net Company - P.A. Works .html'
file_ = ' AniDB.net Person - Maeda Jun .html'
file_ = 'mylist.xml'
file_path = os.path.join(os.path.abspath(os.path.curdir), 'lists', file_)
with open(file_path) as f:
txt = ''.join(f.readlines())
aniParser = AniDBListTypeParser()
aniParser.feed(txt)
print 'aniParser:', aniParser.get_type()
if aniParser.get_type() == None:
mylistTypeParser = MylistTypeParser()
mylistTypeParser.feed(txt)
print 'mylistTypeParser: ', mylistTypeParser.get_type()
def test6():
print 'lol'
import os
# file_ = ' AniDB.net Person - Hanazawa Kana .html'
file_ = ' AniDB.net Company - P.A. Works .html'
# file_ = ' AniDB.net Person - Maeda Jun .html'
# file_ = 'mylist.xml'
file_path = os.path.join(os.path.abspath(os.path.curdir), 'lists', file_)
with open(file_path) as f:
txt = ''.join(f.readlines())
parser = CompanyParser()
parser.feed(txt)
animes = parser.animes
for t in animes:
# print t.year, t.ani_name
print t.year, t.ani_name
print listtype.PERSON
# listtype.PERSON = 5
print listtype.PERSON
import collections
Const = collections.namedtuple('Const', ['UNKNOWN', 'PERSON'])
const = Const(0, 1)
# print const.UNKNOWN
# const.UNKNOWN = 5
def test7():
import os
curdir = os.path.abspath(os.path.curdir)
file_ = ' AniDB.net Company - Visual Art`s Key .html'
#file_ = ' AniDB.net Company - Shaft .html'
file_path = os.path.join(curdir, 'lists', file_)
with open(file_path) as f:
file_text = ''.join(f.readlines())
parser = CompanyParser()
parser.feed(file_text)
animes = parser.titles
for t in animes[0:50]:
print t.ani_id, t.ani_name
################################################################################
# LIST FUNCTIONS
################################################################################
def list_mylist(list_file, completed=False):
with open(list_file) as f:
fh = StringIO.StringIO()
fh.write(f.read())
titles = list_mylist4(fh, completed)
return titles
def list_mylist4(xml_fh, completed=False):
xml_text = xml_fh.getvalue()
mylistParser = MylistParser()
mylistParser.feed(xml_text)
titles = mylistParser.titles
return titles
def list_company(list_file):
# titles = None
with open(list_file) as f:
fh = StringIO.StringIO()
fh.write(f.read())
titles = list_company4(fh)
return titles
def list_company4(html_fh):
html_text = html_fh.getvalue()
company_parser = CompanyParser()
company_parser.feed(html_text.decode('utf-8'))
titles = company_parser.titles
print 'list_company', len(titles)
return titles
def list_person(list_file):
# titles = None
with open(list_file) as f:
fh = StringIO.StringIO()
fh.write(f.read())
titles = list_person4(fh)
return titles
def list_person4(html_fh):
html_text = html_fh.getvalue()
table_parser = PersonParser()
table_parser.feed(html_text.decode('utf-8'))
titles = table_parser.titles
print 'list_person', len(titles)
return titles
def list_check(list_file):
with open(list_file) as f:
fh = StringIO.StringIO()
fh.write(f.read())
info = list_check4(fh)
return info
def list_check4(list_fh):
file_text = list_fh.getvalue()
aniParser = AniDBListTypeParser()
aniParser.feed(file_text)
lst = aniParser.get_type()
if lst.type == listtype.UNKNOWN:
lst = AniList()
print 'MUYLIST LISTCHECK LOL=====================>>'
mylistTypeParser = AniDBMylistTypeParser()
mylistTypeParser.feed(file_text)
lst = mylistTypeParser.type
return lst
def list_check44(list_fh):
file_text = list_fh.getvalue()
aniParser = AniDBListTypeParser()
aniParser.feed(file_text)
info = aniParser.get_type()
if info != None:
return info
else:
mylistTypeParser = AniDBMylistTypeParser()
mylistTypeParser.feed(file_text)
info = mylistTypeParser.type
if info != None:
return info
return None
def list_file3(file_path):
with open(file_path) as f:
fh = StringIO.StringIO()
fh.write(f.read())
file_status = list_check4(fh)
if file_status['type'] == keys.FTYPE_COMPANY:
lst = list_company3(fh)
return lst
elif file_status['type'] == keys.FTYPE_PERSON:
lst = list_person4(fh)
return lst
elif file_status['type'] == keys.FTYPE_MYLIST:
lst = list_mylist4(fh)
return lst
else:
return None
################################################################################
# TESTING
################################################################################
def compare():
main = os.path.join(os.curdir, 'lists')
paths = {
'jcs' : os.path.join(main, ' AniDB.net Company - J.C.Staff .html'),
'xbc' : os.path.join(main, ' AniDB.net Company - Xebec .html'),
'sht' : os.path.join(main, ' AniDB.net Company - Shaft .html'),
'mj' : os.path.join(main, ' AniDB.net Company - Xebec .html'),
'hk' : os.path.join(main, ' AniDB.net Person - Hanazawa Kana .html'),
'kh' : os.path.join(main, ' AniDB.net Person - Kamiya Hiroshi .html'),
'iu' : os.path.join(main, ' AniDB.net Person - Iguchi Yuka .html'),
'sh' : os.path.join(main, ' AniDB.net Person - Sakurai Harumi .html'),
'kr' : os.path.join(main, ' AniDB.net Person - Kugimiya Rie .html'),
'hs' : os.path.join(main, ' AniDB.net Person - Hino Satoshi .html'),
'on' : os.path.join(main, ' AniDB.net Person - Okamoto Nobuhiko .html'),
'mylist' : os.path.join(main, 'mylist.xml')
}
print list_check('/home/alex/prog/lp2/lists/listcompare.pyc').type
# hk = list_person(paths['hk'])
#mylist = list_mylist(paths['mylist'])
# print_list(hk[0:25])
#print_list(mylist[0:25])
# inter1 = list_inter([hk, mylist])
# inter2 = list_inter([mylist, hk])
# l1 = [{keys.ANI_ID : str(i)} for i in range(5)]
# l2 = [{keys.ANI_ID : str(i)} for i in range(10)]
# inter1 = list_inter([l1, l2])
# inter2 = list_inter([l2, l1])
# print inter1
# print inter2
# print len(hk)
# print len(mylist)
# print len(inter1)
# print len(inter2)
# hk2 = list_person44(paths['hk'])
# print len(hk1), len(hk2)
# print_list(hk1)
# lst = hk1
# for i in lst:
# print i
# print i[keys.YEAR], '%s' % (i[keys.ANI_NAME])
# mylist = list_mylist(os.path.join(main, 'mylist.xml'), completed = False)
# import timeit
# times = 1
# f1_t = timeit.Timer('func()', 'from __main__ import f1 as func').timeit(times)
# f2_t = timeit.Timer('func()', 'from __main__ import f2 as func').timeit(times)
# print 'f1 time: ', f1_t
# print 'f2 time: ', f2_t
# # times = 5
# # f1_t = Timer('func()', 'from __main__ import f1 as func').timeit(times)
# # print 'f1:', f1_t
################################################################################
# COMPARE FUNCTIONS
################################################################################
def list_inter(lists):
if len(lists) < 2:
raise ValueError('must be >1 lists')
lst = lists[0]
store = dict(
(item.ani_id, item) for item in lst
)
inter = set([i.ani_id for i in lst])
for lst in lists[1:]:
inter = inter.intersection(set([item.ani_id for item in lst]))
for item in lst:
store[item.ani_id] = item
output = []
for item in inter:
output.append(store[item])
return output
def list_diff(lists):
# titles are in first list, but not in second
if len(lists) != 2:
raise ValueError('must be 2 lists')
lst1_set = set([item.ani_id for item in lists[0]])
lst2_set = set([item.ani_id for item in lists[1]])
differ = lst1_set.difference(lst2_set)
output = []
for item in lists[0]:
if item.ani_id in differ:
output.append(item)
return output
def print_list(lst, uniq=True, field='year'):
count = 0
uniq_list = []
for item in sorted(lst, key=lambda i : i.year):
conditions = []
data = (item.year, item.ani_name)
if uniq:
if data not in uniq_list:
uniq_list.append(data)
# conditions.append(1)
conditions.append(True)
else:
# conditions.append(0)
conditions.append(False)
if not False in conditions:
# if sum(conditions) == len(conditions):
# if reduce(lambda res, x: res and x, conditions, True):
print '%s %s' % (data[0], data[1])
# print item
# break
count += 1
print
print 'count:', count
################################################################################
# A CERTAIN PROGRAM
################################################################################
class ACertainView:
root = None
tabs = {}
listCompare = None
def __init__(self):
self.create_ui()
self.listCompare = ListCompareView(root=self.root,
main_frame=self.tabs['list_compare'])
def create_ui(self):
self.root = Tkinter.Tk()
root = self.root
root.title('A Certain Program')
w = 600
h = 500
self.root.geometry('%sx%s+0+0' % (w, h))
notebook = ttk.Notebook(self.root)
notebook.pack(fill='both', expand=True)
TABS = (
('list_compare', 'List Compare'),
('auto_dir_make', 'AutoDirMake'),
('res_copy', 'ResCopy'),
('wh_scan', 'WH Scan'),
('tenshi_ost', 'Tenshi OST'),
)
for name, title in TABS:
page = ttk.Frame(self.root)
notebook.add(page, text=title)
self.tabs[name] = page
def close(self):
print 'close'
self.root.destroy()
self.root.quit()
class ACertainModel:
view = None
listCompare = None
##
##
def __init__(self, view):
self.view = view
self.listCompare = ListCompareModel(view.listCompare)
class ACertainController:
view = None
model = None
listCompare = None
def __init__(self):
self.view = ACertainView()
self.model = ACertainModel(self.view)
view = self.view.listCompare
model = self.model.listCompare
self.listCompare = ListCompareController(view, model)
self.view.root.protocol('WM_DELETE_WINDOW', self.close_handler)
self.view.root.mainloop()
def close_handler(self):
self.view.close()
class ACertainApp:
controller = None
def __init__(self):
self.controller = ACertainController()
################################################################################
# LIST COMPARE PROGRAM
################################################################################
class ListCompareView:
# main window
root = None
listboxes = {}
buttons = {}
radiobuttons = {}
textlabels = {}
modes = {}
def close(self):
self.root.destroy()
self.root.quit()
def __init__(self, root=None, main_frame=None):
self.createWidgets(root, main_frame)
def createWidgets(self, root, main_frame):
if root == None:
# standalone
self.root = Tkinter.Tk()
self.root.title('A Certain Title')
w = 600
h = 500
self.root.geometry('%sx%s+0+0' % (w, h))
else:
# inside
self.root = root
self.modes['result_sort'] = Tkinter.StringVar()
self.modes['result_sort'].set('year')
self.modes['list_compare'] = Tkinter.StringVar()
self.modes['list_compare'].set('intersect')
self.textlabels['result_stat'] = Tkinter.StringVar()
self.textlabels['result_stat'].set('count: 0')
self.textlabels['awailable_stat'] = Tkinter.StringVar()
self.textlabels['awailable_stat'].set('0 lists awailable')
self.mk_main_frame(main_frame)
def mk_main_frame(self, main):
if main == None:
# standalone
main_frame = Tkinter.Frame(master=self.root, bg='black',bd=3)
main_frame.pack(fill='both', expand=True)
else:
# inside
main_frame = main
self.mk_result_frame(main_frame)
self.mk_additional_frame(main_frame)
def mk_result_frame(self, main_frame):
result_frame = Tkinter.Frame(master=main_frame, bg='red', bd=3)
result_frame.pack(side='left', fill='both', expand=True)
Tkinter.Label(result_frame, text='result list').pack(fill='both',
side='top')
self.mk_statistic_frame(result_frame)
self.listboxes['result'] = Tkinter.Listbox(result_frame,
selectmode=Tkinter.EXTENDED)
result_listbox = self.listboxes['result']
result_listbox.pack(side='left', fill='both', expand=True)
res_scrollBar = Tkinter.Scrollbar(result_frame)
res_scrollBar.pack(side='right', fill='y', expand=False)
res_scrollBar['command'] = result_listbox.yview
result_listbox['yscrollcommand'] = res_scrollBar.set
def mk_statistic_frame(self, result_frame):
statistic_frame = Tkinter.Frame(result_frame, bg='blue', bd=3)
statistic_frame.pack(side='bottom', fill='both', expand=False)
sort_label = Tkinter.Label(statistic_frame, text='sort:')
sort_label.pack(side='left', fill='both')
res_sort_mode = self.modes['result_sort']
RADIO = (
('name', 'name'),
('year', 'year')
)
radio_opt = {'side' : 'left', 'fill' : 'none'}
for title, value_ in RADIO:
self.radiobuttons[title] = Tkinter.Radiobutton(statistic_frame,
text=title,
variable=res_sort_mode,
value=value_,
anchor='w')
self.radiobuttons[title].pack(**radio_opt)
exp_label = Tkinter.Label(statistic_frame, text='EXPAND')
exp_label.pack(side='left', fill='both', expand=True)
stat_label = Tkinter.Label(statistic_frame,
textvariable=self.textlabels['result_stat'])
stat_label.pack(side='left', fill='both')
def mk_additional_frame(self, main_frame):
additinal_frame = Tkinter.Frame(master=main_frame,
bg='blue',
bd=3)
additinal_frame.pack(side='right', fill='both', expand=False)
self.mk_selected_frame(additinal_frame)
self.mk_awailable_frame(additinal_frame)
def mk_selected_frame(self, additinal_frame):
selected_frame = Tkinter.Frame(additinal_frame, bg='green',
bd=3)
selected_frame.pack(side='top', fill='both', expand=True)
selectedLabel = Tkinter.Label(selected_frame, text='selected lists')
selectedLabel.pack(fill='both')
self.listboxes['selected'] = Tkinter.Listbox(selected_frame,
selectmode=Tkinter.EXTENDED)
sel_listbox = self.listboxes['selected']
sel_listbox.pack(side='left', fill='both', expand=True)
sel_scrollBar = Tkinter.Scrollbar(selected_frame)
sel_scrollBar.pack(side='left', fill='y', expand=False)
sel_scrollBar['command'] = sel_listbox.yview
sel_listbox['yscrollcommand'] = sel_scrollBar.set
# # style = ttk.Style()
# # style.map('C.TButton',
# # foreground=[('pressed','red'),('active','blue')],
# # background=[('pressed','!disabled','black'),('active','white')]
# # )
BUTTONS = (
('UP', 'up'),
('DOWN', 'down'),
('LIST', 'list'),
)
# third button will be expand
exp = 3
for title, name in BUTTONS:
exp -= 1
self.buttons[name] = Tkinter.Button(selected_frame, text=title)
self.buttons[name].pack(side='top', fill='both',
expand=not bool(exp))
mode_label = Tkinter.Label(selected_frame, text='mode:', anchor='n')
mode_label.pack(side='top', fill='x')
compare_mode = self.modes['list_compare']
RADIO = (
('intersect', 'intersect'),
('differ', 'differ'),
('union', 'union')
)
radio_opt = {'side' : 'top', 'fill' : 'x'}
for title, value_ in RADIO:
Tkinter.Radiobutton(selected_frame, text=title,
variable=compare_mode, value=value_,
anchor='w').pack(**radio_opt)
def mk_awailable_frame(self, additinal_frame):
awailable_frame = Tkinter.Frame(additinal_frame, bg='red', bd=3)
awailable_frame.pack(side='top', fill='both', expand=True)
self.mk_aw_buttons_frame(awailable_frame)
aw_label = Tkinter.Label(awailable_frame, text='awailable lists')
aw_label.pack(side='top',fill='both')
stat_label = Tkinter.Label(awailable_frame,
textvariable=self.textlabels['awailable_stat'])
stat_label.pack(side='bottom', fill='both')
self.listboxes['awailable'] = Tkinter.Listbox(awailable_frame,
selectmode=Tkinter.EXTENDED)
aw_listbox = self.listboxes['awailable']
aw_listbox.pack(side='left', fill='both', expand=True)
aw_scrollBar = Tkinter.Scrollbar(awailable_frame)
aw_scrollBar.pack(side='right', fill='y', expand=False)
aw_scrollBar['command'] = aw_listbox.yview
aw_listbox['yscrollcommand'] = aw_scrollBar.set
def mk_aw_buttons_frame(self, awailable_frame):
aw_buttons_frame = Tkinter.Frame(awailable_frame,
bg='yellow',
bd=3)
aw_buttons_frame.pack(side='top', fill='x', expand=False)
BUTTONS = (
('ADD', 'add'),
('DEL', 'del'),
('RELOAD', 'reload')
)
button_opt = {'side' : 'left', 'fill' : 'x', 'expand' : True}
for title, name in BUTTONS:
self.buttons[name] = Tkinter.Button(aw_buttons_frame, text=title)
self.buttons[name].pack(**button_opt)
class ListCompareModel:
view = None
lists = {}
def __init__(self, view_):
self.view = view_
self.lists['result'] = []
self.lists['selected'] = []
self.lists['awailable'] = []
def upSelected(self):
print 'UP'
selected = self.lists['selected']
listbox = self.view.listboxes['selected']
indexes = map(int, listbox.curselection())
print 'old', indexes
new = [i for i in indexes]
for i in indexes:
if i != indexes.index(i):
selected[i], selected[i-1] = selected[i-1], selected[i]
new[indexes.index(i)] -= 1
print 'new'
self.displaySelected()
def downSelected(self):
print 'DOWN'
selected = self.lists['selected']
listbox = self.view.listboxes['selected']
indexes = sorted(map(int, listbox.curselection()), reverse=True)
print 'old', indexes
new = [i for i in indexes]
for i in indexes:
if i != listbox.size() - indexes.index(i) - 1:
selected[i], selected[i+1] = selected[i+1], selected[i]
new[indexes.index(i)] += 1
print 'new', new
self.displaySelected()
def listCompare(self):
print 'LIST COMPARE'
selected = self.lists['selected']
result = self.lists['result']
lists = []
for item in selected:
if item.type != listtype.UNKNOWN:
if item.type == listtype.PERSON:
lst = list_person(item.path)
elif item.type == listtype.COMPANY:
lst = list_company(item.path)
elif item.type == listtype.MYLIST:
lst = list_mylist(item.path)
lists.append(lst)
res = []
compareMode = self.view.modes['list_compare'].get()
if compareMode == 'intersect':
res = list_inter(lists)
elif compareMode == 'differ':
res = list_diff(lists[0:2])
elif compareMode == 'union':
for lst in lists:
for item in lst:
res.append(item)
print 'LENGTH', len(res)
result[:] = res
self.uniqResult()
self.sortResult()
self.displayResult()
def addList(self):
print 'ADD'
listbox = self.view.listboxes['awailable']
indexes = map(int, listbox.curselection())
indexes.sort(reverse=True)
awailable = self.lists['awailable']
selected = self.lists['selected']
for i in indexes:
selected.append(awailable.pop(i))
self.sortAwailable()
self.displayAwailable()
self.displaySelected()
def delList(self):
print 'ADD'
listbox = self.view.listboxes['selected']
indexes = map(int, listbox.curselection())
indexes.sort(reverse=True)
awailable = self.lists['awailable']
selected = self.lists['selected']
for i in indexes:
awailable.append(selected.pop(i))
self.sortAwailable()
self.displayAwailable()
self.displaySelected()
def reloadLists(self):
print 'RELOAD'
LISTS_DIR = 'lists'
curdir = os.path.abspath(os.path.curdir)
LISTS_PATH = os.path.normpath(os.path.join(curdir, LISTS_DIR))
print LISTS_PATH
awailable = self.lists['awailable']
awailable[:] = []
for list_file in os.listdir(LISTS_PATH):
list_file_fullpath = os.path.join(LISTS_PATH, list_file)
lst = list_check(list_file_fullpath)
if lst.type != listtype.UNKNOWN:
lst.path = list_file_fullpath
print lst.type, lst.name
awailable.append(lst)
self.sortAwailable()
self.displayAwailable()
def sortAwailable(self):
lst = self.lists['awailable']
lst.sort(key=lambda item : (item.type, item.name))
def displayAwailable(self):
listbox = self.view.listboxes['awailable']
textlabel = self.view.textlabels['awailable_stat']
awailable = self.lists['awailable']
listbox.delete(0, Tkinter.END)
for item in awailable:
listbox.insert(Tkinter.END, '%s' % (item.name))
listbox.update()
textlabel.set('%d lists awailable' % (len(awailable)))
def displaySelected(self):
listbox = self.view.listboxes['selected']
selected = self.lists['selected']
listbox.delete(0, Tkinter.END)
for item in selected:
listbox.insert(Tkinter.END, '%s' % (item.name))
listbox.update()
#self.sortResult()
def sortResult(self):
mode = self.view.modes['result_sort'].get()
print mode
if mode == 'year':
function = lambda item : (item.year, item.ani_name)
elif mode == 'name':
function = lambda item : (item.ani_name, item.year)
self.lists['result'].sort(key=function)
def uniqResult(self):
result = self.lists['result']
uniq_dict = dict(
(item.ani_id, item) for item in result
)
result[:] = uniq_dict.values()
def displayResult(self):
listbox = self.view.listboxes['result']
result = self.lists['result']
textlabel = self.view.textlabels['result_stat']
listbox.delete(0, Tkinter.END)
for item in result:
listbox.insert(Tkinter.END, '%s %s' % (item.year, item.ani_name))
textlabel.set('count: %i' % len(result))
listbox.update()
def resultSortChange(self, type_):
self.view.modes['result_sort'].set(type_)
self.sortResult()
self.displayResult()
class ListCompareController:
model = None
view = None
def __init__(self, view=None, model=None):
if view == None:
self.view = ListCompareView()
else:
self.view = view
if model == None:
self.model = ListCompareModel(self.view)
else:
self.model = model
self.bind_handlers()
if view == None:
self.view.root.protocol('WM_DELETE_WINDOW', self.close_handler)
self.view.root.mainloop()
def bind_handlers(self):
up_button = self.view.buttons['up']
up_button.bind("<Button-1>", self.up_handler)
down_button = self.view.buttons['down']
down_button.bind("<Button-1>", self.down_handler)
list_button = self.view.buttons['list']
list_button.bind("<Button-1>", self.list_handler)
add_button = self.view.buttons['add']
add_button.bind("<Button-1>", self.add_handler)
del_button = self.view.buttons['del']
del_button.bind("<Button-1>", self.del_handler)
reload_button = self.view.buttons['reload']
reload_button.bind("<Button-1>", self.reload_handler)
# print self.view.radiobuttons
sort_name_radiobutton = self.view.radiobuttons['name']
sort_name_radiobutton.bind("<Button-1>", self.result_sort_handler_name)
sort_year_radiobutton = self.view.radiobuttons['year']
sort_year_radiobutton.bind("<Button-1>", self.result_sort_handler_year)
def up_handler(self, event):
print 'up handler'
self.model.upSelected()
def down_handler(self, event):
print 'down handler'
self.model.downSelected()
def list_handler(self, event):
print 'list handler'
self.model.listCompare()
def add_handler(self, event):
print 'add handler'
self.model.addList()
def del_handler(self, event):
print 'del handler'
self.model.delList()
def reload_handler(self, event):
print 'reload handler'
self.model.reloadLists()
def result_sort_handler_name(self, event):
print 'result sort handler name'
self.model.resultSortChange('name')
def result_sort_handler_year(self, event):
print 'result sort handler year'
self.model.resultSortChange('year')
def close_handler(self):
print 'close'
self.view.close()
class ListCompareApp:
controller = None
def __init__(self):
self.controller = ListCompareController()
################################################################################
# OTHER/OLD
################################################################################
def import_modules():
modules_path = os.path.join(os.path.curdir, MODULES_DIR)
if not (modules_path in sys.path):
sys.path.append(modules_path)
def list_parse():
try:
from lxml import etree as etree
except ImportError:
import xml.etree.ElementTree as etree
import gzip
params = ['series_animedb_id', 'series_title', 'series_type', 'series_episodes',
'my_id', 'my_watched_episodes', 'my_start_date', 'my_finish_date',
'my_fansub_group', 'my_rated', 'my_score', 'my_dvd', 'my_storage',
'my_status', 'my_comments', 'my_times_watched', 'my_rewatch_value',
'my_downloaded_eps', 'my_tags', 'my_rewatching', 'my_rewatching_ep',
'update_on_import']
status = {'p' : 'Plan to Watch',
'c' : 'Completed',
'w' : 'Watching',
'name' : 'my_status'
}
ALL = 1
with gzip.open('zip/animelist_1391893533_-_3199957.xml.gz', 'r') as f:
# tree = etree.parse(f)
# root = tree.getroot()
root = etree.fromstringlist(f)
# print(len(titles))
count = 0
for title in root.findall('anime'):
if (title.find(status['name']).text == status['c'] or ALL):
name = title.find('series_title').text
print(name)
count += 1
print()
print('Count: ', count)
def parse_info2222():
try:
# python2
import urllib2
except ImportError:
# python3
import urllib.request as urllib2
import sys
try:
import mechanize
except ImportError:
modules_path = os.path.join(os.path.curdir, MODULES_DIR)
if not (modules_path in sys.path):
sys.path.append(modules_path)
print(sys.path)
try:
import mechanize
except ImportError:
# lolwut?
# modules dir corrupt?
# restore from backup
pass
# #LOAD_DIR = ''
anidb_link = 'http://anidb.net/perl-bin/animedb.pl?show=anime&aid=6556'
anidb_link = 'http://ru.wikipedia.org'
br = mechanize.Browser()
br.set_handle_robots(False)
# headers = [('User-Agent','Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:16.0) Gecko/20120815 Firefox/16.0')]
# br.addheaders = headers
page = br.open(anidb_link)
request = br.request
print request.header_items()
print br.title()
# print 'links count: ', len(br.links())
# for link in br.links():
# print link.text
# # page = urllib2.urlopen(anidb_link)
# # page_content = page.read()
# # print(page_content)
# #
# #
# # page_name = '1.html'
# # save_path = os.path.join(LOAD_DIR, page_name)
# #
# # with open(save_path, 'w') as f:
# # f.write(page_content)
# # print('done')
################################################################################
if __name__ == '__main__':
#test7()
#ListCompareApp()
ACertainApp()
# import_modules()
#compare()
# prog = ListCompareApp()
# path = '/media/Локальный диск/GAMES/unsortd/_r/'
# dir_ = 'Aura: Maryuuinkouga Saigo no Tatakai'
# path = os.path.join(os.path.normpath(path), dir_)
# make_dirs(path)
pass
| sora7/listparse | OLD/lp2/listparse.py | Python | gpl-2.0 | 77,022 | 0.006116 |
"""
Comsystem command module.
Comm commands are OOC commands and intended to be made available to
the Player at all times (they go into the PlayerCmdSet). So we
make sure to homogenize self.caller to always be the player object
for easy handling.
"""
from django.conf import settings
from src.comms.models import Channel, Msg, PlayerChannelConnection, ExternalChannelConnection
from src.comms import irc, imc2, rss
from src.comms.channelhandler import CHANNELHANDLER
from src.utils import create, utils, prettytable
from src.commands.default.muxcommand import MuxCommand, MuxPlayerCommand
# limit symbol import for API
__all__ = ("CmdAddCom", "CmdDelCom", "CmdAllCom",
"CmdChannels", "CmdCdestroy", "CmdCBoot", "CmdCemit",
"CmdCWho", "CmdChannelCreate", "CmdCset", "CmdCdesc",
"CmdPage", "CmdIRC2Chan", "CmdIMC2Chan", "CmdIMCInfo",
"CmdIMCTell", "CmdRSS2Chan")
def find_channel(caller, channelname, silent=False, noaliases=False):
"""
Helper function for searching for a single channel with
some error handling.
"""
channels = Channel.objects.channel_search(channelname)
if not channels:
if not noaliases:
channels = [chan for chan in Channel.objects.all() if channelname in chan.aliases]
if channels:
return channels[0]
if not silent:
caller.msg("Channel '%s' not found." % channelname)
return None
elif len(channels) > 1:
matches = ", ".join(["%s(%s)" % (chan.key, chan.id) for chan in channels])
if not silent:
caller.msg("Multiple channels match (be more specific): \n%s" % matches)
return None
return channels[0]
class CmdAddCom(MuxPlayerCommand):
"""
addcom - subscribe to a channel with optional alias
Usage:
addcom [alias=] <channel>
Joins a given channel. If alias is given, this will allow you to
refer to the channel by this alias rather than the full channel
name. Subsequent calls of this command can be used to add multiple
aliases to an already joined channel.
"""
key = "addcom"
aliases = ["aliaschan","chanalias"]
help_category = "Comms"
locks = "cmd:not pperm(channel_banned)"
def func(self):
"Implement the command"
caller = self.caller
args = self.args
player = caller
if not args:
self.msg("Usage: addcom [alias =] channelname.")
return
if self.rhs:
# rhs holds the channelname
channelname = self.rhs
alias = self.lhs
else:
channelname = self.args
alias = None
channel = find_channel(caller, channelname)
if not channel:
# we use the custom search method to handle errors.
return
# check permissions
if not channel.access(player, 'listen'):
self.msg("%s: You are not allowed to listen to this channel." % channel.key)
return
string = ""
if not channel.has_connection(player):
# we want to connect as well.
if not channel.connect_to(player):
# if this would have returned True, the player is connected
self.msg("%s: You are not allowed to join this channel." % channel.key)
return
else:
string += "You now listen to the channel %s. " % channel.key
else:
string += "You are already connected to channel %s." % channel.key
if alias:
# create a nick and add it to the caller.
caller.nicks.add(alias, channel.key, nick_type="channel")
string += " You can now refer to the channel %s with the alias '%s'."
self.msg(string % (channel.key, alias))
else:
string += " No alias added."
self.msg(string)
class CmdDelCom(MuxPlayerCommand):
"""
delcom - unsubscribe from channel or remove channel alias
Usage:
delcom <alias or channel>
If the full channel name is given, unsubscribe from the
channel. If an alias is given, remove the alias but don't
unsubscribe.
"""
key = "delcom"
aliases = ["delaliaschan, delchanalias"]
help_category = "Comms"
locks = "cmd:not perm(channel_banned)"
def func(self):
"Implementing the command. "
caller = self.caller
player = caller
if not self.args:
self.msg("Usage: delcom <alias or channel>")
return
ostring = self.args.lower()
channel = find_channel(caller, ostring, silent=True, noaliases=True)
if channel:
# we have given a channel name - unsubscribe
if not channel.has_connection(player):
self.msg("You are not listening to that channel.")
return
chkey = channel.key.lower()
# find all nicks linked to this channel and delete them
for nick in [nick for nick in caller.nicks.get(nick_type="channel")
if nick.db_real.lower() == chkey]:
nick.delete()
channel.disconnect_from(player)
self.msg("You stop listening to channel '%s'. Eventual aliases were removed." % channel.key)
return
else:
# we are removing a channel nick
channame = caller.nicks.get(ostring, nick_type="channel")
channel = find_channel(caller, channame, silent=True)
if not channel:
self.msg("No channel with alias '%s' was found." % ostring)
else:
if caller.nicks.has(ostring, nick_type="channel"):
caller.nicks.delete(ostring, nick_type="channel")
self.msg("Your alias '%s' for channel %s was cleared." % (ostring, channel.key))
else:
self.msg("You had no such alias defined for this channel.")
class CmdAllCom(MuxPlayerCommand):
"""
allcom - operate on all channels
Usage:
allcom [on | off | who | destroy]
Allows the user to universally turn off or on all channels they are on,
as well as perform a 'who' for all channels they are on. Destroy deletes
all channels that you control.
Without argument, works like comlist.
"""
key = "allcom"
locks = "cmd: not pperm(channel_banned)"
help_category = "Comms"
def func(self):
"Runs the function"
caller = self.caller
args = self.args
if not args:
caller.execute_cmd("@channels")
self.msg("(Usage: allcom on | off | who | destroy)")
return
if args == "on":
# get names of all channels available to listen to and activate them all
channels = [chan for chan in Channel.objects.get_all_channels() if chan.access(caller, 'listen')]
for channel in channels:
caller.execute_cmd("addcom %s" % channel.key)
elif args == "off":
#get names all subscribed channels and disconnect from them all
channels = [conn.channel for conn in PlayerChannelConnection.objects.get_all_player_connections(caller)]
for channel in channels:
caller.execute_cmd("delcom %s" % channel.key)
elif args == "destroy":
# destroy all channels you control
channels = [chan for chan in Channel.objects.get_all_channels() if chan.access(caller, 'control')]
for channel in channels:
caller.execute_cmd("@cdestroy %s" % channel.key)
elif args == "who":
# run a who, listing the subscribers on visible channels.
string = "\n{CChannel subscriptions{n"
channels = [chan for chan in Channel.objects.get_all_channels() if chan.access(caller, 'listen')]
if not channels:
string += "No channels."
for channel in channels:
string += "\n{w%s:{n\n" % channel.key
conns = PlayerChannelConnection.objects.get_all_connections(channel)
if conns:
string += " " + ", ".join([conn.player.key for conn in conns])
else:
string += " <None>"
self.msg(string.strip())
else:
# wrong input
self.msg("Usage: allcom on | off | who | clear")
class CmdChannels(MuxPlayerCommand):
"""
@clist
Usage:
@channels
@clist
comlist
Lists all channels available to you, wether you listen to them or not.
Use 'comlist" to only view your current channel subscriptions.
Use addcom/delcom to join and leave channels
"""
key = "@channels"
aliases = ["@clist", "channels", "comlist", "chanlist", "channellist", "all channels"]
help_category = "Comms"
locks = "cmd: not pperm(channel_banned)"
def func(self):
"Implement function"
caller = self.caller
# all channels we have available to listen to
channels = [chan for chan in Channel.objects.get_all_channels() if chan.access(caller, 'listen')]
if not channels:
self.msg("No channels available.")
return
# all channel we are already subscribed to
subs = [conn.channel for conn in PlayerChannelConnection.objects.get_all_player_connections(caller)]
if self.cmdstring == "comlist":
# just display the subscribed channels with no extra info
comtable = prettytable.PrettyTable(["{wchannel","{wmy aliases", "{wdescription"])
for chan in subs:
clower = chan.key.lower()
nicks = [nick for nick in caller.nicks.get(nick_type="channel")]
comtable.add_row(["%s%s" % (chan.key, chan.aliases and "(%s)" % ",".join(chan.aliases) or ""),
"%s".join(nick.db_nick for nick in nicks if nick.db_real.lower()==clower()),
chan.desc])
caller.msg("\n{wChannel subscriptions{n (use {w@channels{n to list all, {waddcom{n/{wdelcom{n to sub/unsub):{n\n%s" % comtable)
else:
# full listing (of channels caller is able to listen to)
comtable = prettytable.PrettyTable(["{wsub","{wchannel","{wmy aliases","{wlocks","{wdescription"])
for chan in channels:
nicks = [nick for nick in caller.nicks.get(nick_type="channel")]
comtable.add_row([chan in subs and "{gYes{n" or "{rNo{n",
"%s%s" % (chan.key, chan.aliases and "(%s)" % ",".join(chan.aliases) or ""),
"%s".join(nick.db_nick for nick in nicks if nick.db_real.lower()==clower()),
chan.locks,
chan.desc])
caller.msg("\n{wAvailable channels{n (use {wcomlist{n,{waddcom{n and {wdelcom{n to manage subscriptions):\n%s" % comtable)
class CmdCdestroy(MuxPlayerCommand):
"""
@cdestroy
Usage:
@cdestroy <channel>
Destroys a channel that you control.
"""
key = "@cdestroy"
help_category = "Comms"
locks = "cmd: not pperm(channel_banned)"
def func(self):
"Destroy objects cleanly."
caller = self.caller
if not self.args:
self.msg("Usage: @cdestroy <channelname>")
return
channel = find_channel(caller, self.args)
if not channel:
self.msg("Could not find channel %s." % self.args)
return
if not channel.access(caller, 'control'):
self.msg("You are not allowed to do that.")
return
message = "%s is being destroyed. Make sure to change your aliases." % channel
msgobj = create.create_message(caller, message, channel)
channel.msg(msgobj)
channel.delete()
CHANNELHANDLER.update()
self.msg("%s was destroyed." % channel)
class CmdCBoot(MuxPlayerCommand):
"""
@cboot
Usage:
@cboot[/quiet] <channel> = <player> [:reason]
Switches:
quiet - don't notify the channel
Kicks a player or object from a channel you control.
"""
key = "@cboot"
locks = "cmd: not pperm(channel_banned)"
help_category = "Comms"
def func(self):
"implement the function"
if not self.args or not self.rhs:
string = "Usage: @cboot[/quiet] <channel> = <player> [:reason]"
self.msg(string)
return
channel = find_channel(self.caller, self.lhs)
if not channel:
return
reason = ""
if ":" in self.rhs:
playername, reason = self.rhs.rsplit(":", 1)
searchstring = playername.lstrip('*')
else:
searchstring = self.rhs.lstrip('*')
player = self.search(searchstring, player=True)
if not player:
return
if reason:
reason = " (reason: %s)" % reason
if not channel.access(self.caller, "control"):
string = "You don't control this channel."
self.msg(string)
return
if not PlayerChannelConnection.objects.has_connection(player, channel):
string = "Player %s is not connected to channel %s." % (player.key, channel.key)
self.msg(string)
return
if not "quiet" in self.switches:
string = "%s boots %s from channel.%s" % (self.caller, player.key, reason)
channel.msg(string)
# find all player's nicks linked to this channel and delete them
for nick in [nick for nick in player.character.nicks.get(nick_type="channel")
if nick.db_real.lower() == channel.key]:
nick.delete()
# disconnect player
channel.disconnect_from(player)
CHANNELHANDLER.update()
class CmdCemit(MuxPlayerCommand):
"""
@cemit - send a message to channel
Usage:
@cemit[/switches] <channel> = <message>
Switches:
noheader - don't show the [channel] header before the message
sendername - attach the sender's name before the message
quiet - don't echo the message back to sender
Allows the user to broadcast a message over a channel as long as
they control it. It does not show the user's name unless they
provide the /sendername switch.
"""
key = "@cemit"
aliases = ["@cmsg"]
locks = "cmd: not pperm(channel_banned)"
help_category = "Comms"
def func(self):
"Implement function"
if not self.args or not self.rhs:
string = "Usage: @cemit[/switches] <channel> = <message>"
self.msg(string)
return
channel = find_channel(self.caller, self.lhs)
if not channel:
return
if not channel.access(self.caller, "control"):
string = "You don't control this channel."
self.msg(string)
return
message = self.rhs
if "sendername" in self.switches:
message = "%s: %s" % (self.key, message)
if not "noheader" in self.switches:
message = "[%s] %s" % (channel.key, message)
channel.msg(message)
if not "quiet" in self.switches:
string = "Sent to channel %s: %s" % (channel.key, message)
self.msg(string)
class CmdCWho(MuxPlayerCommand):
"""
@cwho
Usage:
@cwho <channel>
List who is connected to a given channel you have access to.
"""
key = "@cwho"
locks = "cmd: not pperm(channel_banned)"
help_category = "Comms"
def func(self):
"implement function"
if not self.args:
string = "Usage: @cwho <channel>"
self.msg(string)
return
channel = find_channel(self.caller, self.lhs)
if not channel:
return
if not channel.access(self.caller, "listen"):
string = "You can't access this channel."
self.msg(string)
return
string = "\n{CChannel subscriptions{n"
string += "\n{w%s:{n\n" % channel.key
conns = PlayerChannelConnection.objects.get_all_connections(channel)
if conns:
string += " " + ", ".join([conn.player.key for conn in conns])
else:
string += " <None>"
self.msg(string.strip())
class CmdChannelCreate(MuxPlayerCommand):
"""
@ccreate
channelcreate
Usage:
@ccreate <new channel>[;alias;alias...] = description
Creates a new channel owned by you.
"""
key = "@ccreate"
aliases = "channelcreate"
locks = "cmd:not pperm(channel_banned)"
help_category = "Comms"
def func(self):
"Implement the command"
caller = self.caller
if not self.args:
self.msg("Usage @ccreate <channelname>[;alias;alias..] = description")
return
description = ""
if self.rhs:
description = self.rhs
lhs = self.lhs
channame = lhs
aliases = None
if ';' in lhs:
channame, aliases = [part.strip().lower()
for part in lhs.split(';', 1) if part.strip()]
aliases = [alias.strip().lower()
for alias in aliases.split(';') if alias.strip()]
channel = Channel.objects.channel_search(channame)
if channel:
self.msg("A channel with that name already exists.")
return
# Create and set the channel up
lockstring = "send:all();listen:all();control:id(%s)" % caller.id
new_chan = create.create_channel(channame, aliases, description, locks=lockstring)
new_chan.connect_to(caller)
self.msg("Created channel %s and connected to it." % new_chan.key)
class CmdCset(MuxPlayerCommand):
"""
@cset - changes channel access restrictions
Usage:
@cset <channel> [= <lockstring>]
Changes the lock access restrictions of a channel. If no
lockstring was given, view the current lock definitions.
"""
key = "@cset"
locks = "cmd:not pperm(channel_banned)"
aliases = ["@cclock"]
help_category = "Comms"
def func(self):
"run the function"
if not self.args:
string = "Usage: @cset channel [= lockstring]"
self.msg(string)
return
channel = find_channel(self.caller, self.lhs)
if not channel:
return
if not self.rhs:
# no =, so just view the current locks
string = "Current locks on %s:" % channel.key
string = "%s\n %s" % (string, channel.locks)
self.msg(string)
return
# we want to add/change a lock.
if not channel.access(self.caller, "control"):
string = "You don't control this channel."
self.msg(string)
return
# Try to add the lock
channel.locks.add(self.rhs)
string = "Lock(s) applied. "
string += "Current locks on %s:" % channel.key
string = "%s\n %s" % (string, channel.locks)
self.msg(string)
class CmdCdesc(MuxPlayerCommand):
"""
@cdesc - set channel description
Usage:
@cdesc <channel> = <description>
Changes the description of the channel as shown in
channel lists.
"""
key = "@cdesc"
locks = "cmd:not pperm(channel_banned)"
help_category = "Comms"
def func(self):
"Implement command"
caller = self.caller
if not self.rhs:
self.msg("Usage: @cdesc <channel> = <description>")
return
channel = find_channel(caller, self.lhs)
if not channel:
self.msg("Channel '%s' not found." % self.lhs)
return
#check permissions
if not channel.access(caller, 'control'):
self.msg("You cannot admin this channel.")
return
# set the description
channel.desc = self.rhs
channel.save()
self.msg("Description of channel '%s' set to '%s'." % (channel.key, self.rhs))
class CmdPage(MuxPlayerCommand):
"""
page - send private message
Usage:
page[/switches] [<player>,<player>,... = <message>]
tell ''
page <number>
Switch:
last - shows who you last messaged
list - show your last <number> of tells/pages (default)
Send a message to target user (if online). If no
argument is given, you will get a list of your latest messages.
"""
key = "page"
aliases = ['tell']
locks = "cmd:not pperm(page_banned)"
help_category = "Comms"
def func(self):
"Implement function using the Msg methods"
# this is a MuxPlayerCommand, which means caller will be a Player.
caller = self.caller
# get the messages we've sent (not to channels)
pages_we_sent = Msg.objects.get_messages_by_sender(caller, exclude_channel_messages=True)
# get last messages we've got
pages_we_got = Msg.objects.get_messages_by_receiver(caller)
if 'last' in self.switches:
if pages_we_sent:
recv = ",".join(obj.key for obj in pages_we_sent[-1].receivers)
self.msg("You last paged {c%s{n:%s" % (recv, pages_we_sent[-1].message))
return
else:
self.msg("You haven't paged anyone yet.")
return
if not self.args or not self.rhs:
pages = pages_we_sent + pages_we_got
pages.sort(lambda x, y: cmp(x.date_sent, y.date_sent))
number = 5
if self.args:
try:
number = int(self.args)
except ValueError:
self.msg("Usage: tell [<player> = msg]")
return
if len(pages) > number:
lastpages = pages[-number:]
else:
lastpages = pages
lastpages = "\n ".join("{w%s{n {c%s{n to {c%s{n: %s" % (utils.datetime_format(page.date_sent),
",".join(obj.key for obj in page.senders),
"{n,{c ".join([obj.name for obj in page.receivers]),
page.message)
for page in lastpages)
if lastpages:
string = "Your latest pages:\n %s" % lastpages
else:
string = "You haven't paged anyone yet."
self.msg(string)
return
# We are sending. Build a list of targets
if not self.lhs:
# If there are no targets, then set the targets
# to the last person we paged.
if pages_we_sent:
receivers = pages_we_sent[-1].receivers
else:
self.msg("Who do you want to page?")
return
else:
receivers = self.lhslist
recobjs = []
for receiver in set(receivers):
if isinstance(receiver, basestring):
pobj = caller.search(receiver)
elif hasattr(receiver, 'character'):
pobj = receiver.character
else:
self.msg("Who do you want to page?")
return
if pobj:
recobjs.append(pobj)
if not recobjs:
self.msg("Noone found to page.")
return
header = "{wPlayer{n {c%s{n {wpages:{n" % caller.key
message = self.rhs
# if message begins with a :, we assume it is a 'page-pose'
if message.startswith(":"):
message = "%s %s" % (caller.key, message.strip(':').strip())
# create the persistent message object
create.create_message(caller, message,
receivers=recobjs)
# tell the players they got a message.
received = []
rstrings = []
for pobj in recobjs:
if not pobj.access(caller, 'msg'):
rstrings.append("You are not allowed to page %s." % pobj)
continue
pobj.msg("%s %s" % (header, message))
if hasattr(pobj, 'has_player') and not pobj.has_player:
received.append("{C%s{n" % pobj.name)
rstrings.append("%s is offline. They will see your message if they list their pages later." % received[-1])
else:
received.append("{c%s{n" % pobj.name)
if rstrings:
self.msg(rstrings = "\n".join(rstrings))
self.msg("You paged %s with: '%s'." % (", ".join(received), message))
class CmdIRC2Chan(MuxCommand):
"""
@irc2chan - link evennia channel to an IRC channel
Usage:
@irc2chan[/switches] <evennia_channel> = <ircnetwork> <port> <#irchannel> <botname>
Switches:
/disconnect - this will delete the bot and remove the irc connection to the channel.
/remove - "
/list - show all irc<->evennia mappings
Example:
@irc2chan myircchan = irc.dalnet.net 6667 myevennia-channel evennia-bot
This creates an IRC bot that connects to a given IRC network and channel. It will
relay everything said in the evennia channel to the IRC channel and vice versa. The
bot will automatically connect at server start, so this comman need only be given once.
The /disconnect switch will permanently delete the bot. To only temporarily deactivate it,
use the @services command instead.
"""
key = "@irc2chan"
locks = "cmd:serversetting(IRC_ENABLED) and pperm(Immortals)"
help_category = "Comms"
def func(self):
"Setup the irc-channel mapping"
if not settings.IRC_ENABLED:
string = """IRC is not enabled. You need to activate it in game/settings.py."""
self.msg(string)
return
if 'list' in self.switches:
# show all connections
connections = ExternalChannelConnection.objects.filter(db_external_key__startswith='irc_')
if connections:
table = prettytable.PrettyTable(["Evennia channel", "IRC channel"])
for conn in connections:
table.add_row([conn.channel.key, " ".join(conn.external_config.split('|'))])
string = "{wIRC connections:{n\n%s" % table
self.msg(string)
else:
self.msg("No connections found.")
return
if not self.args or not self.rhs:
string = "Usage: @irc2chan[/switches] <evennia_channel> = <ircnetwork> <port> <#irchannel> <botname>"
self.msg(string)
return
channel = self.lhs
self.rhs = self.rhs.replace('#', ' ') # to avoid Python comment issues
try:
irc_network, irc_port, irc_channel, irc_botname = [part.strip() for part in self.rhs.split(None, 3)]
irc_channel = "#%s" % irc_channel
except Exception:
string = "IRC bot definition '%s' is not valid." % self.rhs
self.msg(string)
return
if 'disconnect' in self.switches or 'remove' in self.switches or 'delete' in self.switches:
chanmatch = find_channel(self.caller, channel, silent=True)
if chanmatch:
channel = chanmatch.key
ok = irc.delete_connection(channel, irc_network, irc_port, irc_channel, irc_botname)
if not ok:
self.msg("IRC connection/bot could not be removed, does it exist?")
else:
self.msg("IRC connection destroyed.")
return
channel = find_channel(self.caller, channel)
if not channel:
return
ok = irc.create_connection(channel, irc_network, irc_port, irc_channel, irc_botname)
if not ok:
self.msg("This IRC connection already exists.")
return
self.msg("Connection created. Starting IRC bot.")
class CmdIMC2Chan(MuxCommand):
"""
imc2chan - link an evennia channel to imc2
Usage:
@imc2chan[/switches] <evennia_channel> = <imc2_channel>
Switches:
/disconnect - this clear the imc2 connection to the channel.
/remove - "
/list - show all imc2<->evennia mappings
Example:
@imc2chan myimcchan = ievennia
Connect an existing evennia channel to a channel on an IMC2
network. The network contact information is defined in settings and
should already be accessed at this point. Use @imcchanlist to see
available IMC channels.
"""
key = "@imc2chan"
locks = "cmd:serversetting(IMC2_ENABLED) and pperm(Immortals)"
help_category = "Comms"
def func(self):
"Setup the imc-channel mapping"
if not settings.IMC2_ENABLED:
string = """IMC is not enabled. You need to activate it in game/settings.py."""
self.msg(string)
return
if 'list' in self.switches:
# show all connections
connections = ExternalChannelConnection.objects.filter(db_external_key__startswith='imc2_')
if connections:
table = prettytable.PrettyTable(["Evennia channel", "IMC channel"])
for conn in connections:
table.add_row([conn.channel.key, conn.external_config])
string = "{wIMC connections:{n\n%s" % table
self.msg(string)
else:
self.msg("No connections found.")
return
if not self.args or not self.rhs:
string = "Usage: @imc2chan[/switches] <evennia_channel> = <imc2_channel>"
self.msg(string)
return
channel = self.lhs
imc2_channel = self.rhs
if 'disconnect' in self.switches or 'remove' in self.switches or 'delete' in self.switches:
# we don't search for channels before this since we want to clear the link
# also if the channel no longer exists.
ok = imc2.delete_connection(channel, imc2_channel)
if not ok:
self.msg("IMC2 connection could not be removed, does it exist?")
else:
self.msg("IMC2 connection destroyed.")
return
# actually get the channel object
channel = find_channel(self.caller, channel)
if not channel:
return
ok = imc2.create_connection(channel, imc2_channel)
if not ok:
self.msg("The connection %s <-> %s already exists." % (channel.key, imc2_channel))
return
self.msg("Created connection channel %s <-> IMC channel %s." % (channel.key, imc2_channel))
class CmdIMCInfo(MuxCommand):
"""
imcinfo - package of imc info commands
Usage:
@imcinfo[/switches]
@imcchanlist - list imc2 channels
@imclist - list connected muds
@imcwhois <playername> - whois info about a remote player
Switches for @imcinfo:
channels - as @imcchanlist (default)
games or muds - as @imclist
whois - as @imcwhois (requires an additional argument)
update - force an update of all lists
Shows lists of games or channels on the IMC2 network.
"""
key = "@imcinfo"
aliases = ["@imcchanlist", "@imclist", "@imcwhois"]
locks = "cmd: serversetting(IMC2_ENABLED) and pperm(Wizards)"
help_category = "Comms"
def func(self):
"Run the command"
if not settings.IMC2_ENABLED:
string = """IMC is not enabled. You need to activate it in game/settings.py."""
self.msg(string)
return
if "update" in self.switches:
# update the lists
import time
from src.comms.imc2lib import imc2_packets as pck
from src.comms.imc2 import IMC2_MUDLIST, IMC2_CHANLIST, IMC2_CLIENT
# update connected muds
IMC2_CLIENT.send_packet(pck.IMC2PacketKeepAliveRequest())
# prune inactive muds
for name, mudinfo in IMC2_MUDLIST.mud_list.items():
if time.time() - mudinfo.last_updated > 3599:
del IMC2_MUDLIST.mud_list[name]
# update channel list
IMC2_CLIENT.send_packet(pck.IMC2PacketIceRefresh())
self.msg("IMC2 lists were re-synced.")
elif "games" in self.switches or "muds" in self.switches or self.cmdstring == "@imclist":
# list muds
from src.comms.imc2 import IMC2_MUDLIST
muds = IMC2_MUDLIST.get_mud_list()
networks = set(mud.networkname for mud in muds)
string = ""
nmuds = 0
for network in networks:
table = prettytable.PrettyTable(["Name", "Url", "Host", "Port"])
for mud in (mud for mud in muds if mud.networkname == network):
nmuds += 1
table.add_row([mud.name, mud.url, mud.host, mud.port])
string += "\n{wMuds registered on %s:{n\n%s" % (network, table)
string += "\n %i Muds found." % nmuds
self.msg(string)
elif "whois" in self.switches or self.cmdstring == "@imcwhois":
# find out about a player
if not self.args:
self.msg("Usage: @imcwhois <playername>")
return
from src.comms.imc2 import IMC2_CLIENT
self.msg("Sending IMC whois request. If you receive no response, no matches were found.")
IMC2_CLIENT.msg_imc2(None, from_obj=self.caller, packet_type="imcwhois", data={"target":self.args})
elif not self.switches or "channels" in self.switches or self.cmdstring == "@imcchanlist":
# show channels
from src.comms.imc2 import IMC2_CHANLIST, IMC2_CLIENT
channels = IMC2_CHANLIST.get_channel_list()
string = ""
nchans = 0
table = prettytable.PrettyTable(["Full name", "Name", "Owner", "Perm", "Policy"])
for chan in channels:
nchans += 1
table.add_row([chan.name, chan.localname, chan.owner, chan.level, chan.policy])
string += "\n{wChannels on %s:{n\n%s" % (IMC2_CLIENT.factory.network, table)
string += "\n%i Channels found." % nchans
self.msg(string)
else:
# no valid inputs
string = "Usage: imcinfo|imcchanlist|imclist"
self.msg(string)
# unclear if this is working ...
class CmdIMCTell(MuxCommand):
"""
imctell - send a page to a remote IMC player
Usage:
imctell User@MUD = <msg>
imcpage "
Sends a page to a user on a remote MUD, connected
over IMC2.
"""
key = "imctell"
aliases = ["imcpage", "imc2tell", "imc2page"]
locks = "cmd: serversetting(IMC2_ENABLED)"
help_category = "Comms"
def func(self):
"Send tell across IMC"
if not settings.IMC2_ENABLED:
string = """IMC is not enabled. You need to activate it in game/settings.py."""
self.msg(string)
return
from src.comms.imc2 import IMC2_CLIENT
if not self.args or not '@' in self.lhs or not self.rhs:
string = "Usage: imctell User@Mud = <msg>"
self.msg(string)
return
target, destination = self.lhs.split("@", 1)
message = self.rhs.strip()
data = {"target":target, "destination":destination}
# send to imc2
IMC2_CLIENT.msg_imc2(message, from_obj=self.caller, packet_type="imctell", data=data)
self.msg("You paged {c%s@%s{n (over IMC): '%s'." % (target, destination, message))
# RSS connection
class CmdRSS2Chan(MuxCommand):
"""
@rss2chan - link evennia channel to an RSS feed
Usage:
@rss2chan[/switches] <evennia_channel> = <rss_url>
Switches:
/disconnect - this will stop the feed and remove the connection to the channel.
/remove - "
/list - show all rss->evennia mappings
Example:
@rss2chan rsschan = http://code.google.com/feeds/p/evennia/updates/basic
This creates an RSS reader that connects to a given RSS feed url. Updates will be
echoed as a title and news link to the given channel. The rate of updating is set
with the RSS_UPDATE_INTERVAL variable in settings (default is every 10 minutes).
When disconnecting you need to supply both the channel and url again so as to identify
the connection uniquely.
"""
key = "@rss2chan"
locks = "cmd:serversetting(RSS_ENABLED) and pperm(Immortals)"
help_category = "Comms"
def func(self):
"Setup the rss-channel mapping"
if not settings.RSS_ENABLED:
string = """RSS is not enabled. You need to activate it in game/settings.py."""
self.msg(string)
return
if 'list' in self.switches:
# show all connections
connections = ExternalChannelConnection.objects.filter(db_external_key__startswith='rss_')
if connections:
table = prettytable.PrettyTable(["Evennia channel", "RSS url"])
for conn in connections:
table.add_row([conn.channel.key, conn.external_config.split('|')[0]])
string = "{wConnections to RSS:{n\n%s" % table
self.msg(string)
else:
self.msg("No connections found.")
return
if not self.args or not self.rhs:
string = "Usage: @rss2chan[/switches] <evennia_channel> = <rss url>"
self.msg(string)
return
channel = self.lhs
url = self.rhs
if 'disconnect' in self.switches or 'remove' in self.switches or 'delete' in self.switches:
chanmatch = find_channel(self.caller, channel, silent=True)
if chanmatch:
channel = chanmatch.key
ok = rss.delete_connection(channel, url)
if not ok:
self.msg("RSS connection/reader could not be removed, does it exist?")
else:
self.msg("RSS connection destroyed.")
return
channel = find_channel(self.caller, channel)
if not channel:
return
interval = settings.RSS_UPDATE_INTERVAL
if not interval:
interval = 10*60
ok = rss.create_connection(channel, url, interval)
if not ok:
self.msg("This RSS connection already exists.")
return
self.msg("Connection created. Starting RSS reader.")
| TaliesinSkye/evennia | src/commands/default/comms.py | Python | bsd-3-clause | 38,986 | 0.003283 |
#!/usr/bin/env python2
#####
#
# PyGlow
#
#####
#
# Python module to control Pimoronis PiGlow
# [http://shop.pimoroni.com/products/piglow]
#
# * bin_clock.py - binary clock by Jiri Tyr
#
#####
from __future__ import print_function
from datetime import datetime
from PyGlow import PyGlow, ARM_LED_LIST, BOTH
from sys import stdout
from time import sleep
def int2bin(num):
return int('{0:b}'.format(num))
def print_time(pg):
now = datetime.now()
cur_time = [now.hour, now.minute, now.second]
bin_time = tuple(list(map(int2bin, cur_time)) + cur_time)
stdout.write(' %0.5d | %0.6d | %0.6d (%0.2d:%0.2d:%0.2d)\r' % bin_time)
stdout.flush()
lst = []
for arm_index, arm_bin in enumerate(bin_time[0:3]):
for led_index, c in enumerate("%0.6d" % arm_bin):
if c == '1':
lst.append(ARM_LED_LIST[arm_index][led_index])
pg.set_leds(lst).update_leds()
def main():
print(' %5s | %6s | %6s' % ('Hour', 'Minute', 'Second'))
pg = PyGlow(brightness=150, pulse=True, speed=1000, pulse_dir=BOTH)
try:
while True:
print_time(pg)
except KeyboardInterrupt:
print('')
pg.all(0)
if __name__ == '__main__':
main()
| bjornt/PyGlow | examples/bin_clock.py | Python | mit | 1,233 | 0 |
# encoding: UTF-8
from vnpy.trader import vtConstant
from korbitGateway import korbitGateway
gatewayClass = korbitGateway
gatewayName = 'KORBIT'
gatewayDisplayName = u'KORBIT'
gatewayType = vtConstant.GATEWAYTYPE_BTC
gatewayQryEnabled = True
| wisfern/vnpy | beta/gateway/korbitGateway/__init__.py | Python | mit | 245 | 0.004082 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2015 Intel Corporation.
# Copyright 2015 Isaku Yamahata <isaku.yamahata at intel com>
# <isaku.yamahata at gmail com>
# All Rights Reserved.
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Isaku Yamahata, Intel Corporation.
# shamelessly many codes are stolen from gbp simplechain_driver.py
import time
import yaml
from heatclient import client as heat_client
from heatclient import exc as heatException
from keystoneclient.v2_0 import client as ks_client
from oslo_config import cfg
from tacker.common import log
from tacker.openstack.common import jsonutils
from tacker.openstack.common import log as logging
from tacker.vm.drivers import abstract_driver
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
OPTS = [
cfg.StrOpt('heat_uri',
default='http://localhost:8004/v1',
help=_("Heat server address to create services "
"specified in the service chain.")),
cfg.IntOpt('stack_retries',
default=10,
help=_("Number of attempts to retry for stack deletion")),
cfg.IntOpt('stack_retry_wait',
default=5,
help=_("Wait time between two successive stack delete "
"retries")),
]
CONF.register_opts(OPTS, group='servicevm_heat')
STACK_RETRIES = cfg.CONF.servicevm_heat.stack_retries
STACK_RETRY_WAIT = cfg.CONF.servicevm_heat.stack_retry_wait
HEAT_TEMPLATE_BASE = """
heat_template_version: 2013-05-23
"""
class DeviceHeat(abstract_driver.DeviceAbstractDriver):
"""Heat driver of hosting device."""
def __init__(self):
super(DeviceHeat, self).__init__()
def get_type(self):
return 'heat'
def get_name(self):
return 'heat'
def get_description(self):
return 'Heat infra driver'
@log.log
def create_device_template_pre(self, plugin, context, device_template):
device_template_dict = device_template['device_template']
vnfd_yaml = device_template_dict['attributes'].get('vnfd')
if vnfd_yaml is None:
return
vnfd_dict = yaml.load(vnfd_yaml)
KEY_LIST = (('name', 'template_name'), ('description', 'description'))
device_template_dict.update(
dict((key, vnfd_dict[vnfd_key]) for (key, vnfd_key) in KEY_LIST
if (not key in device_template_dict.get(key) and
vnfd_key in vnfd_dict)))
service_types = vnfd_dict.get('service_properties', {}).get('type', [])
if service_types:
device_template_dict.setdefault('service_types', []).extend(
[{'service_type': service_type}
for service_type in service_types])
for vdu in vnfd_dict.get('vdus', {}).values():
mgmt_driver = vdu.get('mgmt_driver')
if mgmt_driver:
device_template_dict['mgmt_driver'] = mgmt_driver
LOG.debug(_('device_template %s'), device_template)
@log.log
def create(self, plugin, context, device):
LOG.debug(_('device %s'), device)
heatclient_ = HeatClient(context)
attributes = device['device_template']['attributes'].copy()
vnfd_yaml = attributes.pop('vnfd', None)
fields = dict((key, attributes.pop(key)) for key
in ('stack_name', 'template_url', 'template')
if key in attributes)
for key in ('files', 'parameters'):
if key in attributes:
fields[key] = jsonutils.loads(attributes.pop(key))
# overwrite parameters with given dev_attrs for device creation
dev_attrs = device['attributes'].copy()
config_yaml = dev_attrs.pop('config', None)
fields.update(dict((key, dev_attrs.pop(key)) for key
in ('stack_name', 'template_url', 'template')
if key in dev_attrs))
for key in ('files', 'parameters'):
if key in dev_attrs:
fields.setdefault(key, {}).update(
jsonutils.loads(dev_attrs.pop(key)))
LOG.debug('vnfd_yaml %s', vnfd_yaml)
if vnfd_yaml is not None:
assert 'template' not in fields
assert 'template_url' not in fields
template_dict = yaml.load(HEAT_TEMPLATE_BASE)
outputs_dict = {}
template_dict['outputs'] = outputs_dict
vnfd_dict = yaml.load(vnfd_yaml)
LOG.debug('vnfd_dict %s', vnfd_dict)
KEY_LIST = (('description', 'description'),
)
for (key, vnfd_key) in KEY_LIST:
if vnfd_key in vnfd_dict:
template_dict[key] = vnfd_dict[vnfd_key]
for vdu_id, vdu_dict in vnfd_dict.get('vdus', {}).items():
template_dict.setdefault('resources', {})[vdu_id] = {
"type": "OS::Nova::Server"
}
resource_dict = template_dict['resources'][vdu_id]
KEY_LIST = (('image', 'vm_image'),
('flavor', 'instance_type'))
resource_dict['properties'] = {}
properties = resource_dict['properties']
for (key, vdu_key) in KEY_LIST:
properties[key] = vdu_dict[vdu_key]
if 'network_interfaces' in vdu_dict:
# properties['networks'] = (
# vdu_dict['network_interfaces'].values())
networks_list = []
properties['networks'] = networks_list
for network_param in vdu_dict[
'network_interfaces'].values():
if network_param.pop('management', False):
mgmt_port = 'mgmt_port-%s' % vdu_id
mgmt_port_dict = {
'type': 'OS::Neutron::Port',
'properties': {
'port_security_enabled': False,
}
}
mgmt_port_dict['properties'].update(network_param)
template_dict['resources'][
mgmt_port] = mgmt_port_dict
network_param = {
'port': {'get_resource': mgmt_port}
}
mgmt_ip = 'mgmt_ip-%s' % vdu_id
outputs_dict[mgmt_ip] = {
'description': 'management ip address',
'value': {
'get_attr': [mgmt_port, 'fixed_ips',
0, 'ip_address']
}
}
networks_list.append(network_param)
if ('placement_policy' in vdu_dict and
'availability_zone' in vdu_dict['placement_policy']):
properties['availability_zone'] = vdu_dict[
'placement_policy']['availability_zone']
if 'config' in vdu_dict:
properties['config_drive'] = True
metadata = properties.setdefault('metadata', {})
metadata.update(vdu_dict['config'])
for key, value in metadata.items():
metadata[key] = value[:255]
# monitoring_policy = vdu_dict.get('monitoring_policy', None)
# failure_policy = vdu_dict.get('failure_policy', None)
# to pass necessary parameters to plugin upwards.
for key in ('monitoring_policy', 'failure_policy',
'service_type'):
if key in vdu_dict:
device.setdefault(
'attributes', {})[key] = vdu_dict[key]
if config_yaml is not None:
config_dict = yaml.load(config_yaml)
resources = template_dict.setdefault('resources', {})
for vdu_id, vdu_dict in config_dict.get('vdus', {}).items():
if vdu_id not in resources:
continue
config = vdu_dict.get('config', None)
if not config:
continue
properties = resources[vdu_id].setdefault('properties', {})
properties['config_drive'] = True
metadata = properties.setdefault('metadata', {})
metadata.update(config)
for key, value in metadata.items():
metadata[key] = value[:255]
heat_template_yaml = yaml.dump(template_dict)
fields['template'] = heat_template_yaml
if not device['attributes'].get('heat_template'):
device['attributes']['heat_template'] = heat_template_yaml
if 'stack_name' not in fields:
name = (__name__ + '_' + self.__class__.__name__ + '-' +
device['id'])
if device['attributes'].get('failure_count'):
name += ('-%s') % str(device['attributes']['failure_count'])
fields['stack_name'] = name
# service context is ignored
LOG.debug(_('service_context: %s'), device.get('service_context', []))
LOG.debug(_('fields: %s'), fields)
LOG.debug(_('template: %s'), fields['template'])
stack = heatclient_.create(fields)
return stack['stack']['id']
def create_wait(self, plugin, context, device_dict, device_id):
heatclient_ = HeatClient(context)
stack = heatclient_.get(device_id)
status = stack.stack_status
stack_retries = STACK_RETRIES
while status == 'CREATE_IN_PROGRESS' and stack_retries > 0:
time.sleep(STACK_RETRY_WAIT)
try:
stack = heatclient_.get(device_id)
except Exception:
LOG.exception(_("Device Instance cleanup may not have "
"happened because Heat API request failed "
"while waiting for the stack %(stack)s to be "
"deleted"), {'stack': device_id})
break
status = stack.stack_status
LOG.debug(_('status: %s'), status)
stack_retries = stack_retries - 1
LOG.debug(_('stack status: %(stack)s %(status)s'),
{'stack': stack, 'status': status})
if stack_retries == 0:
LOG.warn(_("Resource creation is"
" not completed within %(wait)s seconds as "
"creation of Stack %(stack)s is not completed"),
{'wait': (STACK_RETRIES * STACK_RETRY_WAIT),
'stack': device_id})
if status != 'CREATE_COMPLETE':
raise RuntimeError(_("creation of server %s faild") % device_id)
outputs = stack.outputs
LOG.debug(_('outputs %s'), outputs)
PREFIX = 'mgmt_ip-'
mgmt_ips = dict((output['output_key'][len(PREFIX):],
output['output_value'])
for output in outputs
if output.get('output_key', '').startswith(PREFIX))
if mgmt_ips:
device_dict['mgmt_url'] = jsonutils.dumps(mgmt_ips)
@log.log
def update(self, plugin, context, device_id, device_dict, device):
# checking if the stack exists at the moment
heatclient_ = HeatClient(context)
heatclient_.get(device_id)
# update config attribute
config_yaml = device_dict.get('attributes', {}).get('config', '')
update_yaml = device['device'].get('attributes', {}).get('config', '')
LOG.debug('yaml orig %(orig)s update %(update)s',
{'orig': config_yaml, 'update': update_yaml})
config_dict = yaml.load(config_yaml) or {}
update_dict = yaml.load(update_yaml)
if not update_dict:
return
@log.log
def deep_update(orig_dict, new_dict):
for key, value in new_dict.items():
if isinstance(value, dict):
if key in orig_dict and isinstance(orig_dict[key], dict):
deep_update(orig_dict[key], value)
continue
orig_dict[key] = value
LOG.debug('dict orig %(orig)s update %(update)s',
{'orig': config_dict, 'update': update_dict})
deep_update(config_dict, update_dict)
LOG.debug('dict new %(new)s update %(update)s',
{'new': config_dict, 'update': update_dict})
new_yaml = yaml.dump(config_dict)
device_dict.setdefault('attributes', {})['config'] = new_yaml
def update_wait(self, plugin, context, device_id):
# do nothing but checking if the stack exists at the moment
heatclient_ = HeatClient(context)
heatclient_.get(device_id)
def delete(self, plugin, context, device_id):
heatclient_ = HeatClient(context)
heatclient_.delete(device_id)
@log.log
def delete_wait(self, plugin, context, device_id):
heatclient_ = HeatClient(context)
stack = heatclient_.get(device_id)
status = stack.satck_status
stack_retries = STACK_RETRIES
while (status == 'DELETE_IN_PROGRESS' and stack_retries > 0):
time.sleep(STACK_RETRY_WAIT)
try:
stack = heatclient_.get(device_id)
except heatException.HTTPNotFound:
return
except Exception:
LOG.exception(_("Device Instance cleanup may not have "
"happened because Heat API request failed "
"while waiting for the stack %(stack)s to be "
"deleted"), {'stack': device_id})
break
status = stack.satck_status
stack_retries = stack_retries - 1
if stack_retries == 0:
LOG.warn(_("Resource cleanup for device is"
" not completed within %(wait)s seconds as "
"deletion of Stack %(stack)s is not completed"),
{'wait': (STACK_RETRIES * STACK_RETRY_WAIT),
'stack': device_id})
if status != 'DELETE_COMPLETE':
LOG.warn(_("device (%(device_id)d) deletion is not completed. "
"%(stack_status)s"),
{'device_id': device_id, 'stack_status': status})
@log.log
def attach_interface(self, plugin, context, device_id, port_id):
raise NotImplementedError()
@log.log
def dettach_interface(self, plugin, context, device_id, port_id):
raise NotImplementedError()
class HeatClient:
def __init__(self, context, password=None):
# context, password are unused
auth_url = CONF.keystone_authtoken.auth_uri + '/v2.0'
authtoken = CONF.keystone_authtoken
kc = ks_client.Client(
tenant_name=authtoken.project_name,
username=authtoken.username,
password=authtoken.password,
auth_url=auth_url)
token = kc.service_catalog.get_token()
api_version = "1"
endpoint = "%s/%s" % (cfg.CONF.servicevm_heat.heat_uri,
token['tenant_id'])
kwargs = {
'token': token['id'],
'tenant_name': authtoken.project_name,
'username': authtoken.username,
}
self.client = heat_client.Client(api_version, endpoint, **kwargs)
self.stacks = self.client.stacks
def create(self, fields):
fields = fields.copy()
fields.update({
'timeout_mins': 10,
'disable_rollback': True})
if 'password' in fields.get('template', {}):
fields['password'] = fields['template']['password']
return self.stacks.create(**fields)
def delete(self, stack_id):
try:
self.stacks.delete(stack_id)
except heatException.HTTPNotFound:
LOG.warn(_("Stack %(stack)s created by service chain driver is "
"not found at cleanup"), {'stack': stack_id})
def get(self, stack_id):
return self.stacks.get(stack_id)
| SripriyaSeetharam/tacker | tacker/vm/drivers/heat/heat.py | Python | apache-2.0 | 17,136 | 0.000117 |
import os
import unittest
import mock
from pulp.server.db import connection
class PulpWebservicesTests(unittest.TestCase):
"""
Base class for tests of webservice controllers. This base is used to work around the
authentication tests for each each method
"""
def setUp(self):
connection.initialize()
self.patch1 = mock.patch('pulp.server.webservices.controllers.decorators.'
'check_preauthenticated')
self.patch2 = mock.patch('pulp.server.webservices.controllers.decorators.'
'is_consumer_authorized')
self.patch3 = mock.patch('pulp.server.webservices.http.resource_path')
self.patch4 = mock.patch('pulp.server.webservices.http.header')
self.patch5 = mock.patch('web.webapi.HTTPError')
self.patch6 = mock.patch('pulp.server.managers.factory.principal_manager')
self.patch7 = mock.patch('pulp.server.managers.factory.user_query_manager')
self.patch8 = mock.patch('pulp.server.webservices.http.uri_path')
self.mock_check_pre_auth = self.patch1.start()
self.mock_check_pre_auth.return_value = 'ws-user'
self.mock_check_auth = self.patch2.start()
self.mock_check_auth.return_value = True
self.mock_http_resource_path = self.patch3.start()
self.patch4.start()
self.patch5.start()
self.patch6.start()
self.mock_user_query_manager = self.patch7.start()
self.mock_user_query_manager.return_value.is_superuser.return_value = False
self.mock_user_query_manager.return_value.is_authorized.return_value = True
self.mock_uri_path = self.patch8.start()
self.mock_uri_path.return_value = "/mock/"
def tearDown(self):
self.patch1.stop()
self.patch2.stop()
self.patch3.stop()
self.patch4.stop()
self.patch5.stop()
self.patch6.stop()
self.patch7.stop()
self.patch8.stop()
def validate_auth(self, operation):
"""
validate that a validation check was performed for a given operation
:param operation: the operation to validate
"""
self.mock_user_query_manager.return_value.is_authorized.assert_called_once_with(mock.ANY, mock.ANY, operation)
def get_mock_uri_path(self, *args):
"""
:param object_id: the id of the object to get the uri for
:type object_id: str
"""
return os.path.join('/mock', *args) + '/' | beav/pulp | devel/pulp/devel/unit/server/base.py | Python | gpl-2.0 | 2,511 | 0.003584 |
import Cookie
import os
from django.conf import settings
from common.tests import ViewTestCase
from common import api
from common import clean
from common import util
class JoinTest(ViewTestCase):
def setUp(self):
super(JoinTest, self).setUp()
self.form_data = {'nick': 'johndoe',
'first_name': 'John',
'last_name': 'Doe',
'email': 'johndoe@google.com',
'password': 'good*password',
'confirm': 'good*password',
'hide': '1',
#'invite': ''
}
def tearDown(self):
self.form_data = None
def assert_join_validation_error(self, response, content):
self.assertContains(response, content)
self.assertTemplateUsed(response, 'join.html')
self.assertTemplateUsed(response, 'form_error.html')
def test_join_page(self):
r = self.client.get('/join')
self.assertContains(r, 'SIGN UP')
self.assertTemplateUsed(r, 'join.html')
def test_join_with_valid_data(self):
r = self.client.post('/join', self.form_data)
r = self.assertRedirectsPrefix(r, '/welcome')
def test_join_with_invalid_email(self):
self.form_data['email'] = 'invalid'
r = self.client.post('/join', self.form_data)
self.assert_join_validation_error(r, 'supply a valid email address')
def test_join_with_used_email(self):
self.form_data['email'] = 'popular@example.com'
r = self.client.post('/join', self.form_data)
self.assert_join_validation_error(r, 'already associated')
def test_join_with_deleted_email(self):
self.form_data['email'] = 'popular@example.com'
r = self.client.post('/join', self.form_data)
self.assert_join_validation_error(r, 'already associated')
api.actor_remove(api.ROOT, 'popular@example.com')
self.form_data['email'] = 'popular@example.com'
r = self.client.post('/join', self.form_data)
r = self.assertRedirectsPrefix(r, '/welcome')
def test_join_with_invalid_nick(self):
self.form_data['nick'] = 'a'
r = self.client.post('/join', self.form_data)
self.assert_join_validation_error(r, 'Invalid nick')
def test_join_with_reserved_nick(self):
self.form_data['nick'] = 'popular'
r = self.client.post('/join', self.form_data)
self.assert_join_validation_error(r, 'already in use')
def test_join_with_banned_nick(self):
self.form_data['nick'] = 'json'
r = self.client.post('/join', self.form_data)
self.assert_join_validation_error(r, 'not allowed')
def test_join_with_used_nick(self):
self.form_data['nick'] = 'popular'
r = self.client.post('/join', self.form_data)
self.assert_join_validation_error(r, 'already in use')
def test_join_with_used_nick_case_insensitive(self):
self.form_data['nick'] = 'Popular'
r = self.client.post('/join', self.form_data)
self.assert_join_validation_error(r, 'already in use')
class WelcomeTest(ViewTestCase):
def setUp(self):
super(WelcomeTest, self).setUp()
self.login('girlfriend')
def tearDown(self):
self.logout()
def test_photo_view(self):
r = self.client.get('/welcome/1')
self.assertContains(r, 'Your photo')
self.assertTemplateUsed(r, 'welcome_photo.html')
def test_photo_upload(self):
nick = 'popular'
nick = clean.nick(nick)
old_avatar = api.actor_get(api.ROOT, nick).extra.get('icon',
'avatar_default')
self.login(nick)
f = open('testdata/test_avatar.jpg')
r = self.client.post('/welcome/1',
{
'imgfile': f,
'_nonce' :
util.create_nonce('popular', 'change_photo'),
})
r = self.assertRedirectsPrefix(r, '/welcome/1?')
new_avatar = api.actor_get(api.ROOT, nick).extra.get('icon',
'avatar_default')
self.assertNotEquals(old_avatar, new_avatar)
self.assertContains(r, 'Avatar uploaded')
self.assertTemplateUsed(r, 'welcome_photo.html')
self.assertTemplateUsed(r, 'flash.html')
def test_mobile_activation_view(self):
r = self.client.get('/welcome/2')
self.assertContains(r, 'SIGN IN')
self.assertTemplateUsed(r, 'welcome_mobile.html')
def test_contacts_view(self):
r = self.client.get('/welcome/3')
self.assertContains(r, 'Find some friends')
self.assertTemplateUsed(r, 'welcome_contacts.html')
def test_done_view(self):
r = self.client.get('/welcome/done')
self.assertContains(r, 'Congratulations!')
self.assertTemplateUsed(r, 'welcome_done.html')
| tallstreet/jaikuenginepatch | join/tests.py | Python | apache-2.0 | 4,711 | 0.005731 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class RegenerateCredentialParameters(Model):
"""The parameters used to regenerate the login credential.
:param name: Specifies name of the password which should be regenerated --
password or password2. Possible values include: 'password', 'password2'
:type name: str or :class:`PasswordName
<azure.mgmt.containerregistry.v2017_06_01_preview.models.PasswordName>`
"""
_validation = {
'name': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'PasswordName'},
}
def __init__(self, name):
self.name = name
| SUSE/azure-sdk-for-python | azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2017_06_01_preview/models/regenerate_credential_parameters.py | Python | mit | 1,115 | 0 |
from os.path import dirname
import numpy as np
from ..os import open_file, exists_isdir, makedirs
from ..log import get_logger
logger = get_logger()
def read_or_write(data_f, fallback=None):
"""Loads the data file if it exists. Otherwise, if fallback is provided,
call fallback and save its return to disk.
Args:
data_f (str): Path to the data file, whose extension will be used for
deciding how to load the data.
fallback (function, optional): Fallback function used if data file
doesn't exist. Its return will be saved to ``data_f`` for future
loadings. It should not take arguments, but if yours requires taking
arguments, just wrap yours with::
fallback=lambda: your_fancy_func(var0, var1)
Returns:
Data loaded if ``data_f`` exists; otherwise, ``fallback``'s return
(``None`` if no fallback).
Writes
- Return by the fallback, if provided.
"""
# Decide data file type
ext = data_f.split('.')[-1].lower()
def load_func(path):
with open_file(path, 'rb') as h:
data = np.load(h)
return data
def save_func(data, path):
if ext == 'npy':
save = np.save
elif ext == 'npz':
save = np.savez
else:
raise NotImplementedError(ext)
with open_file(path, 'wb') as h:
save(h, data)
# Load or call fallback
if exists_isdir(data_f)[0]:
data = load_func(data_f)
msg = "Loaded: "
else:
msg = "File doesn't exist "
if fallback is None:
data = None
msg += "(fallback not provided): "
else:
data = fallback()
out_dir = dirname(data_f)
makedirs(out_dir)
save_func(data, data_f)
msg += "(fallback provided); fallback return now saved to: "
msg += data_f
logger.info(msg)
return data
| google/nerfactor | third_party/xiuminglib/xiuminglib/io/np.py | Python | apache-2.0 | 1,973 | 0.000507 |
# a singleton to avoid loading specific classes such as
# sequana.taxonomy.Taxonomy
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
| sequana/sequana | sequana/utils/singleton.py | Python | bsd-3-clause | 329 | 0.015198 |
#
# tracker_icons.py
#
# Copyright (C) 2010 John Garland <johnnybg+deluge@gmail.com>
#
# Deluge is free software.
#
# You may redistribute it and/or modify it under the terms of the
# GNU General Public License, as published by the Free Software
# Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# deluge is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with deluge. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
#
# In addition, as a special exception, the copyright holders give
# permission to link the code of portions of this program with the OpenSSL
# library.
# You must obey the GNU General Public License in all respects for all of
# the code used other than OpenSSL. If you modify file(s) with this
# exception, you may extend this exception to your version of the file(s),
# but you are not obligated to do so. If you do not wish to do so, delete
# this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here.
#
#
import os
from HTMLParser import HTMLParser, HTMLParseError
from urlparse import urljoin, urlparse
from tempfile import mkstemp
from twisted.internet import defer, threads
from twisted.web.error import PageRedirect
try:
from twisted.web.resource import NoResource, ForbiddenResource
except ImportError:
# twisted 8
from twisted.web.error import NoResource, ForbiddenResource
from deluge.component import Component
from deluge.configmanager import get_config_dir
from deluge.httpdownloader import download_file
from deluge.decorators import proxy
from deluge.log import LOG as log
try:
import PIL.Image as Image
import deluge.ui.Win32IconImagePlugin
except ImportError:
PIL_INSTALLED = False
else:
PIL_INSTALLED = True
class TrackerIcon(object):
"""
Represents a tracker's icon
"""
def __init__(self, filename):
"""
Initialises a new TrackerIcon object
:param filename: the filename of the icon
:type filename: string
"""
self.filename = os.path.abspath(filename)
self.mimetype = extension_to_mimetype(self.filename.rpartition('.')[2])
self.data = None
self.icon_cache = None
def __eq__(self, other):
"""
Compares this TrackerIcon with another to determine if they're equal
:param other: the TrackerIcon to compare to
:type other: TrackerIcon
:returns: whether or not they're equal
:rtype: boolean
"""
return os.path.samefile(self.filename, other.filename) or \
self.get_mimetype() == other.get_mimetype() and \
self.get_data() == other.get_data()
def get_mimetype(self):
"""
Returns the mimetype of this TrackerIcon's image
:returns: the mimetype of the image
:rtype: string
"""
return self.mimetype
def get_data(self):
"""
Returns the TrackerIcon's image data as a string
:returns: the image data
:rtype: string
"""
if not self.data:
f = open(self.filename, "rb")
self.data = f.read()
f.close()
return self.data
def get_filename(self, full=True):
"""
Returns the TrackerIcon image's filename
:param full: an (optional) arg to indicate whether or not to
return the full path
:type full: boolean
:returns: the path of the TrackerIcon's image
:rtype: string
"""
return self.filename if full else os.path.basename(self.filename)
def set_cached_icon(self, data):
"""
Set the cached icon data.
"""
self.icon_cache = data
def get_cached_icon(self):
"""
Returns the cached icon data.
"""
return self.icon_cache
class TrackerIcons(Component):
"""
A TrackerIcon factory class
"""
def __init__(self, icon_dir=None, no_icon=None):
"""
Initialises a new TrackerIcons object
:param icon_dir: the (optional) directory of where to store the icons
:type icon_dir: string
:param no_icon: the (optional) path name of the icon to show when no icon
can be fetched
:type no_icon: string
"""
Component.__init__(self, "TrackerIcons")
if not icon_dir:
icon_dir = get_config_dir("icons")
self.dir = icon_dir
if not os.path.isdir(self.dir):
os.makedirs(self.dir)
self.icons = {}
for icon in os.listdir(self.dir):
if icon != no_icon:
host = icon_name_to_host(icon)
try:
self.icons[host] = TrackerIcon(os.path.join(self.dir, icon))
except KeyError:
log.warning("invalid icon %s", icon)
if no_icon:
self.icons[None] = TrackerIcon(no_icon)
else:
self.icons[None] = None
self.icons[''] = self.icons[None]
self.pending = {}
self.redirects = {}
def get(self, host):
"""
Returns a TrackerIcon for the given tracker's host
:param host: the host to obtain the TrackerIcon for
:type host: string
:returns: a Deferred which fires with the TrackerIcon for the given host
:rtype: Deferred
"""
host = host.lower()
if host in self.icons:
# We already have it, so let's return it
d = defer.succeed(self.icons[host])
elif host in self.pending:
# We're in the middle of getting it
# Add ourselves to the waiting list
d = defer.Deferred()
self.pending[host].append(d)
else:
# We need to fetch it
self.pending[host] = []
# Start callback chain
d = self.download_page(host)
d.addCallbacks(self.on_download_page_complete, self.on_download_page_fail,
errbackArgs=(host,))
d.addCallback(self.parse_html_page)
d.addCallbacks(self.on_parse_complete, self.on_parse_fail,
callbackArgs=(host,))
d.addCallback(self.download_icon, host)
d.addCallbacks(self.on_download_icon_complete, self.on_download_icon_fail,
callbackArgs=(host,), errbackArgs=(host,))
if PIL_INSTALLED:
d.addCallback(self.resize_icon)
d.addCallback(self.store_icon, host)
return d
def download_page(self, host, url=None):
"""
Downloads a tracker host's page
If no url is provided, it bases the url on the host
:param host: the tracker host
:type host: string
:param url: the (optional) url of the host
:type url: string
:returns: the filename of the tracker host's page
:rtype: Deferred
"""
if not url:
url = self.host_to_url(host)
log.debug("Downloading %s %s", host, url)
return download_file(url, mkstemp()[1], force_filename=True)
def on_download_page_complete(self, page):
"""
Runs any download clean up functions
:param page: the page that finished downloading
:type page: string
:returns: the page that finished downloading
:rtype: string
"""
log.debug("Finished downloading %s", page)
return page
def on_download_page_fail(self, f, host):
"""
Recovers from download error
:param f: the failure that occured
:type f: Failure
:param host: the name of the host whose page failed to download
:type host: string
:returns: a Deferred if recovery was possible
else the original failure
:rtype: Deferred or Failure
"""
error_msg = f.getErrorMessage()
log.debug("Error downloading page: %s", error_msg)
d = f
if f.check(PageRedirect):
# Handle redirect errors
location = urljoin(self.host_to_url(host), error_msg.split(" to ")[1])
self.redirects[host] = url_to_host(location)
d = self.download_page(host, url=location)
d.addCallbacks(self.on_download_page_complete, self.on_download_page_fail,
errbackArgs=(host,))
return d
@proxy(threads.deferToThread)
def parse_html_page(self, page):
"""
Parses the html page for favicons
:param page: the page to parse
:type page: string
:returns: a Deferred which callbacks a list of available favicons (url, type)
:rtype: Deferred
"""
f = open(page, "r")
parser = FaviconParser()
for line in f:
parser.feed(line)
if parser.left_head:
break
parser.close()
f.close()
try:
os.remove(page)
except Exception, e:
log.warning("Couldn't remove temp file: %s", e)
return parser.get_icons()
def on_parse_complete(self, icons, host):
"""
Runs any parse clean up functions
:param icons: the icons that were extracted from the page
:type icons: list
:param host: the host the icons are for
:type host: string
:returns: the icons that were extracted from the page
:rtype: list
"""
log.debug("Parse Complete, got icons for %s: %s", host, icons)
url = self.host_to_url(host)
icons = [(urljoin(url, icon), mimetype) for icon, mimetype in icons]
log.debug("Icon urls from %s: %s", host, icons)
return icons
def on_parse_fail(self, f):
"""
Recovers from a parse error
:param f: the failure that occured
:type f: Failure
:returns: a Deferred if recovery was possible
else the original failure
:rtype: Deferred or Failure
"""
log.debug("Error parsing page: %s", f.getErrorMessage())
return f
def download_icon(self, icons, host):
"""
Downloads the first available icon from icons
:param icons: a list of icons
:type icons: list
:param host: the tracker's host name
:type host: string
:returns: a Deferred which fires with the downloaded icon's filename
:rtype: Deferred
"""
if len(icons) == 0:
raise NoIconsError, "empty icons list"
(url, mimetype) = icons.pop(0)
d = download_file(url, os.path.join(self.dir, host_to_icon_name(host, mimetype)),
force_filename=True)
d.addCallback(self.check_icon_is_valid)
if icons:
d.addErrback(self.on_download_icon_fail, host, icons)
return d
@proxy(threads.deferToThread)
def check_icon_is_valid(self, icon_name):
"""
Performs a sanity check on icon_name
:param icon_name: the name of the icon to check
:type icon_name: string
:returns: the name of the validated icon
:rtype: string
:raises: InvalidIconError
"""
if PIL_INSTALLED:
try:
Image.open(icon_name)
except IOError, e:
raise InvalidIconError(e)
else:
if os.stat(icon_name).st_size == 0L:
raise InvalidIconError, "empty icon"
return icon_name
def on_download_icon_complete(self, icon_name, host):
"""
Runs any download cleanup functions
:param icon_name: the filename of the icon that finished downloading
:type icon_name: string
:param host: the host the icon completed to download for
:type host: string
:returns: the icon that finished downloading
:rtype: TrackerIcon
"""
log.debug("Successfully downloaded from %s: %s", host, icon_name)
icon = TrackerIcon(icon_name)
return icon
def on_download_icon_fail(self, f, host, icons=[]):
"""
Recovers from a download error
:param f: the failure that occured
:type f: Failure
:param host: the host the icon failed to download for
:type host: string
:param icons: the (optional) list of remaining icons
:type icons: list
:returns: a Deferred if recovery was possible
else the original failure
:rtype: Deferred or Failure
"""
error_msg = f.getErrorMessage()
log.debug("Error downloading icon from %s: %s", host, error_msg)
d = f
if f.check(PageRedirect):
# Handle redirect errors
location = urljoin(self.host_to_url(host), error_msg.split(" to ")[1])
d = self.download_icon([(location, extension_to_mimetype(location.rpartition('.')[2]))] + icons, host)
if not icons:
d.addCallbacks(self.on_download_icon_complete, self.on_download_icon_fail,
callbackArgs=(host,), errbackArgs=(host,))
elif f.check(NoResource, ForbiddenResource) and icons:
d = self.download_icon(icons, host)
elif f.check(NoIconsError, HTMLParseError):
# No icons, try favicon.ico as an act of desperation
d = self.download_icon([(urljoin(self.host_to_url(host), "favicon.ico"), extension_to_mimetype("ico"))], host)
d.addCallbacks(self.on_download_icon_complete, self.on_download_icon_fail,
callbackArgs=(host,), errbackArgs=(host,))
else:
# No icons :(
# Return the None Icon
d = self.icons[None]
return d
@proxy(threads.deferToThread)
def resize_icon(self, icon):
"""
Resizes the given icon to be 16x16 pixels
:param icon: the icon to resize
:type icon: TrackerIcon
:returns: the resized icon
:rtype: TrackerIcon
"""
if icon:
filename = icon.get_filename()
img = Image.open(filename)
if img.size > (16, 16):
new_filename = filename.rpartition('.')[0]+".png"
img = img.resize((16, 16), Image.ANTIALIAS)
img.save(new_filename)
if new_filename != filename:
os.remove(filename)
icon = TrackerIcon(new_filename)
return icon
def store_icon(self, icon, host):
"""
Stores the icon for the given host
Callbacks any pending deferreds waiting on this icon
:param icon: the icon to store
:type icon: TrackerIcon or None
:param host: the host to store it for
:type host: string
:returns: the stored icon
:rtype: TrackerIcon or None
"""
self.icons[host] = icon
for d in self.pending[host]:
d.callback(icon)
del self.pending[host]
return icon
def host_to_url(self, host):
"""
Given a host, returns the URL to fetch
:param host: the tracker host
:type host: string
:returns: the url of the tracker
:rtype: string
"""
if host in self.redirects:
host = self.redirects[host]
return "http://%s/" % host
################################ HELPER CLASSES ###############################
class FaviconParser(HTMLParser):
"""
A HTMLParser which extracts favicons from a HTML page
"""
def __init__(self):
self.icons = []
self.left_head = False
HTMLParser.__init__(self)
def handle_starttag(self, tag, attrs):
if tag == "link" and ("rel", "icon") in attrs or ("rel", "shortcut icon") in attrs:
href = None
type = None
for attr, value in attrs:
if attr == "href":
href = value
elif attr == "type":
type = value
if href:
try:
mimetype = extension_to_mimetype(href.rpartition('.')[2])
except KeyError:
pass
else:
type = mimetype
if type:
self.icons.append((href, type))
def handle_endtag(self, tag):
if tag == "head":
self.left_head = True
def get_icons(self):
"""
Returns a list of favicons extracted from the HTML page
:returns: a list of favicons
:rtype: list
"""
return self.icons
############################### HELPER FUNCTIONS ##############################
def url_to_host(url):
"""
Given a URL, returns the host it belongs to
:param url: the URL in question
:type url: string
:returns: the host of the given URL
:rtype:string
"""
return urlparse(url).hostname
def host_to_icon_name(host, mimetype):
"""
Given a host, returns the appropriate icon name
:param host: the host in question
:type host: string
:param mimetype: the mimetype of the icon
:type mimetype: string
:returns: the icon's filename
:rtype: string
"""
return host+'.'+mimetype_to_extension(mimetype)
def icon_name_to_host(icon):
"""
Given a host's icon name, returns the host name
:param icon: the icon name
:type icon: string
:returns: the host name
:rtype: string
"""
return icon.rpartition('.')[0]
MIME_MAP = {
"image/gif" : "gif",
"image/jpeg" : "jpg",
"image/png" : "png",
"image/vnd.microsoft.icon" : "ico",
"image/x-icon" : "ico",
"gif" : "image/gif",
"jpg" : "image/jpeg",
"jpeg" : "image/jpeg",
"png" : "image/png",
"ico" : "image/vnd.microsoft.icon",
}
def mimetype_to_extension(mimetype):
"""
Given a mimetype, returns the appropriate filename extension
:param mimetype: the mimetype
:type mimetype: string
:returns: the filename extension for the given mimetype
:rtype: string
:raises KeyError: if given an invalid mimetype
"""
return MIME_MAP[mimetype.lower()]
def extension_to_mimetype(extension):
"""
Given a filename extension, returns the appropriate mimetype
:param extension: the filename extension
:type extension: string
:returns: the mimetype for the given filename extension
:rtype: string
:raises KeyError: if given an invalid filename extension
"""
return MIME_MAP[extension.lower()]
################################## EXCEPTIONS #################################
class NoIconsError(Exception):
pass
class InvalidIconError(Exception):
pass
| vguerci/Deluge.app | deluge/ui/tracker_icons.py | Python | gpl-3.0 | 19,201 | 0.002187 |
import hashlib
from kuma.core.utils import get_ip
def get_unique(content_type, object_pk, request=None, ip=None, user_agent=None, user=None):
"""Extract a set of unique identifiers from the request.
This set will be made up of one of the following combinations, depending
on what's available:
* user, None, None, unique_MD5_hash
* None, ip, user_agent, unique_MD5_hash
"""
if request:
if request.user.is_authenticated():
user = request.user
ip = user_agent = None
else:
user = None
ip = get_ip(request)
user_agent = request.META.get('HTTP_USER_AGENT', '')[:255]
# HACK: Build a hash of the fields that should be unique, let MySQL
# chew on that for a unique index. Note that any changes to this algo
# will create all new unique hashes that don't match any existing ones.
hash_text = "\n".join(unicode(x).encode('utf8') for x in (
content_type.pk, object_pk, ip, user_agent,
(user and user.pk or 'None')
))
unique_hash = hashlib.md5(hash_text).hexdigest()
return (user, ip, user_agent, unique_hash)
| davidyezsetz/kuma | kuma/contentflagging/utils.py | Python | mpl-2.0 | 1,154 | 0.000867 |
#!/usr/bin/env python3
import asyncio
import os
from datetime import datetime
import aiohttp
from aiohttp import web
from raven import Client
from restaurants import (FormattedMenus, SafeRestaurant, OtherRestaurant,
AvalonRestaurant, TOTORestaurant, TOTOCantinaRestaurant,
CasaInkaRestaurant, OlivaRestaurant, CityCantinaRosumRestaurant)
from slack import Channel
# SLACK_HOOK = 'https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXXXXXXXXXXXXXXXXXX'
SLACK_HOOK = os.environ.get('SLACK_HOOK', None)
SLACK_CHANNEL = os.environ.get('SLACK_CHANNEL', None)
SECRET_KEY = os.environ.get('SECRET_KEY', None)
DEBUG = bool(os.environ.get('DEBUG', False))
def is_work_day():
return datetime.today().weekday() in range(0, 5)
def should_send_to_slack(secret_key):
return SLACK_HOOK and secret_key == SECRET_KEY
async def retrieve_menus(session):
futures = [
SafeRestaurant(TOTORestaurant(session)).retrieve_menu(),
SafeRestaurant(TOTOCantinaRestaurant(session)).retrieve_menu(),
SafeRestaurant(AvalonRestaurant(session)).retrieve_menu(),
SafeRestaurant(OlivaRestaurant(session)).retrieve_menu(),
SafeRestaurant(CasaInkaRestaurant(session)).retrieve_menu(),
SafeRestaurant(CityCantinaRosumRestaurant(session)).retrieve_menu(),
]
# Add list of other restaurants first, will be in header.
menus = [await SafeRestaurant(OtherRestaurant()).retrieve_menu()]
for future in asyncio.as_completed(futures):
menus.append(await future)
return menus
async def index(request):
if is_work_day():
async with aiohttp.ClientSession() as session:
menus = FormattedMenus(await retrieve_menus(session))
secret_key = request.match_info.get('secret_key')
if should_send_to_slack(secret_key):
await Channel(SLACK_HOOK, session).send(menus)
return web.Response(text=str(menus))
return web.Response(text='Come on Monday-Friday')
sentry_client = Client() # credentials is taken from environment variable SENTRY_DSN
app = web.Application(debug=True)
app.router.add_get('/', index)
app.router.add_get('/{secret_key}', index)
if __name__ == '__main__':
web.run_app(app, host='localhost', port=5000)
| fadawar/infinit-lunch | main.py | Python | gpl-3.0 | 2,316 | 0.001727 |
# coding: utf-8
# %save en ipython is wonderful. Cargar con ipython -i para continuar.
from framework import pclases
a = pclases.Obra.selectBy(nombre = "SU ALMACEN")[0]
len(a.clientes)
for c in a.clientes:
print c.nombre
vacidas = [o for o in pclases.Obra.select() if not o.clientes]
len(vacidas)
for o in vacidas:
if o.presupuestos:
for f in o.presupuestos:
o.addCliente(f.cliente)
else:
o.destroySelf()
vacidas = [o for o in pclases.Obra.select() if not o.clientes]
len(vacidas)
for c in a.clientes:
nueva_obra = a.clone(nombre = c.nombre)
nueva_obra.direccion = c.direccion
nueva_obra.cp = c.cp
nueva_obra.ciudad = c.ciudad
nueva_obra.provincia = c.provincia
nueva_obra.pais = c.pais
nueva_obra.observaciones = "[admin] splitted from SU ALMACEN. (8/9/2014)."
nueva_obra.addCliente(c)
if len(c.obras) == 1:
nueva_obra.generica = True
sus = pclases.Cliente.get(1589)
osus = sus.get_obra_generica()
osus.nombre
contactos_sus = [c for c in a.contactos if "sustraia" in c.correoe]
len(contactos_sus)
for c in contactos_sus:
c.removeObra(a)
c.addObra(osus)
c.sync()
ref = pclases.Cliente.select(pclases.Cliente.q.nombre.contains("REFRESCO I"))[0]
oref = ref.get_obra_generica()
for c in a.contactos:
c.removeObra(a)
c.addObra(oref)
c.sync()
len(a.presupuestos)
len(a.facturasVenta)
len(a.pedidosVenta)
| pacoqueen/ginn | extra/scripts/ipython_session_obra_su_almacen.py | Python | gpl-2.0 | 1,431 | 0.006289 |
import numpy as np
from procgraph import Block
from contracts import contract
class DepthBuffer(Block):
Block.alias('depth_buffer')
Block.input('rgba')
Block.output('rgba')
Block.output('line')
Block.output('depth')
def init(self):
self.depth = None
def update(self):
rgba = self.input.rgba
if self.depth is None:
H, W = rgba.shape[0:2]
self.depth = np.zeros((H, W))
self.depth.fill(0)
d = get_depth(rgba)
mask = rgba[:, :, 3] > 0
closer = np.logical_and(self.depth < d, mask)
farther = np.logical_not(closer)
self.depth[closer] = d
rgba = rgba.copy()
rgba[farther, 3] = 0
with_line = rgba[:, :, 0:3].copy()
with_line[d, :, 0] = 255
with_line[d, :, 1] = 55
depth = self.depth.copy()
depth[depth == 0] = np.nan
self.output.rgba = rgba
self.output.line = with_line
self.output.depth = depth
@contract(rgba='array[HxWx4](uint8)', returns='float,>=0,<=H-1')
def get_depth(rgba):
alpha = rgba[:, :, 3]
H, _ = alpha.shape
a = 0
w = 0
for i in range(H):
line = alpha[i, :].astype('float32')
a += i * np.sum(line)
w += np.sum(line)
a = a / w
return a
| spillai/procgraph | src/procgraph_mplayer/depth_buffer.py | Python | lgpl-3.0 | 1,391 | 0.008627 |
import os.path
from zope import component
from zope import interface
from zope.component.factory import Factory
from sparc.configuration import container
import mellon
from mellon.factories.filesystem.file import MellonByteFileFromFilePathAndConfig
from mellon.factories.filesystem.file import \
MellonUnicodeFileFromFilePathAndConfig
@interface.implementer(mellon.IByteMellonFile)
class MellonByteFileFromGitRepoCommitPathAndConfig(
MellonByteFileFromFilePathAndConfig):
def __init__(self, commit, file_path, config):
self.commit = commit
super(MellonByteFileFromGitRepoCommitPathAndConfig, self).\
__init__(file_path, config)
def __str__(self):
return "Git byte file in repo at {} for commit {} at location {}".\
format(self.commit.repo.working_dir,
str(self.commit), self.file_path)
mellonByteFileFromGitRepoCommitPathAndConfigFactory = \
Factory(MellonByteFileFromGitRepoCommitPathAndConfig)
@interface.implementer(mellon.IUnicodeMellonFile)
class MellonUnicodeFileFromGitRepoCommitPathAndConfig(
MellonUnicodeFileFromFilePathAndConfig):
def __init__(self, commit, file_path, config):
self.commit = commit
super(MellonUnicodeFileFromGitRepoCommitPathAndConfig, \
self).__init__(file_path, config)
def __str__(self):
return "Git unicode file in repo at {} for commit {} at location {}".\
format(self.commit.repo.working_dir,
str(self.commit), self.file_path)
mellonUnicodeFileFromGitRepoCommitPathAndConfigFactory = \
Factory(MellonUnicodeFileFromGitRepoCommitPathAndConfig)
@interface.implementer(mellon.IMellonFileProvider)
class MellonFileProviderForGitReposBaseDirectory(object):
def __init__(self, config):
"""Init
Args:
config: sparc.configuration.container.ISparcAppPyContainerConfiguration
provider with sparc.git[configure.yaml:GitReposBaseDir]
and mellon[configure.yaml:MellonSnippet] entries.
"""
self.config = config
def __iter__(self):
repos_base_dir = container.IPyContainerConfigValue(self.config).\
get('GitReposBaseDir')['directory']
repo_iter = component.createObject(\
u'sparc.git.repos.repos_from_recursive_dir', repos_base_dir)
for repo in repo_iter:
# iterate through the commits
examined = set()
for commit in repo.iter_commits('--all'):
# we need to reset the working tree to gain filesystem access
# to the blob data. This will allow us to pass in a path
# (needed to determine if file is binary or not)
repo.head.reference = commit
repo.head.reset(index=True, working_tree=True)
# iter through commit blobs (e.g. files)
for blob in commit.tree.traverse():
if blob.type != 'blob' or blob.hexsha in examined:
continue
else:
examined.add(blob.hexsha)
if not os.path.isfile(blob.abspath):
continue
path = component.createObject(u"mellon.filesystem_path", blob.abspath)
#path = provider(blob.abspath)
#interface.alsoProvides(path, mellon.IPath)
if mellon.IBinaryChecker(path).check():
yield component.createObject(\
u'mellon.factories.git.byte_file_from_commit_path_and_config',
commit,
path,
self.config)
else:
yield component.createObject(\
u'mellon.factories.git.unicode_file_from_commit_path_and_config',
commit,
path,
self.config)
MellonFileProviderForGitReposBaseDirectoryFactory = Factory(MellonFileProviderForGitReposBaseDirectory)
interface.alsoProvides(MellonFileProviderForGitReposBaseDirectoryFactory, mellon.IMellonFileProviderFactory) | CrowdStrike/mellon | mellon/factories/git/file.py | Python | mit | 4,601 | 0.00739 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Bruno Calogero <brunocalogero@hotmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_switch_leaf_selector
short_description: Add a leaf Selector with Node Block Range and Policy Group to a Switch Policy Leaf Profile on Cisco ACI fabrics
description:
- Add a leaf Selector with Node Block range and Policy Group to a Switch Policy Leaf Profile on Cisco ACI fabrics.
- More information from the internal APIC class I(infra:LeafS), I(infra:NodeBlk), I(infra:RsAccNodePGrp) at
U(https://developer.cisco.com/docs/apic-mim-ref/).
author:
- Bruno Calogero (@brunocalogero)
version_added: '2.5'
notes:
- This module is to be used with M(aci_switch_policy_leaf_profile)
One first creates a leaf profile (infra:NodeP) and then creates an associated selector (infra:LeafS),
options:
description:
description:
- The description to assign to the C(leaf)
leaf_profile:
description:
- Name of the Leaf Profile to which we add a Selector.
aliases: [ leaf_profile_name ]
leaf:
description:
- Name of Leaf Selector.
aliases: [ name, leaf_name, leaf_profile_leaf_name, leaf_selector_name ]
leaf_node_blk:
description:
- Name of Node Block range to be added to Leaf Selector of given Leaf Profile
aliases: [ leaf_node_blk_name, node_blk_name ]
leaf_node_blk_description:
description:
- The description to assign to the C(leaf_node_blk)
from:
description:
- Start of Node Block Range
aliases: [ node_blk_range_from, from_range, range_from ]
to:
description:
- Start of Node Block Range
aliases: [ node_blk_range_to, to_range, range_to ]
policy_group:
description:
- Name of the Policy Group to be added to Leaf Selector of given Leaf Profile
aliases: [ name, policy_group_name ]
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: aci
'''
EXAMPLES = r'''
- name: adding a switch policy leaf profile selector associated Node Block range (w/ policy group)
aci_switch_leaf_selector:
host: apic
username: someusername
password: somepassword
leaf_profile: sw_name
leaf: leaf_selector_name
leaf_node_blk: node_blk_name
from: 1011
to: 1011
policy_group: somepolicygroupname
state: present
- name: adding a switch policy leaf profile selector associated Node Block range (w/o policy group)
aci_switch_leaf_selector:
host: apic
username: someusername
password: somepassword
leaf_profile: sw_name
leaf: leaf_selector_name
leaf_node_blk: node_blk_name
from: 1011
to: 1011
state: present
- name: Removing a switch policy leaf profile selector
aci_switch_leaf_selector:
host: apic
username: someusername
password: somepassword
leaf_profile: sw_name
leaf: leaf_selector_name
state: absent
- name: Querying a switch policy leaf profile selector
aci_switch_leaf_selector:
host: apic
username: someusername
password: somepassword
leaf_profile: sw_name
leaf: leaf_selector_name
state: query
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: string
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: string
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: string
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: string
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: string
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec()
argument_spec.update({
'description': dict(type='str'),
'leaf_profile': dict(type='str', aliases=['leaf_profile_name']),
'leaf': dict(type='str', aliases=['name', 'leaf_name', 'leaf_profile_leaf_name', 'leaf_selector_name']),
'leaf_node_blk': dict(type='str', aliases=['leaf_node_blk_name', 'node_blk_name']),
'leaf_node_blk_description': dict(type='str'),
'from': dict(type='int', aliases=['node_blk_range_from', 'from_range', 'range_from']),
'to': dict(type='int', aliases=['node_blk_range_to', 'to_range', 'range_to']),
'policy_group': dict(type='str', aliases=['policy_group_name']),
'state': dict(type='str', default='present', choices=['absent', 'present', 'query']),
})
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['leaf_profile', 'leaf']],
['state', 'present', ['leaf_profile', 'leaf', 'leaf_node_blk', 'from', 'to']]
]
)
description = module.params['description']
leaf_profile = module.params['leaf_profile']
leaf = module.params['leaf']
leaf_node_blk = module.params['leaf_node_blk']
leaf_node_blk_description = module.params['leaf_node_blk_description']
from_ = module.params['from']
to_ = module.params['to']
policy_group = module.params['policy_group']
state = module.params['state']
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class='infraNodeP',
aci_rn='infra/nprof-{0}'.format(leaf_profile),
filter_target='eq(infraNodeP.name, "{0}")'.format(leaf_profile),
module_object=leaf_profile
),
subclass_1=dict(
aci_class='infraLeafS',
# NOTE: normal rn: leaves-{name}-typ-{type}, hence here hardcoded to range for purposes of module
aci_rn='leaves-{0}-typ-range'.format(leaf),
filter_target='eq(infraLeafS.name, "{0}")'.format(leaf),
module_object=leaf,
),
# NOTE: infraNodeBlk is not made into a subclass because there is a 1-1 mapping between node block and leaf selector name
child_classes=['infraNodeBlk', 'infraRsAccNodePGrp']
)
aci.get_existing()
if state == 'present':
# Filter out module params with null values
aci.payload(
aci_class='infraLeafS',
class_config=dict(
descr=description,
name=leaf,
),
child_configs=[
dict(
infraNodeBlk=dict(
attributes=dict(
descr=leaf_node_blk_description,
name=leaf_node_blk,
from_=from_,
to_=to_,
)
)
),
dict(
infraRsAccNodePGrp=dict(
attributes=dict(
tDn='uni/infra/funcprof/accnodepgrp-{0}'.format(policy_group),
)
)
),
],
)
# Generate config diff which will be used as POST request body
aci.get_diff(aci_class='infraLeafS')
# Submit changes if module not in check_mode and the proposed is different than existing
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
| tdtrask/ansible | lib/ansible/modules/network/aci/aci_switch_leaf_selector.py | Python | gpl-3.0 | 10,067 | 0.002384 |
"""Tests for the directory module"""
import os
from translate.storage import directory
class TestDirectory:
"""a test class to run tests on a test Pootle Server"""
def setup_method(self, method):
"""sets up a test directory"""
print("setup_method called on", self.__class__.__name__)
self.testdir = "%s_testdir" % (self.__class__.__name__)
self.cleardir(self.testdir)
os.mkdir(self.testdir)
def teardown_method(self, method):
"""removes the attributes set up by setup_method"""
self.cleardir(self.testdir)
def cleardir(self, dirname):
"""removes the given directory"""
if os.path.exists(dirname):
for dirpath, subdirs, filenames in os.walk(dirname, topdown=False):
for name in filenames:
os.remove(os.path.join(dirpath, name))
for name in subdirs:
os.rmdir(os.path.join(dirpath, name))
if os.path.exists(dirname):
os.rmdir(dirname)
assert not os.path.exists(dirname)
def touchfiles(self, dir, filenames, content=None):
for filename in filenames:
with open(os.path.join(dir, filename), "w") as fh:
if content:
fh.write(content)
def mkdir(self, dir):
"""Makes a directory inside self.testdir."""
os.mkdir(os.path.join(self.testdir, dir))
def test_created(self):
"""test that the directory actually exists"""
print(self.testdir)
assert os.path.isdir(self.testdir)
def test_basic(self):
"""Tests basic functionality."""
files = ["a.po", "b.po", "c.po"]
files.sort()
self.touchfiles(self.testdir, files)
d = directory.Directory(self.testdir)
filenames = [name for dir, name in d.getfiles()]
filenames.sort()
assert filenames == files
def test_structure(self):
"""Tests a small directory structure."""
files = ["a.po", "b.po", "c.po"]
self.touchfiles(self.testdir, files)
self.mkdir("bla")
self.touchfiles(os.path.join(self.testdir, "bla"), files)
d = directory.Directory(self.testdir)
filenames = [name for dirname, name in d.getfiles()]
filenames.sort()
files = files * 2
files.sort()
assert filenames == files
def test_getunits(self):
"""Tests basic functionality."""
files = ["a.po", "b.po", "c.po"]
posource = '''msgid "bla"\nmsgstr "blabla"\n'''
self.touchfiles(self.testdir, files, posource)
d = directory.Directory(self.testdir)
for unit in d.getunits():
assert unit.target == "blabla"
assert len(d.getunits()) == 3
| unho/translate | translate/storage/test_directory.py | Python | gpl-2.0 | 2,774 | 0 |
from a10sdk.common.A10BaseClass import A10BaseClass
class Stats(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param sync_tx_create_ext_bit_counter: {"description": "Conn Sync Create with Ext Sent counter", "format": "counter", "type": "number", "oid": "29", "optional": true, "size": "8"}
:param sync_tx_max_packed: {"description": "Min Sync Msg Per Packet", "format": "counter", "type": "number", "oid": "32", "optional": true, "size": "2"}
:param sync_persist_rx_no_such_sg_group: {"description": "Persist Conn Sync No Service Group Found", "format": "counter", "type": "number", "oid": "42", "optional": true, "size": "8"}
:param sync_rx_persist_update_age_counter: {"description": "Conn Sync Update Persist Age Pkts Received counter", "format": "counter", "type": "number", "oid": "11", "optional": true, "size": "2"}
:param sync_rx_type_invalid: {"description": "Conn Sync Type Invalid", "format": "counter", "type": "number", "oid": "57", "optional": true, "size": "8"}
:param sync_rx_ext_rtsp: {"description": "Conn Sync Ext RTSP", "format": "counter", "type": "number", "oid": "74", "optional": true, "size": "8"}
:param sync_tx_lsn_fullcone: {"description": "Conn Sync Update LSN Fullcone Sent counter", "format": "counter", "type": "number", "oid": "81", "optional": true, "size": "8"}
:param sync_rx_update_seqnos_counter: {"description": "Conn Sync Update Seq Num Received counter", "format": "counter", "type": "number", "oid": "61", "optional": true, "size": "8"}
:param sync_rx_create_ext_bit_counter: {"description": "Conn Sync Create with Ext Received counter", "format": "counter", "type": "number", "oid": "49", "optional": true, "size": "8"}
:param sync_persist_rx_no_such_rport: {"description": "Persist Conn Sync Real Port Not Found", "format": "counter", "type": "number", "oid": "41", "optional": true, "size": "8"}
:param sync_tx_persist_del_counter: {"description": "Conn Sync Delete Persist Session Pkts Sent counter", "format": "counter", "type": "number", "oid": "13", "optional": true, "size": "2"}
:param sync_persist_rx_no_such_vport: {"description": "Persist Conn Sync Virt Port Not Found", "format": "counter", "type": "number", "oid": "39", "optional": true, "size": "8"}
:param sync_rx_reserve_ha: {"description": "Conn Sync Reserve HA Conn", "format": "counter", "type": "number", "oid": "75", "optional": true, "size": "8"}
:param sync_get_buff_failed_rt: {"description": "Conn Sync Get Buff Failure No Route", "format": "counter", "type": "number", "oid": "65", "optional": true, "size": "8"}
:param sync_rx_conn_exists: {"description": "Conn Sync Create Conn Exists", "format": "counter", "type": "number", "oid": "50", "optional": true, "size": "8"}
:param sync_err_lsn_fullcone: {"description": "Conn Sync LSN Fullcone Failure", "format": "counter", "type": "number", "oid": "83", "optional": true, "size": "8"}
:param sync_get_buff_failed_port: {"description": "Conn Sync Get Buff Failure Wrong Port", "format": "counter", "type": "number", "oid": "66", "optional": true, "size": "8"}
:param query_tx_get_buff_failed: {"description": "Conn Query Get Buff Failure", "format": "counter", "type": "number", "oid": "22", "optional": true, "size": "8"}
:param sync_rx_ext_nat_mac: {"description": "Conn Sync NAT MAC Failure", "format": "counter", "type": "number", "oid": "80", "optional": true, "size": "8"}
:param sync_pkt_rcv_counter: {"description": "Conn Sync Received counter", "format": "counter", "type": "number", "oid": "2", "optional": true, "size": "2"}
:param sync_tx_smp_radius_table_counter: {"description": "Conn Sync Update LSN RADIUS Sent counter", "format": "counter", "type": "number", "oid": "17", "optional": true, "size": "2"}
:param sync_rx_proto_not_supported: {"description": "Conn Sync Protocol Invalid", "format": "counter", "type": "number", "oid": "52", "optional": true, "size": "8"}
:param sync_tx_persist_update_age_counter: {"description": "Conn Sync Update Persist Age Pkts Sent counter", "format": "counter", "type": "number", "oid": "14", "optional": true, "size": "2"}
:param query_rx_zero_info_counter: {"description": "Conn Query Packet Empty", "format": "counter", "type": "number", "oid": "23", "optional": true, "size": "8"}
:param sync_rx_len_invalid: {"description": "Conn Sync Length Invalid", "format": "counter", "type": "number", "oid": "33", "optional": true, "size": "8"}
:param query_pkt_invalid_idx_counter: {"description": "Conn Query Invalid Interface", "format": "counter", "type": "number", "oid": "21", "optional": true, "size": "8"}
:param sync_rx_create_static_sby: {"description": "Conn Sync Create Static Standby", "format": "counter", "type": "number", "oid": "72", "optional": true, "size": "8"}
:param sync_persist_rx_cannot_process_mandatory: {"description": "Persist Conn Sync Process Mandatory Invalid", "format": "counter", "type": "number", "oid": "37", "optional": true, "size": "8"}
:param sync_persist_rx_len_invalid: {"description": "Persist Conn Sync Length Invalid", "format": "counter", "type": "number", "oid": "34", "optional": true, "size": "8"}
:param sync_tx_total_info_counter: {"description": "Conn Sync Total Info Pkts Sent counter", "format": "counter", "type": "number", "oid": "28", "optional": true, "size": "8"}
:param sync_pkt_invalid_idx_counter: {"description": "Conn Sync Invalid Interface", "format": "counter", "type": "number", "oid": "26", "optional": true, "size": "8"}
:param sync_rx_nat_alloc_sby: {"description": "Conn Sync NAT Alloc Standby", "format": "counter", "type": "number", "oid": "69", "optional": true, "size": "8"}
:param sync_rx_persist_del_counter: {"description": "Conn Sync Delete Persist Session Pkts Received counter", "format": "counter", "type": "number", "oid": "10", "optional": true, "size": "2"}
:param sync_rx_update_age_counter: {"description": "Conn Sync Update Age Received counter", "format": "counter", "type": "number", "oid": "5", "optional": true, "size": "2"}
:param sync_persist_rx_proto_not_supported: {"description": "Persist Conn Sync Protocol Invalid", "format": "counter", "type": "number", "oid": "35", "optional": true, "size": "8"}
:param sync_rx_dcmsg_counter: {"description": "Conn Sync forward CPU", "format": "counter", "type": "number", "oid": "59", "optional": true, "size": "8"}
:param query_tx_min_packed: {"description": "Min Query Msg Per Packet", "format": "counter", "type": "number", "oid": "20", "optional": true, "size": "2"}
:param sync_tx_min_packed: {"description": "Max Sync Msg Per Packet", "format": "counter", "type": "number", "oid": "31", "optional": true, "size": "2"}
:param sync_persist_rx_conn_get_failed: {"description": "Persist Conn Sync Get Conn Failure", "format": "counter", "type": "number", "oid": "44", "optional": true, "size": "8"}
:param query_tx_max_packed: {"description": "Max Query Msg Per Packet", "format": "counter", "type": "number", "oid": "19", "optional": true, "size": "2"}
:param sync_persist_rx_ext_bit_process_error: {"description": "Persist Conn Sync Proc Ext Bit Failure", "format": "counter", "type": "number", "oid": "38", "optional": true, "size": "8"}
:param sync_rx_seq_deltas: {"description": "Conn Sync Seq Deltas Failure", "format": "counter", "type": "number", "oid": "76", "optional": true, "size": "8"}
:param query_pkt_rcv_counter: {"description": "Conn Query Received counter", "format": "counter", "type": "number", "oid": "16", "optional": true, "size": "2"}
:param sync_query_dcmsg_counter: {"description": "Conn Sync query forward CPU", "format": "counter", "type": "number", "oid": "64", "optional": true, "size": "8"}
:param sync_rx_zero_info_counter: {"description": "Conn Sync Packet Empty", "format": "counter", "type": "number", "oid": "58", "optional": true, "size": "8"}
:param sync_rx_ftp_control: {"description": "Conn Sync FTP Control Failure", "format": "counter", "type": "number", "oid": "77", "optional": true, "size": "8"}
:param sync_pkt_tx_counter: {"description": "Conn Sync Sent counter", "format": "counter", "type": "number", "oid": "1", "optional": true, "size": "2"}
:param sync_rx_ext_sip_alg: {"description": "Conn Sync SIP TCP ALG Failure", "format": "counter", "type": "number", "oid": "79", "optional": true, "size": "8"}
:param sync_rx_lsn_fullcone: {"description": "Conn Sync Update LSN Fullcone Received counter", "format": "counter", "type": "number", "oid": "82", "optional": true, "size": "8"}
:param query_pkt_tx_counter: {"description": "Conn Query sent counter", "format": "counter", "type": "number", "oid": "15", "optional": true, "size": "2"}
:param sync_tx_persist_create_counter: {"description": "Conn Sync Create Persist Session Pkts Sent counter", "format": "counter", "type": "number", "oid": "12", "optional": true, "size": "2"}
:param sync_rx_smp_radius_table_counter: {"description": "Conn Sync Update LSN RADIUS Received counter", "format": "counter", "type": "number", "oid": "18", "optional": true, "size": "2"}
:param query_rx_unk_counter: {"description": "Conn Query Unknown Type", "format": "counter", "type": "number", "oid": "25", "optional": true, "size": "8"}
:param sync_rx_total_info_counter: {"description": "Conn Sync Total Info Pkts Received counter", "format": "counter", "type": "number", "oid": "60", "optional": true, "size": "8"}
:param sync_rx_no_dst_for_vport_inline: {"description": "Conn Sync 'dst' not found for vport inline", "format": "counter", "type": "number", "oid": "53", "optional": true, "size": "8"}
:param sync_rx_sfw: {"description": "Conn Sync SFW", "format": "counter", "type": "number", "oid": "71", "optional": true, "size": "8"}
:param sync_rx_unk_counter: {"description": "Conn Sync Unknown Type", "format": "counter", "type": "number", "oid": "62", "optional": true, "size": "8"}
:param sync_rx_ext_lsn_acl: {"description": "Conn Sync LSN ACL Failure", "format": "counter", "type": "number", "oid": "78", "optional": true, "size": "8"}
:param sync_tx_create_counter: {"description": "Conn Sync Create Session Sent counter", "format": "counter", "type": "number", "oid": "6", "optional": true, "size": "2"}
:param sync_rx_del_no_such_session: {"description": "Conn Sync Del Conn not Found", "format": "counter", "type": "number", "oid": "56", "optional": true, "size": "8"}
:param sync_tx_get_buff_failed: {"description": "Conn Sync Get Buff Failure", "format": "counter", "type": "number", "oid": "27", "optional": true, "size": "8"}
:param sync_persist_rx_vporttype_not_supported: {"description": "Persist Conn Sync Virt Port Type Invalid", "format": "counter", "type": "number", "oid": "40", "optional": true, "size": "8"}
:param sync_persist_rx_no_sg_group_info: {"description": "Persist Conn Sync No Service Group Info Found", "format": "counter", "type": "number", "oid": "43", "optional": true, "size": "8"}
:param sync_rx_no_such_vport: {"description": "Conn Sync Virt Port Not Found", "format": "counter", "type": "number", "oid": "45", "optional": true, "size": "8"}
:param sync_rx_create_counter: {"description": "Conn Sync Create Session Received counter", "format": "counter", "type": "number", "oid": "3", "optional": true, "size": "2"}
:param sync_rx_conn_get_failed: {"description": "Conn Sync Get Conn Failure", "format": "counter", "type": "number", "oid": "51", "optional": true, "size": "8"}
:param sync_persist_rx_type_invalid: {"description": "Persist Conn Sync Type Invalid", "format": "counter", "type": "number", "oid": "36", "optional": true, "size": "8"}
:param sync_rx_no_such_rport: {"description": "Conn Sync Real Port Not Found", "format": "counter", "type": "number", "oid": "46", "optional": true, "size": "8"}
:param sync_rx_nat_create_sby: {"description": "Conn Sync NAT Create Standby", "format": "counter", "type": "number", "oid": "68", "optional": true, "size": "8"}
:param sync_tx_update_age_counter: {"description": "Conn Sync Update Age Sent counter", "format": "counter", "type": "number", "oid": "8", "optional": true, "size": "2"}
:param sync_rx_del_counter: {"description": "Conn Sync Del Session Received counter", "format": "counter", "type": "number", "oid": "4", "optional": true, "size": "2"}
:param sync_rx_no_such_nat_pool: {"description": "Conn Sync NAT Pool Error", "format": "counter", "type": "number", "oid": "54", "optional": true, "size": "8"}
:param sync_tx_del_counter: {"description": "Conn Sync Del Session Sent counter", "format": "counter", "type": "number", "oid": "7", "optional": true, "size": "2"}
:param query_rx_full_info_counter: {"description": "Conn Query Packet Full", "format": "counter", "type": "number", "oid": "24", "optional": true, "size": "8"}
:param sync_rx_lsn_create_sby: {"description": "Conn Sync LSN Create Standby", "format": "counter", "type": "number", "oid": "67", "optional": true, "size": "8"}
:param sync_rx_ext_bit_process_error: {"description": "Conn Sync Proc Ext Bit Failure", "format": "counter", "type": "number", "oid": "48", "optional": true, "size": "8"}
:param sync_rx_cannot_process_mandatory: {"description": "Conn Sync Process Mandatory Invalid", "format": "counter", "type": "number", "oid": "47", "optional": true, "size": "8"}
:param sync_rx_insert_tuple: {"description": "Conn Sync Insert Tuple", "format": "counter", "type": "number", "oid": "70", "optional": true, "size": "8"}
:param sync_tx_update_seqnos_counter: {"description": "Conn Sync Update Seq Num Sent counter", "format": "counter", "type": "number", "oid": "30", "optional": true, "size": "8"}
:param sync_rx_persist_create_counter: {"description": "Conn Sync Create Persist Session Pkts Received counter", "format": "counter", "type": "number", "oid": "9", "optional": true, "size": "2"}
:param sync_rx_no_such_sg_node: {"description": "Conn Sync no SG node found", "format": "counter", "type": "number", "oid": "55", "optional": true, "size": "8"}
:param sync_rx_ext_pptp: {"description": "Conn Sync Ext PPTP", "format": "counter", "type": "number", "oid": "73", "optional": true, "size": "8"}
:param sync_rx_apptype_not_supported: {"description": "Conn Sync App Type Invalid", "format": "counter", "type": "number", "oid": "63", "optional": true, "size": "8"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "stats"
self.DeviceProxy = ""
self.sync_tx_create_ext_bit_counter = ""
self.sync_tx_max_packed = ""
self.sync_persist_rx_no_such_sg_group = ""
self.sync_rx_persist_update_age_counter = ""
self.sync_rx_type_invalid = ""
self.sync_rx_ext_rtsp = ""
self.sync_tx_lsn_fullcone = ""
self.sync_rx_update_seqnos_counter = ""
self.sync_rx_create_ext_bit_counter = ""
self.sync_persist_rx_no_such_rport = ""
self.sync_tx_persist_del_counter = ""
self.sync_persist_rx_no_such_vport = ""
self.sync_rx_reserve_ha = ""
self.sync_get_buff_failed_rt = ""
self.sync_rx_conn_exists = ""
self.sync_err_lsn_fullcone = ""
self.sync_get_buff_failed_port = ""
self.query_tx_get_buff_failed = ""
self.sync_rx_ext_nat_mac = ""
self.sync_pkt_rcv_counter = ""
self.sync_tx_smp_radius_table_counter = ""
self.sync_rx_proto_not_supported = ""
self.sync_tx_persist_update_age_counter = ""
self.query_rx_zero_info_counter = ""
self.sync_rx_len_invalid = ""
self.query_pkt_invalid_idx_counter = ""
self.sync_rx_create_static_sby = ""
self.sync_persist_rx_cannot_process_mandatory = ""
self.sync_persist_rx_len_invalid = ""
self.sync_tx_total_info_counter = ""
self.sync_pkt_invalid_idx_counter = ""
self.sync_rx_nat_alloc_sby = ""
self.sync_rx_persist_del_counter = ""
self.sync_rx_update_age_counter = ""
self.sync_persist_rx_proto_not_supported = ""
self.sync_rx_dcmsg_counter = ""
self.query_tx_min_packed = ""
self.sync_tx_min_packed = ""
self.sync_persist_rx_conn_get_failed = ""
self.query_tx_max_packed = ""
self.sync_persist_rx_ext_bit_process_error = ""
self.sync_rx_seq_deltas = ""
self.query_pkt_rcv_counter = ""
self.sync_query_dcmsg_counter = ""
self.sync_rx_zero_info_counter = ""
self.sync_rx_ftp_control = ""
self.sync_pkt_tx_counter = ""
self.sync_rx_ext_sip_alg = ""
self.sync_rx_lsn_fullcone = ""
self.query_pkt_tx_counter = ""
self.sync_tx_persist_create_counter = ""
self.sync_rx_smp_radius_table_counter = ""
self.query_rx_unk_counter = ""
self.sync_rx_total_info_counter = ""
self.sync_rx_no_dst_for_vport_inline = ""
self.sync_rx_sfw = ""
self.sync_rx_unk_counter = ""
self.sync_rx_ext_lsn_acl = ""
self.sync_tx_create_counter = ""
self.sync_rx_del_no_such_session = ""
self.sync_tx_get_buff_failed = ""
self.sync_persist_rx_vporttype_not_supported = ""
self.sync_persist_rx_no_sg_group_info = ""
self.sync_rx_no_such_vport = ""
self.sync_rx_create_counter = ""
self.sync_rx_conn_get_failed = ""
self.sync_persist_rx_type_invalid = ""
self.sync_rx_no_such_rport = ""
self.sync_rx_nat_create_sby = ""
self.sync_tx_update_age_counter = ""
self.sync_rx_del_counter = ""
self.sync_rx_no_such_nat_pool = ""
self.sync_tx_del_counter = ""
self.query_rx_full_info_counter = ""
self.sync_rx_lsn_create_sby = ""
self.sync_rx_ext_bit_process_error = ""
self.sync_rx_cannot_process_mandatory = ""
self.sync_rx_insert_tuple = ""
self.sync_tx_update_seqnos_counter = ""
self.sync_rx_persist_create_counter = ""
self.sync_rx_no_such_sg_node = ""
self.sync_rx_ext_pptp = ""
self.sync_rx_apptype_not_supported = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class State(A10BaseClass):
"""Class Description::
Statistics for the object state.
Class state supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/vrrp-a/state/stats`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "state"
self.a10_url="/axapi/v3/vrrp-a/state/stats"
self.DeviceProxy = ""
self.stats = {}
for keys, value in kwargs.items():
setattr(self,keys, value)
| amwelch/a10sdk-python | a10sdk/core/vrrp/vrrp_a_state_stats.py | Python | apache-2.0 | 19,224 | 0.00515 |
# shipBonusCarrierC4WarfareLinksBonus
#
# Used by:
# Ship: Chimera
type = "passive"
def handler(fit, src, context):
fit.modules.filteredItemBoost(
lambda mod: mod.item.requiresSkill("Shield Command") or mod.item.requiresSkill("Information Command"),
"warfareBuff2Value", src.getModifiedItemAttr("shipBonusCarrierC4"), skill="Caldari Carrier")
fit.modules.filteredItemBoost(
lambda mod: mod.item.requiresSkill("Shield Command") or mod.item.requiresSkill("Information Command"),
"buffDuration", src.getModifiedItemAttr("shipBonusCarrierC4"), skill="Caldari Carrier")
fit.modules.filteredItemBoost(
lambda mod: mod.item.requiresSkill("Shield Command") or mod.item.requiresSkill("Information Command"),
"warfareBuff3Value", src.getModifiedItemAttr("shipBonusCarrierC4"), skill="Caldari Carrier")
fit.modules.filteredItemBoost(
lambda mod: mod.item.requiresSkill("Shield Command") or mod.item.requiresSkill("Information Command"),
"warfareBuff4Value", src.getModifiedItemAttr("shipBonusCarrierC4"), skill="Caldari Carrier")
fit.modules.filteredItemBoost(
lambda mod: mod.item.requiresSkill("Shield Command") or mod.item.requiresSkill("Information Command"),
"warfareBuff1Value", src.getModifiedItemAttr("shipBonusCarrierC4"), skill="Caldari Carrier")
| Ebag333/Pyfa | eos/effects/shipbonuscarrierc4warfarelinksbonus.py | Python | gpl-3.0 | 1,348 | 0.007418 |
import unittest
from indicoio import config, fer
class TestBatchSize(unittest.TestCase):
def setUp(self):
self.api_key = config.api_key
if not self.api_key:
raise unittest.SkipTest
def test_url_support(self):
test_url = "https://s3-us-west-2.amazonaws.com/indico-test-data/face.jpg"
response = fer(test_url)
self.assertTrue(isinstance(response, dict))
self.assertEqual(len(response.keys()), 6)
| IndicoDataSolutions/IndicoIo-python | tests/etc/test_url.py | Python | mit | 467 | 0.002141 |
# Copyright 2014-2017 The ODL contributors
#
# This file is part of ODL.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.
"""Test for the smooth solvers."""
from __future__ import division
import pytest
import odl
from odl.operator import OpNotImplementedError
nonlinear_cg_beta = odl.util.testutils.simple_fixture('nonlinear_cg_beta',
['FR', 'PR', 'HS', 'DY'])
@pytest.fixture(scope="module", params=['l2_squared', 'l2_squared_scaled',
'rosenbrock', 'quadratic_form'])
def functional(request):
"""functional with optimum 0 at 0."""
name = request.param
if name == 'l2_squared':
space = odl.rn(3)
return odl.solvers.L2NormSquared(space)
elif name == 'l2_squared_scaled':
space = odl.uniform_discr(0, 1, 3)
scaling = odl.MultiplyOperator(space.element([1, 2, 3]),
domain=space)
return odl.solvers.L2NormSquared(space) * scaling
elif name == 'quadratic_form':
space = odl.rn(3)
# Symmetric and diagonally dominant matrix
matrix = odl.MatrixOperator([[7.0, 1, 2],
[1, 5, -3],
[2, -3, 8]])
vector = space.element([1, 2, 3])
# Calibrate so that functional is zero in optimal point
constant = 1 / 4 * vector.inner(matrix.inverse(vector))
return odl.solvers.QuadraticForm(
operator=matrix, vector=vector, constant=constant)
elif name == 'rosenbrock':
# Moderately ill-behaved rosenbrock functional.
rosenbrock = odl.solvers.RosenbrockFunctional(odl.rn(2), scale=2)
# Center at zero
return rosenbrock.translated([-1, -1])
else:
assert False
@pytest.fixture(scope="module", params=['constant', 'backtracking'])
def functional_and_linesearch(request, functional):
"""Return functional with optimum 0 at 0 and a line search."""
name = request.param
if name == 'constant':
return functional, 1.0
else:
return functional, odl.solvers.BacktrackingLineSearch(functional)
@pytest.fixture(scope="module", params=['first', 'second'])
def broyden_impl(request):
return request.param
def test_newton_solver(functional_and_linesearch):
"""Test the Newton solver."""
functional, line_search = functional_and_linesearch
try:
# Test if derivative exists
functional.gradient.derivative(functional.domain.zero())
except OpNotImplementedError:
return
# Solving the problem
x = functional.domain.one()
odl.solvers.newtons_method(functional, x, tol=1e-6,
line_search=line_search)
# Assert x is close to the optimum at [1, 1]
assert functional(x) < 1e-3
def test_bfgs_solver(functional_and_linesearch):
"""Test the BFGS quasi-Newton solver."""
functional, line_search = functional_and_linesearch
x = functional.domain.one()
odl.solvers.bfgs_method(functional, x, tol=1e-3,
line_search=line_search)
assert functional(x) < 1e-3
def test_lbfgs_solver(functional_and_linesearch):
"""Test limited memory BFGS quasi-Newton solver."""
functional, line_search = functional_and_linesearch
x = functional.domain.one()
odl.solvers.bfgs_method(functional, x, tol=1e-3,
line_search=line_search, num_store=5)
assert functional(x) < 1e-3
def test_broydens_method(broyden_impl, functional_and_linesearch):
"""Test the ``broydens_method`` quasi-Newton solver."""
functional, line_search = functional_and_linesearch
x = functional.domain.one()
odl.solvers.broydens_method(functional, x, tol=1e-3,
line_search=line_search, impl=broyden_impl)
assert functional(x) < 1e-3
def test_steepest_descent(functional):
"""Test the ``steepest_descent`` solver."""
line_search = odl.solvers.BacktrackingLineSearch(functional)
x = functional.domain.one()
odl.solvers.steepest_descent(functional, x, tol=1e-3,
line_search=line_search)
assert functional(x) < 1e-3
def test_adam(functional):
"""Test the ``adam`` solver."""
x = functional.domain.one()
odl.solvers.adam(functional, x, tol=1e-2, learning_rate=0.5)
assert functional(x) < 1e-3
def test_conjguate_gradient_nonlinear(functional, nonlinear_cg_beta):
"""Test the ``conjugate_gradient_nonlinear`` solver."""
line_search = odl.solvers.BacktrackingLineSearch(functional)
x = functional.domain.one()
odl.solvers.conjugate_gradient_nonlinear(functional, x, tol=1e-3,
line_search=line_search,
beta_method=nonlinear_cg_beta)
assert functional(x) < 1e-3
if __name__ == '__main__':
odl.util.test_file(__file__)
| kohr-h/odl | odl/test/solvers/smooth/smooth_test.py | Python | mpl-2.0 | 5,137 | 0 |
"""Test the Dyson fan component."""
import json
import unittest
from unittest import mock
import asynctest
from libpurecool.const import FanMode, FanSpeed, NightMode, Oscillation
from libpurecool.dyson_pure_cool import DysonPureCool
from libpurecool.dyson_pure_cool_link import DysonPureCoolLink
from libpurecool.dyson_pure_state import DysonPureCoolState
from libpurecool.dyson_pure_state_v2 import DysonPureCoolV2State
from homeassistant.components import dyson as dyson_parent
from homeassistant.components.dyson import DYSON_DEVICES
import homeassistant.components.dyson.fan as dyson
from homeassistant.components.fan import (
ATTR_OSCILLATING,
ATTR_SPEED,
DOMAIN,
SERVICE_OSCILLATE,
SPEED_HIGH,
SPEED_LOW,
SPEED_MEDIUM,
)
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON
from homeassistant.helpers import discovery
from homeassistant.setup import async_setup_component
from tests.common import get_test_home_assistant
class MockDysonState(DysonPureCoolState):
"""Mock Dyson state."""
def __init__(self):
"""Create new Mock Dyson State."""
pass
def _get_dyson_purecool_device():
"""Return a valid device as provided by the Dyson web services."""
device = mock.Mock(spec=DysonPureCool)
device.serial = "XX-XXXXX-XX"
device.name = "Living room"
device.connect = mock.Mock(return_value=True)
device.auto_connect = mock.Mock(return_value=True)
device.state = mock.Mock()
device.state.oscillation = "OION"
device.state.fan_power = "ON"
device.state.speed = FanSpeed.FAN_SPEED_AUTO.value
device.state.night_mode = "OFF"
device.state.auto_mode = "ON"
device.state.oscillation_angle_low = "0090"
device.state.oscillation_angle_high = "0180"
device.state.front_direction = "ON"
device.state.sleep_timer = 60
device.state.hepa_filter_state = "0090"
device.state.carbon_filter_state = "0080"
return device
def _get_dyson_purecoollink_device():
"""Return a valid device as provided by the Dyson web services."""
device = mock.Mock(spec=DysonPureCoolLink)
device.serial = "XX-XXXXX-XX"
device.name = "Living room"
device.connect = mock.Mock(return_value=True)
device.auto_connect = mock.Mock(return_value=True)
device.state = mock.Mock()
device.state.oscillation = "ON"
device.state.fan_mode = "FAN"
device.state.speed = FanSpeed.FAN_SPEED_AUTO.value
device.state.night_mode = "OFF"
return device
def _get_supported_speeds():
return [
int(FanSpeed.FAN_SPEED_1.value),
int(FanSpeed.FAN_SPEED_2.value),
int(FanSpeed.FAN_SPEED_3.value),
int(FanSpeed.FAN_SPEED_4.value),
int(FanSpeed.FAN_SPEED_5.value),
int(FanSpeed.FAN_SPEED_6.value),
int(FanSpeed.FAN_SPEED_7.value),
int(FanSpeed.FAN_SPEED_8.value),
int(FanSpeed.FAN_SPEED_9.value),
int(FanSpeed.FAN_SPEED_10.value),
]
def _get_config():
"""Return a config dictionary."""
return {
dyson_parent.DOMAIN: {
dyson_parent.CONF_USERNAME: "email",
dyson_parent.CONF_PASSWORD: "password",
dyson_parent.CONF_LANGUAGE: "GB",
dyson_parent.CONF_DEVICES: [
{"device_id": "XX-XXXXX-XX", "device_ip": "192.168.0.1"}
],
}
}
def _get_device_with_no_state():
"""Return a device with no state."""
device = mock.Mock()
device.name = "Device_name"
device.state = None
return device
def _get_device_off():
"""Return a device with state off."""
device = mock.Mock()
device.name = "Device_name"
device.state = mock.Mock()
device.state.fan_mode = "OFF"
device.state.night_mode = "ON"
device.state.speed = "0004"
return device
def _get_device_auto():
"""Return a device with state auto."""
device = mock.Mock()
device.name = "Device_name"
device.state = mock.Mock()
device.state.fan_mode = "AUTO"
device.state.night_mode = "ON"
device.state.speed = "AUTO"
return device
def _get_device_on():
"""Return a valid state on."""
device = mock.Mock(spec=DysonPureCoolLink)
device.name = "Device_name"
device.state = mock.Mock()
device.state.fan_mode = "FAN"
device.state.fan_state = "FAN"
device.state.oscillation = "ON"
device.state.night_mode = "OFF"
device.state.speed = "0001"
return device
class DysonSetupTest(unittest.TestCase):
"""Dyson component setup tests."""
def setUp(self): # pylint: disable=invalid-name
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
def test_setup_component_with_no_devices(self):
"""Test setup component with no devices."""
self.hass.data[dyson.DYSON_DEVICES] = []
add_entities = mock.MagicMock()
dyson.setup_platform(self.hass, None, add_entities, mock.Mock())
add_entities.assert_called_with([])
def test_setup_component(self):
"""Test setup component with devices."""
def _add_device(devices):
assert len(devices) == 2
assert devices[0].name == "Device_name"
device_fan = _get_device_on()
device_purecool_fan = _get_dyson_purecool_device()
device_non_fan = _get_device_off()
self.hass.data[dyson.DYSON_DEVICES] = [
device_fan,
device_purecool_fan,
device_non_fan,
]
dyson.setup_platform(self.hass, None, _add_device)
class DysonTest(unittest.TestCase):
"""Dyson fan component test class."""
def setUp(self): # pylint: disable=invalid-name
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
def test_dyson_set_speed(self):
"""Test set fan speed."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert not component.should_poll
component.set_speed("1")
set_config = device.set_configuration
set_config.assert_called_with(
fan_mode=FanMode.FAN, fan_speed=FanSpeed.FAN_SPEED_1
)
component.set_speed("AUTO")
set_config = device.set_configuration
set_config.assert_called_with(fan_mode=FanMode.AUTO)
def test_dyson_turn_on(self):
"""Test turn on fan."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert not component.should_poll
component.turn_on()
set_config = device.set_configuration
set_config.assert_called_with(fan_mode=FanMode.FAN)
def test_dyson_turn_night_mode(self):
"""Test turn on fan with night mode."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert not component.should_poll
component.set_night_mode(True)
set_config = device.set_configuration
set_config.assert_called_with(night_mode=NightMode.NIGHT_MODE_ON)
component.set_night_mode(False)
set_config = device.set_configuration
set_config.assert_called_with(night_mode=NightMode.NIGHT_MODE_OFF)
def test_is_night_mode(self):
"""Test night mode."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert not component.night_mode
device = _get_device_off()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert component.night_mode
def test_dyson_turn_auto_mode(self):
"""Test turn on/off fan with auto mode."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert not component.should_poll
component.set_auto_mode(True)
set_config = device.set_configuration
set_config.assert_called_with(fan_mode=FanMode.AUTO)
component.set_auto_mode(False)
set_config = device.set_configuration
set_config.assert_called_with(fan_mode=FanMode.FAN)
def test_is_auto_mode(self):
"""Test auto mode."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert not component.auto_mode
device = _get_device_auto()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert component.auto_mode
def test_dyson_turn_on_speed(self):
"""Test turn on fan with specified speed."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert not component.should_poll
component.turn_on("1")
set_config = device.set_configuration
set_config.assert_called_with(
fan_mode=FanMode.FAN, fan_speed=FanSpeed.FAN_SPEED_1
)
component.turn_on("AUTO")
set_config = device.set_configuration
set_config.assert_called_with(fan_mode=FanMode.AUTO)
def test_dyson_turn_off(self):
"""Test turn off fan."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert not component.should_poll
component.turn_off()
set_config = device.set_configuration
set_config.assert_called_with(fan_mode=FanMode.OFF)
def test_dyson_oscillate_off(self):
"""Test turn off oscillation."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
component.oscillate(False)
set_config = device.set_configuration
set_config.assert_called_with(oscillation=Oscillation.OSCILLATION_OFF)
def test_dyson_oscillate_on(self):
"""Test turn on oscillation."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
component.oscillate(True)
set_config = device.set_configuration
set_config.assert_called_with(oscillation=Oscillation.OSCILLATION_ON)
def test_dyson_oscillate_value_on(self):
"""Test get oscillation value on."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert component.oscillating
def test_dyson_oscillate_value_off(self):
"""Test get oscillation value off."""
device = _get_device_off()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert not component.oscillating
def test_dyson_on(self):
"""Test device is on."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert component.is_on
def test_dyson_off(self):
"""Test device is off."""
device = _get_device_off()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert not component.is_on
device = _get_device_with_no_state()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert not component.is_on
def test_dyson_get_speed(self):
"""Test get device speed."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert component.speed == 1
device = _get_device_off()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert component.speed == 4
device = _get_device_with_no_state()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert component.speed is None
device = _get_device_auto()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert component.speed == "AUTO"
def test_dyson_get_direction(self):
"""Test get device direction."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert component.current_direction is None
def test_dyson_get_speed_list(self):
"""Test get speeds list."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert len(component.speed_list) == 11
def test_dyson_supported_features(self):
"""Test supported features."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert component.supported_features == 3
def test_on_message(self):
"""Test when message is received."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
component.entity_id = "entity_id"
component.schedule_update_ha_state = mock.Mock()
component.on_message(MockDysonState())
component.schedule_update_ha_state.assert_called_with()
def test_service_set_night_mode(self):
"""Test set night mode service."""
dyson_device = mock.MagicMock()
self.hass.data[DYSON_DEVICES] = []
dyson_device.entity_id = "fan.living_room"
self.hass.data[dyson.DYSON_FAN_DEVICES] = [dyson_device]
dyson.setup_platform(self.hass, None, mock.MagicMock(), mock.MagicMock())
self.hass.services.call(
dyson.DYSON_DOMAIN,
dyson.SERVICE_SET_NIGHT_MODE,
{"entity_id": "fan.bed_room", "night_mode": True},
True,
)
assert dyson_device.set_night_mode.call_count == 0
self.hass.services.call(
dyson.DYSON_DOMAIN,
dyson.SERVICE_SET_NIGHT_MODE,
{"entity_id": "fan.living_room", "night_mode": True},
True,
)
dyson_device.set_night_mode.assert_called_with(True)
@asynctest.patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@asynctest.patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecoollink_device()],
)
async def test_purecoollink_attributes(devices, login, hass):
"""Test state attributes."""
await async_setup_component(hass, dyson.DYSON_DOMAIN, _get_config())
await hass.async_block_till_done()
fan_state = hass.states.get("fan.living_room")
attributes = fan_state.attributes
assert fan_state.state == "on"
assert attributes[dyson.ATTR_NIGHT_MODE] is False
assert attributes[ATTR_SPEED] == FanSpeed.FAN_SPEED_AUTO.value
assert attributes[ATTR_OSCILLATING] is True
@asynctest.patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@asynctest.patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_turn_on(devices, login, hass):
"""Test turn on."""
device = devices.return_value[0]
await async_setup_component(hass, dyson.DYSON_DOMAIN, _get_config())
await hass.async_block_till_done()
await hass.services.async_call(
DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: "fan.bed_room"}, True
)
assert device.turn_on.call_count == 0
await hass.services.async_call(
DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: "fan.living_room"}, True
)
assert device.turn_on.call_count == 1
@asynctest.patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@asynctest.patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_set_speed(devices, login, hass):
"""Test set speed."""
device = devices.return_value[0]
await async_setup_component(hass, dyson.DYSON_DOMAIN, _get_config())
await hass.async_block_till_done()
await hass.services.async_call(
DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "fan.bed_room", ATTR_SPEED: SPEED_LOW},
True,
)
assert device.set_fan_speed.call_count == 0
await hass.services.async_call(
DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "fan.living_room", ATTR_SPEED: SPEED_LOW},
True,
)
device.set_fan_speed.assert_called_with(FanSpeed.FAN_SPEED_4)
await hass.services.async_call(
DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "fan.living_room", ATTR_SPEED: SPEED_MEDIUM},
True,
)
device.set_fan_speed.assert_called_with(FanSpeed.FAN_SPEED_7)
await hass.services.async_call(
DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "fan.living_room", ATTR_SPEED: SPEED_HIGH},
True,
)
device.set_fan_speed.assert_called_with(FanSpeed.FAN_SPEED_10)
@asynctest.patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@asynctest.patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_turn_off(devices, login, hass):
"""Test turn off."""
device = devices.return_value[0]
await async_setup_component(hass, dyson.DYSON_DOMAIN, _get_config())
await hass.async_block_till_done()
await hass.services.async_call(
DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: "fan.bed_room"}, True
)
assert device.turn_off.call_count == 0
await hass.services.async_call(
DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: "fan.living_room"}, True
)
assert device.turn_off.call_count == 1
@asynctest.patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@asynctest.patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_set_dyson_speed(devices, login, hass):
"""Test set exact dyson speed."""
device = devices.return_value[0]
await async_setup_component(hass, dyson.DYSON_DOMAIN, _get_config())
await hass.async_block_till_done()
await hass.services.async_call(
dyson.DYSON_DOMAIN,
dyson.SERVICE_SET_DYSON_SPEED,
{
ATTR_ENTITY_ID: "fan.bed_room",
dyson.ATTR_DYSON_SPEED: int(FanSpeed.FAN_SPEED_2.value),
},
True,
)
assert device.set_fan_speed.call_count == 0
await hass.services.async_call(
dyson.DYSON_DOMAIN,
dyson.SERVICE_SET_DYSON_SPEED,
{
ATTR_ENTITY_ID: "fan.living_room",
dyson.ATTR_DYSON_SPEED: int(FanSpeed.FAN_SPEED_2.value),
},
True,
)
device.set_fan_speed.assert_called_with(FanSpeed.FAN_SPEED_2)
@asynctest.patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@asynctest.patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_oscillate(devices, login, hass):
"""Test set oscillation."""
device = devices.return_value[0]
await async_setup_component(hass, dyson.DYSON_DOMAIN, _get_config())
await hass.async_block_till_done()
await hass.services.async_call(
DOMAIN,
SERVICE_OSCILLATE,
{ATTR_ENTITY_ID: "fan.bed_room", ATTR_OSCILLATING: True},
True,
)
assert device.enable_oscillation.call_count == 0
await hass.services.async_call(
DOMAIN,
SERVICE_OSCILLATE,
{ATTR_ENTITY_ID: "fan.living_room", ATTR_OSCILLATING: True},
True,
)
assert device.enable_oscillation.call_count == 1
await hass.services.async_call(
DOMAIN,
SERVICE_OSCILLATE,
{ATTR_ENTITY_ID: "fan.living_room", ATTR_OSCILLATING: False},
True,
)
assert device.disable_oscillation.call_count == 1
@asynctest.patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@asynctest.patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_set_night_mode(devices, login, hass):
"""Test set night mode."""
device = devices.return_value[0]
await async_setup_component(hass, dyson.DYSON_DOMAIN, _get_config())
await hass.async_block_till_done()
await hass.services.async_call(
dyson.DYSON_DOMAIN,
dyson.SERVICE_SET_NIGHT_MODE,
{"entity_id": "fan.bed_room", "night_mode": True},
True,
)
assert device.enable_night_mode.call_count == 0
await hass.services.async_call(
dyson.DYSON_DOMAIN,
dyson.SERVICE_SET_NIGHT_MODE,
{"entity_id": "fan.living_room", "night_mode": True},
True,
)
assert device.enable_night_mode.call_count == 1
await hass.services.async_call(
dyson.DYSON_DOMAIN,
dyson.SERVICE_SET_NIGHT_MODE,
{"entity_id": "fan.living_room", "night_mode": False},
True,
)
assert device.disable_night_mode.call_count == 1
@asynctest.patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@asynctest.patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_set_auto_mode(devices, login, hass):
"""Test set auto mode."""
device = devices.return_value[0]
await async_setup_component(hass, dyson.DYSON_DOMAIN, _get_config())
await hass.async_block_till_done()
await hass.services.async_call(
dyson.DYSON_DOMAIN,
dyson.SERVICE_SET_AUTO_MODE,
{ATTR_ENTITY_ID: "fan.bed_room", dyson.ATTR_AUTO_MODE: True},
True,
)
assert device.enable_auto_mode.call_count == 0
await hass.services.async_call(
dyson.DYSON_DOMAIN,
dyson.SERVICE_SET_AUTO_MODE,
{ATTR_ENTITY_ID: "fan.living_room", dyson.ATTR_AUTO_MODE: True},
True,
)
assert device.enable_auto_mode.call_count == 1
await hass.services.async_call(
dyson.DYSON_DOMAIN,
dyson.SERVICE_SET_AUTO_MODE,
{ATTR_ENTITY_ID: "fan.living_room", dyson.ATTR_AUTO_MODE: False},
True,
)
assert device.disable_auto_mode.call_count == 1
@asynctest.patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@asynctest.patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_set_angle(devices, login, hass):
"""Test set angle."""
device = devices.return_value[0]
await async_setup_component(hass, dyson.DYSON_DOMAIN, _get_config())
await hass.async_block_till_done()
await hass.services.async_call(
dyson.DYSON_DOMAIN,
dyson.SERVICE_SET_ANGLE,
{
ATTR_ENTITY_ID: "fan.bed_room",
dyson.ATTR_ANGLE_LOW: 90,
dyson.ATTR_ANGLE_HIGH: 180,
},
True,
)
assert device.enable_oscillation.call_count == 0
await hass.services.async_call(
dyson.DYSON_DOMAIN,
dyson.SERVICE_SET_ANGLE,
{
ATTR_ENTITY_ID: "fan.living_room",
dyson.ATTR_ANGLE_LOW: 90,
dyson.ATTR_ANGLE_HIGH: 180,
},
True,
)
device.enable_oscillation.assert_called_with(90, 180)
@asynctest.patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@asynctest.patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_set_flow_direction_front(devices, login, hass):
"""Test set frontal flow direction."""
device = devices.return_value[0]
await async_setup_component(hass, dyson.DYSON_DOMAIN, _get_config())
await hass.async_block_till_done()
await hass.services.async_call(
dyson.DYSON_DOMAIN,
dyson.SERVICE_SET_FLOW_DIRECTION_FRONT,
{ATTR_ENTITY_ID: "fan.bed_room", dyson.ATTR_FLOW_DIRECTION_FRONT: True},
True,
)
assert device.enable_frontal_direction.call_count == 0
await hass.services.async_call(
dyson.DYSON_DOMAIN,
dyson.SERVICE_SET_FLOW_DIRECTION_FRONT,
{ATTR_ENTITY_ID: "fan.living_room", dyson.ATTR_FLOW_DIRECTION_FRONT: True},
True,
)
assert device.enable_frontal_direction.call_count == 1
await hass.services.async_call(
dyson.DYSON_DOMAIN,
dyson.SERVICE_SET_FLOW_DIRECTION_FRONT,
{ATTR_ENTITY_ID: "fan.living_room", dyson.ATTR_FLOW_DIRECTION_FRONT: False},
True,
)
assert device.disable_frontal_direction.call_count == 1
@asynctest.patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@asynctest.patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_set_timer(devices, login, hass):
"""Test set timer."""
device = devices.return_value[0]
await async_setup_component(hass, dyson.DYSON_DOMAIN, _get_config())
await hass.async_block_till_done()
await hass.services.async_call(
dyson.DYSON_DOMAIN,
dyson.SERVICE_SET_TIMER,
{ATTR_ENTITY_ID: "fan.bed_room", dyson.ATTR_TIMER: 60},
True,
)
assert device.enable_frontal_direction.call_count == 0
await hass.services.async_call(
dyson.DYSON_DOMAIN,
dyson.SERVICE_SET_TIMER,
{ATTR_ENTITY_ID: "fan.living_room", dyson.ATTR_TIMER: 60},
True,
)
device.enable_sleep_timer.assert_called_with(60)
await hass.services.async_call(
dyson.DYSON_DOMAIN,
dyson.SERVICE_SET_TIMER,
{ATTR_ENTITY_ID: "fan.living_room", dyson.ATTR_TIMER: 0},
True,
)
assert device.disable_sleep_timer.call_count == 1
@asynctest.patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@asynctest.patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_update_state(devices, login, hass):
"""Test state update."""
device = devices.return_value[0]
await async_setup_component(hass, dyson.DYSON_DOMAIN, _get_config())
await hass.async_block_till_done()
event = {
"msg": "CURRENT-STATE",
"product-state": {
"fpwr": "OFF",
"fdir": "OFF",
"auto": "OFF",
"oscs": "ON",
"oson": "ON",
"nmod": "OFF",
"rhtm": "ON",
"fnst": "FAN",
"ercd": "11E1",
"wacd": "NONE",
"nmdv": "0004",
"fnsp": "0002",
"bril": "0002",
"corf": "ON",
"cflr": "0085",
"hflr": "0095",
"sltm": "OFF",
"osal": "0045",
"osau": "0095",
"ancp": "CUST",
},
}
device.state = DysonPureCoolV2State(json.dumps(event))
for call in device.add_message_listener.call_args_list:
callback = call[0][0]
if type(callback.__self__) == dyson.DysonPureCoolDevice:
callback(device.state)
await hass.async_block_till_done()
fan_state = hass.states.get("fan.living_room")
attributes = fan_state.attributes
assert fan_state.state == "off"
assert attributes[dyson.ATTR_NIGHT_MODE] is False
assert attributes[dyson.ATTR_AUTO_MODE] is False
assert attributes[dyson.ATTR_ANGLE_LOW] == 45
assert attributes[dyson.ATTR_ANGLE_HIGH] == 95
assert attributes[dyson.ATTR_FLOW_DIRECTION_FRONT] is False
assert attributes[dyson.ATTR_TIMER] == "OFF"
assert attributes[dyson.ATTR_HEPA_FILTER] == 95
assert attributes[dyson.ATTR_CARBON_FILTER] == 85
assert attributes[dyson.ATTR_DYSON_SPEED] == int(FanSpeed.FAN_SPEED_2.value)
assert attributes[ATTR_SPEED] is SPEED_LOW
assert attributes[ATTR_OSCILLATING] is False
assert attributes[dyson.ATTR_DYSON_SPEED_LIST] == _get_supported_speeds()
@asynctest.patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@asynctest.patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_update_state_filter_inv(devices, login, hass):
"""Test state TP06 carbon filter state."""
device = devices.return_value[0]
await async_setup_component(hass, dyson.DYSON_DOMAIN, _get_config())
await hass.async_block_till_done()
event = {
"msg": "CURRENT-STATE",
"product-state": {
"fpwr": "OFF",
"fdir": "ON",
"auto": "ON",
"oscs": "ON",
"oson": "ON",
"nmod": "ON",
"rhtm": "ON",
"fnst": "FAN",
"ercd": "11E1",
"wacd": "NONE",
"nmdv": "0004",
"fnsp": "0002",
"bril": "0002",
"corf": "ON",
"cflr": "INV",
"hflr": "0075",
"sltm": "OFF",
"osal": "0055",
"osau": "0105",
"ancp": "CUST",
},
}
device.state = DysonPureCoolV2State(json.dumps(event))
for call in device.add_message_listener.call_args_list:
callback = call[0][0]
if type(callback.__self__) == dyson.DysonPureCoolDevice:
callback(device.state)
await hass.async_block_till_done()
fan_state = hass.states.get("fan.living_room")
attributes = fan_state.attributes
assert fan_state.state == "off"
assert attributes[dyson.ATTR_NIGHT_MODE] is True
assert attributes[dyson.ATTR_AUTO_MODE] is True
assert attributes[dyson.ATTR_ANGLE_LOW] == 55
assert attributes[dyson.ATTR_ANGLE_HIGH] == 105
assert attributes[dyson.ATTR_FLOW_DIRECTION_FRONT] is True
assert attributes[dyson.ATTR_TIMER] == "OFF"
assert attributes[dyson.ATTR_HEPA_FILTER] == 75
assert attributes[dyson.ATTR_CARBON_FILTER] == "INV"
assert attributes[dyson.ATTR_DYSON_SPEED] == int(FanSpeed.FAN_SPEED_2.value)
assert attributes[ATTR_SPEED] is SPEED_LOW
assert attributes[ATTR_OSCILLATING] is False
assert attributes[dyson.ATTR_DYSON_SPEED_LIST] == _get_supported_speeds()
@asynctest.patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@asynctest.patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_component_setup_only_once(devices, login, hass):
"""Test if entities are created only once."""
config = _get_config()
await async_setup_component(hass, dyson_parent.DOMAIN, config)
await hass.async_block_till_done()
discovery.load_platform(hass, "fan", dyson_parent.DOMAIN, {}, config)
await hass.async_block_till_done()
fans = [
fan
for fan in hass.data[DOMAIN].entities
if fan.platform.platform_name == dyson_parent.DOMAIN
]
assert len(fans) == 1
assert fans[0].device_serial == "XX-XXXXX-XX"
| Teagan42/home-assistant | tests/components/dyson/test_fan.py | Python | apache-2.0 | 30,624 | 0.000229 |
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
"""
TR-55 Model Implementation
A mapping between variable/parameter names found in the TR-55 document
and variables used in this program are as follows:
* `precip` is referred to as P in the report
* `runoff` is Q
* `evaptrans` maps to ET, the evapotranspiration
* `inf` is the amount of water that infiltrates into the soil (in inches)
* `init_abs` is Ia, the initial abstraction, another form of infiltration
"""
import copy
import numpy as np
from tr55.tablelookup import lookup_cn, lookup_bmp_storage, \
lookup_ki, is_bmp, is_built_type, make_precolumbian, \
get_pollutants, get_bmps, lookup_pitt_runoff, lookup_bmp_drainage_ratio
from tr55.water_quality import get_volume_of_runoff, get_pollutant_load
from tr55.operations import dict_plus
def runoff_pitt(precip, evaptrans, soil_type, land_use):
"""
The Pitt Small Storm Hydrology method. The output is a runoff
value in inches.
This uses numpy to make a linear interpolation between tabular values to
calculate the exact runoff for a given value
`precip` is the amount of precipitation in inches.
"""
runoff_ratios = lookup_pitt_runoff(soil_type, land_use)
runoff_ratio = np.interp(precip, runoff_ratios['precip'], runoff_ratios['Rv'])
runoff = precip*runoff_ratio
return min(runoff, precip - evaptrans)
def nrcs_cutoff(precip, curve_number):
"""
A function to find the cutoff between precipitation/curve number
pairs that have zero runoff by definition, and those that do not.
"""
if precip <= -1 * (2 * (curve_number - 100.0) / curve_number):
return True
else:
return False
def runoff_nrcs(precip, evaptrans, soil_type, land_use):
"""
The runoff equation from the TR-55 document. The output is a
runoff value in inches.
`precip` is the amount of precipitation in inches.
"""
curve_number = lookup_cn(soil_type, land_use)
if nrcs_cutoff(precip, curve_number):
return 0.0
potential_retention = (1000.0 / curve_number) - 10
initial_abs = 0.2 * potential_retention
precip_minus_initial_abs = precip - initial_abs
numerator = pow(precip_minus_initial_abs, 2)
denominator = (precip_minus_initial_abs + potential_retention)
runoff = numerator / denominator
return min(runoff, precip - evaptrans)
def simulate_cell_day(precip, evaptrans, cell, cell_count):
"""
Simulate a bunch of cells of the same type during a one-day event.
`precip` is the amount of precipitation in inches.
`evaptrans` is evapotranspiration in inches per day - this is the
ET for the cell after taking the crop/landscape factor into account
this is NOT the ETmax.
`cell` is a string which contains a soil type and land use
separated by a colon.
`cell_count` is the number of cells to simulate.
The return value is a dictionary of runoff, evapotranspiration, and
infiltration as a volume (inches * #cells).
"""
def clamp(runoff, et, inf, precip):
"""
This function ensures that runoff + et + inf <= precip.
NOTE: Infiltration is normally independent of the
precipitation level, but this function introduces a slight
dependency (that is, at very low levels of precipitation, this
function can cause infiltration to be smaller than it
ordinarily would be.
"""
total = runoff + et + inf
if (total > precip):
scale = precip / total
runoff *= scale
et *= scale
inf *= scale
return (runoff, et, inf)
precip = max(0.0, precip)
soil_type, land_use, bmp = cell.lower().split(':')
# If there is no precipitation, then there is no runoff or
# infiltration; however, there is evapotranspiration. (It is
# understood that over a period of time, this can lead to the sum
# of the three values exceeding the total precipitation.)
if precip == 0.0:
return {
'runoff-vol': 0.0,
'et-vol': 0.0,
'inf-vol': 0.0,
}
# If the BMP is cluster_housing or no_till, then make it the
# land-use. This is done because those two types of BMPs behave
# more like land-uses than they do BMPs.
if bmp and not is_bmp(bmp):
land_use = bmp or land_use
# When the land-use is a built-type use the Pitt Small Storm Hydrology
# Model until the runoff predicted by the NRCS model is greater than that
# predicted by the NRCS model.
if is_built_type(land_use):
pitt_runoff = runoff_pitt(precip, evaptrans, soil_type, land_use)
nrcs_runoff = runoff_nrcs(precip, evaptrans, soil_type, land_use)
runoff = max(pitt_runoff, nrcs_runoff)
else:
runoff = runoff_nrcs(precip, evaptrans, soil_type, land_use)
inf = max(0.0, precip - (evaptrans + runoff))
# (runoff, evaptrans, inf) = clamp(runoff, evaptrans, inf, precip)
return {
'runoff-vol': cell_count * runoff,
'et-vol': cell_count * evaptrans,
'inf-vol': cell_count * inf,
}
def create_unmodified_census(census):
"""
This creates a cell census, ignoring any modifications. The
output is suitable for use as input to `simulate_water_quality`.
"""
unmod = copy.deepcopy(census)
unmod.pop('modifications', None)
return unmod
def create_modified_census(census):
"""
This creates a cell census, with modifications, that is suitable
for use as input to `simulate_water_quality`.
For every type of cell that undergoes modification, the
modifications are indicated with a sub-distribution under that
cell type.
"""
mod = copy.deepcopy(census)
mod.pop('modifications', None)
for (cell, subcensus) in mod['distribution'].items():
n = subcensus['cell_count']
changes = {
'distribution': {
cell: {
'distribution': {
cell: {'cell_count': n}
}
}
}
}
mod = dict_plus(mod, changes)
for modification in (census.get('modifications') or []):
for (orig_cell, subcensus) in modification['distribution'].items():
n = subcensus['cell_count']
soil1, land1 = orig_cell.split(':')
soil2, land2, bmp = modification['change'].split(':')
changed_cell = '%s:%s:%s' % (soil2 or soil1, land2 or land1, bmp)
changes = {
'distribution': {
orig_cell: {
'distribution': {
orig_cell: {'cell_count': -n},
changed_cell: {'cell_count': n}
}
}
}
}
mod = dict_plus(mod, changes)
return mod
def simulate_water_quality(tree, cell_res, fn,
pct=1.0, current_cell=None, precolumbian=False):
"""
Perform a water quality simulation by doing simulations on each of
the cell types (leaves), then adding them together by summing the
values of a node's subtrees and storing them at that node.
`tree` is the (sub)tree of cell distributions that is currently
under consideration.
`pct` is the percentage of calculated water volume to retain.
`cell_res` is the size of each cell/pixel in meters squared
(used for turning inches of water into volumes of water).
`fn` is a function that takes a cell type and a number of cells
and returns a dictionary containing runoff, et, and inf as
volumes.
`current_cell` is the cell type for the present node.
"""
# Internal node.
if 'cell_count' in tree and 'distribution' in tree:
n = tree['cell_count']
# simulate subtrees
if n != 0:
tally = {}
for cell, subtree in tree['distribution'].items():
simulate_water_quality(subtree, cell_res, fn,
pct, cell, precolumbian)
subtree_ex_dist = subtree.copy()
subtree_ex_dist.pop('distribution', None)
tally = dict_plus(tally, subtree_ex_dist)
tree.update(tally) # update this node
# effectively a leaf
elif n == 0:
for pol in get_pollutants():
tree[pol] = 0.0
# Leaf node.
elif 'cell_count' in tree and 'distribution' not in tree:
# the number of cells covered by this leaf
n = tree['cell_count']
# canonicalize the current_cell string
split = current_cell.split(':')
if (len(split) == 2):
split.append('')
if precolumbian:
split[1] = make_precolumbian(split[1])
current_cell = '%s:%s:%s' % tuple(split)
# run the runoff model on this leaf
result = fn(current_cell, n) # runoff, et, inf
runoff_adjustment = result['runoff-vol'] - (result['runoff-vol'] * pct)
result['runoff-vol'] -= runoff_adjustment
result['inf-vol'] += runoff_adjustment
tree.update(result)
# perform water quality calculation
if n != 0:
soil_type, land_use, bmp = split
runoff_per_cell = result['runoff-vol'] / n
liters = get_volume_of_runoff(runoff_per_cell, n, cell_res)
for pol in get_pollutants():
tree[pol] = get_pollutant_load(land_use, pol, liters)
def postpass(tree):
"""
Remove volume units and replace them with inches.
"""
if 'cell_count' in tree:
if tree['cell_count'] > 0:
n = tree['cell_count']
tree['runoff'] = tree['runoff-vol'] / n
tree['et'] = tree['et-vol'] / n
tree['inf'] = tree['inf-vol'] / n
else:
tree['runoff'] = 0
tree['et'] = 0
tree['inf'] = 0
tree.pop('runoff-vol', None)
tree.pop('et-vol', None)
tree.pop('inf-vol', None)
if 'distribution' in tree:
for subtree in tree['distribution'].values():
postpass(subtree)
def compute_bmp_effect(census, m2_per_pixel, precip):
"""
Compute the overall amount of water retained by infiltration/retention
type BMP's.
Result is a percent of runoff remaining after water is trapped in
infiltration/retention BMP's
"""
meters_per_inch = 0.0254
cubic_meters = census['runoff-vol'] * meters_per_inch * m2_per_pixel
# 'runoff-vol' in census is in inches*#cells
bmp_dict = census.get('BMPs', {})
bmp_keys = set(bmp_dict.keys())
reduction = 0.0
for bmp in set.intersection(set(get_bmps()), bmp_keys):
bmp_area = bmp_dict[bmp]
storage_space = (lookup_bmp_storage(bmp) * bmp_area)
max_reduction = lookup_bmp_drainage_ratio(bmp) * bmp_area * precip * meters_per_inch
bmp_reduction = min(max_reduction, storage_space)
reduction += bmp_reduction
return 0 if not cubic_meters else \
max(0.0, cubic_meters - reduction) / cubic_meters
def simulate_modifications(census, fn, cell_res, precip, pc=False):
"""
Simulate effects of modifications.
`census` contains a distribution of cell-types in the area of interest.
`fn` is as described in `simulate_water_quality`.
`cell_res` is as described in `simulate_water_quality`.
"""
mod = create_modified_census(census)
simulate_water_quality(mod, cell_res, fn, precolumbian=pc)
pct = compute_bmp_effect(mod, cell_res, precip)
simulate_water_quality(mod, cell_res, fn, pct=pct, precolumbian=pc)
postpass(mod)
unmod = create_unmodified_census(census)
simulate_water_quality(unmod, cell_res, fn, precolumbian=pc)
postpass(unmod)
return {
'unmodified': unmod,
'modified': mod
}
def simulate_day(census, precip, cell_res=10, precolumbian=False):
"""
Simulate a day, including water quality effects of modifications.
`census` contains a distribution of cell-types in the area of interest.
`cell_res` is as described in `simulate_water_quality`.
`precolumbian` indicates that artificial types should be turned
into forest.
"""
et_max = 0.207
# From the EPA WaterSense data finder for the Philadelphia airport (19153)
# Converted to daily number in inches per day.
# http://www3.epa.gov/watersense/new_homes/wb_data_finder.html
# TODO: include Potential Max ET as a data layer from CGIAR
# http://csi.cgiar.org/aridity/Global_Aridity_PET_Methodolgy.asp
if 'modifications' in census:
verify_census(census)
def fn(cell, cell_count):
# Compute et for cell type
split = cell.split(':')
if (len(split) == 2):
(land_use, bmp) = split
else:
(_, land_use, bmp) = split
et = et_max * lookup_ki(bmp or land_use)
# Simulate the cell for one day
return simulate_cell_day(precip, et, cell, cell_count)
return simulate_modifications(census, fn, cell_res, precip, precolumbian)
def verify_census(census):
"""
Assures that there is no soil type/land cover pair
in a modification census that isn't in the AoI census.
"""
for modification in census['modifications']:
for land_cover in modification['distribution']:
if land_cover not in census['distribution']:
raise ValueError("Invalid modification census")
| WikiWatershed/tr-55 | tr55/model.py | Python | apache-2.0 | 13,671 | 0.000658 |
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.volume import base
from tempest import exceptions
from tempest import test
class VolumeQuotasNegativeTestJSON(base.BaseVolumeV1AdminTest):
_interface = "json"
force_tenant_isolation = True
@classmethod
@test.safe_setup
def setUpClass(cls):
super(VolumeQuotasNegativeTestJSON, cls).setUpClass()
demo_user = cls.isolated_creds.get_primary_creds()
cls.demo_tenant_id = demo_user.tenant_id
cls.shared_quota_set = {'gigabytes': 3, 'volumes': 1, 'snapshots': 1}
# NOTE(gfidente): no need to restore original quota set
# after the tests as they only work with tenant isolation.
resp, quota_set = cls.quotas_client.update_quota_set(
cls.demo_tenant_id,
**cls.shared_quota_set)
# NOTE(gfidente): no need to delete in tearDown as
# they are created using utility wrapper methods.
cls.volume = cls.create_volume()
cls.snapshot = cls.create_snapshot(cls.volume['id'])
@test.attr(type='negative')
def test_quota_volumes(self):
self.assertRaises(exceptions.OverLimit,
self.volumes_client.create_volume,
size=1)
@test.attr(type='negative')
def test_quota_volume_snapshots(self):
self.assertRaises(exceptions.OverLimit,
self.snapshots_client.create_snapshot,
self.volume['id'])
@test.attr(type='negative')
def test_quota_volume_gigabytes(self):
# NOTE(gfidente): quota set needs to be changed for this test
# or we may be limited by the volumes or snaps quota number, not by
# actual gigs usage; next line ensures shared set is restored.
self.addCleanup(self.quotas_client.update_quota_set,
self.demo_tenant_id,
**self.shared_quota_set)
new_quota_set = {'gigabytes': 2, 'volumes': 2, 'snapshots': 1}
resp, quota_set = self.quotas_client.update_quota_set(
self.demo_tenant_id,
**new_quota_set)
self.assertRaises(exceptions.OverLimit,
self.volumes_client.create_volume,
size=1)
new_quota_set = {'gigabytes': 2, 'volumes': 1, 'snapshots': 2}
resp, quota_set = self.quotas_client.update_quota_set(
self.demo_tenant_id,
**self.shared_quota_set)
self.assertRaises(exceptions.OverLimit,
self.snapshots_client.create_snapshot,
self.volume['id'])
class VolumeQuotasNegativeTestXML(VolumeQuotasNegativeTestJSON):
_interface = "xml"
| Mirantis/tempest | tempest/api/volume/admin/test_volume_quotas_negative.py | Python | apache-2.0 | 3,331 | 0 |
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt
def execute(filters=None):
if not filters: filters = {}
columns = get_columns(filters)
item_map = get_item_details(filters)
iwb_map = get_item_warehouse_batch_map(filters)
data = []
for item in sorted(iwb_map):
for wh in sorted(iwb_map[item]):
for batch in sorted(iwb_map[item][wh]):
qty_dict = iwb_map[item][wh][batch]
data.append([item, item_map[item]["item_name"],
item_map[item]["description"], wh, batch,
qty_dict.opening_qty, qty_dict.in_qty,
qty_dict.out_qty, qty_dict.bal_qty
])
return columns, data
def get_columns(filters):
"""return columns based on filters"""
columns = [_("Item") + ":Link/Item:100"] + [_("Item Name") + "::150"] + [_("Description") + "::150"] + \
[_("Warehouse") + ":Link/Warehouse:100"] + [_("Batch") + ":Link/Batch:100"] + [_("Opening Qty") + "::90"] + \
[_("In Qty") + "::80"] + [_("Out Qty") + "::80"] + [_("Balance Qty") + "::90"]
return columns
def get_conditions(filters):
conditions = ""
if not filters.get("from_date"):
frappe.throw(_("'From Date' is required"))
if filters.get("to_date"):
conditions += " and posting_date <= '%s'" % filters["to_date"]
else:
frappe.throw(_("'To Date' is required"))
return conditions
#get all details
def get_stock_ledger_entries(filters):
conditions = get_conditions(filters)
return frappe.db.sql("""select item_code, batch_no, warehouse,
posting_date, actual_qty
from `tabStock Ledger Entry`
where docstatus < 2 %s order by item_code, warehouse""" %
conditions, as_dict=1)
def get_item_warehouse_batch_map(filters):
sle = get_stock_ledger_entries(filters)
iwb_map = {}
for d in sle:
iwb_map.setdefault(d.item_code, {}).setdefault(d.warehouse, {})\
.setdefault(d.batch_no, frappe._dict({
"opening_qty": 0.0, "in_qty": 0.0, "out_qty": 0.0, "bal_qty": 0.0
}))
qty_dict = iwb_map[d.item_code][d.warehouse][d.batch_no]
if d.posting_date < filters["from_date"]:
qty_dict.opening_qty += flt(d.actual_qty)
elif d.posting_date >= filters["from_date"] and d.posting_date <= filters["to_date"]:
if flt(d.actual_qty) > 0:
qty_dict.in_qty += flt(d.actual_qty)
else:
qty_dict.out_qty += abs(flt(d.actual_qty))
qty_dict.bal_qty += flt(d.actual_qty)
return iwb_map
def get_item_details(filters):
item_map = {}
for d in frappe.db.sql("select name, item_name, description from tabItem", as_dict=1):
item_map.setdefault(d.name, d)
return item_map
| gangadharkadam/tailorerp | erpnext/stock/report/batch_wise_balance_history/batch_wise_balance_history.py | Python | agpl-3.0 | 2,666 | 0.026632 |
f = open("font.txt","rb")
fontdata = f.read()
f.close()
out = "static char font[128][5] = {"
for i in xrange(128):
out += "{"
#Encode these into 7 bit "byte"s, with MSB=0
#We don't save anything by using 8-bit bytes; the last five bits of the last byte spill over, so we still get 5 bytes/char
bits = []
for j in xrange(5*7):
bits.append( ord(fontdata[(5*7)*i+j])&0x01 )
## print bits
for j in xrange(5):
byte_bits = bits[7*j:7*j+7]
## print byte_bits
byte_bits = [byte_bits[i]<<(7-i-1) for i in xrange(7)]
## print byte_bits
byte = 0x00
for k in byte_bits: byte |= k
## print byte
byte_str = "0x%x" % byte
## print byte_str
## raw_input()
out += byte_str
if j != 5-1: out += ","
out += "}"
if i != 128-1: out += ","
out +="};"
print out
| imallett/MOSS | resources/bytemap font/echo as array.py | Python | mit | 923 | 0.021668 |
'''
Modulo Movimiento Nanometros
@author: P1R0
import ObjSerial, sys;
ObjSer = ObjSerial.ObjSerial(0,9600)
ObjSer.cts = True
ObjSer.dtr = True
ObjSer.bytesize = 8
'''
SxN = 59.71 #Constante de Calibracion del Motor
#Funcion para inicializar Monocromador
def init(ObjSer,A):
ObjSer.flushOutput()
ObjSer.write(unicode("A\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0A\r\n"))
echo(ObjSer)
ObjSer.write(unicode("A\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0A\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0R\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0U1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0V1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0T400\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0K1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0Y1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0Y0\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0K0\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0V1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0T1000\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0F-\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0V1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0T400\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0K1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0V1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0T4000\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0K0\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0M99999\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0K1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0V1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0T400\r\n"))
echo(ObjSer)
#en la posicion cero
ObjSer.write(unicode("0M-3925\r\n"))
echo(ObjSer)
#En de estar fuera de rango mandamos como parametro 1
if A == 1:
ObjSer.write(unicode("0M3925\r\n"))
echo(ObjSer)
return 0
#funcion para aproximar errores metodo de interpolacion
def Error(x):
Y = [0,
0.010373807,
-0.05124284,
-0.227092782,
-0.572418858,
-1.150211522,
-2.019461229,
-3.247663205,
-4.904050745,
-7.062119076,
-9.803353877,
-13.21724083,
-17.39877039,
-22.45717585,
-28.51818573,
-35.71928571,
-44.22644716,
-54.22539859,
-65.94810183,
-79.66102345,
95.70661095,
-114.4980595,
-136.5895354,
-162.693691,
-193.8151306,
-231.3914014,
-277.6754313,
-336.5191712,
-415.6610186,
-536.5034235,
-763.8268297,
-804.7677106];
X = [0,
50.002,
99.999,
149.999,
199.997,
249.997,
300.007,
349.993,
400.003,
449.997,
499.994,
550.005,
600.002,
649.993,
700.003,
749.995,
800.004,
849.995,
900.004,
949.999,
1000.006,
1049.997,
1100.004,
1150.001,
1200.005,
1250.002,
1300,
1349.999,
1399.998,
449.998,
1490,
1492];
i = 0;
while x > X[i]:
x0=X[i];
y0=Y[i];
x1=X[i+1];
y1=Y[i+1];
i=i+1;
r=y1-y0;
d=r/(x1-x0);
y=y0+(d*(x-x0));
return y
#funcion para calcular y mover el motor
def Calcula(ObjSer,Nm,LastPos):
Er=Error(Nm);
NmyEr = Nm - Er;
uS = NmyEr * SxN;
dif = uS - int(uS);
if dif > 0.5:
uS = int(uS) + 1;
else:
uS = int(uS);
Mover = uS - LastPos;
print "La diferencia a mover es: %d" % Mover;
Mueve(ObjSer,Mover);
LastPos = uS;
return LastPos
#Funcion para llamar al eco del ObjSerial
def echo(ObjSer):
line = ObjSer.readline()
print line
#Funcion para mover el motor
def Mueve(ObjSer, Mover):
#mover Full Step cuando recibe como parametros microSteps
MoverFS = ((Mover-3) / 5);
ObjSer.flushOutput();
ObjSer.write(unicode("0U0\r\n"));
echo(ObjSer);
ObjSer.write(unicode("0V1\r\n"));
echo(ObjSer);
ObjSer.write(unicode("0T1000\r\n"));
echo(ObjSer);
ObjSer.write(unicode("0M%d\r\n" % MoverFS));
echo(ObjSer);
ObjSer.write(unicode("0U1\r\n"));
echo(ObjSer);
ObjSer.write(unicode("0V1\r\n"));
echo(ObjSer);
ObjSer.write(unicode("0T400\r\n"));
echo(ObjSer);
#ultimos 3 microsteps para una aproximacion mas suave.
ObjSer.write(unicode("0M3\r\n"));
echo(ObjSer);
'''
if __name__ == "__main__":
N = 0;
LastPos = 0;
init(0);
while 1:
while type(N)!= float:
try:
N = raw_input("Ingresa Nanometros o quit para cerrar:");
if N == "quit":
ObjSer.close();
sys.exit(0);
N = float(N);
except (ValueError, TypeError):
print "error, el valor debe ObjSer entero o flotante";
LastPos = Calcula(N,LastPos);
print "los microspasos totales son: %d" % LastPos;
N=0
''' | P1R/freeMonoCrom | MM.py | Python | gpl-2.0 | 5,424 | 0.014381 |
from django.conf.urls import patterns, url
from rest_framework.urlpatterns import format_suffix_patterns
from snippets.v3_0 import views
urlpatterns = patterns('',
url(r'^v3_0/snippets/$', views.SnippetList.as_view()),
url(r'^v3_0/snippets/(?P<pk>[0-9]+)/$', views.SnippetDetail.as_view()),
)
urlpatterns = format_suffix_patterns(urlpatterns)
| lgarest/django_snippets_api | snippets/v3_0/urls.py | Python | gpl-2.0 | 353 | 0.002833 |
"""
Turtle RDF graph serializer for RDFLib.
See <http://www.w3.org/TeamSubmission/turtle/> for syntax specification.
"""
from collections import defaultdict
from rdflib.term import BNode, Literal, URIRef
from rdflib.exceptions import Error
from rdflib.serializer import Serializer
from rdflib.namespace import RDF, RDFS
__all__ = ['RecursiveSerializer', 'TurtleSerializer']
class RecursiveSerializer(Serializer):
topClasses = [RDFS.Class]
predicateOrder = [RDF.type, RDFS.label]
maxDepth = 10
indentString = u" "
def __init__(self, store):
super(RecursiveSerializer, self).__init__(store)
self.stream = None
self.reset()
def addNamespace(self, prefix, uri):
self.namespaces[prefix] = uri
def checkSubject(self, subject):
"""Check to see if the subject should be serialized yet"""
if ((self.isDone(subject))
or (subject not in self._subjects)
or ((subject in self._topLevels) and (self.depth > 1))
or (isinstance(subject, URIRef)
and (self.depth >= self.maxDepth))):
return False
return True
def isDone(self, subject):
"""Return true if subject is serialized"""
return subject in self._serialized
def orderSubjects(self):
seen = {}
subjects = []
for classURI in self.topClasses:
members = list(self.store.subjects(RDF.type, classURI))
members.sort()
for member in members:
subjects.append(member)
self._topLevels[member] = True
seen[member] = True
recursable = [
(isinstance(subject, BNode),
self._references[subject], subject)
for subject in self._subjects if subject not in seen]
recursable.sort()
subjects.extend([subject for (isbnode, refs, subject) in recursable])
return subjects
def preprocess(self):
for triple in self.store.triples((None, None, None)):
self.preprocessTriple(triple)
def preprocessTriple(self, (s, p, o)):
self._references[o]+=1
self._subjects[s] = True
def reset(self):
self.depth = 0
self.lists = {}
self.namespaces = {}
self._references = defaultdict(int)
self._serialized = {}
self._subjects = {}
self._topLevels = {}
for prefix, ns in self.store.namespaces():
self.addNamespace(prefix, ns)
def buildPredicateHash(self, subject):
"""
Build a hash key by predicate to a list of objects for the given
subject
"""
properties = {}
for s, p, o in self.store.triples((subject, None, None)):
oList = properties.get(p, [])
oList.append(o)
properties[p] = oList
return properties
def sortProperties(self, properties):
"""Take a hash from predicate uris to lists of values.
Sort the lists of values. Return a sorted list of properties."""
# Sort object lists
for prop, objects in properties.items():
objects.sort()
# Make sorted list of properties
propList = []
seen = {}
for prop in self.predicateOrder:
if (prop in properties) and (prop not in seen):
propList.append(prop)
seen[prop] = True
props = properties.keys()
props.sort()
for prop in props:
if prop not in seen:
propList.append(prop)
seen[prop] = True
return propList
def subjectDone(self, subject):
"""Mark a subject as done."""
self._serialized[subject] = True
def indent(self, modifier=0):
"""Returns indent string multiplied by the depth"""
return (self.depth + modifier) * self.indentString
def write(self, text):
"""Write text in given encoding."""
self.stream.write(text.encode(self.encoding, 'replace'))
SUBJECT = 0
VERB = 1
OBJECT = 2
_GEN_QNAME_FOR_DT = False
_SPACIOUS_OUTPUT = False
class TurtleSerializer(RecursiveSerializer):
short_name = "turtle"
indentString = ' '
def __init__(self, store):
self._ns_rewrite = {}
super(TurtleSerializer, self).__init__(store)
self.keywords = {
RDF.type: 'a'
}
self.reset()
self.stream = None
self._spacious = _SPACIOUS_OUTPUT
def addNamespace(self, prefix, namespace):
# Turtle does not support prefix that start with _
# if they occur in the graph, rewrite to p_blah
# this is more complicated since we need to make sure p_blah
# does not already exist. And we register namespaces as we go, i.e.
# we may first see a triple with prefix _9 - rewrite it to p_9
# and then later find a triple with a "real" p_9 prefix
# so we need to keep track of ns rewrites we made so far.
if (prefix > '' and prefix[0] == '_') \
or self.namespaces.get(prefix, namespace) != namespace:
if prefix not in self._ns_rewrite:
p = "p" + prefix
while p in self.namespaces:
p = "p" + p
self._ns_rewrite[prefix] = p
prefix = self._ns_rewrite.get(prefix, prefix)
super(TurtleSerializer, self).addNamespace(prefix, namespace)
return prefix
def reset(self):
super(TurtleSerializer, self).reset()
self._shortNames = {}
self._started = False
self._ns_rewrite = {}
def serialize(self, stream, base=None, encoding=None,
spacious=None, **args):
self.reset()
self.stream = stream
self.base = base
if spacious is not None:
self._spacious = spacious
self.preprocess()
subjects_list = self.orderSubjects()
self.startDocument()
firstTime = True
for subject in subjects_list:
if self.isDone(subject):
continue
if firstTime:
firstTime = False
if self.statement(subject) and not firstTime:
self.write('\n')
self.endDocument()
stream.write(u"\n".encode('ascii'))
def preprocessTriple(self, triple):
super(TurtleSerializer, self).preprocessTriple(triple)
for i, node in enumerate(triple):
if node in self.keywords:
continue
# Don't use generated prefixes for subjects and objects
self.getQName(node, gen_prefix=(i == VERB))
if isinstance(node, Literal) and node.datatype:
self.getQName(node.datatype, gen_prefix=_GEN_QNAME_FOR_DT)
p = triple[1]
if isinstance(p, BNode): # hmm - when is P ever a bnode?
self._references[p]+=1
def getQName(self, uri, gen_prefix=True):
if not isinstance(uri, URIRef):
return None
parts = None
try:
parts = self.store.compute_qname(uri, generate=gen_prefix)
except:
# is the uri a namespace in itself?
pfx = self.store.store.prefix(uri)
if pfx is not None:
parts = (pfx, uri, '')
else:
# nothing worked
return None
prefix, namespace, local = parts
# QName cannot end with .
if local.endswith("."): return None
prefix = self.addNamespace(prefix, namespace)
return u'%s:%s' % (prefix, local)
def startDocument(self):
self._started = True
ns_list = sorted(self.namespaces.items())
for prefix, uri in ns_list:
self.write(self.indent() + '@prefix %s: <%s> .\n' % (prefix, uri))
if ns_list and self._spacious:
self.write('\n')
def endDocument(self):
if self._spacious:
self.write('\n')
def statement(self, subject):
self.subjectDone(subject)
return self.s_squared(subject) or self.s_default(subject)
def s_default(self, subject):
self.write('\n' + self.indent())
self.path(subject, SUBJECT)
self.predicateList(subject)
self.write(' .')
return True
def s_squared(self, subject):
if (self._references[subject] > 0) or not isinstance(subject, BNode):
return False
self.write('\n' + self.indent() + '[]')
self.predicateList(subject)
self.write(' .')
return True
def path(self, node, position, newline=False):
if not (self.p_squared(node, position, newline)
or self.p_default(node, position, newline)):
raise Error("Cannot serialize node '%s'" % (node, ))
def p_default(self, node, position, newline=False):
if position != SUBJECT and not newline:
self.write(' ')
self.write(self.label(node, position))
return True
def label(self, node, position):
if node == RDF.nil:
return '()'
if position is VERB and node in self.keywords:
return self.keywords[node]
if isinstance(node, Literal):
return node._literal_n3(
use_plain=True,
qname_callback=lambda dt: self.getQName(
dt, _GEN_QNAME_FOR_DT))
else:
node = self.relativize(node)
return self.getQName(node, position == VERB) or node.n3()
def p_squared(self, node, position, newline=False):
if (not isinstance(node, BNode)
or node in self._serialized
or self._references[node] > 1
or position == SUBJECT):
return False
if not newline:
self.write(' ')
if self.isValidList(node):
# this is a list
self.write('(')
self.depth += 1 # 2
self.doList(node)
self.depth -= 1 # 2
self.write(' )')
else:
self.subjectDone(node)
self.depth += 2
# self.write('[\n' + self.indent())
self.write('[')
self.depth -= 1
# self.predicateList(node, newline=True)
self.predicateList(node, newline=False)
# self.write('\n' + self.indent() + ']')
self.write(' ]')
self.depth -= 1
return True
def isValidList(self, l):
"""
Checks if l is a valid RDF list, i.e. no nodes have other properties.
"""
try:
if not self.store.value(l, RDF.first):
return False
except:
return False
while l:
if l != RDF.nil and len(
list(self.store.predicate_objects(l))) != 2:
return False
l = self.store.value(l, RDF.rest)
return True
def doList(self, l):
while l:
item = self.store.value(l, RDF.first)
if item is not None:
self.path(item, OBJECT)
self.subjectDone(l)
l = self.store.value(l, RDF.rest)
def predicateList(self, subject, newline=False):
properties = self.buildPredicateHash(subject)
propList = self.sortProperties(properties)
if len(propList) == 0:
return
self.verb(propList[0], newline=newline)
self.objectList(properties[propList[0]])
for predicate in propList[1:]:
self.write(' ;\n' + self.indent(1))
self.verb(predicate, newline=True)
self.objectList(properties[predicate])
def verb(self, node, newline=False):
self.path(node, VERB, newline)
def objectList(self, objects):
count = len(objects)
if count == 0:
return
depthmod = (count == 1) and 0 or 1
self.depth += depthmod
self.path(objects[0], OBJECT)
for obj in objects[1:]:
self.write(',\n' + self.indent(1))
self.path(obj, OBJECT, newline=True)
self.depth -= depthmod
| gloaec/trifle | src/rdflib/plugins/serializers/turtle.py | Python | gpl-3.0 | 12,175 | 0.000903 |
# -*- coding: utf-8 -*-
import unittest
from linked_list import (delete_node, list_cycle, remove_elements,
reverse_list)
from public import ListNode
class TestLinkedList(unittest.TestCase):
def test_delete_node(self):
so = delete_node.Solution()
head = ListNode(1)
head.next = ListNode(2)
head.next.next = ListNode(3)
head.next.next.next = ListNode(4)
so.deleteNode(head.next)
self.assertEqual(head.next.val, 3)
def test_has_cycle(self):
so = list_cycle.Solution()
self.assertFalse(so.hasCycle(None))
head = ListNode(1)
self.assertFalse(so.hasCycle(head))
head.next = head
self.assertTrue(so.hasCycle(head))
head.next = ListNode(2)
head.next.next = ListNode(3)
self.assertFalse(so.hasCycle(head))
head.next.next.next = head
self.assertTrue(so.hasCycle(head))
def test_detect_cycle(self):
so = list_cycle.Solution()
head = ListNode(1)
self.assertFalse(so.detectCycle(head))
self.assertFalse(so.detectCycle(None))
head.next = ListNode(2)
self.assertFalse(so.detectCycle(head))
cross = ListNode(3)
head.next.next = cross
head.next.next.next = ListNode(4)
head.next.next.next.next = ListNode(5)
head.next.next.next.next.next = cross
self.assertEqual(so.detectCycle(head), cross)
def test_remove_elements(self):
so = remove_elements.Solution()
self.assertFalse(so.removeElements(None, 0))
head = ListNode(1)
head.next = ListNode(2)
head.next.next = ListNode(2)
head.next.next.next = ListNode(3)
head.next.next.next.next = ListNode(4)
head = so.removeElements(head, 1)
self.assertEqual(head.val, 2)
head = so.removeElements(head, 2)
self.assertEqual(head.val, 3)
head = so.removeElements(head, 4)
self.assertFalse(head.next)
def test_reverse_linked_list(self):
so = reverse_list.Solution()
self.assertFalse(so.reverseList_iteratively(None))
head = ListNode(1)
head.next = ListNode(2)
head.next.next = ListNode(3)
self.assertEqual(so.reverseList_iteratively(head).val, 3)
self.assertFalse(so.reverseList_recursively(None))
head = ListNode(1)
head.next = ListNode(2)
head.next.next = ListNode(3)
self.assertEqual(so.reverseList_recursively(head).val, 3)
| lycheng/leetcode | tests/test_linked_list.py | Python | mit | 2,549 | 0 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from opus_core.variables.variable import Variable
from variable_functions import my_attribute_label
from numpy import ma
from numpy import float32
class total_improvement_value_per_residential_unit_within_walking_distance(Variable):
total_improvement_value_within_walking_distance = "total_improvement_value_within_walking_distance"
residential_units_within_walking_distance = "residential_units_within_walking_distance"
def dependencies(self):
return [my_attribute_label(self.total_improvement_value_within_walking_distance),
my_attribute_label(self.residential_units_within_walking_distance)]
def compute(self, dataset_pool):
units_wwd = self.get_dataset().get_attribute(self.residential_units_within_walking_distance)
return self.get_dataset().get_attribute(self.total_improvement_value_within_walking_distance) /\
ma.masked_where(units_wwd == 0, units_wwd.astype(float32))
from opus_core.tests import opus_unittest
from opus_core.tests.utils.variable_tester import VariableTester
from numpy import array
class Tests(opus_unittest.OpusTestCase):
def test_my_inputs(self):
tester = VariableTester(
__file__,
package_order=['urbansim'],
test_data={
'gridcell':{
'grid_id': array([1,2,3,4]),
'relative_x': array([1,2,1,2]),
'relative_y': array([1,1,2,2]),
'total_residential_value': array([100, 500, 1000, 1500]),
'governmental_improvement_value': array([100, 500, 1000, 1500]),
# The four items below are 'throw-away' items to allow this Variable to test -
# they can be anything and not affect the outcome of this variable.
'commercial_improvement_value': array([0, 0, 0, 0]),
'industrial_improvement_value': array([0, 0, 0, 0]),
'residential_improvement_value': array([0, 0, 0, 0]),
'residential_units': array([0, 0, 0, 0]),
},
'urbansim_constant':{
"walking_distance_circle_radius": array([150]),
'cell_size': array([150]),
}
}
)
should_be = array([1800, 3100, 4600, 6000])
tester.test_is_equal_for_variable_defined_by_this_module(self, should_be)
if __name__=='__main__':
opus_unittest.main() | christianurich/VIBe2UrbanSim | 3rdparty/opus/src/urbansim/gridcell/total_improvement_value_per_residential_unit_within_walking_distance.py | Python | gpl-2.0 | 2,680 | 0.011194 |
import directory
import scanner
import mapper
import board
import os
class Klopfer(object):
def __init__(self, import_dir, export_dir):
self.import_dir = import_dir
self.export_dir = export_dir
print "Klopfer class"
def run(self):
# open dir and get oldest file with the given extension
dir = directory.Directory(os, self.import_dir, ['jpg', 'jpeg'])
self.imagefile = dir.get_oldest_file()
# open image
scan = scanner.Scanner(self.imagefile.name)
self.remove_image()
informations = scan.scan()
# load board_id and cards
mapping = mapper.Mapper(informations)
board_id = mapping.board_id
cards = mapping.get_cards()
# create board
current_board = board.Board(board_id, cards)
# write board to json
current_board.export_json(self.export_dir)
# remove old image
def remove_image(self):
# Uncomment in production version when multiple input files are present
# os.remove(self.imagefile.name)
pass
| slx-dev/digital-kanban | src/klopfer.py | Python | mit | 1,083 | 0 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing unique constraint on 'Url', fields ['url', 'site']
db.delete_unique('seo_url', ['url', 'site_id'])
# Deleting model 'Url'
db.delete_table('seo_url')
def backwards(self, orm):
# Adding model 'Url'
db.create_table('seo_url', (
('url', self.gf('django.db.models.fields.CharField')(default='/', max_length=200)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('site', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['sites.Site'], null=True, blank=True)),
))
db.send_create_signal('seo', ['Url'])
# Adding unique constraint on 'Url', fields ['url', 'site']
db.create_unique('seo_url', ['url', 'site_id'])
models = {
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'seo.seo': {
'Meta': {'unique_together': "(('content_type', 'object_id'),)", 'object_name': 'Seo'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['seo'] | bashu/django-easy-seo | seo/south_migrations/0004_auto__del_url__del_unique_url_url_site.py | Python | gpl-3.0 | 2,904 | 0.00792 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, random, erpnext
from datetime import timedelta
from frappe.utils.make_random import how_many
from frappe.desk import query_report
from erpnext.manufacturing.doctype.workstation.workstation import WorkstationHolidayError
from erpnext.manufacturing.doctype.work_order.test_work_order import make_wo_order_test_record
def work():
if random.random() < 0.3: return
frappe.set_user(frappe.db.get_global('demo_manufacturing_user'))
if not frappe.get_all('Sales Order'): return
from erpnext.projects.doctype.timesheet.timesheet import OverlapError
ppt = frappe.new_doc("Production Plan")
ppt.company = erpnext.get_default_company()
# ppt.use_multi_level_bom = 1 #refactored
ppt.get_items_from = "Sales Order"
# ppt.purchase_request_for_warehouse = "Stores - WPL" # refactored
ppt.run_method("get_open_sales_orders")
if not ppt.get("sales_orders"): return
ppt.run_method("get_items")
ppt.run_method("raise_material_requests")
ppt.save()
ppt.submit()
ppt.run_method("raise_work_orders")
frappe.db.commit()
# submit work orders
for pro in frappe.db.get_values("Work Order", {"docstatus": 0}, "name"):
b = frappe.get_doc("Work Order", pro[0])
b.wip_warehouse = "Work in Progress - WPL"
b.submit()
frappe.db.commit()
# submit material requests
for pro in frappe.db.get_values("Material Request", {"docstatus": 0}, "name"):
b = frappe.get_doc("Material Request", pro[0])
b.submit()
frappe.db.commit()
# stores -> wip
if random.random() < 0.4:
for pro in query_report.run("Open Work Orders")["result"][:how_many("Stock Entry for WIP")]:
make_stock_entry_from_pro(pro[0], "Material Transfer for Manufacture")
# wip -> fg
if random.random() < 0.4:
for pro in query_report.run("Work Orders in Progress")["result"][:how_many("Stock Entry for FG")]:
make_stock_entry_from_pro(pro[0], "Manufacture")
for bom in frappe.get_all('BOM', fields=['item'], filters = {'with_operations': 1}):
pro_order = make_wo_order_test_record(item=bom.item, qty=2,
source_warehouse="Stores - WPL", wip_warehouse = "Work in Progress - WPL",
fg_warehouse = "Stores - WPL", company = erpnext.get_default_company(),
stock_uom = frappe.db.get_value('Item', bom.item, 'stock_uom'),
planned_start_date = frappe.flags.current_date)
# submit job card
if random.random() < 0.4:
submit_job_cards()
def make_stock_entry_from_pro(pro_id, purpose):
from erpnext.manufacturing.doctype.work_order.work_order import make_stock_entry
from erpnext.stock.stock_ledger import NegativeStockError
from erpnext.stock.doctype.stock_entry.stock_entry import IncorrectValuationRateError, \
DuplicateEntryForWorkOrderError, OperationsNotCompleteError
try:
st = frappe.get_doc(make_stock_entry(pro_id, purpose))
st.posting_date = frappe.flags.current_date
st.fiscal_year = str(frappe.flags.current_date.year)
for d in st.get("items"):
d.cost_center = "Main - " + frappe.get_cached_value('Company', st.company, 'abbr')
st.insert()
frappe.db.commit()
st.submit()
frappe.db.commit()
except (NegativeStockError, IncorrectValuationRateError, DuplicateEntryForWorkOrderError,
OperationsNotCompleteError):
frappe.db.rollback()
def submit_job_cards():
work_orders = frappe.get_all("Work Order", ["name", "creation"], {"docstatus": 1, "status": "Not Started"})
work_order = random.choice(work_orders)
# for work_order in work_orders:
start_date = work_order.creation
work_order = frappe.get_doc("Work Order", work_order.name)
job = frappe.get_all("Job Card", ["name", "operation", "work_order"],
{"docstatus": 0, "work_order": work_order.name})
if not job: return
job_map = {}
for d in job:
job_map[d.operation] = frappe.get_doc("Job Card", d.name)
for operation in work_order.operations:
job = job_map[operation.operation]
job_time_log = frappe.new_doc("Job Card Time Log")
job_time_log.from_time = start_date
minutes = operation.get("time_in_mins")
job_time_log.time_in_mins = random.randint(int(minutes/2), minutes)
job_time_log.to_time = job_time_log.from_time + \
timedelta(minutes=job_time_log.time_in_mins)
job_time_log.parent = job.name
job_time_log.parenttype = 'Job Card'
job_time_log.parentfield = 'time_logs'
job_time_log.completed_qty = work_order.qty
job_time_log.save(ignore_permissions=True)
job.time_logs.append(job_time_log)
job.save(ignore_permissions=True)
job.submit()
start_date = job_time_log.to_time
| ebukoz/thrive | erpnext/demo/user/manufacturing.py | Python | gpl-3.0 | 4,593 | 0.027433 |
# VMware vSphere Python SDK
# Copyright (c) 2008-2015 VMware, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from datetime import timedelta
import tests
import vcr
from pyVim import connect
from pyVmomi.Iso8601 import TZManager
class Iso8601Tests(tests.VCRTestBase):
@vcr.use_cassette('test_vm_config_iso8601.yaml',
cassette_library_dir=tests.fixtures_path,
record_mode='once')
def test_vm_config_iso8601(self):
si = connect.SmartConnect(host='vcsa',
user='my_user',
pwd='my_password')
search_index = si.content.searchIndex
uuid = "5001ad1b-c78d-179e-ecd7-1cc0e1cf1b96"
vm = search_index.FindByUuid(None, uuid, True, True)
boot_time = vm.runtime.bootTime
# NOTE (hartsock): assertIsNone does not work in Python 2.6
self.assertTrue(boot_time is not None)
# 2014-08-05T17:50:20.594958Z
expected_time = datetime(2014, 8, 5, 17, 50, 20, 594958,
boot_time.tzinfo)
self.assertEqual(expected_time, boot_time)
def test_iso8601_set_datetime(self):
# NOTE (hartsock): This test is an example of how to register
# a fixture based test to compare the XML document that pyVmomi
# is transmitting. We needed to invent a set of tools to effectively
# compare logical XML documents to each other. In this case we are
# only interested in the 'soapenv:Body' tag and its children.
now_string = "2014-08-19T04:29:36.070918-04:00"
# NOTE (hartsock): the strptime formatter has a bug in python 2.x
# http://bugs.python.org/issue6641 so we're building the date time
# using the constructor arguments instead of parsing it.
now = datetime(2014, 8, 19, 4, 29, 36, 70918,
TZManager.GetTZInfo(
tzname='EDT',
utcOffset=timedelta(hours=-4, minutes=0)))
def has_tag(doc):
if doc is None:
return False
return '<dateTime>' in doc
def correct_time_string(doc):
return '<dateTime>{0}</dateTime>'.format(now_string) in doc
def check_date_time_value(r1, r2):
for r in [r1, r2]:
if has_tag(r.body):
if not correct_time_string(r.body):
return False
return True
my_vcr = vcr.VCR()
my_vcr.register_matcher('document', check_date_time_value)
# NOTE (hartsock): the `match_on` option is altered to use the
# look at the XML body sent to the server
with my_vcr.use_cassette('iso8601_set_datetime.yaml',
cassette_library_dir=tests.fixtures_path,
record_mode='once',
match_on=['method', 'scheme', 'host', 'port',
'path', 'query', 'document']):
si = connect.SmartConnect(host='vcsa',
user='my_user',
pwd='my_password')
search_index = si.content.searchIndex
uuid = "4c4c4544-0043-4d10-8056-b1c04f4c5331"
host = search_index.FindByUuid(None, uuid, False)
date_time_system = host.configManager.dateTimeSystem
# NOTE (hartsock): sending the date time 'now' to host.
date_time_system.UpdateDateTime(now)
| alexkolar/pyvmomi | tests/test_iso8601.py | Python | apache-2.0 | 4,117 | 0 |
# -*- coding: utf-8 -*-
#
# Test links:
# https://www.oboom.com/B7CYZIEB/10Mio.dat
import re
from module.common.json_layer import json_loads
from module.plugins.internal.Hoster import Hoster
from module.plugins.captcha.ReCaptcha import ReCaptcha
class OboomCom(Hoster):
__name__ = "OboomCom"
__type__ = "hoster"
__version__ = "0.38"
__status__ = "testing"
__pattern__ = r'https?://(?:www\.)?oboom\.com/(?:#(?:id=|/)?)?(?P<ID>\w{8})'
__description__ = """Oboom.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("stanley", "stanley.foerster@gmail.com")]
RECAPTCHA_KEY = "6LdqpO0SAAAAAJGHXo63HyalP7H4qlRs_vff0kJX"
def setup(self):
self.chunk_limit = 1
self.multiDL = self.resume_download = self.premium
def process(self, pyfile):
self.pyfile.url.replace(".com/#id=", ".com/#")
self.pyfile.url.replace(".com/#/", ".com/#")
self.html = self.load(pyfile.url)
self.get_file_id(self.pyfile.url)
self.get_session_token()
self.get_fileInfo(self.session_token, self.file_id)
self.pyfile.name = self.file_name
self.pyfile.size = self.file_size
if not self.premium:
self.solve_captcha()
self.get_download_ticket()
self.download("http://%s/1.0/dlh" % self.download_domain, get={'ticket': self.download_ticket, 'http_errors': 0})
def load_url(self, url, get=None):
if get is None:
get = {}
return json_loads(self.load(url, get))
def get_file_id(self, url):
self.file_id = re.match(OboomCom.__pattern__, url).group('ID')
def get_session_token(self):
if self.premium:
accountInfo = self.account.get_data()
if "session" in accountInfo:
self.session_token = accountInfo['session']
else:
self.fail(_("Could not retrieve premium session"))
else:
apiUrl = "http://www.oboom.com/1.0/guestsession"
result = self.load_url(apiUrl)
if result[0] == 200:
self.session_token = result[1]
else:
self.fail(_("Could not retrieve token for guest session. Error code: %s") % result[0])
def solve_captcha(self):
recaptcha = ReCaptcha(self)
response, challenge = recaptcha.challenge(self.RECAPTCHA_KEY)
apiUrl = "http://www.oboom.com/1.0/download/ticket"
params = {'recaptcha_challenge_field': challenge,
'recaptcha_response_field': response,
'download_id': self.file_id,
'token': self.session_token}
result = self.load_url(apiUrl, params)
if result[0] == 200:
self.download_token = result[1]
self.download_auth = result[2]
self.captcha.correct()
self.wait(30)
else:
if result[0] == 403:
if result[1] == -1: #: Another download is running
self.set_wait(15 * 60)
else:
self.set_wait(result[1], True)
self.wait()
self.retry(5)
elif result[0] == 400 and result[1] == "forbidden":
self.retry(5, 15 * 60, _("Service unavailable"))
self.retry_captcha()
def get_fileInfo(self, token, fileId):
apiUrl = "http://api.oboom.com/1.0/info"
params = {'token': token, 'items': fileId, 'http_errors': 0}
result = self.load_url(apiUrl, params)
if result[0] == 200:
item = result[1][0]
if item['state'] == "online":
self.file_size = item['size']
self.file_name = item['name']
else:
self.offline()
else:
self.fail(_("Could not retrieve file info. Error code %s: %s") % (result[0], result[1]))
def get_download_ticket(self):
apiUrl = "http://api.oboom.com/1/dl"
params = {'item': self.file_id, 'http_errors': 0}
if self.premium:
params['token'] = self.session_token
else:
params['token'] = self.download_token
params['auth'] = self.download_auth
result = self.load_url(apiUrl, params)
if result[0] == 200:
self.download_domain = result[1]
self.download_ticket = result[2]
elif result[0] == 421:
self.retry(wait=result[2] + 60, msg=_("Connection limit exceeded"))
else:
self.fail(_("Could not retrieve download ticket. Error code: %s") % result[0])
| jansohn/pyload | module/plugins/hoster/OboomCom.py | Python | gpl-3.0 | 4,627 | 0.004322 |
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from future.utils import string_types
import re
import unittest
def _assertRaisesRegexp(self, expected_exception, expected_regexp,
callable_obj, *args, **kwds):
"""
Asserts that the message in a raised exception matches a regexp.
This is a simple clone of unittest.TestCase.assertRaisesRegexp() method
introduced in python 2.7. The goal for this function is to behave exactly
as assertRaisesRegexp() in standard library.
"""
exception = None
try:
callable_obj(*args, **kwds)
except expected_exception as ex: # let unexpected exceptions pass through
exception = ex
if exception is None:
self.fail("%s not raised" % str(expected_exception.__name__))
if isinstance(expected_regexp, string_types):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(str(exception)):
self.fail('"%s" does not match "%s"' %
(expected_regexp.pattern, str(exception)))
def patch():
hasAssertRaisesRegexp = getattr(unittest.TestCase, "assertRaisesRegexp", None)
hasAssertRaisesRegex = getattr(unittest.TestCase, "assertRaisesRegex", None)
if not hasAssertRaisesRegexp:
# Python 2.6
unittest.TestCase.assertRaisesRegexp = _assertRaisesRegexp
if not hasAssertRaisesRegex:
# Python 2.6 and Python 2.7
unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
| sbidoul/buildbot | worker/buildbot_worker/monkeypatches/testcase_assert.py | Python | gpl-2.0 | 2,243 | 0.001337 |
#!/usr/bin/python
"""
Author: rockylinux
E-Mail: Jingzheng.W@gmail.com
"""
import commands
#display the living connections
#return a list containning living connections
class netstat:
def __init__(self):
self.__name = 'netstat'
def getData(self):
(status, output) = commands.getstatusoutput('netstat -ntu | /usr/bin/awk \'NR>2 {sub(/:[^:]+$/, ""); print $5}\' | sort | uniq -c')
#return output.split('\n')
rst = [i.strip().split() for i in output.split("\n")]
if len(rst[0]) == 0:
print [["", ""]]
else:
print rst
#[i.strip().split() for i in output.split("\n")]return [i.strip() for i in output.split("\n")]
def testGetData(self,test):
if type(test) == type([]):
for i in test:
print i
else:
print test
if __name__ == '__main__':
a = netstat()
test = a.getData()
#a.testGetData(test)
| china-x-orion/infoeye | tools/netstat.py | Python | mit | 966 | 0.024845 |
#!/usr/bin/env python
"""
@file HybridVAControl.py
@author Craig Rafter
@date 19/08/2016
class for fixed time signal control
"""
import signalControl, readJunctionData, traci
from math import atan2, degrees
import numpy as np
from collections import defaultdict
class HybridVAControl(signalControl.signalControl):
def __init__(self, junctionData, minGreenTime=10, maxGreenTime=60, scanRange=250, packetRate=0.2):
super(HybridVAControl, self).__init__()
self.junctionData = junctionData
self.firstCalled = self.getCurrentSUMOtime()
self.lastCalled = self.getCurrentSUMOtime()
self.lastStageIndex = 0
traci.trafficlights.setRedYellowGreenState(self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString)
self.packetRate = int(1000*packetRate)
self.transition = False
self.CAMactive = False
# dict[vehID] = [position, heading, velocity, Tdetect]
self.newVehicleInfo = {}
self.oldVehicleInfo = {}
self.scanRange = scanRange
self.jcnCtrlRegion = self._getJncCtrlRegion()
# print(self.junctionData.id)
# print(self.jcnCtrlRegion)
self.controlledLanes = traci.trafficlights.getControlledLanes(self.junctionData.id)
# dict[laneID] = [heading, shape]
self.laneDetectionInfo = self._getIncomingLaneInfo()
self.stageTime = 0.0
self.minGreenTime = minGreenTime
self.maxGreenTime = maxGreenTime
self.secondsPerMeterTraffic = 0.45
self.nearVehicleCatchDistance = 25
self.extendTime = 1.0 # 5 m in 10 m/s (acceptable journey 1.333)
self.laneInductors = self._getLaneInductors()
def process(self):
# Packets sent on this step
# packet delay + only get packets towards the end of the second
if (not self.getCurrentSUMOtime() % self.packetRate) and (self.getCurrentSUMOtime() % 1000 > 500):
self.CAMactive = True
self._getCAMinfo()
else:
self.CAMactive = False
# Update stage decisions
# If there's no ITS enabled vehicles present use VA ctrl
if len(self.oldVehicleInfo) < 1 and not self.getCurrentSUMOtime() % 1000:
detectTimePerLane = self._getLaneDetectTime()
#print(detectTimePerLane)
# Set adaptive time limit
#print(detectTimePerLane < 3)
if np.any(detectTimePerLane < 2):
extend = self.extendTime
else:
extend = 0.0
self.stageTime = max(self.stageTime + extend, self.minGreenTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
# If active and on the second, or transition then make stage descision
elif (self.CAMactive and not self.getCurrentSUMOtime() % 1000) or self.transition:
oncomingVeh = self._getOncomingVehicles()
# If new stage get furthest from stop line whose velocity < 5% speed
# limit and determine queue length
if self.transition:
furthestVeh = self._getFurthestStationaryVehicle(oncomingVeh)
if furthestVeh[0] != '':
meteredTime = self.secondsPerMeterTraffic*furthestVeh[1]
self.stageTime = max(self.minGreenTime, meteredTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
# If we're in this state this should never happen but just in case
else:
self.stageTime = self.minGreenTime
# If currently staging then extend time if there are vehicles close
# to the stop line
else:
nearestVeh = self._getNearestVehicle(oncomingVeh)
# If a vehicle detected
if nearestVeh != '' and nearestVeh[1] <= self.nearVehicleCatchDistance:
if (self.oldVehicleInfo[nearestVeh[0]][2] != 1e6
and self.oldVehicleInfo[nearestVeh[0]][2] > 1.0/self.secondsPerMeterTraffic):
meteredTime = nearestVeh[1]/self.oldVehicleInfo[nearestVeh[0]][2]
else:
meteredTime = self.secondsPerMeterTraffic*nearestVeh[1]
elapsedTime = 0.001*(self.getCurrentSUMOtime() - self.lastCalled)
Tremaining = self.stageTime - elapsedTime
self.stageTime = elapsedTime + max(meteredTime, Tremaining)
self.stageTime = min(self.stageTime, self.maxGreenTime)
# no detectable near vehicle try inductive loop info
elif nearestVeh == '' or nearestVeh[1] <= self.nearVehicleCatchDistance:
detectTimePerLane = self._getLaneDetectTime()
print('Loops2')
# Set adaptive time limit
if np.any(detectTimePerLane < 2):
extend = self.extendTime
else:
extend = 0.0
self.stageTime = max(self.stageTime + extend, self.minGreenTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
else:
pass
# process stage as normal
else:
pass
# print(self.stageTime)
self.transition = False
if self.transitionObject.active:
# If the transition object is active i.e. processing a transition
pass
elif (self.getCurrentSUMOtime() - self.firstCalled) < (self.junctionData.offset*1000):
# Process offset first
pass
elif (self.getCurrentSUMOtime() - self.lastCalled) < self.stageTime*1000:
# Before the period of the next stage
pass
else:
# Not active, not in offset, stage not finished
if len(self.junctionData.stages) != (self.lastStageIndex)+1:
# Loop from final stage to first stage
self.transitionObject.newTransition(
self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString,
self.junctionData.stages[self.lastStageIndex+1].controlString)
self.lastStageIndex += 1
else:
# Proceed to next stage
#print(0.001*(self.getCurrentSUMOtime() - self.lastCalled))
self.transitionObject.newTransition(
self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString,
self.junctionData.stages[0].controlString)
self.lastStageIndex = 0
#print(0.001*(self.getCurrentSUMOtime() - self.lastCalled))
self.lastCalled = self.getCurrentSUMOtime()
self.transition = True
self.stageTime = 0.0
super(HybridVAControl, self).process()
def _getHeading(self, currentLoc, prevLoc):
dy = currentLoc[1] - prevLoc[1]
dx = currentLoc[0] - prevLoc[0]
if currentLoc[1] == prevLoc[1] and currentLoc[0] == prevLoc[0]:
heading = -1
else:
if dy >= 0:
heading = degrees(atan2(dy, dx))
else:
heading = 360 + degrees(atan2(dy, dx))
# Map angle to make compatible with SUMO heading
if 0 <= heading <= 90:
heading = 90 - heading
elif 90 < heading < 360:
heading = 450 - heading
return heading
def _getJncCtrlRegion(self):
jncPosition = traci.junction.getPosition(self.junctionData.id)
otherJuncPos = [traci.junction.getPosition(x) for x in traci.trafficlights.getIDList() if x != self.junctionData.id]
ctrlRegion = {'N':jncPosition[1]+self.scanRange, 'S':jncPosition[1]-self.scanRange,
'E':jncPosition[0]+self.scanRange, 'W':jncPosition[0]-self.scanRange}
TOL = 10 # Exclusion region around junction boundary
if otherJuncPos != []:
for pos in otherJuncPos:
dx = jncPosition[0] - pos[0]
dy = jncPosition[1] - pos[1]
# North/South Boundary
if abs(dy) < self.scanRange:
if dy < -TOL:
ctrlRegion['N'] = min(pos[1] - TOL, ctrlRegion['N'])
elif dy > TOL:
ctrlRegion['S'] = max(pos[1] + TOL, ctrlRegion['S'])
else:
pass
else:
pass
# East/West Boundary
if abs(dx) < self.scanRange:
if dx < -TOL:
ctrlRegion['E'] = min(pos[0] - TOL, ctrlRegion['E'])
elif dx > TOL:
ctrlRegion['W'] = max(pos[0] + TOL, ctrlRegion['W'])
else:
pass
else:
pass
return ctrlRegion
def _isInRange(self, vehID):
vehPosition = np.array(traci.vehicle.getPosition(vehID))
jcnPosition = np.array(traci.junction.getPosition(self.junctionData.id))
distance = np.linalg.norm(vehPosition - jcnPosition)
if (distance < self.scanRange
and self.jcnCtrlRegion['W'] <= vehPosition[0] <= self.jcnCtrlRegion['E']
and self.jcnCtrlRegion['S'] <= vehPosition[1] <= self.jcnCtrlRegion['N']):
return True
else:
return False
def _getVelocity(self, vehID, vehPosition, Tdetect):
if vehID in self.oldVehicleInfo.keys():
oldX = np.array(self.oldVehicleInfo[vehID][0])
newX = np.array(vehPosition)
dx = np.linalg.norm(newX - oldX)
dt = Tdetect - self.oldVehicleInfo[vehID][3]
velocity = dx/dt
return velocity
else:
return 1e6
def _getCAMinfo(self):
self.oldVehicleInfo = self.newVehicleInfo.copy()
self.newVehicleInfo = {}
Tdetect = 0.001*self.getCurrentSUMOtime()
for vehID in traci.vehicle.getIDList():
if traci.vehicle.getTypeID(vehID) == 'typeITSCV' and self._isInRange(vehID):
vehPosition = traci.vehicle.getPosition(vehID)
vehHeading = traci.vehicle.getAngle(vehID)
vehVelocity = self._getVelocity(vehID, vehPosition, Tdetect)
self.newVehicleInfo[vehID] = [vehPosition, vehHeading, vehVelocity, Tdetect]
def _getIncomingLaneInfo(self):
laneInfo = defaultdict(list)
for lane in list(np.unique(np.array(self.controlledLanes))):
shape = traci.lane.getShape(lane)
width = traci.lane.getWidth(lane)
heading = self._getHeading(shape[1], shape[0])
dx = shape[0][0] - shape[1][0]
dy = shape[0][1] - shape[1][1]
if abs(dx) > abs(dy):
roadBounds = ((shape[0][0], shape[0][1] + width), (shape[1][0], shape[1][1] - width))
else:
roadBounds = ((shape[0][0] + width, shape[0][1]), (shape[1][0] - width, shape[1][1]))
laneInfo[lane] = [heading, roadBounds]
return laneInfo
def _getOncomingVehicles(self):
# Oncoming if (in active lane & heading matches oncoming heading &
# is in lane bounds)
activeLanes = self._getActiveLanes()
vehicles = []
for lane in activeLanes:
for vehID in self.oldVehicleInfo.keys():
# If on correct heading pm 10deg
if (np.isclose(self.oldVehicleInfo[vehID][1], self.laneDetectionInfo[lane][0], atol=10)
# If in lane x bounds
and min(self.laneDetectionInfo[lane][1][0][0], self.laneDetectionInfo[lane][1][1][0]) <
self.oldVehicleInfo[vehID][0][0] <
max(self.laneDetectionInfo[lane][1][0][0], self.laneDetectionInfo[lane][1][1][0])
# If in lane y bounds
and min(self.laneDetectionInfo[lane][1][0][1], self.laneDetectionInfo[lane][1][1][1]) <
self.oldVehicleInfo[vehID][0][1] <
max(self.laneDetectionInfo[lane][1][0][1], self.laneDetectionInfo[lane][1][1][1])):
# Then append vehicle
vehicles.append(vehID)
vehicles = list(np.unique(np.array(vehicles)))
return vehicles
def _getActiveLanes(self):
# Get the current control string to find the green lights
stageCtrlString = self.junctionData.stages[self.lastStageIndex].controlString
activeLanes = []
for i, letter in enumerate(stageCtrlString):
if letter == 'G':
activeLanes.append(self.controlledLanes[i])
# Get a list of the unique active lanes
activeLanes = list(np.unique(np.array(activeLanes)))
return activeLanes
def _getLaneInductors(self):
laneInductors = defaultdict(list)
for loop in traci.inductionloop.getIDList():
loopLane = traci.inductionloop.getLaneID(loop)
if loopLane in self.controlledLanes and 'upstream' not in loop:
laneInductors[loopLane].append(loop)
return laneInductors
def _getFurthestStationaryVehicle(self, vehIDs):
furthestID = ''
maxDistance = -1
jcnPosition = np.array(traci.junction.getPosition(self.junctionData.id))
speedLimit = traci.lane.getMaxSpeed(self._getActiveLanes()[0])
for ID in vehIDs:
vehPosition = np.array(self.oldVehicleInfo[ID][0])
distance = np.linalg.norm(vehPosition - jcnPosition)
if distance > maxDistance and self.oldVehicleInfo[ID][2] < 0.05*speedLimit:
furthestID = ID
maxDistance = distance
return [furthestID, maxDistance]
def _getNearestVehicle(self, vehIDs):
nearestID = ''
minDistance = self.nearVehicleCatchDistance + 1
jcnPosition = np.array(traci.junction.getPosition(self.junctionData.id))
for ID in vehIDs:
vehPosition = np.array(self.oldVehicleInfo[ID][0])
distance = np.linalg.norm(vehPosition - jcnPosition)
if distance < minDistance:
nearestID = ID
minDistance = distance
return [nearestID, minDistance]
def _getLaneDetectTime(self):
activeLanes = self._getActiveLanes()
meanDetectTimePerLane = np.zeros(len(activeLanes))
for i, lane in enumerate(activeLanes):
detectTimes = []
for loop in self.laneInductors[lane]:
detectTimes.append(traci.inductionloop.getTimeSinceDetection(loop))
meanDetectTimePerLane[i] = np.mean(detectTimes)
return meanDetectTimePerLane
| cbrafter/TRB18_GPSVA | codes/sumoAPI/HybridVAControl_PROFILED.py | Python | mit | 14,996 | 0.005802 |
"""reminders URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from . import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^login/', views.display_login, name='login'),
url(r'^logout/', views.logout_view, name='logout'),
url(r'^api/authenticate', views.authenticate_user, name='authenticate_user'),
url(r'^api/reminders', views.get_reminders, name='get_reminders'),
url(r'^api/add', views.add_reminder, name='add_reminder'),
url(r'^api/remove', views.remove_reminder, name='remove_reminder'),
url(r'^$', views.display_index, name='index'),
]
| Silvian/Reminders-App | reminders/urls.py | Python | gpl-3.0 | 1,236 | 0.000809 |
# xVector Engine Client
# Copyright (c) 2011 James Buchwald
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
'''
Screen with the title menu, metaserver, and automatic updater.
The startup screen can be thought of as a central hub for the client. Its
primary role is to allow the player to select a server to connect to, then
prepare the local files for that connection. Its final step is to hand
off control to the Game screen, the first state of which is the login screen.
'''
import os.path
import logging
import traceback
import sys
import time
from PyQt4 import QtCore, QtGui
from PyQt4.QtCore import SIGNAL
from . import ClientPaths, ClientGlobals, Metaserver, ClientNetworking
from .ui.TitleWidgetUI import Ui_TitleWidget
from .ui.PrivateServerWidgetUI import Ui_PrivateServerWidget
mainlog = logging.getLogger("Client.Main")
class TitleMenu(QtGui.QWidget):
'''Widget that displays the title menu.'''
def __init__(self, parent=None):
'''
Sets up the title menu widget.
@type parent: QtGui.QWidget
@param parent: Parent widget.
'''
# Inherit base class behavior.
super(TitleMenu, self).__init__(parent=parent)
# Set up our UI.
self.ui = Ui_TitleWidget()
'''Automatically-generated user interface object.'''
self.ui.setupUi(self)
# Connect buttons.
self.connect(self.ui.btnPublic, SIGNAL("clicked()"),
self.OnPublicServer)
self.connect(self.ui.btnPrivate, SIGNAL("clicked()"),
self.OnPrivateServer)
self.connect(self.ui.btnSettings, SIGNAL("clicked()"),
self.OnSettings)
self.connect(self.ui.btnExit, SIGNAL("clicked()"),
self.OnExit)
def OnPublicServer(self):
'''Called when the "Public Servers" button is clicked.'''
# Notify the main widget.
self.parent().OnPublicServer()
def OnPrivateServer(self):
'''Called when the "Private Servers" button is clicked.'''
# Notify the main widget.
self.parent().OnPrivateServer()
def OnSettings(self):
'''Called when the "Settings" button is clicked.'''
pass # TODO: Implement
def OnExit(self):
'''Called when the "Exit" button is clicked.'''
self.parent().OnClose()
def paintEvent(self, event):
'''
Called from Qt when the widget is redrawn.
@type event: QtGui.QPaintEvent
@param event: Paint event.
'''
# Enable stylesheets on this widget.
opt = QtGui.QStyleOption()
opt.init(self)
painter = QtGui.QPainter(self)
self.style().drawPrimitive(QtGui.QStyle.PE_Widget, opt,
painter, self)
class PrivateServerWidget(QtGui.QWidget):
'''
Widget that allows the user to connect to a private server by address.
'''
def __init__(self, parent=None):
'''
Creates a new private server widget.
'''
# Inherit base class behavior.
super(PrivateServerWidget, self).__init__(parent)
# Set up UI.
self.ui = Ui_PrivateServerWidget()
'''Automatically-generated user interface object.'''
self.ui.setupUi(self)
# Create our validators.
self.PortValidator = QtGui.QIntValidator(1, 65535, self)
'''Port number validator.'''
self.ui.PortEdit.setValidator(self.PortValidator)
# Connect buttons to their callbacks.
self.connect(self.ui.ConnectButton, SIGNAL("clicked()"),
self.OnConnect)
self.connect(self.ui.BackButton, SIGNAL("clicked()"), self.OnBack)
def OnConnect(self):
'''Called when the Connect button is clicked.'''
# Validate user input.
host = self.ui.HostEdit.text()
if host == "":
msg = "Host must not be empty."
mainlog.error(msg)
return
try:
port = int(self.ui.PortEdit.text())
if port < 1 or port > 65535: raise Exception
except:
# Port must be an integer.
msg = "Port must be a number between 1 and 65535."
mainlog.error(msg)
return
# Connect.
address = (host, port)
self.parent().ConnectToServer(address)
def OnBack(self):
'''Called when the Back button is clicked.'''
self.parent().BackToMain()
def paintEvent(self, event):
'''
Called from Qt when the widget is redrawn.
@type event: QtGui.QPaintEvent
@param event: Paint event.
'''
# Enable stylesheets on this widget.
opt = QtGui.QStyleOption()
opt.init(self)
painter = QtGui.QPainter(self)
self.style().drawPrimitive(QtGui.QStyle.PE_Widget, opt,
painter, self)
class StartupScreen(QtGui.QWidget):
'''
Game startup screen; handles the title menu, metaserver, and auto-updater.
'''
##
## Startup state constants
##
StartupState_None = 0
'''This state shows nothing.'''
StartupState_Title = 1
'''This state shows the title menu.'''
StartupState_Metaserver = 2
'''This state allows a player to choose a server from the list.'''
StartupState_PrivateServer = 3
'''This state allows a player to connect to a server by address.'''
StartupState_Settings = 4
'''This state allows a player to change the local settings.'''
StartupState_Updater = 5
'''This state retrieves updated files from the server.'''
##
## Control constants
##
FramesPerSecond = 30
'''Framecap for the startup screen.'''
FadeTime = 1.0
'''Time, in seconds, of the fade effect.'''
BackgroundFile = os.path.join("ui", "backgrounds", "startup.png")
'''Path (relative to the master resource root) of the background.'''
def __init__(self, parent=None):
'''
Creates a new startup screen object.
@type parent: QtGui.QWidget
@param parent: Parent object of the screen (usually the main window)
'''
# Inherit base class behavior.
super(StartupScreen,self).__init__(parent)
App = ClientGlobals.Application
# Declare attributes which will hold our widgets.
self.TitleMenu = None
'''The TitleMenu widget to display.'''
self.PublicServersMenu = None
'''The public servers menu to display.'''
self.PrivateServersMenu = None
'''The private servers menu to display.'''
self.SettingsScreen = None
'''The settings screen to display.'''
# Create our layout.
self.Layout = QtGui.QVBoxLayout()
'''Main layout of the startup screen.'''
self.setLayout(self.Layout)
# Set our initial state.
self.State = self.StartupState_Title
'''Current state of the startup screen.'''
self.FadeIn = True
'''Whether or not we are fading in.'''
self.FadeOut = False
'''Whether or not we are fading out.'''
self.FadeAlpha = 0.0
'''Alpha of the widget; used for fading effects.'''
self.FadeBrush = QtGui.QBrush(QtCore.Qt.black)
'''Brush used to draw the fade effect.'''
self.OnFadeComplete = None
'''Callback for when the current fade operation is complete.'''
self.LastFrame = 0
'''Time of the last rendered frame. Used for animation.'''
# Load the background image.
bkgpath = os.path.join(ClientPaths.BaseMasterPath, self.BackgroundFile)
try:
self.BackgroundImage = QtGui.QPixmap(bkgpath)
'''Image shown in the background of the startup screen.'''
except:
msg = "Could not open %s.\n" % bkgpath
msg += "Defaulting to blank background.\n"
msg += traceback.format_exc()
mainlog.error(msg)
# default to a black background
x, y = App.MainWindow.Resolution
self.BackgroundImage = QtGui.QPixmap(x,y)
self.BackgroundImage.fill(QtCore.Qt.black)
# Start our control timer.
self.LastFrame = time.time()
self.startTimer(1000 // self.FramesPerSecond)
# Get everything going with a fade-in to the title menu.
self.StartFadein(self._AfterInitialFadein)
def StartFadeout(self, callback_when_done=None):
'''
Starts a fade-out effect.
@type callback_when_done: Callable object.
@param callback_when_done: Callback to call when the fade is complete.
'''
self.FadeOut = True
self.FadeIn = False
self.FadeAlpha = 1.0
self.OnFadeComplete = callback_when_done
def StartFadein(self, callback_when_done=None):
'''
Starts a fade-in effect.
@type callback_when_done: Callable object.
@param callback_when_done: Callback to call when the fade is complete.
'''
self.FadeOut = False
self.FadeIn = True
self.FadeAlpha = 0.0
self.OnFadeComplete = callback_when_done
def timerEvent(self, event):
'''
Regularly scheduled timer callback. Controls animation.
In the startup screen, the main purpose of the timer is to control
smooth fadein and fadeout effects of the various widgets. This
timer is called once every 1/30 of a second.
'''
# Compute time delta.
now = time.time()
delta = now - self.LastFrame
# Process fade effects.
if self.FadeIn:
self.FadeAlpha += 1.0 * (delta / self.FadeTime)
if self.FadeAlpha > 1.0:
# Fade complete.
self.FadeAlpha = 1.0
self.repaint()
self.FadeIn = False
# Any callbacks to call?
if self.OnFadeComplete:
self.OnFadeComplete()
else:
self.repaint()
elif self.FadeOut:
self.FadeAlpha -= 1.0 * (delta / self.FadeTime)
if self.FadeAlpha < 0.0:
# Fade complete.
self.FadeAlpha = 0.0
self.repaint()
self.FadeOut = False
# Any callbacks to call?
if self.OnFadeComplete:
self.OnFadeComplete()
else:
self.repaint()
self.LastFrame = now
def paintEvent(self, event):
'''
Called when the screen needs to be repainted.
'''
# Get a painter.
painter = QtGui.QPainter()
painter.begin(self)
# Clear to black.
App = ClientGlobals.Application
res_x, res_y = App.MainWindow.Resolution
target = QtCore.QRect(0, 0, res_x, res_y)
painter.fillRect(target, QtCore.Qt.black)
# Draw the background, faded as needed.
painter.save()
FadeBrush = QtGui.QBrush(QtGui.QColor(0,0,0,255-255*self.FadeAlpha))
painter.drawPixmap(0, 0, self.BackgroundImage)
painter.fillRect(target, FadeBrush)
painter.restore()
# Finish up.
painter.end()
def ChangeWidget(self, widget=None):
'''
Changes the displayed widget.
@type widget: QtGui.QWidget
@param widget: New widget to display, or None for no widget.
'''
# Is there already a widget there?
if self.Layout.count() == 1:
# Yes, remove it.
old = self.Layout.itemAt(0)
old.widget().setVisible(False)
self.Layout.removeItem(old)
# Add the new widget if needed.
if widget:
self.Layout.addWidget(widget)
alignment = QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter
self.Layout.setAlignment(widget, alignment)
widget.setVisible(True)
def _AfterInitialFadein(self):
'''
Called after the initial fade-in operation is completed.
'''
# Create the title menu and display it.
self.TitleMenu = TitleMenu(parent=self)
self.ChangeWidget(self.TitleMenu)
def OnPublicServer(self):
'''
Called when "Connect to Public Server" is clicked on the title menu.
'''
# Create the metaserver widget and display it.
self.PublicServersMenu = Metaserver.MetaserverWidget(parent=self)
self.ChangeWidget(self.PublicServersMenu)
def OnPrivateServer(self):
'''
Called when "Connect to Private Server" is clicked on the title menu.
'''
# Create the private server widget and display it.
self.PrivateServersMenu = PrivateServerWidget(parent=self)
self.ChangeWidget(self.PrivateServersMenu)
def BackToMain(self):
'''
Switches back to the title menu widget.
'''
self.ChangeWidget(self.TitleMenu)
def ConnectToServer(self, address):
'''
Tries to connect to a server at the given address.
@type address: tuple
@param address: Network address of the server as a tuple (host, port)
'''
# Try to connect.
try:
ClientNetworking.ConnectToServer(address)
except ClientNetworking.ConnectionFailed:
return
# Disappear the current widget.
self.ChangeWidget(None)
def OnClose(self):
'''
Called when the client must exit.
'''
self.ChangeWidget(None)
self.StartFadeout(sys.exit)
| buchwj/xvector | client/xVClient/StartupScreen.py | Python | gpl-3.0 | 14,622 | 0.004514 |
# coding: utf-8
# This file is part of Thomas Aquinas.
#
# Thomas Aquinas is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Thomas Aquinas is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Thomas Aquinas. If not, see <http://www.gnu.org/licenses/>.
#
# veni, Sancte Spiritus.
import ctypes
import logging
| shackra/thomas-aquinas | summa/audio/system.py | Python | bsd-3-clause | 776 | 0 |
import os
import subprocess
from pathlib import Path
import pyinstaller_versionfile
import tomli
packaging_path = Path(__file__).resolve().parent
def get_version() -> str:
project_dir = Path(__file__).resolve().parent.parent
f = project_dir / "pyproject.toml"
return str(tomli.loads(f.read_text())["tool"]["poetry"]["version"])
def make_gaphor_script():
pyproject_toml = packaging_path.parent / "pyproject.toml"
with open(pyproject_toml, "rb") as f:
toml = tomli.load(f)
gaphor_script = packaging_path / "gaphor-script.py"
with open(gaphor_script, "w") as file:
# https://github.com/pyinstaller/pyinstaller/issues/6100
# On one Windows computer, PyInstaller was adding a ; to
# end of the path, this removes it if it exists
file.write("import os\n")
file.write("if os.environ['PATH'][-1] == ';':\n")
file.write(" os.environ['PATH'] = os.environ['PATH'][:-1]\n")
# Check for and remove two semicolons in path
file.write("os.environ['PATH'] = os.environ['PATH'].replace(';;', ';')\n")
plugins = toml["tool"]["poetry"]["plugins"]
for cat in plugins.values():
for entrypoint in cat.values():
file.write(f"import {entrypoint.split(':')[0]}\n")
file.write("from gaphor.ui import main\n")
file.write("import sys\n")
file.write("main(sys.argv)\n")
def make_file_version_info():
win_packaging_path = packaging_path / "windows"
metadata = win_packaging_path / "versionfile_metadata.yml"
file_version_out = win_packaging_path / "file_version_info.txt"
version = get_version()
if "dev" in version:
version = version[: version.rfind(".dev")]
pyinstaller_versionfile.create_versionfile_from_input_file(
output_file=file_version_out,
input_file=metadata,
version=version,
)
def make_pyinstaller():
os.chdir(packaging_path)
subprocess.run(["pyinstaller", "-y", "gaphor.spec"])
| amolenaar/gaphor | packaging/make-script.py | Python | lgpl-2.1 | 2,017 | 0.000496 |
def auto_fill_cuts(myMesh,user_cuts,weight_function):
'''
fill in user_cut list (or if empty create new one) which
prefers edges with larger weight, given by the weight_function
NOTE: currently sets naked edges as cuts
'''
sorted_edges = get_edge_weights(myMesh,user_cuts,weight_function)
fold_list = getSpanningKruskal(sorted_edges,myMesh.mesh)
cuts = getCutList(myMesh,fold_list)
myMesh.set_cuts(cuts)
return cuts
def get_edge_weights(myMesh, userCuts,weight_function):
edges_with_weights= []
for i in xrange(myMesh.mesh.TopologyEdges.Count):
if userCuts:
if i not in userCuts:
edges_with_weights.append((i,weight_function(myMesh, i)))
else:
edges_with_weights.append((i, float('inf')))
else:
edges_with_weights.append((i,weight_function(myMesh, i)))
return edges_with_weights
def getSpanningKruskal(edges_with_weights, mesh):
'''
this section of the code should be updated to use the union-find trick.
input:
edges_with_weights = list of tuples (edgeIdx, weight)
mesh = Rhino.Geometry mesh
output:
foldList = list of edgeIdx's that are to be folded
'''
# sorted from smallest to greatest; user cuts, which get inf weight, have low likelyhood of becoming fold edges
sorted_edges = sorted(edges_with_weights, key=lambda tup: tup[1], reverse=False)
treeSets = []
foldList = []
for tupEdge in sorted_edges:
edgeIdx = tupEdge[0]
arrConnFaces = mesh.TopologyEdges.GetConnectedFaces(edgeIdx)
if(len(arrConnFaces) > 1): # this avoids problems with naked edges
setConnFaces = set(
[arrConnFaces.GetValue(0), arrConnFaces.GetValue(1)])
parentSets = []
# print"edgeSet:"
# print setConnFaces
isLegal = True
for i, treeSet in enumerate(treeSets):
if setConnFaces.issubset(treeSet):
# print"--was illegal"
isLegal = False
break
elif not setConnFaces.isdisjoint(treeSet):
# print"overlapped"
parentSets.append(i)
if isLegal == True:
#do not save edge as a fold if the user set it as a cut
if tupEdge[1] != float('inf'):
foldList.append(edgeIdx)
if len(parentSets) == 0:
treeSets.append(setConnFaces)
elif len(parentSets) == 1:
treeSets[parentSets[0]].update(setConnFaces)
elif len(parentSets) == 2:
treeSets[parentSets[0]].update(treeSets[parentSets[1]])
treeSets.pop(parentSets[1])
elif len(parentSets) > 2:
print"Error in m.s.t: more than two sets overlapped with edgeSet!"
print "len parentSets: %d\n" % len(parentSets)
print(treeSets)
print(parentSets)
print(setConnFaces)
# wow there must be a cleaner way of doing this!!! some set tricks
# also the if staements could be cleaned up probs.
return foldList
def getCutList(myMesh, foldList):
all_edges = myMesh.get_set_of_edges()
cut_set = all_edges.difference(set(foldList))
cut_list = []
for edge in cut_set:
if not myMesh.is_naked_edge(edge):
cut_list.append(edge)
return cut_list
| jlopezbi/rhinoUnfolder | rhino_unwrapper/cutSelection/autoCuts.py | Python | gpl-3.0 | 3,550 | 0.006197 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2011 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Middleware provided and used by Horizon.
"""
import logging
from django import shortcuts
from django.contrib import messages
from django.utils.translation import ugettext as _
from horizon import api
from horizon import exceptions
from horizon import users
LOG = logging.getLogger(__name__)
class HorizonMiddleware(object):
""" The main Horizon middleware class. Required for use of Horizon. """
def process_request(self, request):
""" Adds data necessary for Horizon to function to the request.
Adds the current "active" :class:`~horizon.Dashboard` and
:class:`~horizon.Panel` to ``request.horizon``.
Adds a :class:`~horizon.users.User` object to ``request.user``.
"""
request.__class__.user = users.LazyUser()
request.horizon = {'dashboard': None, 'panel': None}
if request.user.is_authenticated() and \
request.user.authorized_tenants is None:
try:
authd = api.tenant_list_for_token(request,
request.user.token,
endpoint_type='internalURL')
except Exception, e:
authd = []
LOG.exception('Could not retrieve tenant list.')
if hasattr(request.user, 'message_set'):
messages.error(request,
_("Unable to retrieve tenant list."))
request.user.authorized_tenants = authd
def process_exception(self, request, exception):
""" Catch NotAuthorized and Http302 and handle them gracefully. """
if isinstance(exception, exceptions.NotAuthorized):
messages.error(request, unicode(exception))
return shortcuts.redirect('/auth/login')
if isinstance(exception, exceptions.Http302):
if exception.message:
messages.error(request, exception.message)
return shortcuts.redirect(exception.location)
| rcbops/horizon-buildpackage | horizon/middleware.py | Python | apache-2.0 | 2,838 | 0 |
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import json
import sys
import os
import urllib
import logging
import re
import time
import errno
import uuid
import datetime
from bs4 import BeautifulSoup
import geoserver
import httplib2
from urlparse import urlparse
from urlparse import urlsplit
from threading import local
from collections import namedtuple
from itertools import cycle, izip
from lxml import etree
import xml.etree.ElementTree as ET
from decimal import Decimal
from owslib.wcs import WebCoverageService
from owslib.util import http_post
from django.core.exceptions import ImproperlyConfigured
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import pre_delete
from django.template.loader import render_to_string
from django.conf import settings
from django.utils.translation import ugettext as _
from dialogos.models import Comment
from agon_ratings.models import OverallRating
from gsimporter import Client
from owslib.wms import WebMapService
from geoserver.store import CoverageStore, DataStore, datastore_from_index,\
coveragestore_from_index, wmsstore_from_index
from geoserver.workspace import Workspace
from geoserver.catalog import Catalog
from geoserver.catalog import FailedRequestError, UploadError
from geoserver.catalog import ConflictingDataError
from geoserver.resource import FeatureType, Coverage
from geoserver.support import DimensionInfo
from geonode import GeoNodeException
from geonode.layers.utils import layer_type, get_files
from geonode.layers.models import Layer, Attribute, Style
from geonode.layers.enumerations import LAYER_ATTRIBUTE_NUMERIC_DATA_TYPES
logger = logging.getLogger(__name__)
if not hasattr(settings, 'OGC_SERVER'):
msg = (
'Please configure OGC_SERVER when enabling geonode.geoserver.'
' More info can be found at '
'http://docs.geonode.org/en/master/reference/developers/settings.html#ogc-server')
raise ImproperlyConfigured(msg)
def check_geoserver_is_up():
"""Verifies all geoserver is running,
this is needed to be able to upload.
"""
url = "%sweb/" % ogc_server_settings.LOCATION
resp, content = http_client.request(url, "GET")
msg = ('Cannot connect to the GeoServer at %s\nPlease make sure you '
'have started it.' % ogc_server_settings.LOCATION)
assert resp['status'] == '200', msg
def _add_sld_boilerplate(symbolizer):
"""
Wrap an XML snippet representing a single symbolizer in the appropriate
elements to make it a valid SLD which applies that symbolizer to all features,
including format strings to allow interpolating a "name" variable in.
"""
return """
<StyledLayerDescriptor version="1.0.0" xmlns="http://www.opengis.net/sld" xmlns:ogc="http://www.opengis.net/ogc"
xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.opengis.net/sld http://schemas.opengis.net/sld/1.0.0/StyledLayerDescriptor.xsd">
<NamedLayer>
<Name>%(name)s</Name>
<UserStyle>
<Name>%(name)s</Name>
<Title>%(name)s</Title>
<FeatureTypeStyle>
<Rule>
""" + symbolizer + """
</Rule>
</FeatureTypeStyle>
</UserStyle>
</NamedLayer>
</StyledLayerDescriptor>
"""
_raster_template = """
<RasterSymbolizer>
<Opacity>1.0</Opacity>
</RasterSymbolizer>
"""
_polygon_template = """
<PolygonSymbolizer>
<Fill>
<CssParameter name="fill">%(bg)s</CssParameter>
</Fill>
<Stroke>
<CssParameter name="stroke">%(fg)s</CssParameter>
<CssParameter name="stroke-width">0.7</CssParameter>
</Stroke>
</PolygonSymbolizer>
"""
_line_template = """
<LineSymbolizer>
<Stroke>
<CssParameter name="stroke">%(bg)s</CssParameter>
<CssParameter name="stroke-width">3</CssParameter>
</Stroke>
</LineSymbolizer>
</Rule>
</FeatureTypeStyle>
<FeatureTypeStyle>
<Rule>
<LineSymbolizer>
<Stroke>
<CssParameter name="stroke">%(fg)s</CssParameter>
</Stroke>
</LineSymbolizer>
"""
_point_template = """
<PointSymbolizer>
<Graphic>
<Mark>
<WellKnownName>%(mark)s</WellKnownName>
<Fill>
<CssParameter name="fill">%(bg)s</CssParameter>
</Fill>
<Stroke>
<CssParameter name="stroke">%(fg)s</CssParameter>
</Stroke>
</Mark>
<Size>10</Size>
</Graphic>
</PointSymbolizer>
"""
_style_templates = dict(
raster=_add_sld_boilerplate(_raster_template),
polygon=_add_sld_boilerplate(_polygon_template),
line=_add_sld_boilerplate(_line_template),
point=_add_sld_boilerplate(_point_template)
)
def _style_name(resource):
return _punc.sub("_", resource.store.workspace.name + ":" + resource.name)
def get_sld_for(layer):
# FIXME: GeoServer sometimes fails to associate a style with the data, so
# for now we default to using a point style.(it works for lines and
# polygons, hope this doesn't happen for rasters though)
name = layer.default_style.name if layer.default_style is not None else "point"
# FIXME: When gsconfig.py exposes the default geometry type for vector
# layers we should use that rather than guessing based on the auto-detected
# style.
if name in _style_templates:
fg, bg, mark = _style_contexts.next()
return _style_templates[name] % dict(
name=layer.name,
fg=fg,
bg=bg,
mark=mark)
else:
return None
def fixup_style(cat, resource, style):
logger.debug("Creating styles for layers associated with [%s]", resource)
layers = cat.get_layers(resource=resource)
logger.info("Found %d layers associated with [%s]", len(layers), resource)
for lyr in layers:
if lyr.default_style.name in _style_templates:
logger.info("%s uses a default style, generating a new one", lyr)
name = _style_name(resource)
if style is None:
sld = get_sld_for(lyr)
else:
sld = style.read()
logger.info("Creating style [%s]", name)
style = cat.create_style(name, sld)
lyr.default_style = cat.get_style(name)
logger.info("Saving changes to %s", lyr)
cat.save(lyr)
logger.info("Successfully updated %s", lyr)
def cascading_delete(cat, layer_name):
resource = None
try:
if layer_name.find(':') != -1:
workspace, name = layer_name.split(':')
ws = cat.get_workspace(workspace)
try:
store = get_store(cat, name, workspace=ws)
except FailedRequestError:
if ogc_server_settings.DATASTORE:
try:
store = get_store(cat, ogc_server_settings.DATASTORE, workspace=ws)
except FailedRequestError:
logger.debug(
'the store was not found in geoserver')
return
else:
logger.debug(
'the store was not found in geoserver')
return
if ws is None:
logger.debug(
'cascading delete was called on a layer where the workspace was not found')
return
resource = cat.get_resource(name, store=store, workspace=workspace)
else:
resource = cat.get_resource(layer_name)
except EnvironmentError as e:
if e.errno == errno.ECONNREFUSED:
msg = ('Could not connect to geoserver at "%s"'
'to save information for layer "%s"' % (
ogc_server_settings.LOCATION, layer_name)
)
logger.warn(msg, e)
return None
else:
raise e
if resource is None:
# If there is no associated resource,
# this method can not delete anything.
# Let's return and make a note in the log.
logger.debug(
'cascading_delete was called with a non existent resource')
return
resource_name = resource.name
lyr = cat.get_layer(resource_name)
if(lyr is not None): # Already deleted
store = resource.store
styles = lyr.styles + [lyr.default_style]
cat.delete(lyr)
for s in styles:
if s is not None and s.name not in _default_style_names:
try:
cat.delete(s, purge='true')
except FailedRequestError as e:
# Trying to delete a shared style will fail
# We'll catch the exception and log it.
logger.debug(e)
# Due to a possible bug of geoserver, we need this trick for now
# TODO: inspect the issue reported by this hack. Should be solved
# with GS 2.7+
try:
cat.delete(resource, recurse=True) # This may fail
except:
cat.reload() # this preservers the integrity of geoserver
if store.resource_type == 'dataStore' and 'dbtype' in store.connection_parameters and \
store.connection_parameters['dbtype'] == 'postgis':
delete_from_postgis(resource_name)
elif store.type and store.type.lower() == 'geogig':
# Prevent the entire store from being removed when the store is a
# GeoGig repository.
return
else:
if store.resource_type == 'coverageStore':
try:
logger.info(" - Going to purge the " + store.resource_type + " : " + store.href)
cat.reset() # this resets the coverage readers and unlocks the files
cat.delete(store, purge='all', recurse=True)
cat.reload() # this preservers the integrity of geoserver
except FailedRequestError as e:
# Trying to recursively purge a store may fail
# We'll catch the exception and log it.
logger.debug(e)
else:
try:
if not store.get_resources():
cat.delete(store, recurse=True)
except FailedRequestError as e:
# Catch the exception and log it.
logger.debug(e)
def delete_from_postgis(resource_name):
"""
Delete a table from PostGIS (because Geoserver won't do it yet);
to be used after deleting a layer from the system.
"""
import psycopg2
db = ogc_server_settings.datastore_db
conn = psycopg2.connect(
"dbname='" +
db['NAME'] +
"' user='" +
db['USER'] +
"' password='" +
db['PASSWORD'] +
"' port=" +
db['PORT'] +
" host='" +
db['HOST'] +
"'")
try:
cur = conn.cursor()
cur.execute("SELECT DropGeometryTable ('%s')" % resource_name)
conn.commit()
except Exception as e:
logger.error(
"Error deleting PostGIS table %s:%s",
resource_name,
str(e))
finally:
conn.close()
def gs_slurp(
ignore_errors=True,
verbosity=1,
console=None,
owner=None,
workspace=None,
store=None,
filter=None,
skip_unadvertised=False,
skip_geonode_registered=False,
remove_deleted=False):
"""Configure the layers available in GeoServer in GeoNode.
It returns a list of dictionaries with the name of the layer,
the result of the operation and the errors and traceback if it failed.
"""
if console is None:
console = open(os.devnull, 'w')
if verbosity > 1:
print >> console, "Inspecting the available layers in GeoServer ..."
cat = Catalog(ogc_server_settings.internal_rest, _user, _password)
if workspace is not None:
workspace = cat.get_workspace(workspace)
if workspace is None:
resources = []
else:
# obtain the store from within the workspace. if it exists, obtain resources
# directly from store, otherwise return an empty list:
if store is not None:
store = get_store(cat, store, workspace=workspace)
if store is None:
resources = []
else:
resources = cat.get_resources(store=store)
else:
resources = cat.get_resources(workspace=workspace)
elif store is not None:
store = get_store(cat, store)
resources = cat.get_resources(store=store)
else:
resources = cat.get_resources()
if remove_deleted:
resources_for_delete_compare = resources[:]
workspace_for_delete_compare = workspace
# filter out layers for delete comparison with GeoNode layers by following criteria:
# enabled = true, if --skip-unadvertised: advertised = true, but
# disregard the filter parameter in the case of deleting layers
resources_for_delete_compare = [
k for k in resources_for_delete_compare if k.enabled in ["true", True]]
if skip_unadvertised:
resources_for_delete_compare = [
k for k in resources_for_delete_compare if k.advertised in ["true", True]]
if filter:
resources = [k for k in resources if filter in k.name]
# filter out layers depending on enabled, advertised status:
resources = [k for k in resources if k.enabled in ["true", True]]
if skip_unadvertised:
resources = [k for k in resources if k.advertised in ["true", True]]
# filter out layers already registered in geonode
layer_names = Layer.objects.all().values_list('typename', flat=True)
if skip_geonode_registered:
resources = [k for k in resources
if not '%s:%s' % (k.workspace.name, k.name) in layer_names]
# TODO: Should we do something with these?
# i.e. look for matching layers in GeoNode and also disable?
# disabled_resources = [k for k in resources if k.enabled == "false"]
number = len(resources)
if verbosity > 1:
msg = "Found %d layers, starting processing" % number
print >> console, msg
output = {
'stats': {
'failed': 0,
'updated': 0,
'created': 0,
'deleted': 0,
},
'layers': [],
'deleted_layers': []
}
start = datetime.datetime.now()
for i, resource in enumerate(resources):
name = resource.name
the_store = resource.store
workspace = the_store.workspace
try:
layer, created = Layer.objects.get_or_create(name=name, defaults={
"workspace": workspace.name,
"store": the_store.name,
"storeType": the_store.resource_type,
"typename": "%s:%s" % (workspace.name.encode('utf-8'), resource.name.encode('utf-8')),
"title": resource.title or 'No title provided',
"abstract": resource.abstract or 'No abstract provided',
"owner": owner,
"uuid": str(uuid.uuid4()),
"bbox_x0": Decimal(resource.latlon_bbox[0]),
"bbox_x1": Decimal(resource.latlon_bbox[1]),
"bbox_y0": Decimal(resource.latlon_bbox[2]),
"bbox_y1": Decimal(resource.latlon_bbox[3])
})
# recalculate the layer statistics
set_attributes(layer, overwrite=True)
# Fix metadata links if the ip has changed
if layer.link_set.metadata().count() > 0:
if not created and settings.SITEURL not in layer.link_set.metadata()[0].url:
layer.link_set.metadata().delete()
layer.save()
metadata_links = []
for link in layer.link_set.metadata():
metadata_links.append((link.mime, link.name, link.url))
resource.metadata_links = metadata_links
cat.save(resource)
except Exception as e:
if ignore_errors:
status = 'failed'
exception_type, error, traceback = sys.exc_info()
else:
if verbosity > 0:
msg = "Stopping process because --ignore-errors was not set and an error was found."
print >> sys.stderr, msg
raise Exception(
'Failed to process %s' %
resource.name.encode('utf-8'), e), None, sys.exc_info()[2]
else:
if created:
layer.set_default_permissions()
status = 'created'
output['stats']['created'] += 1
else:
status = 'updated'
output['stats']['updated'] += 1
msg = "[%s] Layer %s (%d/%d)" % (status, name, i + 1, number)
info = {'name': name, 'status': status}
if status == 'failed':
output['stats']['failed'] += 1
info['traceback'] = traceback
info['exception_type'] = exception_type
info['error'] = error
output['layers'].append(info)
if verbosity > 0:
print >> console, msg
if remove_deleted:
q = Layer.objects.filter()
if workspace_for_delete_compare is not None:
if isinstance(workspace_for_delete_compare, Workspace):
q = q.filter(
workspace__exact=workspace_for_delete_compare.name)
else:
q = q.filter(workspace__exact=workspace_for_delete_compare)
if store is not None:
if isinstance(
store,
CoverageStore) or isinstance(
store,
DataStore):
q = q.filter(store__exact=store.name)
else:
q = q.filter(store__exact=store)
logger.debug("Executing 'remove_deleted' logic")
logger.debug("GeoNode Layers Found:")
# compare the list of GeoNode layers obtained via query/filter with valid resources found in GeoServer
# filtered per options passed to updatelayers: --workspace, --store, --skip-unadvertised
# add any layers not found in GeoServer to deleted_layers (must match
# workspace and store as well):
deleted_layers = []
for layer in q:
logger.debug(
"GeoNode Layer info: name: %s, workspace: %s, store: %s",
layer.name,
layer.workspace,
layer.store)
layer_found_in_geoserver = False
for resource in resources_for_delete_compare:
# if layer.name matches a GeoServer resource, check also that
# workspace and store match, mark valid:
if layer.name == resource.name:
if layer.workspace == resource.workspace.name and layer.store == resource.store.name:
logger.debug(
"Matches GeoServer layer: name: %s, workspace: %s, store: %s",
resource.name,
resource.workspace.name,
resource.store.name)
layer_found_in_geoserver = True
if not layer_found_in_geoserver:
logger.debug(
"----- Layer %s not matched, marked for deletion ---------------",
layer.name)
deleted_layers.append(layer)
number_deleted = len(deleted_layers)
if verbosity > 1:
msg = "\nFound %d layers to delete, starting processing" % number_deleted if number_deleted > 0 else \
"\nFound %d layers to delete" % number_deleted
print >> console, msg
for i, layer in enumerate(deleted_layers):
logger.debug(
"GeoNode Layer to delete: name: %s, workspace: %s, store: %s",
layer.name,
layer.workspace,
layer.store)
try:
# delete ratings, comments, and taggit tags:
ct = ContentType.objects.get_for_model(layer)
OverallRating.objects.filter(
content_type=ct,
object_id=layer.id).delete()
Comment.objects.filter(
content_type=ct,
object_id=layer.id).delete()
layer.keywords.clear()
layer.delete()
output['stats']['deleted'] += 1
status = "delete_succeeded"
except Exception as e:
status = "delete_failed"
finally:
from .signals import geoserver_pre_delete
pre_delete.connect(geoserver_pre_delete, sender=Layer)
msg = "[%s] Layer %s (%d/%d)" % (status,
layer.name,
i + 1,
number_deleted)
info = {'name': layer.name, 'status': status}
if status == "delete_failed":
exception_type, error, traceback = sys.exc_info()
info['traceback'] = traceback
info['exception_type'] = exception_type
info['error'] = error
output['deleted_layers'].append(info)
if verbosity > 0:
print >> console, msg
finish = datetime.datetime.now()
td = finish - start
output['stats']['duration_sec'] = td.microseconds / \
1000000 + td.seconds + td.days * 24 * 3600
return output
def get_stores(store_type=None):
cat = Catalog(ogc_server_settings.internal_rest, _user, _password)
stores = cat.get_stores()
store_list = []
for store in stores:
store.fetch()
stype = store.dom.find('type').text.lower()
if store_type and store_type.lower() == stype:
store_list.append({'name': store.name, 'type': stype})
elif store_type is None:
store_list.append({'name': store.name, 'type': stype})
return store_list
def set_attributes(layer, overwrite=False):
"""
Retrieve layer attribute names & types from Geoserver,
then store in GeoNode database using Attribute model
"""
attribute_map = []
server_url = ogc_server_settings.LOCATION if layer.storeType != "remoteStore" else layer.service.base_url
if layer.storeType == "remoteStore" and layer.service.ptype == "gxp_arcrestsource":
dft_url = server_url + ("%s?f=json" % layer.typename)
try:
# The code below will fail if http_client cannot be imported
body = json.loads(http_client.request(dft_url)[1])
attribute_map = [[n["name"], _esri_types[n["type"]]]
for n in body["fields"] if n.get("name") and n.get("type")]
except Exception:
attribute_map = []
elif layer.storeType in ["dataStore", "remoteStore", "wmsStore"]:
dft_url = re.sub("\/wms\/?$",
"/",
server_url) + "wfs?" + urllib.urlencode({"service": "wfs",
"version": "1.0.0",
"request": "DescribeFeatureType",
"typename": layer.typename.encode('utf-8'),
})
try:
# The code below will fail if http_client cannot be imported or
# WFS not supported
body = http_client.request(dft_url)[1]
doc = etree.fromstring(body)
path = ".//{xsd}extension/{xsd}sequence/{xsd}element".format(
xsd="{http://www.w3.org/2001/XMLSchema}")
attribute_map = [[n.attrib["name"], n.attrib["type"]] for n in doc.findall(
path) if n.attrib.get("name") and n.attrib.get("type")]
except Exception:
attribute_map = []
# Try WMS instead
dft_url = server_url + "?" + urllib.urlencode({
"service": "wms",
"version": "1.0.0",
"request": "GetFeatureInfo",
"bbox": ','.join([str(x) for x in layer.bbox]),
"LAYERS": layer.typename.encode('utf-8'),
"QUERY_LAYERS": layer.typename.encode('utf-8'),
"feature_count": 1,
"width": 1,
"height": 1,
"srs": "EPSG:4326",
"info_format": "text/html",
"x": 1,
"y": 1
})
try:
body = http_client.request(dft_url)[1]
soup = BeautifulSoup(body)
for field in soup.findAll('th'):
if(field.string is None):
field_name = field.contents[0].string
else:
field_name = field.string
attribute_map.append([field_name, "xsd:string"])
except Exception:
attribute_map = []
elif layer.storeType in ["coverageStore"]:
dc_url = server_url + "wcs?" + urllib.urlencode({
"service": "wcs",
"version": "1.1.0",
"request": "DescribeCoverage",
"identifiers": layer.typename.encode('utf-8')
})
try:
response, body = http_client.request(dc_url)
doc = etree.fromstring(body)
path = ".//{wcs}Axis/{wcs}AvailableKeys/{wcs}Key".format(
wcs="{http://www.opengis.net/wcs/1.1.1}")
attribute_map = [[n.text, "raster"] for n in doc.findall(path)]
except Exception:
attribute_map = []
# we need 3 more items for description, attribute_label and display_order
attribute_map_dict = {
'field': 0,
'ftype': 1,
'description': 2,
'label': 3,
'display_order': 4,
}
for attribute in attribute_map:
attribute.extend((None, None, 0))
attributes = layer.attribute_set.all()
# Delete existing attributes if they no longer exist in an updated layer
for la in attributes:
lafound = False
for attribute in attribute_map:
field, ftype, description, label, display_order = attribute
if field == la.attribute:
lafound = True
# store description and attribute_label in attribute_map
attribute[attribute_map_dict['description']] = la.description
attribute[attribute_map_dict['label']] = la.attribute_label
attribute[attribute_map_dict['display_order']] = la.display_order
if overwrite or not lafound:
logger.debug(
"Going to delete [%s] for [%s]",
la.attribute,
layer.name.encode('utf-8'))
la.delete()
# Add new layer attributes if they don't already exist
if attribute_map is not None:
iter = len(Attribute.objects.filter(layer=layer)) + 1
for attribute in attribute_map:
field, ftype, description, label, display_order = attribute
if field is not None:
la, created = Attribute.objects.get_or_create(
layer=layer, attribute=field, attribute_type=ftype,
description=description, attribute_label=label,
display_order=display_order)
if created:
if is_layer_attribute_aggregable(
layer.storeType,
field,
ftype):
logger.debug("Generating layer attribute statistics")
result = get_attribute_statistics(layer.name, field)
if result is not None:
la.count = result['Count']
la.min = result['Min']
la.max = result['Max']
la.average = result['Average']
la.median = result['Median']
la.stddev = result['StandardDeviation']
la.sum = result['Sum']
la.unique_values = result['unique_values']
la.last_stats_updated = datetime.datetime.now()
la.visible = ftype.find("gml:") != 0
la.display_order = iter
la.save()
iter += 1
logger.debug(
"Created [%s] attribute for [%s]",
field,
layer.name.encode('utf-8'))
else:
logger.debug("No attributes found")
def set_styles(layer, gs_catalog):
style_set = []
gs_layer = gs_catalog.get_layer(layer.name)
default_style = gs_layer.default_style
layer.default_style = save_style(default_style)
# FIXME: This should remove styles that are no longer valid
style_set.append(layer.default_style)
alt_styles = gs_layer.styles
for alt_style in alt_styles:
style_set.append(save_style(alt_style))
layer.styles = style_set
return layer
def save_style(gs_style):
style, created = Style.objects.get_or_create(name=gs_style.name)
style.sld_title = gs_style.sld_title
style.sld_body = gs_style.sld_body
style.sld_url = gs_style.body_href
style.save()
return style
def is_layer_attribute_aggregable(store_type, field_name, field_type):
"""
Decipher whether layer attribute is suitable for statistical derivation
"""
# must be vector layer
if store_type != 'dataStore':
return False
# must be a numeric data type
if field_type not in LAYER_ATTRIBUTE_NUMERIC_DATA_TYPES:
return False
# must not be an identifier type field
if field_name.lower() in ['id', 'identifier']:
return False
return True
def get_attribute_statistics(layer_name, field):
"""
Generate statistics (range, mean, median, standard deviation, unique values)
for layer attribute
"""
logger.debug('Deriving aggregate statistics for attribute %s', field)
if not ogc_server_settings.WPS_ENABLED:
return None
try:
return wps_execute_layer_attribute_statistics(layer_name, field)
except Exception:
logger.exception('Error generating layer aggregate statistics')
def get_wcs_record(instance, retry=True):
wcs = WebCoverageService(ogc_server_settings.LOCATION + 'wcs', '1.0.0')
key = instance.workspace + ':' + instance.name
logger.debug(wcs.contents)
if key in wcs.contents:
return wcs.contents[key]
else:
msg = ("Layer '%s' was not found in WCS service at %s." %
(key, ogc_server_settings.public_url)
)
if retry:
logger.debug(
msg +
' Waiting a couple of seconds before trying again.')
time.sleep(2)
return get_wcs_record(instance, retry=False)
else:
raise GeoNodeException(msg)
def get_coverage_grid_extent(instance):
"""
Returns a list of integers with the size of the coverage
extent in pixels
"""
instance_wcs = get_wcs_record(instance)
grid = instance_wcs.grid
return [(int(h) - int(l) + 1) for
h, l in zip(grid.highlimits, grid.lowlimits)]
GEOSERVER_LAYER_TYPES = {
'vector': FeatureType.resource_type,
'raster': Coverage.resource_type,
}
def geoserver_layer_type(filename):
the_type = layer_type(filename)
return GEOSERVER_LAYER_TYPES[the_type]
def cleanup(name, uuid):
"""Deletes GeoServer and Catalogue records for a given name.
Useful to clean the mess when something goes terribly wrong.
It also verifies if the Django record existed, in which case
it performs no action.
"""
try:
Layer.objects.get(name=name)
except Layer.DoesNotExist as e:
pass
else:
msg = ('Not doing any cleanup because the layer %s exists in the '
'Django db.' % name)
raise GeoNodeException(msg)
cat = gs_catalog
gs_store = None
gs_layer = None
gs_resource = None
# FIXME: Could this lead to someone deleting for example a postgis db
# with the same name of the uploaded file?.
try:
gs_store = cat.get_store(name)
if gs_store is not None:
gs_layer = cat.get_layer(name)
if gs_layer is not None:
gs_resource = gs_layer.resource
else:
gs_layer = None
gs_resource = None
except FailedRequestError as e:
msg = ('Couldn\'t connect to GeoServer while cleaning up layer '
'[%s] !!', str(e))
logger.warning(msg)
if gs_layer is not None:
try:
cat.delete(gs_layer)
except:
logger.warning("Couldn't delete GeoServer layer during cleanup()")
if gs_resource is not None:
try:
cat.delete(gs_resource)
except:
msg = 'Couldn\'t delete GeoServer resource during cleanup()'
logger.warning(msg)
if gs_store is not None:
try:
cat.delete(gs_store)
except:
logger.warning("Couldn't delete GeoServer store during cleanup()")
logger.warning('Deleting dangling Catalogue record for [%s] '
'(no Django record to match)', name)
if 'geonode.catalogue' in settings.INSTALLED_APPS:
from geonode.catalogue import get_catalogue
catalogue = get_catalogue()
catalogue.remove_record(uuid)
logger.warning('Finished cleanup after failed Catalogue/Django '
'import for layer: %s', name)
def _create_featurestore(name, data, overwrite=False, charset="UTF-8", workspace=None):
cat = gs_catalog
cat.create_featurestore(name, data, overwrite=overwrite, charset=charset)
store = get_store(cat, name, workspace=workspace)
return store, cat.get_resource(name, store=store, workspace=workspace)
def _create_coveragestore(name, data, overwrite=False, charset="UTF-8", workspace=None):
cat = gs_catalog
cat.create_coveragestore(name, data, overwrite=overwrite)
store = get_store(cat, name, workspace=workspace)
return store, cat.get_resource(name, store=store, workspace=workspace)
def _create_db_featurestore(name, data, overwrite=False, charset="UTF-8", workspace=None):
"""Create a database store then use it to import a shapefile.
If the import into the database fails then delete the store
(and delete the PostGIS table for it).
"""
cat = gs_catalog
dsname = ogc_server_settings.DATASTORE
try:
ds = get_store(cat, dsname, workspace=workspace)
except FailedRequestError:
ds = cat.create_datastore(dsname, workspace=workspace)
db = ogc_server_settings.datastore_db
db_engine = 'postgis' if \
'postgis' in db['ENGINE'] else db['ENGINE']
ds.connection_parameters.update(
{'validate connections': 'true',
'max connections': '10',
'min connections': '1',
'fetch size': '1000',
'host': db['HOST'],
'port': db['PORT'],
'database': db['NAME'],
'user': db['USER'],
'passwd': db['PASSWORD'],
'dbtype': db_engine}
)
cat.save(ds)
ds = get_store(cat, dsname, workspace=workspace)
try:
cat.add_data_to_store(ds, name, data,
overwrite=overwrite,
charset=charset)
return ds, cat.get_resource(name, store=ds, workspace=workspace)
except Exception:
msg = _("An exception occurred loading data to PostGIS")
msg += "- %s" % (sys.exc_info()[1])
try:
delete_from_postgis(name)
except Exception:
msg += _(" Additionally an error occured during database cleanup")
msg += "- %s" % (sys.exc_info()[1])
raise GeoNodeException(msg)
def get_store(cat, name, workspace=None):
# Make sure workspace is a workspace object and not a string.
# If the workspace does not exist, continue as if no workspace had been defined.
if isinstance(workspace, basestring):
workspace = cat.get_workspace(workspace)
if workspace is None:
workspace = cat.get_default_workspace()
try:
store = cat.get_xml('%s/%s.xml' % (workspace.datastore_url[:-4], name))
except FailedRequestError:
try:
store = cat.get_xml('%s/%s.xml' % (workspace.coveragestore_url[:-4], name))
except FailedRequestError:
try:
store = cat.get_xml('%s/%s.xml' % (workspace.wmsstore_url[:-4], name))
except FailedRequestError:
raise FailedRequestError("No store found named: " + name)
if store.tag == 'dataStore':
store = datastore_from_index(cat, workspace, store)
elif store.tag == 'coverageStore':
store = coveragestore_from_index(cat, workspace, store)
elif store.tag == 'wmsStore':
store = wmsstore_from_index(cat, workspace, store)
return store
def geoserver_upload(
layer,
base_file,
user,
name,
overwrite=True,
title=None,
abstract=None,
permissions=None,
keywords=(),
charset='UTF-8'):
# Step 2. Check that it is uploading to the same resource type as
# the existing resource
logger.info('>>> Step 2. Make sure we are not trying to overwrite a '
'existing resource named [%s] with the wrong type', name)
the_layer_type = geoserver_layer_type(base_file)
# Get a short handle to the gsconfig geoserver catalog
cat = gs_catalog
# Fix bug on layer replace #2642
# https://github.com/GeoNode/geonode/issues/2462
cat.reload()
workspace = cat.get_default_workspace()
# Check if the store exists in geoserver
try:
store = get_store(cat, name, workspace=workspace)
except geoserver.catalog.FailedRequestError as e:
# There is no store, ergo the road is clear
pass
else:
# If we get a store, we do the following:
resources = store.get_resources()
# If the store is empty, we just delete it.
if len(resources) == 0:
cat.delete(store)
else:
# If our resource is already configured in the store it needs
# to have the right resource type
for resource in resources:
if resource.name == name:
msg = 'Name already in use and overwrite is False'
assert overwrite, msg
existing_type = resource.resource_type
if existing_type != the_layer_type:
msg = ('Type of uploaded file %s (%s) '
'does not match type of existing '
'resource type '
'%s' % (name, the_layer_type, existing_type))
logger.info(msg)
raise GeoNodeException(msg)
# Step 3. Identify whether it is vector or raster and which extra files
# are needed.
logger.info('>>> Step 3. Identifying if [%s] is vector or raster and '
'gathering extra files', name)
if the_layer_type == FeatureType.resource_type:
logger.debug('Uploading vector layer: [%s]', base_file)
if ogc_server_settings.DATASTORE:
create_store_and_resource = _create_db_featurestore
else:
create_store_and_resource = _create_featurestore
elif the_layer_type == Coverage.resource_type:
logger.debug("Uploading raster layer: [%s]", base_file)
create_store_and_resource = _create_coveragestore
else:
msg = ('The layer type for name %s is %s. It should be '
'%s or %s,' % (name,
the_layer_type,
FeatureType.resource_type,
Coverage.resource_type))
logger.warn(msg)
raise GeoNodeException(msg)
# Step 4. Create the store in GeoServer
logger.info('>>> Step 4. Starting upload of [%s] to GeoServer...', name)
# Get the helper files if they exist
files = get_files(base_file)
data = files
if 'shp' not in files:
data = base_file
try:
store, gs_resource = create_store_and_resource(name,
data,
charset=charset,
overwrite=overwrite,
workspace=workspace)
except UploadError as e:
msg = ('Could not save the layer %s, there was an upload '
'error: %s' % (name, str(e)))
logger.warn(msg)
e.args = (msg,)
raise
except ConflictingDataError as e:
# A datastore of this name already exists
msg = ('GeoServer reported a conflict creating a store with name %s: '
'"%s". This should never happen because a brand new name '
'should have been generated. But since it happened, '
'try renaming the file or deleting the store in '
'GeoServer.' % (name, str(e)))
logger.warn(msg)
e.args = (msg,)
raise
else:
logger.debug('Finished upload of [%s] to GeoServer without '
'errors.', name)
# Step 5. Create the resource in GeoServer
logger.info('>>> Step 5. Generating the metadata for [%s] after '
'successful import to GeoSever', name)
# Verify the resource was created
if gs_resource is not None:
assert gs_resource.name == name
else:
msg = ('GeoNode encountered problems when creating layer %s.'
'It cannot find the Layer that matches this Workspace.'
'try renaming your files.' % name)
logger.warn(msg)
raise GeoNodeException(msg)
# Step 6. Make sure our data always has a valid projection
# FIXME: Put this in gsconfig.py
logger.info('>>> Step 6. Making sure [%s] has a valid projection' % name)
if gs_resource.latlon_bbox is None:
box = gs_resource.native_bbox[:4]
minx, maxx, miny, maxy = [float(a) for a in box]
if -180 <= minx <= 180 and -180 <= maxx <= 180 and \
-90 <= miny <= 90 and -90 <= maxy <= 90:
logger.info('GeoServer failed to detect the projection for layer '
'[%s]. Guessing EPSG:4326', name)
# If GeoServer couldn't figure out the projection, we just
# assume it's lat/lon to avoid a bad GeoServer configuration
gs_resource.latlon_bbox = gs_resource.native_bbox
gs_resource.projection = "EPSG:4326"
cat.save(gs_resource)
else:
msg = ('GeoServer failed to detect the projection for layer '
'[%s]. It doesn\'t look like EPSG:4326, so backing out '
'the layer.')
logger.info(msg, name)
cascading_delete(cat, name)
raise GeoNodeException(msg % name)
# Step 7. Create the style and assign it to the created resource
# FIXME: Put this in gsconfig.py
logger.info('>>> Step 7. Creating style for [%s]' % name)
publishing = cat.get_layer(name)
if 'sld' in files:
f = open(files['sld'], 'r')
sld = f.read()
f.close()
else:
sld = get_sld_for(publishing)
if sld is not None:
try:
cat.create_style(name, sld)
except geoserver.catalog.ConflictingDataError as e:
msg = ('There was already a style named %s in GeoServer, '
'cannot overwrite: "%s"' % (name, str(e)))
logger.warn(msg)
e.args = (msg,)
# FIXME: Should we use the fully qualified typename?
publishing.default_style = cat.get_style(name)
cat.save(publishing)
# Step 10. Create the Django record for the layer
logger.info('>>> Step 10. Creating Django record for [%s]', name)
# FIXME: Do this inside the layer object
typename = workspace.name + ':' + gs_resource.name
layer_uuid = str(uuid.uuid1())
defaults = dict(store=gs_resource.store.name,
storeType=gs_resource.store.resource_type,
typename=typename,
title=title or gs_resource.title,
uuid=layer_uuid,
abstract=abstract or gs_resource.abstract or '',
owner=user)
return name, workspace.name, defaults, gs_resource
class ServerDoesNotExist(Exception):
pass
class OGC_Server(object):
"""
OGC Server object.
"""
def __init__(self, ogc_server, alias):
self.alias = alias
self.server = ogc_server
def __getattr__(self, item):
return self.server.get(item)
@property
def credentials(self):
"""
Returns a tuple of the server's credentials.
"""
creds = namedtuple('OGC_SERVER_CREDENTIALS', ['username', 'password'])
return creds(username=self.USER, password=self.PASSWORD)
@property
def datastore_db(self):
"""
Returns the server's datastore dict or None.
"""
if self.DATASTORE and settings.DATABASES.get(self.DATASTORE, None):
return settings.DATABASES.get(self.DATASTORE, dict())
else:
return dict()
@property
def ows(self):
"""
The Open Web Service url for the server.
"""
location = self.PUBLIC_LOCATION if self.PUBLIC_LOCATION else self.LOCATION
return self.OWS_LOCATION if self.OWS_LOCATION else location + 'ows'
@property
def rest(self):
"""
The REST endpoint for the server.
"""
return self.LOCATION + \
'rest' if not self.REST_LOCATION else self.REST_LOCATION
@property
def public_url(self):
"""
The global public endpoint for the server.
"""
return self.LOCATION if not self.PUBLIC_LOCATION else self.PUBLIC_LOCATION
@property
def internal_ows(self):
"""
The Open Web Service url for the server used by GeoNode internally.
"""
location = self.LOCATION
return location + 'ows'
@property
def internal_rest(self):
"""
The internal REST endpoint for the server.
"""
return self.LOCATION + 'rest'
@property
def hostname(self):
return urlsplit(self.LOCATION).hostname
@property
def netloc(self):
return urlsplit(self.LOCATION).netloc
def __str__(self):
return self.alias
class OGC_Servers_Handler(object):
"""
OGC Server Settings Convenience dict.
"""
def __init__(self, ogc_server_dict):
self.servers = ogc_server_dict
# FIXME(Ariel): Are there better ways to do this without involving
# local?
self._servers = local()
def ensure_valid_configuration(self, alias):
"""
Ensures the settings are valid.
"""
try:
server = self.servers[alias]
except KeyError:
raise ServerDoesNotExist("The server %s doesn't exist" % alias)
datastore = server.get('DATASTORE')
uploader_backend = getattr(
settings,
'UPLOADER',
dict()).get(
'BACKEND',
'geonode.rest')
if uploader_backend == 'geonode.importer' and datastore and not settings.DATABASES.get(
datastore):
raise ImproperlyConfigured(
'The OGC_SERVER setting specifies a datastore '
'but no connection parameters are present.')
if uploader_backend == 'geonode.importer' and not datastore:
raise ImproperlyConfigured(
'The UPLOADER BACKEND is set to geonode.importer but no DATASTORE is specified.')
if 'PRINTNG_ENABLED' in server:
raise ImproperlyConfigured("The PRINTNG_ENABLED setting has been removed, use 'PRINT_NG_ENABLED' instead.")
def ensure_defaults(self, alias):
"""
Puts the defaults into the settings dictionary for a given connection where no settings is provided.
"""
try:
server = self.servers[alias]
except KeyError:
raise ServerDoesNotExist("The server %s doesn't exist" % alias)
server.setdefault('BACKEND', 'geonode.geoserver')
server.setdefault('LOCATION', 'http://localhost:8080/geoserver/')
server.setdefault('USER', 'admin')
server.setdefault('PASSWORD', 'geoserver')
server.setdefault('DATASTORE', str())
server.setdefault('GEOGIG_DATASTORE_DIR', str())
for option in ['MAPFISH_PRINT_ENABLED', 'PRINT_NG_ENABLED', 'GEONODE_SECURITY_ENABLED',
'BACKEND_WRITE_ENABLED']:
server.setdefault(option, True)
for option in ['GEOGIG_ENABLED', 'WMST_ENABLED', 'WPS_ENABLED']:
server.setdefault(option, False)
def __getitem__(self, alias):
if hasattr(self._servers, alias):
return getattr(self._servers, alias)
self.ensure_defaults(alias)
self.ensure_valid_configuration(alias)
server = self.servers[alias]
server = OGC_Server(alias=alias, ogc_server=server)
setattr(self._servers, alias, server)
return server
def __setitem__(self, key, value):
setattr(self._servers, key, value)
def __iter__(self):
return iter(self.servers)
def all(self):
return [self[alias] for alias in self]
def get_wms():
wms_url = ogc_server_settings.internal_ows + \
"?service=WMS&request=GetCapabilities&version=1.1.0"
netloc = urlparse(wms_url).netloc
http = httplib2.Http()
http.add_credentials(_user, _password)
http.authorizations.append(
httplib2.BasicAuthentication(
(_user, _password),
netloc,
wms_url,
{},
None,
None,
http
)
)
body = http.request(wms_url)[1]
_wms = WebMapService(wms_url, xml=body)
return _wms
def wps_execute_layer_attribute_statistics(layer_name, field):
"""Derive aggregate statistics from WPS endpoint"""
# generate statistics using WPS
url = '%s/ows' % (ogc_server_settings.LOCATION)
# TODO: use owslib.wps.WebProcessingService for WPS interaction
# this requires GeoServer's WPS gs:Aggregate function to
# return a proper wps:ExecuteResponse
request = render_to_string('layers/wps_execute_gs_aggregate.xml', {
'layer_name': 'geonode:%s' % layer_name,
'field': field
})
response = http_post(
url,
request,
timeout=ogc_server_settings.TIMEOUT,
username=ogc_server_settings.credentials.username,
password=ogc_server_settings.credentials.password)
exml = etree.fromstring(response)
result = {}
for f in ['Min', 'Max', 'Average', 'Median', 'StandardDeviation', 'Sum']:
fr = exml.find(f)
if fr is not None:
result[f] = fr.text
else:
result[f] = 'NA'
count = exml.find('Count')
if count is not None:
result['Count'] = int(count.text)
else:
result['Count'] = 0
result['unique_values'] = 'NA'
return result
# TODO: find way of figuring out threshold better
# Looks incomplete what is the purpose if the nex lines?
# if result['Count'] < 10000:
# request = render_to_string('layers/wps_execute_gs_unique.xml', {
# 'layer_name': 'geonode:%s' % layer_name,
# 'field': field
# })
# response = http_post(
# url,
# request,
# timeout=ogc_server_settings.TIMEOUT,
# username=ogc_server_settings.credentials.username,
# password=ogc_server_settings.credentials.password)
# exml = etree.fromstring(response)
def style_update(request, url):
"""
Sync style stuff from GS to GN.
Ideally we should call this from a view straight from GXP, and we should use
gsConfig, that at this time does not support styles updates. Before gsConfig
is updated, for now we need to parse xml.
In case of a DELETE, we need to query request.path to get the style name,
and then remove it.
In case of a POST or PUT, we need to parse the xml from
request.body, which is in this format:
"""
if request.method in ('POST', 'PUT'): # we need to parse xml
# Need to remove NSx from IE11
if "HTTP_USER_AGENT" in request.META:
if ('Trident/7.0' in request.META['HTTP_USER_AGENT'] and
'rv:11.0' in request.META['HTTP_USER_AGENT']):
txml = re.sub(r'xmlns:NS[0-9]=""', '', request.body)
txml = re.sub(r'NS[0-9]:', '', txml)
request._body = txml
tree = ET.ElementTree(ET.fromstring(request.body))
elm_namedlayer_name = tree.findall(
'.//{http://www.opengis.net/sld}Name')[0]
elm_user_style_name = tree.findall(
'.//{http://www.opengis.net/sld}Name')[1]
elm_user_style_title = tree.find(
'.//{http://www.opengis.net/sld}Title')
if not elm_user_style_title:
elm_user_style_title = elm_user_style_name
layer_name = elm_namedlayer_name.text
style_name = elm_user_style_name.text
sld_body = '<?xml version="1.0" encoding="UTF-8"?>%s' % request.body
# add style in GN and associate it to layer
if request.method == 'POST':
style = Style(name=style_name, sld_body=sld_body, sld_url=url)
style.save()
layer = Layer.objects.all().filter(typename=layer_name)[0]
style.layer_styles.add(layer)
style.save()
if request.method == 'PUT': # update style in GN
style = Style.objects.all().filter(name=style_name)[0]
style.sld_body = sld_body
style.sld_url = url
if len(elm_user_style_title.text) > 0:
style.sld_title = elm_user_style_title.text
style.save()
for layer in style.layer_styles.all():
layer.save()
if request.method == 'DELETE': # delete style from GN
style_name = os.path.basename(request.path)
style = Style.objects.all().filter(name=style_name)[0]
style.delete()
def set_time_info(layer, attribute, end_attribute, presentation,
precision_value, precision_step, enabled=True):
'''Configure the time dimension for a layer.
:param layer: the layer to configure
:param attribute: the attribute used to represent the instant or period
start
:param end_attribute: the optional attribute used to represent the end
period
:param presentation: either 'LIST', 'DISCRETE_INTERVAL', or
'CONTINUOUS_INTERVAL'
:param precision_value: number representing number of steps
:param precision_step: one of 'seconds', 'minutes', 'hours', 'days',
'months', 'years'
:param enabled: defaults to True
'''
layer = gs_catalog.get_layer(layer.name)
if layer is None:
raise ValueError('no such layer: %s' % layer.name)
resource = layer.resource
resolution = None
if precision_value and precision_step:
resolution = '%s %s' % (precision_value, precision_step)
info = DimensionInfo("time", enabled, presentation, resolution, "ISO8601",
None, attribute=attribute, end_attribute=end_attribute)
metadata = dict(resource.metadata or {})
metadata['time'] = info
resource.metadata = metadata
gs_catalog.save(resource)
def get_time_info(layer):
'''Get the configured time dimension metadata for the layer as a dict.
The keys of the dict will be those of the parameters of `set_time_info`.
:returns: dict of values or None if not configured
'''
layer = gs_catalog.get_layer(layer.name)
if layer is None:
raise ValueError('no such layer: %s' % layer.name)
resource = layer.resource
info = resource.metadata.get('time', None) if resource.metadata else None
vals = None
if info:
value = step = None
resolution = info.resolution_str()
if resolution:
value, step = resolution.split()
vals = dict(
enabled=info.enabled,
attribute=info.attribute,
end_attribute=info.end_attribute,
presentation=info.presentation,
precision_value=value,
precision_step=step,
)
return vals
ogc_server_settings = OGC_Servers_Handler(settings.OGC_SERVER)['default']
_wms = None
_csw = None
_user, _password = ogc_server_settings.credentials
http_client = httplib2.Http()
http_client.add_credentials(_user, _password)
http_client.add_credentials(_user, _password)
_netloc = urlparse(ogc_server_settings.LOCATION).netloc
http_client.authorizations.append(
httplib2.BasicAuthentication(
(_user, _password),
_netloc,
ogc_server_settings.LOCATION,
{},
None,
None,
http_client
)
)
url = ogc_server_settings.rest
gs_catalog = Catalog(url, _user, _password)
gs_uploader = Client(url, _user, _password)
_punc = re.compile(r"[\.:]") # regex for punctuation that confuses restconfig
_foregrounds = [
"#ffbbbb",
"#bbffbb",
"#bbbbff",
"#ffffbb",
"#bbffff",
"#ffbbff"]
_backgrounds = [
"#880000",
"#008800",
"#000088",
"#888800",
"#008888",
"#880088"]
_marks = ["square", "circle", "cross", "x", "triangle"]
_style_contexts = izip(cycle(_foregrounds), cycle(_backgrounds), cycle(_marks))
_default_style_names = ["point", "line", "polygon", "raster"]
_esri_types = {
"esriFieldTypeDouble": "xsd:double",
"esriFieldTypeString": "xsd:string",
"esriFieldTypeSmallInteger": "xsd:int",
"esriFieldTypeInteger": "xsd:int",
"esriFieldTypeDate": "xsd:dateTime",
"esriFieldTypeOID": "xsd:long",
"esriFieldTypeGeometry": "xsd:geometry",
"esriFieldTypeBlob": "xsd:base64Binary",
"esriFieldTypeRaster": "raster",
"esriFieldTypeGUID": "xsd:string",
"esriFieldTypeGlobalID": "xsd:string",
"esriFieldTypeXML": "xsd:anyType"}
def _render_thumbnail(req_body):
spec = _fixup_ows_url(req_body)
url = "%srest/printng/render.png" % ogc_server_settings.LOCATION
hostname = urlparse(settings.SITEURL).hostname
params = dict(width=240, height=180, auth="%s,%s,%s" % (hostname, _user, _password))
url = url + "?" + urllib.urlencode(params)
# @todo annoying but not critical
# openlayers controls posted back contain a bad character. this seems
# to come from a − entity in the html, but it gets converted
# to a unicode en-dash but is not uncoded properly during transmission
# 'ignore' the error for now as controls are not being rendered...
data = spec
if type(data) == unicode:
# make sure any stored bad values are wiped out
# don't use keyword for errors - 2.6 compat
# though unicode accepts them (as seen below)
data = data.encode('ASCII', 'ignore')
data = unicode(data, errors='ignore').encode('UTF-8')
try:
resp, content = http_client.request(url, "POST", data, {
'Content-type': 'text/html'
})
except Exception:
logging.warning('Error generating thumbnail')
return
return content
def _fixup_ows_url(thumb_spec):
# @HACK - for whatever reason, a map's maplayers ows_url contains only /geoserver/wms
# so rendering of thumbnails fails - replace those uri's with full geoserver URL
import re
gspath = '"' + ogc_server_settings.public_url # this should be in img src attributes
repl = '"' + ogc_server_settings.LOCATION
return re.sub(gspath, repl, thumb_spec)
| PhilLidar-DAD/geonode | geonode/geoserver/helpers.py | Python | gpl-3.0 | 61,811 | 0.001052 |
from PyQt4 import QtGui, QtCore
from shapely.geometry import Point
from shapely import affinity
from math import sqrt
import FlatCAMApp
from GUIElements import *
from FlatCAMObj import FlatCAMGerber, FlatCAMExcellon
class FlatCAMTool(QtGui.QWidget):
toolName = "FlatCAM Generic Tool"
def __init__(self, app, parent=None):
"""
:param app: The application this tool will run in.
:type app: App
:param parent: Qt Parent
:return: FlatCAMTool
"""
QtGui.QWidget.__init__(self, parent)
# self.setSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Maximum)
self.layout = QtGui.QVBoxLayout()
self.setLayout(self.layout)
self.app = app
self.menuAction = None
def install(self):
self.menuAction = self.app.ui.menutool.addAction(self.toolName)
self.menuAction.triggered.connect(self.run)
def run(self):
# Remove anything else in the GUI
self.app.ui.tool_scroll_area.takeWidget()
# Put ourself in the GUI
self.app.ui.tool_scroll_area.setWidget(self)
# Switch notebook to tool page
self.app.ui.notebook.setCurrentWidget(self.app.ui.tool_tab)
self.show()
class DblSidedTool(FlatCAMTool):
toolName = "Double-Sided PCB Tool"
def __init__(self, app):
FlatCAMTool.__init__(self, app)
## Title
title_label = QtGui.QLabel("<font size=4><b>%s</b></font>" % self.toolName)
self.layout.addWidget(title_label)
## Form Layout
form_layout = QtGui.QFormLayout()
self.layout.addLayout(form_layout)
## Layer to mirror
self.object_combo = QtGui.QComboBox()
self.object_combo.setModel(self.app.collection)
form_layout.addRow("Bottom Layer:", self.object_combo)
## Axis
self.mirror_axis = RadioSet([{'label': 'X', 'value': 'X'},
{'label': 'Y', 'value': 'Y'}])
form_layout.addRow("Mirror Axis:", self.mirror_axis)
## Axis Location
self.axis_location = RadioSet([{'label': 'Point', 'value': 'point'},
{'label': 'Box', 'value': 'box'}])
form_layout.addRow("Axis Location:", self.axis_location)
## Point/Box
self.point_box_container = QtGui.QVBoxLayout()
form_layout.addRow("Point/Box:", self.point_box_container)
self.point = EvalEntry()
self.point_box_container.addWidget(self.point)
self.box_combo = QtGui.QComboBox()
self.box_combo.setModel(self.app.collection)
self.point_box_container.addWidget(self.box_combo)
self.box_combo.hide()
## Alignment holes
self.alignment_holes = EvalEntry()
form_layout.addRow("Alignment Holes:", self.alignment_holes)
## Drill diameter for alignment holes
self.drill_dia = LengthEntry()
form_layout.addRow("Drill diam.:", self.drill_dia)
## Buttons
hlay = QtGui.QHBoxLayout()
self.layout.addLayout(hlay)
hlay.addStretch()
self.create_alignment_hole_button = QtGui.QPushButton("Create Alignment Drill")
self.mirror_object_button = QtGui.QPushButton("Mirror Object")
hlay.addWidget(self.create_alignment_hole_button)
hlay.addWidget(self.mirror_object_button)
self.layout.addStretch()
## Signals
self.create_alignment_hole_button.clicked.connect(self.on_create_alignment_holes)
self.mirror_object_button.clicked.connect(self.on_mirror)
self.axis_location.group_toggle_fn = self.on_toggle_pointbox
## Initialize form
self.mirror_axis.set_value('X')
self.axis_location.set_value('point')
def on_create_alignment_holes(self):
axis = self.mirror_axis.get_value()
mode = self.axis_location.get_value()
if mode == "point":
px, py = self.point.get_value()
else:
selection_index = self.box_combo.currentIndex()
bb_obj = self.app.collection.object_list[selection_index] # TODO: Direct access??
xmin, ymin, xmax, ymax = bb_obj.bounds()
px = 0.5*(xmin+xmax)
py = 0.5*(ymin+ymax)
xscale, yscale = {"X": (1.0, -1.0), "Y": (-1.0, 1.0)}[axis]
dia = self.drill_dia.get_value()
tools = {"1": {"C": dia}}
holes = self.alignment_holes.get_value()
drills = []
for hole in holes:
point = Point(hole)
point_mirror = affinity.scale(point, xscale, yscale, origin=(px, py))
drills.append({"point": point, "tool": "1"})
drills.append({"point": point_mirror, "tool": "1"})
def obj_init(obj_inst, app_inst):
obj_inst.tools = tools
obj_inst.drills = drills
obj_inst.create_geometry()
self.app.new_object("excellon", "Alignment Drills", obj_init)
def on_mirror(self):
selection_index = self.object_combo.currentIndex()
fcobj = self.app.collection.object_list[selection_index]
# For now, lets limit to Gerbers and Excellons.
# assert isinstance(gerb, FlatCAMGerber)
if not isinstance(fcobj, FlatCAMGerber) and not isinstance(fcobj, FlatCAMExcellon):
self.info("ERROR: Only Gerber and Excellon objects can be mirrored.")
return
axis = self.mirror_axis.get_value()
mode = self.axis_location.get_value()
if mode == "point":
px, py = self.point.get_value()
else:
selection_index = self.box_combo.currentIndex()
bb_obj = self.app.collection.object_list[selection_index] # TODO: Direct access??
xmin, ymin, xmax, ymax = bb_obj.bounds()
px = 0.5*(xmin+xmax)
py = 0.5*(ymin+ymax)
fcobj.mirror(axis, [px, py])
fcobj.plot()
def on_toggle_pointbox(self):
if self.axis_location.get_value() == "point":
self.point.show()
self.box_combo.hide()
else:
self.point.hide()
self.box_combo.show()
class Measurement(FlatCAMTool):
toolName = "Measurement Tool"
def __init__(self, app):
FlatCAMTool.__init__(self, app)
# self.setContentsMargins(0, 0, 0, 0)
self.layout.setMargin(0)
self.layout.setContentsMargins(0, 0, 3, 0)
self.setSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Maximum)
self.point1 = None
self.point2 = None
self.label = QtGui.QLabel("Click on a reference point ...")
self.label.setFrameStyle(QtGui.QFrame.StyledPanel | QtGui.QFrame.Plain)
self.label.setMargin(3)
self.layout.addWidget(self.label)
# self.layout.setMargin(0)
self.setVisible(False)
self.click_subscription = None
self.move_subscription = None
def install(self):
FlatCAMTool.install(self)
self.app.ui.right_layout.addWidget(self)
self.app.plotcanvas.mpl_connect('key_press_event', self.on_key_press)
def run(self):
self.toggle()
def on_click(self, event):
if self.point1 is None:
self.point1 = (event.xdata, event.ydata)
else:
self.point2 = copy(self.point1)
self.point1 = (event.xdata, event.ydata)
self.on_move(event)
def on_key_press(self, event):
if event.key == 'm':
self.toggle()
def toggle(self):
if self.isVisible():
self.setVisible(False)
self.app.plotcanvas.mpl_disconnect(self.move_subscription)
self.app.plotcanvas.mpl_disconnect(self.click_subscription)
else:
self.setVisible(True)
self.move_subscription = self.app.plotcanvas.mpl_connect('motion_notify_event', self.on_move)
self.click_subscription = self.app.plotcanvas.mpl_connect('button_press_event', self.on_click)
def on_move(self, event):
if self.point1 is None:
self.label.setText("Click on a reference point...")
else:
try:
dx = event.xdata - self.point1[0]
dy = event.ydata - self.point1[1]
d = sqrt(dx**2 + dy**2)
self.label.setText("D = %.4f D(x) = %.4f D(y) = %.4f" % (d, dx, dy))
except TypeError:
pass
if self.update is not None:
self.update()
| silasb/flatcam | FlatCAMTool.py | Python | mit | 8,497 | 0.002942 |
#!/usr/bin/env python
#fractoe.py
from AISuite.game import Game as Game
import AISuite.player as player
from AISuite.alphabeta import UPPER_BOUND, LOWER_BOUND, shallowest_first
import AISuite.recorder as recorder
import AISuite.PythonLibraries.prgm_lib as prgm_lib
import fractoe_tictactoe as tictactoe
Tictactoe = tictactoe.Tictactoe
from fractoe_heuristics import fractoe_heuristic
BOARD_SIZE = 3
def coor_split(num):
col = num % BOARD_SIZE
row = (num - col) / BOARD_SIZE
return [row,col]
def coor_splice(row,col):
return row*BOARD_SIZE + col
class Fractoe(Game):
def __init__(self, player1, player2, be_quiet = False, show_game = False):
super(self.__class__, self).__init__(player1, player2, be_quiet)
self.grid = [[Tictactoe(), Tictactoe(), Tictactoe()], [Tictactoe(), Tictactoe(), Tictactoe()], [Tictactoe(), Tictactoe(), Tictactoe()]]
self.rows = BOARD_SIZE
self.cols = BOARD_SIZE
self.show_board = show_game
self.current_box = -1
self.current_row = -1
self.current_col = -1
self.thinking = False
self.boards_won = [-1,-1,-1,-1,-1,-1,-1,-1,-1]
self.player_token = [" ","X","O"]
self.last_moves = [ [[-1,-1],[-1,-1]], [[-1,-1], [-1,-1]] ]
def load_state_from_string(self, state_string):
class_data = state_string.split(";")
self.boards_won = [-1,-1,-1,-1,-1,-1,-1,-1,-1]
for num in range(9):
col = num % 3
row = (num - (num % 3))/3
self.grid[row][col].load(class_data[num])
self.boards_won[num] = self.grid[row][col].get_winner()
self.turn = int(class_data[9])
self.current_box = int(class_data[10])
if self.current_box != -1:
x = self.current_box
self.current_col = x % 3
self.current_row = (x - (x % 3))/3
else:
self.current_row = -1
self.current_col = -1
self.check_for_winner()
def __str__(self):
value = ""
for row in range(3):
for col in range(3):
value += str(self.grid[row][col]) + ';'
value += str(self.turn) + ';'
value += str(self.current_box)
return value
@staticmethod
def parse_state(game_state):
split_list = game_state.split(';')
split_list = split_list[:-2] + split_list[-1]
return ';'.join(split_list)
def get_child_states(self):
root = str(self)
moves = self.get_child_moves()
states = []
for m in moves:
self.current_box = int(str(m)[0])
self.current_col = self.current_box % 3
self.current_row = (self.current_box - self.current_col)/3
num = int(str(m)[1])
self.try_placing_square(num)
self.turn += 1
states += [str(self)]
self.load_state_from_string(root)
return states
def get_child_moves(self):
children = []
if self.current_box == -1:
for box in range(9):
if self.boards_won[box] == -1:
for x in range(9):
out_c = box % 3
out_r = (box - out_c)/3
in_c = x % 3
in_r = (x - in_c)/3
if self.grid[out_r][out_c].get_square(in_r,in_c) == " ":
children += [str(box) + str(x)]
else:
for x in range(9):
out_c = self.current_box % 3
out_r = (self.current_box - out_c)/3
in_c = x % 3
in_r = (x - in_c)/3
if self.grid[out_r][out_c].get_square(in_r,in_c) == " ":
children += [str(self.current_box) + str(x)]
return children
def do_turn(self):
human = self.is_human_turn()
if human or self.show_board:
self.opg()
if not human:
if not self.quiet and not self.thinking:
print "Player" + str(self.get_player_num()) + " (the computer) is thinking..."
self.thinking = True
finished_playing = False
valid_moves = self.get_child_moves()
while not finished_playing:
if human:
if self.current_box != -1:
print "Current board is " + str(self.current_box)
self.grid[self.current_row][self.current_col].opg()
print "Player" + str(self.get_player_num()) + ", it is your turn to play."
print "Please enter a valid move string."
print "a valid move string is two numbers, such as 34"
print "this indicates the 4-th square on the 3-rd board (both 0-indexed)"
move = self.current_player().choose_move(self)
if human and move in self.escapes:
self.handle_escape(move)
elif str(move) in valid_moves:
self.current_box = int(str(move)[0])
self.current_col = self.current_box % 3
self.current_row = (self.current_box - self.current_col)/3
num = int(str(move)[1])
inner_col = num % 3
inner_row = (num - (num % 3))/3
turn_descriptor = [[self.current_row,self.current_col], [inner_row, inner_col]]
self.try_placing_square(num)
self.turn += 1
finished_playing = True
self.thinking = False
self.last_moves[self.get_player_num()-1] = turn_descriptor
else:
if human:
print 'That wasn\'t a valid move.'
print 'Valid moves look like: 08 or 27'
self.opg()
self.check_winner()
def make_new_instance(self):
return Fractoe(player.Player(), player.Player())
def opg(self):
prgm_lib.cls(100)
for x in range(len(self.grid)):
size = 0
string0 = ''
for z in range(3):
string1 = ''
string2 = ''
for y in range(len(self.grid[x])):
special = self.get_last_moves_in(x,y,z)
string3 = self.grid[x][y].get_row(z,special)
for var in range(len(string3) - 9 * len(special)):
string2 += "-"
string1 += string3 + " || "
string2 += " || "
print string1[:-4]
if z != 2:
print string2[:-4]
size = len(string2)-4
for var in range(size):
string0 += "="
if x != 2:
print string0
print
def check_for_winner(self):
for x in range(3):
if self.boards_won[3*x] == self.boards_won[3*x+1] == self.boards_won[3*x+2] > 0:
self.winner = self.boards_won[3*x]
if self.boards_won[x] == self.boards_won[x+3] == self.boards_won[x+6] > 0:
self.winner = self.boards_won[x]
if self.boards_won[0] == self.boards_won[4] == self.boards_won[8] > 0:
self.winner = self.boards_won[4]
if self.boards_won[2] == self.boards_won[4] == self.boards_won[6] > 0:
self.winner = self.boards_won[4]
if self.winner == -1 and self.check_full():
self.winner = 0
return self.winner
def check_full(self):
full = True
for x in self.boards_won:
if x == -1:
full = False
return full
def is_board_won(self, board):
return self.boards_won[board]
def get_current_box(self):
return self.current_box
def get_board_string(self,row,col):
return str(self.grid[row][col])
def get_last_moves_in(self,x,y,z):
special = []
if self.last_moves[0][0][0] == x and self.last_moves[0][0][1] == y and self.last_moves[0][1][0] == z:
special += [self.last_moves[0][1][1]]
if self.last_moves[1][0][0] == x and self.last_moves[1][0][1] == y and self.last_moves[1][1][0] == z:
special += [self.last_moves[1][1][1]]
return special
def try_placing_square(self, num):
inner_col = num % 3
inner_row = (num - (num % 3))/3
value = False
if self.grid[self.current_row][self.current_col].get_square(inner_row,inner_col) == " ":
token = self.player_token[self.get_player_num()]
self.grid[self.current_row][self.current_col].set_square(inner_row,inner_col,token)
if self.grid[self.current_row][self.current_col].is_finished():
box_winner = self.grid[self.current_row][self.current_col].get_winner()
self.boards_won[self.current_box] = box_winner
self.check_for_winner()
if not self.grid[inner_row][inner_col].is_finished():
self.current_box = num
self.current_row = inner_row
self.current_col = inner_col
else:
self.current_box = -1
self.current_row = -1
self.current_col = -1
value = True
return value
if __name__ == "__main__":
option = "simulate_d2_end"
filename = "fr_game_data_d2.txt"
num_games = 0
win_counts = [0,0,0]
if option == "simulate_all":
filename = "fr_game_data_all.txt"
num_games = 10000
FILE = open(filename, 'a')
for x in range(num_games):
g = Fractoe(player.RandomAI(),player.RandomAI(),True)
w = g.play()
g.record_history_to_file(FILE)
if x % 100 == 0:
print x
win_counts[w] += 1
FILE.close()
elif option == "simulate_end":
filename = "fr_game_data.txt"
num_games = 50000
FILE = open(filename, 'a')
for x in range(num_games):
g = Fractoe(player.RandomAI(),player.RandomAI(),True)
w = g.play()
FILE.write(str(g) + '~' + str(w) + '\n')
if x % 100 == 0:
print x
win_counts[w] += 1
FILE.close()
elif option == "simulate_d2_end":
filename = "fr_game_data_d2.txt"
num_games = 1000
FILE = open(filename, 'a')
for x in range(num_games):
ai1 = player.AI_ABPruning(fractoe_heuristic, depth_lim = 2)
ai1.set_child_selector(shallowest_first)
ai2 = player.AI_ABPruning(fractoe_heuristic, depth_lim = 2)
ai2.set_child_selector(shallowest_first)
g = Fractoe(ai1,ai2,True)
w = g.play()
FILE.write(str(g) + '~' + str(w) + '\n')
if x % 10 == 0:
print x
win_counts[w] += 1
FILE.close()
elif option == "human_2p":
g = Fractoe(player.Human(), player.Human())
g.play()
elif option == "human_1pX":
ai = player.AI_ABPruning(fractoe_heuristic, depth_lim = 5)
ai.set_child_selector(shallowest_first)
g = Fractoe(player.Human(), ai)
g.play()
elif option == "human_1pO":
ai = player.AI_ABPruning(fractoe_heuristic, depth_lim = 5)
ai.set_child_selector(shallowest_first)
g = Fractoe(ai, player.Human())
g.play()
elif option == "recorder_test":
rec = recorder.Recorder(filename, BOARD_SIZE**2, BOARD_SIZE**2, ['X','O',' '])
num_games = 5
for x in range(num_games):
print "Beginning game %i" % (x)
ai1 = player.AI_ABPruning(rec.recorder_heuristic, depth_lim = 4)
ai1.set_child_selector(shallowest_first)
g = Fractoe(ai1, player.RandomAI(), False, True)
w = g.play()
win_counts[w] += 1
elif option == "heuristic_test":
num_games = 5
for x in range(num_games):
print "Beginning game %i" % (x)
ai1 = player.AI_ABPruning(fractoe_heuristic, depth_lim = 2)
ai1.set_child_selector(shallowest_first)
ai2 = player.AI_ABPruning(fractoe_heuristic, depth_lim = 2)
ai2.set_child_selector(shallowest_first)
g = Fractoe(ai1,ai2,False, True)
w = g.play()
win_counts[w] += 1
elif option == "vs_mode":
rec = recorder.Recorder(filename, BOARD_SIZE**2, BOARD_SIZE**2, ['X','O',' '])
num_games = 5
for x in range(num_games):
print "Beginning game %i" % (x)
ai1 = player.AI_ABPruning(rec.recorder_heuristic, depth_lim = 5)
ai1.set_child_selector(shallowest_first)
ai2 = player.AI_ABPruning(fractoe_heuristic, depth_lim = 3)
ai2.set_child_selector(shallowest_first)
g = Fractoe(ai1,ai2,False, True)
w = g.play()
win_counts[w] += 1
print win_counts
for w in win_counts:
print str(w) + "/" + str(num_games) + " : " + str(w/float(num_games))
print
| blamed-cloud/PythonGames | fractoe.py | Python | mit | 10,693 | 0.041242 |
#python
import k3d
import testing
setup = testing.setup_bitmap_modifier_test("BitmapReader", "BitmapAdd")
setup.source.file = k3d.filesystem.generic_path(testing.source_path() + "/bitmaps/" + "test_rgb_8.png")
setup.modifier.value = 0.5
testing.require_similar_bitmap(setup.document, setup.modifier.get_property("output_bitmap"), "BitmapAdd", 0)
| barche/k3d | tests/bitmap/bitmap.modifier.BitmapAdd.py | Python | gpl-2.0 | 350 | 0.011429 |
# python
# This file is generated by a program (mib2py).
import HPR_IP_MIB
OIDMAP = {
'1.3.6.1.2.1.34.6.1.5': HPR_IP_MIB.hprIp,
'1.3.6.1.2.1.34.6.1.5.1.1.1': HPR_IP_MIB.hprIpActiveLsLsName,
'1.3.6.1.2.1.34.6.1.5.1.1.2': HPR_IP_MIB.hprIpActiveLsAppnTrafficType,
'1.3.6.1.2.1.34.6.1.5.1.1.3': HPR_IP_MIB.hprIpActiveLsUdpPackets,
'1.3.6.1.2.1.34.6.1.5.2.1.1': HPR_IP_MIB.hprIpAppnPortName,
'1.3.6.1.2.1.34.6.1.5.2.1.2': HPR_IP_MIB.hprIpAppnPortAppnTrafficType,
'1.3.6.1.2.1.34.6.1.5.2.1.3': HPR_IP_MIB.hprIpAppnPortTOSPrecedence,
'1.3.6.1.2.1.34.6.1.5.3.1.1': HPR_IP_MIB.hprIpLsLsName,
'1.3.6.1.2.1.34.6.1.5.3.1.2': HPR_IP_MIB.hprIpLsAppnTrafficType,
'1.3.6.1.2.1.34.6.1.5.3.1.3': HPR_IP_MIB.hprIpLsTOSPrecedence,
'1.3.6.1.2.1.34.6.1.5.3.1.4': HPR_IP_MIB.hprIpLsRowStatus,
'1.3.6.1.2.1.34.6.1.5.4.1.1': HPR_IP_MIB.hprIpCnVrnName,
'1.3.6.1.2.1.34.6.1.5.4.1.2': HPR_IP_MIB.hprIpCnAppnTrafficType,
'1.3.6.1.2.1.34.6.1.5.4.1.3': HPR_IP_MIB.hprIpCnTOSPrecedence,
'1.3.6.1.2.1.34.6.1.5.4.1.4': HPR_IP_MIB.hprIpCnRowStatus,
'1.3.6.1.2.1.34.6.2.2.5': HPR_IP_MIB.hprIpMonitoringGroup,
'1.3.6.1.2.1.34.6.2.2.6': HPR_IP_MIB.hprIpConfigurationGroup,
}
| xiangke/pycopia | mibs/pycopia/mibs/HPR_IP_MIB_OID.py | Python | lgpl-2.1 | 1,140 | 0.015789 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteCircuitConnectionsOperations:
"""ExpressRouteCircuitConnectionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
connection_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/connections/{connectionName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
connection_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified Express Route Circuit Connection from the specified express route
circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param connection_name: The name of the express route circuit connection.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
connection_name=connection_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/connections/{connectionName}'} # type: ignore
async def get(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
connection_name: str,
**kwargs: Any
) -> "_models.ExpressRouteCircuitConnection":
"""Gets the specified Express Route Circuit Connection from the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param connection_name: The name of the express route circuit connection.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuitConnection, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_06_01.models.ExpressRouteCircuitConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/connections/{connectionName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
connection_name: str,
express_route_circuit_connection_parameters: "_models.ExpressRouteCircuitConnection",
**kwargs: Any
) -> "_models.ExpressRouteCircuitConnection":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(express_route_circuit_connection_parameters, 'ExpressRouteCircuitConnection')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitConnection', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ExpressRouteCircuitConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/connections/{connectionName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
connection_name: str,
express_route_circuit_connection_parameters: "_models.ExpressRouteCircuitConnection",
**kwargs: Any
) -> AsyncLROPoller["_models.ExpressRouteCircuitConnection"]:
"""Creates or updates a Express Route Circuit Connection in the specified express route circuits.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param connection_name: The name of the express route circuit connection.
:type connection_name: str
:param express_route_circuit_connection_parameters: Parameters supplied to the create or update
express route circuit connection operation.
:type express_route_circuit_connection_parameters: ~azure.mgmt.network.v2018_06_01.models.ExpressRouteCircuitConnection
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ExpressRouteCircuitConnection or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_06_01.models.ExpressRouteCircuitConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitConnection"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
connection_name=connection_name,
express_route_circuit_connection_parameters=express_route_circuit_connection_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/connections/{connectionName}'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_06_01/aio/operations/_express_route_circuit_connections_operations.py | Python | mit | 19,491 | 0.005541 |
#!/usr/bin/env python2.7
# Copyright 2015 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
''' Sample usage of function 'acl_list'.
Print the function's documentation.
Apply the function to a network device.
Print the function output.
If no ACLs are found then retry with a different network device.
'''
from __future__ import print_function as _print_function
from pydoc import plain
from pydoc import render_doc as doc
from basics.context import sys_exit, EX_OK, EX_TEMPFAIL
from basics.acl import acl_list,inventory_acl
def demonstrate(device_name):
''' Apply function 'acl_list' to the specified device.
Return True if an ACL was found.
'''
print('acl_list(' + device_name, end=')\n')
result = acl_list(device_name)
print(result)
return bool(result)
def main():
''' Select a device and demonstrate.'''
print(plain(doc(acl_list)))
inventory = inventory_acl()
if not inventory:
print('There are no ACL capable devices to examine. Demonstration cancelled.')
else:
for device_name in inventory:
if demonstrate(device_name):
return EX_OK
return EX_TEMPFAIL
if __name__ == "__main__":
sys_exit(main())
| tbarrongh/cosc-learning-labs | src/learning_lab/05_acl_list.py | Python | apache-2.0 | 1,742 | 0.006889 |
#!/usr/bin/python
import urllib
import httplib
import time
import datetime
id = '123456789012345'
server = 'localhost:5055'
points = [
('2017-01-01 00:00:00', 59.93211887, 30.33050537, 0.0),
('2017-01-01 00:05:00', 59.93266715, 30.33190012, 50.0),
('2017-01-01 00:10:00', 59.93329069, 30.33333778, 50.0),
('2017-01-01 00:15:00', 59.93390346, 30.33468962, 0.0),
('2017-01-01 00:20:00', 59.93390346, 30.33468962, 0.0),
('2017-01-01 00:25:00', 59.93416146, 30.33580542, 50.0),
('2017-01-01 00:30:00', 59.93389271, 30.33790827, 50.0),
('2017-01-01 00:35:00', 59.93357020, 30.34033298, 50.0),
('2017-01-01 00:40:00', 59.93330144, 30.34252167, 0.0),
('2017-01-01 00:44:00', 59.93355945, 30.34413099, 50.0),
('2017-01-01 00:50:00', 59.93458072, 30.34458160, 0.0),
('2017-01-01 00:55:00', 59.93458072, 30.34458160, 0.0),
]
def send(conn, time, lat, lon, speed):
params = (('id', id), ('timestamp', int(time)), ('lat', lat), ('lon', lon), ('speed', speed))
conn.request('POST', '?' + urllib.urlencode(params))
conn.getresponse().read()
conn = httplib.HTTPConnection(server)
for i in range(0, len(points)):
(moment, lat, lon, speed) = points[i]
send(conn, time.mktime(datetime.datetime.strptime(moment, "%Y-%m-%d %H:%M:%S").timetuple()), lat, lon, speed)
| tananaev/traccar | tools/test-trips.py | Python | apache-2.0 | 1,319 | 0.003033 |
"""
WSGI config for roastdog project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.base")
application = Cling(get_wsgi_application())
| chrisvans/roastdoge | config/wsgi.py | Python | mit | 422 | 0 |
#!-*- coding:utf-8 -*-
import time
def retries(times=3, timeout=1):
"""对未捕获异常进行重试"""
def decorator(func):
def _wrapper(*args, **kw):
att, retry = 0, 0
while retry < times:
retry += 1
try:
return func(*args, **kw)
except:
att += timeout
if retry < times:
time.sleep(att)
return _wrapper
return decorator
def empty_content_retries(times=3, timeout=2):
"""响应为空的进行重试"""
def decorator(func):
def _wrapper(*args, **kw):
att, retry = 0, 0
while retry < times:
retry += 1
ret = func(*args, **kw)
if ret:
return ret
att += timeout
time.sleep(att)
return _wrapper
return decorator
def use_logging(level):
"""带参数的装饰器"""
def decorator(func):
print func.__name__
def wrapper(*args, **kwargs):
if level == "warn":
print ("level:%s, %s is running" % (level, func.__name__))
elif level == "info":
print ("level:%s, %s is running" % (level, func.__name__))
return func(*args, **kwargs)
return wrapper
return decorator
if __name__ == "__main__":
@use_logging(level="warn")
def foo(name='foo'):
print("i am %s" % name)
foo() | wanghuafeng/spider_tools | decorator.py | Python | mit | 1,524 | 0.004076 |
# This file is part of django-doubleoptin-contactform.
# django-doubleoptin-contactform is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# django-doubleoptin-contactform is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with Foobar. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template
urlpatterns = patterns('',
(r'^$', 'doptincf.views.contact'),
(r'^received/$', direct_to_template, {'template': 'contact/received.html'}),
(r'^(?P<contact_id>\d+)/verify/$', 'doptincf.views.verify'),
(r'^received/$', direct_to_template, {'template': 'contact/verified.html'}),
)
| MyersGer/django-doubleoptin-contactform | doptincf/urls.py | Python | gpl-3.0 | 1,171 | 0.005978 |
# coding: utf-8
example_traceback = """*** HARAKIRI ON WORKER 16 (pid: 2259, try: 1) ***
*** uWSGI Python tracebacker output ***
thread_id = Thread-2 filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 504 function = __bootstrap line = self.__bootstrap_inner()
thread_id = Thread-2 filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 532 function = __bootstrap_inner line = self.run()
thread_id = Thread-2 filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 484 function = run line = self.__target(*self.__args, **self.__kwargs)
thread_id = Thread-2 filename = /home/project/envs/project_prod/lib/python2.6/site-packages/raven/transport/threaded.py lineno = 79 function = _target line = record = self._queue.get()
thread_id = Thread-2 filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/Queue.py lineno = 168 function = get line = self.not_empty.wait()
thread_id = Thread-2 filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 239 function = wait line = waiter.acquire()
thread_id = NR-Harvest-Thread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 504 function = __bootstrap line = self.__bootstrap_inner()
thread_id = NR-Harvest-Thread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 532 function = __bootstrap_inner line = self.run()
thread_id = NR-Harvest-Thread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 484 function = run line = self.__target(*self.__args, **self.__kwargs)
thread_id = NR-Harvest-Thread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/core/agent.py lineno = 511 function = _harvest_loop line = self._harvest_shutdown.wait(delay)
thread_id = NR-Harvest-Thread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 395 function = wait line = self.__cond.wait(timeout)
thread_id = NR-Harvest-Thread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 258 function = wait line = _sleep(delay)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/web_transaction.py lineno = 828 function = __call__ line = result = application(environ, _start_response)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/object_wrapper.py lineno = 237 function = __call__ line = self._nr_instance, args, kwargs, **self._nr_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/function_trace.py lineno = 93 function = literal_wrapper line = return wrapped(*args, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/web_transaction.py lineno = 717 function = __call__ line = return self._nr_next_object(environ, start_response)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/django/core/handlers/wsgi.py lineno = 241 function = __call__ line = response = self.get_response(request)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/django/core/handlers/base.py lineno = 111 function = get_response line = response = callback(request, *callback_args, **callback_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/object_wrapper.py lineno = 237 function = __call__ line = self._nr_instance, args, kwargs, **self._nr_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/hooks/framework_django.py lineno = 475 function = wrapper line = return wrapped(*args, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/django/views/decorators/csrf.py lineno = 77 function = wrapped_view line = return view_func(*args, **kwargs)
thread_id = MainThread filename = /home/project/src/project_prod/contrib/netauth/views.py lineno = 74 function = complete line = return backend.complete(request, response)
thread_id = MainThread filename = /home/project/src/project_prod/contrib/netauth/backends/vkontakte.py lineno = 59 function = complete line = redirect = super(VkontakteBackend, self).complete(request, response)
thread_id = MainThread filename = /home/project/src/project_prod/contrib/netauth/backends/__init__.py lineno = 175 function = complete line = self.fill_extra_fields(request, extra)
thread_id = MainThread filename = /home/project/src/project_prod/contrib/netauth/backends/__init__.py lineno = 114 function = fill_extra_fields line = form = str_to_class(settings.EXTRA_FORM)(data)
thread_id = MainThread filename = /home/project/src/project_prod/contrib/netauth/forms.py lineno = 43 function = __init__ line = files = {'avatar': ContentFile(requests.get(url).content, name=str(uuid.uuid4()))}
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/api.py lineno = 55 function = get line = return request('get', url, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/object_wrapper.py lineno = 237 function = __call__ line = self._nr_instance, args, kwargs, **self._nr_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/external_trace.py lineno = 123 function = dynamic_wrapper line = return wrapped(*args, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/api.py lineno = 44 function = request line = return session.request(method=method, url=url, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/object_wrapper.py lineno = 237 function = __call__ line = self._nr_instance, args, kwargs, **self._nr_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/external_trace.py lineno = 123 function = dynamic_wrapper line = return wrapped(*args, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/sessions.py lineno = 335 function = request line = resp = self.send(prep, **send_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/sessions.py lineno = 438 function = send line = r = adapter.send(request, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/adapters.py lineno = 292 function = send line = timeout=timeout
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/packages/urllib3/connectionpool.py lineno = 428 function = urlopen line = body=body, headers=headers)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/packages/urllib3/connectionpool.py lineno = 280 function = _make_request line = conn.request(method, url, **httplib_request_kw)
thread_id = MainThread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/httplib.py lineno = 914 function = request line = self._send_request(method, url, body, headers)
thread_id = MainThread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/httplib.py lineno = 951 function = _send_request line = self.endheaders()
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/object_wrapper.py lineno = 237 function = __call__ line = self._nr_instance, args, kwargs, **self._nr_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/hooks/external_httplib.py lineno = 49 function = httplib_endheaders_wrapper line = return wrapped(*args, **kwargs)
thread_id = MainThread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/httplib.py lineno = 908 function = endheaders line = self._send_output()
thread_id = MainThread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/httplib.py lineno = 780 function = _send_output line = self.send(msg)
thread_id = MainThread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/httplib.py lineno = 739 function = send line = self.connect()
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/object_wrapper.py lineno = 237 function = __call__ line = self._nr_instance, args, kwargs, **self._nr_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/hooks/external_httplib.py lineno = 25 function = httplib_connect_wrapper line = return wrapped(*args, **kwargs)
thread_id = MainThread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/httplib.py lineno = 720 function = connect line = self.timeout)
thread_id = MainThread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/socket.py lineno = 554 function = create_connection line = sock.connect(sa)
*** backtrace of 2259 ***
/home/project/envs/project_prod/bin/uwsgi(uwsgi_backtrace+0x25) [0x456085]
/home/project/envs/project_prod/bin/uwsgi(uwsgi_segfault+0x21) [0x456161]
/lib/libc.so.6(+0x32230) [0x7f2c43376230]
/lib/libc.so.6(+0x108052) [0x7f2c4344c052]
/home/project/envs/project_prod/bin/uwsgi(uwsgi_python_tracebacker_thread+0x430) [0x471950]
/lib/libpthread.so.0(+0x68ca) [0x7f2c44b4a8ca]
/lib/libc.so.6(clone+0x6d) [0x7f2c43413b6d]"""
from raven_harakiri import convert_traceback
def test_convert():
frames = convert_traceback(example_traceback)
assert frames[-1] == {
'abs_path': '/home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/socket.py',
'context_line': 'sock.connect(sa)',
'filename': '/home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/socket.py',
'function': 'create_connection',
'lineno': 554,
'module': None,
'post_context': [],
'pre_context': [],
'vars': {}
}
| futurecolors/raven-harakiri | test_harakiri.py | Python | mit | 10,635 | 0.00489 |
# devices/md.py
#
# Copyright (C) 2009-2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): David Lehman <dlehman@redhat.com>
#
import os
import six
from gi.repository import BlockDev as blockdev
from ..devicelibs import mdraid, raid
from .. import errors
from .. import util
from ..flags import flags
from ..storage_log import log_method_call
from .. import udev
from ..size import Size
from ..tasks import availability
import logging
log = logging.getLogger("blivet")
from .storage import StorageDevice
from .container import ContainerDevice
from .raid import RaidDevice
class MDRaidArrayDevice(ContainerDevice, RaidDevice):
""" An mdraid (Linux RAID) device. """
_type = "mdarray"
_packages = ["mdadm"]
_devDir = "/dev/md"
_formatClassName = property(lambda s: "mdmember")
_formatUUIDAttr = property(lambda s: "mdUuid")
_external_dependencies = [availability.BLOCKDEV_MDRAID_PLUGIN]
def __init__(self, name, level=None, major=None, minor=None, size=None,
memberDevices=None, totalDevices=None,
uuid=None, fmt=None, exists=False, metadataVersion=None,
parents=None, sysfsPath=''):
"""
:param name: the device name (generally a device node's basename)
:type name: str
:keyword exists: does this device exist?
:type exists: bool
:keyword size: the device's size
:type size: :class:`~.size.Size`
:keyword parents: a list of parent devices
:type parents: list of :class:`StorageDevice`
:keyword fmt: this device's formatting
:type fmt: :class:`~.formats.DeviceFormat` or a subclass of it
:keyword sysfsPath: sysfs device path
:type sysfsPath: str
:keyword uuid: the device UUID
:type uuid: str
:keyword level: the device's RAID level
:type level: any valid RAID level descriptor
:keyword int memberDevices: the number of active member devices
:keyword int totalDevices: the total number of member devices
:keyword metadataVersion: the version of the device's md metadata
:type metadataVersion: str (eg: "0.90")
:keyword minor: the device minor (obsolete?)
:type minor: int
"""
# pylint: disable=unused-argument
# These attributes are used by _addParent, so they must be initialized
# prior to instantiating the superclass.
self._memberDevices = 0 # the number of active (non-spare) members
self._totalDevices = 0 # the total number of members
# avoid attribute-defined-outside-init pylint warning
self._level = None
super(MDRaidArrayDevice, self).__init__(name, fmt=fmt, uuid=uuid,
exists=exists, size=size,
parents=parents,
sysfsPath=sysfsPath)
try:
self.level = level
except errors.DeviceError as e:
# Could not set the level, so set loose the parents that were
# added in superclass constructor.
for dev in self.parents:
dev.removeChild()
raise e
self.uuid = uuid
self._totalDevices = util.numeric_type(totalDevices)
self.memberDevices = util.numeric_type(memberDevices)
self.chunkSize = mdraid.MD_CHUNK_SIZE
if not self.exists and not isinstance(metadataVersion, str):
self.metadataVersion = "default"
else:
self.metadataVersion = metadataVersion
if self.parents and self.parents[0].type == "mdcontainer" and self.type != "mdbiosraidarray":
raise errors.DeviceError("A device with mdcontainer member must be mdbiosraidarray.")
if self.exists and self.mdadmFormatUUID and not flags.testing:
# this is a hack to work around mdadm's insistence on giving
# really high minors to arrays it has no config entry for
with open("/etc/mdadm.conf", "a") as c:
c.write("ARRAY %s UUID=%s\n" % (self.path, self.mdadmFormatUUID))
@property
def mdadmFormatUUID(self):
""" This array's UUID, formatted for external use.
:returns: the array's UUID in mdadm format, if available
:rtype: str or NoneType
"""
formatted_uuid = None
if self.uuid is not None:
try:
formatted_uuid = blockdev.md.get_md_uuid(self.uuid)
except blockdev.MDRaidError:
pass
return formatted_uuid
@property
def level(self):
""" Return the raid level
:returns: raid level value
:rtype: an object that represents a RAID level
"""
return self._level
@property
def _levels(self):
""" Allowed RAID level for this type of device."""
return mdraid.RAID_levels
@level.setter
def level(self, value):
""" Set the RAID level and enforce restrictions based on it.
:param value: new raid level
:param type: object
:raises :class:`~.errors.DeviceError`: if value does not describe
a valid RAID level
:returns: None
"""
try:
level = self._getLevel(value, self._levels)
except ValueError as e:
raise errors.DeviceError(e)
self._level = level
@property
def createBitmap(self):
""" Whether or not a bitmap should be created on the array.
If the the array is sufficiently small, a bitmap yields no benefit.
If the array has no redundancy, a bitmap is just pointless.
"""
try:
return self.level.has_redundancy() and self.size >= Size(1000) and self.format.type != "swap"
except errors.RaidError:
# If has_redundancy() raises an exception then this device has
# a level for which the redundancy question is meaningless. In
# that case, creating a write-intent bitmap would be a meaningless
# action.
return False
def getSuperBlockSize(self, raw_array_size):
"""Estimate the superblock size for a member of an array,
given the total available memory for this array and raid level.
:param raw_array_size: total available for this array and level
:type raw_array_size: :class:`~.size.Size`
:returns: estimated superblock size
:rtype: :class:`~.size.Size`
"""
return blockdev.md.get_superblock_size(raw_array_size,
version=self.metadataVersion)
@property
def size(self):
"""Returns the actual or estimated size depending on whether or
not the array exists.
"""
if not self.exists or not self.mediaPresent:
try:
size = self.level.get_size([d.size for d in self.devices],
self.memberDevices,
self.chunkSize,
self.getSuperBlockSize)
except (blockdev.MDRaidError, errors.RaidError) as e:
log.info("could not calculate size of device %s for raid level %s: %s", self.name, self.level, e)
size = Size(0)
log.debug("non-existent RAID %s size == %s", self.level, size)
else:
size = self.currentSize
log.debug("existing RAID %s size == %s", self.level, size)
return size
def updateSize(self):
# pylint: disable=bad-super-call
super(ContainerDevice, self).updateSize()
@property
def description(self):
levelstr = self.level.nick if self.level.nick else self.level.name
return "MDRAID set (%s)" % levelstr
def __repr__(self):
s = StorageDevice.__repr__(self)
s += (" level = %(level)s spares = %(spares)s\n"
" members = %(memberDevices)s\n"
" total devices = %(totalDevices)s"
" metadata version = %(metadataVersion)s" %
{"level": self.level, "spares": self.spares,
"memberDevices": self.memberDevices,
"totalDevices": self.totalDevices,
"metadataVersion": self.metadataVersion})
return s
@property
def dict(self):
d = super(MDRaidArrayDevice, self).dict
d.update({"level": str(self.level),
"spares": self.spares, "memberDevices": self.memberDevices,
"totalDevices": self.totalDevices,
"metadataVersion": self.metadataVersion})
return d
@property
def mdadmConfEntry(self):
""" This array's mdadm.conf entry. """
uuid = self.mdadmFormatUUID
if self.memberDevices is None or not uuid:
raise errors.DeviceError("array is not fully defined", self.name)
fmt = "ARRAY %s level=%s num-devices=%d UUID=%s\n"
return fmt % (self.path, self.level, self.memberDevices, uuid)
@property
def totalDevices(self):
""" Total number of devices in the array, including spares. """
if not self.exists:
return self._totalDevices
else:
return len(self.parents)
def _getMemberDevices(self):
return self._memberDevices
def _setMemberDevices(self, number):
if not isinstance(number, six.integer_types):
raise ValueError("memberDevices must be an integer")
if not self.exists and number > self.totalDevices:
raise ValueError("memberDevices cannot be greater than totalDevices")
self._memberDevices = number
memberDevices = property(_getMemberDevices, _setMemberDevices,
doc="number of member devices")
def _getSpares(self):
spares = 0
if self.memberDevices is not None:
if self.totalDevices is not None and \
self.totalDevices > self.memberDevices:
spares = self.totalDevices - self.memberDevices
elif self.totalDevices is None:
spares = self.memberDevices
self._totalDevices = self.memberDevices
return spares
def _setSpares(self, spares):
max_spares = self.level.get_max_spares(len(self.parents))
if spares > max_spares:
log.debug("failed to set new spares value %d (max is %d)",
spares, max_spares)
raise errors.DeviceError("new spares value is too large")
if self.totalDevices > spares:
self.memberDevices = self.totalDevices - spares
spares = property(_getSpares, _setSpares)
def _addParent(self, member):
super(MDRaidArrayDevice, self)._addParent(member)
if self.status and member.format.exists:
# we always probe since the device may not be set up when we want
# information about it
self._size = self.currentSize
# These should be incremented when adding new member devices except
# during devicetree.populate. When detecting existing arrays we will
# have gotten these values from udev and will use them to determine
# whether we found all of the members, so we shouldn't change them in
# that case.
if not member.format.exists:
self._totalDevices += 1
self.memberDevices += 1
def _removeParent(self, member):
error_msg = self._validateParentRemoval(self.level, member)
if error_msg:
raise errors.DeviceError(error_msg)
super(MDRaidArrayDevice, self)._removeParent(member)
self.memberDevices -= 1
@property
def _trueStatusStrings(self):
""" Strings in state file for which status() should return True."""
return ("clean", "active", "active-idle", "readonly", "read-auto")
@property
def status(self):
""" This device's status.
For now, this should return a boolean:
True the device is open and ready for use
False the device is not open
"""
# check the status in sysfs
status = False
if not self.exists:
return status
if os.path.exists(self.path) and not self.sysfsPath:
# the array has been activated from outside of blivet
self.updateSysfsPath()
# make sure the active array is the one we expect
info = udev.get_device(self.sysfsPath)
uuid = udev.device_get_md_uuid(info)
if uuid and uuid != self.uuid:
log.warning("md array %s is active, but has UUID %s -- not %s",
self.path, uuid, self.uuid)
self.sysfsPath = ""
return status
state_file = "%s/md/array_state" % self.sysfsPath
try:
state = open(state_file).read().strip()
if state in self._trueStatusStrings:
status = True
except IOError:
status = False
return status
def memberStatus(self, member):
if not (self.status and member.status):
return
member_name = os.path.basename(member.sysfsPath)
path = "/sys/%s/md/dev-%s/state" % (self.sysfsPath, member_name)
try:
state = open(path).read().strip()
except IOError:
state = None
return state
@property
def degraded(self):
""" Return True if the array is running in degraded mode. """
rc = False
degraded_file = "%s/md/degraded" % self.sysfsPath
if os.access(degraded_file, os.R_OK):
val = open(degraded_file).read().strip()
if val == "1":
rc = True
return rc
@property
def members(self):
""" Returns this array's members.
:rtype: list of :class:`StorageDevice`
"""
return list(self.parents)
@property
def complete(self):
""" An MDRaidArrayDevice is complete if it has at least as many
component devices as its count of active devices.
"""
return (self.memberDevices <= len(self.members)) or not self.exists
@property
def devices(self):
""" Return a list of this array's member device instances. """
return self.parents
def _postSetup(self):
super(MDRaidArrayDevice, self)._postSetup()
self.updateSysfsPath()
def _setup(self, orig=False):
""" Open, or set up, a device. """
log_method_call(self, self.name, orig=orig, status=self.status,
controllable=self.controllable)
disks = []
for member in self.devices:
member.setup(orig=orig)
disks.append(member.path)
blockdev.md.activate(self.path, members=disks, uuid=self.mdadmFormatUUID)
def _postTeardown(self, recursive=False):
super(MDRaidArrayDevice, self)._postTeardown(recursive=recursive)
# mdadm reuses minors indiscriminantly when there is no mdadm.conf, so
# we need to clear the sysfs path now so our status method continues to
# give valid results
self.sysfsPath = ''
def teardown(self, recursive=None):
""" Close, or tear down, a device. """
log_method_call(self, self.name, status=self.status,
controllable=self.controllable)
# we don't really care about the return value of _preTeardown here.
# see comment just above md_deactivate call
self._preTeardown(recursive=recursive)
# We don't really care what the array's state is. If the device
# file exists, we want to deactivate it. mdraid has too many
# states.
if self.exists and os.path.exists(self.path):
blockdev.md.deactivate(self.path)
self._postTeardown(recursive=recursive)
def preCommitFixup(self, *args, **kwargs):
""" Determine create parameters for this set """
mountpoints = kwargs.pop("mountpoints")
log_method_call(self, self.name, mountpoints)
if "/boot" in mountpoints:
bootmountpoint = "/boot"
else:
bootmountpoint = "/"
# If we are used to boot from we cannot use 1.1 metadata
if getattr(self.format, "mountpoint", None) == bootmountpoint or \
getattr(self.format, "mountpoint", None) == "/boot/efi" or \
self.format.type == "prepboot":
self.metadataVersion = "1.0"
def _postCreate(self):
# this is critical since our status method requires a valid sysfs path
self.exists = True # this is needed to run updateSysfsPath
self.updateSysfsPath()
StorageDevice._postCreate(self)
# update our uuid attribute with the new array's UUID
# XXX this won't work for containers since no UUID is reported for them
info = blockdev.md.detail(self.path)
self.uuid = info.uuid
for member in self.devices:
member.format.mdUuid = self.uuid
def _create(self):
""" Create the device. """
log_method_call(self, self.name, status=self.status)
disks = [disk.path for disk in self.devices]
spares = len(self.devices) - self.memberDevices
level = None
if self.level:
level = str(self.level)
blockdev.md.create(self.path, level, disks, spares,
version=self.metadataVersion,
bitmap=self.createBitmap)
udev.settle()
def _remove(self, member):
self.setup()
# see if the device must be marked as failed before it can be removed
fail = (self.memberStatus(member) == "in_sync")
blockdev.md.remove(self.path, member.path, fail)
def _add(self, member):
""" Add a member device to an array.
:param str member: the member's path
:raises: blockdev.MDRaidError
"""
self.setup()
raid_devices = None
try:
if not self.level.has_redundancy():
if self.level is not raid.Linear:
raid_devices = int(blockdev.md.detail(self.name).raid_devices) + 1
except errors.RaidError:
pass
blockdev.md.add(self.path, member.path, raid_devs=raid_devices)
@property
def formatArgs(self):
formatArgs = []
if self.format.type == "ext2":
recommended_stride = self.level.get_recommended_stride(self.memberDevices)
if recommended_stride:
formatArgs = ['-R', 'stride=%d' % recommended_stride ]
return formatArgs
@property
def model(self):
return self.description
def dracutSetupArgs(self):
return set(["rd.md.uuid=%s" % self.mdadmFormatUUID])
def populateKSData(self, data):
if self.isDisk:
return
super(MDRaidArrayDevice, self).populateKSData(data)
data.level = self.level.name
data.spares = self.spares
data.members = ["raid.%d" % p.id for p in self.parents]
data.preexist = self.exists
data.device = self.name
class MDContainerDevice(MDRaidArrayDevice):
_type = "mdcontainer"
def __init__(self, name, **kwargs):
kwargs['level'] = raid.Container
super(MDContainerDevice, self).__init__(name, **kwargs)
@property
def _levels(self):
return mdraid.MDRaidLevels(["container"])
@property
def description(self):
return "BIOS RAID container"
@property
def mdadmConfEntry(self):
uuid = self.mdadmFormatUUID
if not uuid:
raise errors.DeviceError("array is not fully defined", self.name)
return "ARRAY %s UUID=%s\n" % (self.path, uuid)
@property
def _trueStatusStrings(self):
return ("clean", "active", "active-idle", "readonly", "read-auto", "inactive")
def teardown(self, recursive=None):
log_method_call(self, self.name, status=self.status,
controllable=self.controllable)
# we don't really care about the return value of _preTeardown here.
# see comment just above md_deactivate call
self._preTeardown(recursive=recursive)
# Since BIOS RAID sets (containers in mdraid terminology) never change
# there is no need to stop them and later restart them. Not stopping
# (and thus also not starting) them also works around bug 523334
return
@property
def mediaPresent(self):
# Containers should not get any format handling done
# (the device node does not allow read / write calls)
return False
class MDBiosRaidArrayDevice(MDRaidArrayDevice):
_type = "mdbiosraidarray"
_formatClassName = property(lambda s: None)
_isDisk = True
_partitionable = True
def __init__(self, name, **kwargs):
super(MDBiosRaidArrayDevice, self).__init__(name, **kwargs)
# For container members probe size now, as we cannot determine it
# when teared down.
self._size = self.currentSize
@property
def size(self):
# For container members return probed size, as we cannot determine it
# when teared down.
return self._size
@property
def description(self):
levelstr = self.level.nick if self.level.nick else self.level.name
return "BIOS RAID set (%s)" % levelstr
@property
def mdadmConfEntry(self):
uuid = self.mdadmFormatUUID
if not uuid:
raise errors.DeviceError("array is not fully defined", self.name)
return "ARRAY %s UUID=%s\n" % (self.path, uuid)
@property
def members(self):
# If the array is a BIOS RAID array then its unique parent
# is a container and its actual member devices are the
# container's parents.
return list(self.parents[0].parents)
def teardown(self, recursive=None):
log_method_call(self, self.name, status=self.status,
controllable=self.controllable)
# we don't really care about the return value of _preTeardown here.
# see comment just above md_deactivate call
self._preTeardown(recursive=recursive)
# Since BIOS RAID sets (containers in mdraid terminology) never change
# there is no need to stop them and later restart them. Not stopping
# (and thus also not starting) them also works around bug 523334
return
| bcl/blivet | blivet/devices/md.py | Python | gpl-2.0 | 23,663 | 0.000887 |
from django.conf.urls import include, url
urlpatterns = (
url(r"^", include("pinax.forums.urls", namespace="pinax_forums")),
)
| pinax/pinax-forums | pinax/forums/tests/urls.py | Python | mit | 132 | 0 |
"""cellprofiler.gui.tests.__init__.py
CellProfiler is distributed under the GNU General Public License.
See the accompanying file LICENSE for details.
Copyright (c) 2003-2009 Massachusetts Institute of Technology
Copyright (c) 2009-2015 Broad Institute
All rights reserved.
Please see the AUTHORS file for credits.
Website: http://www.cellprofiler.org
"""
if __name__ == "__main__":
import nose
nose.main()
| LeeKamentsky/CellProfiler | cellprofiler/gui/tests/__init__.py | Python | gpl-2.0 | 421 | 0 |
#!/usr/bin/env python
"""
Task message queue consumer server.
"""
import click
import logging
from zencore.conf import Settings
from zencore.redtask import server
from zencore.utils.debug import setup_simple_logger
CONFIG = Settings()
logger = logging.getLogger(__name__)
@click.group()
@click.option("-c", "--config", help="Config file path, default etc/config.yaml")
def zrts(config):
"""Task message queue consumer server.
"""
CONFIG.setup(config)
setup_simple_logger(CONFIG.get("logging"))
logger.debug("Logger setup done.")
@zrts.command()
def start():
"""Start zencore-redtask consumer server.
"""
logger.debug("Server starting...")
server.start(CONFIG)
@zrts.command()
def stop():
"""Stop zencore-redtask consumer server.
"""
logger.debug("Server stopping...")
server.stop(CONFIG)
@zrts.command()
def reload():
"""Reload zencore-redtask consumer server.
"""
logger.debug("Server reloading...")
server.reload(CONFIG)
@zrts.command()
def status():
"""Get zencore-redtask consumer server's status.
"""
logger.debug("Server get status...")
server.status(CONFIG)
if __name__ == "__main__":
zrts()
| zencore-dobetter/zencore-redtask | src/scripts/zrts.py | Python | mit | 1,207 | 0.002486 |
"""
Django settings for kiki project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/dev/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'l99imh-s+-4ijwo+!gejon7!xp@$hmun43gck7t($1(g-rdtsu'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
# 'django.contrib.auth',
# 'django.contrib.contenttypes',
# 'django.contrib.sessions',
# 'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
'wiki',
'docclass'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'kiki.urls'
WSGI_APPLICATION = 'kiki.wsgi.application'
# Database
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '127.0.0.1',
'PORT': 3306
}
}
# Internationalization
# https://docs.djangoproject.com/en/dev/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/dev/howto/static-files/
STATIC_URL = '/static/'
| after12am/expecto | machine_learning/kiki/kiki/settings.py | Python | mit | 2,210 | 0 |
import pytz
import urllib
from datetime import datetime
from pupa.scrape import Scraper, Bill, VoteEvent
from openstates.utils import LXMLMixin
TIMEZONE = pytz.timezone("US/Central")
VOTE_TYPE_MAP = {"yes": "yes", "no": "no"}
class NEBillScraper(Scraper, LXMLMixin):
def scrape(self, session=None):
if session is None:
session = self.jurisdiction.legislative_sessions[-1]
self.info("no session specified, using %s", session["identifier"])
start_year = datetime.strptime(session["start_date"], "%Y-%m-%d").year
end_year = datetime.strptime(session["end_date"], "%Y-%m-%d").year
yield from self.scrape_year(session["identifier"], start_year)
if start_year != end_year:
yield from self.scrape_year(session["identifier"], end_year)
def scrape_year(self, session, year):
main_url = (
"https://nebraskalegislature.gov/bills/search_by_date.php?"
"SessionDay={}".format(year)
)
page = self.lxmlize(main_url)
document_links = self.get_nodes(
page,
'//div[@class="main-content"]//div[@class="table-responsive"]//'
'table[@class="table"]/tbody/tr/td[1]/a',
)
for document_link in document_links:
# bill_number = document_link.text
bill_link = document_link.attrib["href"]
# POST request for search form
# post_dict = {'DocumentNumber': bill_number, 'Legislature': session}
# headers = urllib.urlencode(post_dict)
# bill_resp = self.post('http://nebraskalegislature.gov/bills/'
# 'search_by_number.php', data=post_dict)
# bill_link = bill_resp.url
# bill_page = bill_resp.text
yield from self.bill_info(bill_link, session, main_url)
def bill_info(self, bill_link, session, main_url):
bill_page = self.lxmlize(bill_link)
long_title = self.get_node(
bill_page, '//div[@class="main-content"]//h2'
).text.split()
bill_number = long_title[0]
title = ""
for x in range(2, len(long_title)):
title += long_title[x] + " "
title = title[0:-1]
if not title:
self.error("no title, skipping %s", bill_number)
return
bill_type = "resolution" if "LR" in bill_number else "bill"
bill = Bill(bill_number, session, title, classification=bill_type)
bill.add_source(main_url)
bill.add_source(bill_link)
introduced_by = self.get_node(
bill_page,
"//body/div[3]/div[2]/div[2]/div/div[3]/div[1]/ul/li[1]/a[1]/text()",
)
if not introduced_by:
introduced_by = self.get_node(
bill_page,
"//body/div[3]/div[2]/div[2]/div/div[2]/div[1]/ul/li[1]/text()",
)
introduced_by = introduced_by.split("Introduced By:")[1].strip()
introduced_by = introduced_by.strip()
bill.add_sponsorship(
name=introduced_by,
entity_type="person",
primary=True,
classification="primary",
)
action_nodes = self.get_nodes(
bill_page, '//div[@class="main-content"]/div[5]//table/tbody/tr'
)
for action_node in action_nodes:
date = self.get_node(action_node, "./td[1]").text
date = datetime.strptime(date, "%b %d, %Y")
# The action node may have an anchor element within it, so
# we grab all the text within.
action = self.get_node(action_node, "./td[2]").text_content()
if "Governor" in action:
actor = "executive"
elif "Speaker" in action:
actor = "legislature"
else:
actor = "legislature"
action_type = self.action_types(action)
bill.add_action(
action,
date.strftime("%Y-%m-%d"),
chamber=actor,
classification=action_type,
)
# Grabs bill version documents.
version_links = self.get_nodes(
bill_page, "/html/body/div[3]/div[2]/div[2]/div/" "div[3]/div[2]/ul/li/a"
)
for version_link in version_links:
version_name = version_link.text
version_url = version_link.attrib["href"]
# replace Current w/ session number
version_url = version_url.replace("Current", session)
bill.add_version_link(
version_name, version_url, media_type="application/pdf"
)
soi = self.get_nodes(bill_page, ".//a[contains(text(), 'Statement of Intent')]")
if soi:
bill.add_document_link(
"Statement of Intent", soi[0].get("href"), media_type="application/pdf"
)
comstmt = self.get_nodes(
bill_page, ".//a[contains(text(), 'Committee Statement')]"
)
if comstmt:
bill.add_document_link(
"Committee Statement",
comstmt[0].get("href"),
media_type="application/pdf",
)
fn = self.get_nodes(bill_page, ".//a[contains(text(), 'Fiscal Note')]")
if fn:
bill.add_document_link(
"Fiscal Note", fn[0].get("href"), media_type="application/pdf"
)
# Adds any documents related to amendments.
amendment_links = self.get_nodes(
bill_page, ".//div[contains(@class, 'amend-link')]/a"
)
for amendment_link in amendment_links:
amendment_name = amendment_link.text
amendment_url = amendment_link.attrib["href"]
# skip over transcripts
if "/AM/" not in amendment_url:
continue
bill.add_document_link(
amendment_name, amendment_url, media_type="application/pdf"
)
yield bill
yield from self.scrape_votes(bill, bill_page, actor)
def scrape_amendments(self, bill, bill_page):
amd_xpath = '//div[contains(@class,"amends") and not(contains(@class,"mb-3"))]'
for row in bill_page.xpath(amd_xpath):
status = row.xpath("string(./div[2])").strip()
if "adopted" in status.lower():
version_url = row.xpath("./div[1]/a/@href")[0]
version_name = row.xpath("./div[1]/a/text()")[0]
bill.add_version_link(
version_name,
version_url,
media_type="application/pdf",
on_duplicate="ignore",
)
def scrape_votes(self, bill, bill_page, chamber):
vote_links = bill_page.xpath(
'//table[contains(@class,"history")]//a[contains(@href, "view_votes")]'
)
for vote_link in vote_links:
vote_url = vote_link.attrib["href"]
date_td, motion_td, *_ = vote_link.xpath("ancestor::tr/td")
date = datetime.strptime(date_td.text, "%b %d, %Y")
motion_text = motion_td.text_content()
vote_page = self.lxmlize(vote_url)
passed = "Passed" in motion_text or "Advanced" in motion_text
cells = vote_page.xpath(
'//div[contains(@class,"table-responsive")]/table//td'
)
vote = VoteEvent(
bill=bill,
chamber=chamber,
start_date=TIMEZONE.localize(date),
motion_text=motion_text,
classification="passage",
result="pass" if passed else "fail",
)
yes_count = self.process_count(vote_page, "Yes:")
no_count = self.process_count(vote_page, "No:")
exc_count = self.process_count(vote_page, "Excused - Not Voting:")
absent_count = self.process_count(vote_page, "Absent - Not Voting:")
present_count = self.process_count(vote_page, "Present - Not Voting:")
vote.set_count("yes", yes_count)
vote.set_count("no", no_count)
vote.set_count("excused", exc_count)
vote.set_count("absent", absent_count)
vote.set_count("abstain", present_count)
query_params = urllib.parse.parse_qs(urllib.parse.urlparse(vote_url).query)
vote.pupa_id = query_params["KeyID"][0]
vote.add_source(vote_url)
for chunk in range(0, len(cells), 2):
name = cells[chunk].text
vote_type = cells[chunk + 1].text
if name and vote_type:
vote.vote(VOTE_TYPE_MAP.get(vote_type.lower(), "other"), name)
yield vote
# Find the vote count row containing row_string, and return the integer count
def process_count(self, page, row_string):
count_xpath = (
'string(//ul[contains(@class,"list-unstyled")]/li[contains(text(),"{}")])'
)
count_text = page.xpath(count_xpath.format(row_string))
return int("".join(x for x in count_text if x.isdigit()))
def action_types(self, action):
if "Date of introduction" in action:
action_type = "introduction"
elif "Referred to" in action:
action_type = "referral-committee"
elif "Indefinitely postponed" in action:
action_type = "committee-failure"
elif ("File" in action) or ("filed" in action):
action_type = "filing"
elif "Placed on Final Reading" in action:
action_type = "reading-3"
elif "Passed" in action or "President/Speaker signed" in action:
action_type = "passage"
elif "Presented to Governor" in action:
action_type = "executive-receipt"
elif "Approved by Governor" in action:
action_type = "executive-signature"
elif "Failed to pass notwithstanding the objections of the Governor" in action:
action_type = "executive-veto"
elif "Failed" in action:
action_type = "failure"
elif "Bill withdrawn" in action:
action_type = "withdrawal"
else:
action_type = None
return action_type
| openstates/openstates | openstates/ne/bills.py | Python | gpl-3.0 | 10,305 | 0.001456 |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# Copyright © 2012 eNovance <licensing@enovance.com>
#
# Author: Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import yaml
import subprocess
import os
import shutil
import signal
import time
import threading
from ceilometer import service
from ceilometer.tests import base
class ServiceTestCase(base.TestCase):
def test_prepare_service(self):
service.prepare_service([])
#NOTE(Fengqian): I have to set up a thread to parse the ouput of
#subprocess.Popen. Because readline() may block the process in
#some conditions.
class ParseOutput(threading.Thread):
def __init__(self, input_stream, str_flag):
super(ParseOutput, self).__init__()
self.input_stream = input_stream
self.str_flag = str_flag
self.ret_stream = None
self.ret = False
self.thread_stop = False
def run(self):
while not self.thread_stop:
next_line = self.input_stream.readline()
if next_line == '':
break
if self.str_flag in next_line:
self.ret = True
self.ret_stream = next_line[(next_line.find(self.str_flag) +
len(self.str_flag)):]
self.stop()
def stop(self):
self.thread_stop = True
class ServiceRestartTest(base.TestCase):
def setUp(self):
super(ServiceRestartTest, self).setUp()
self.tempfile = self.temp_config_file_path()
self.pipeline_cfg_file = self.temp_config_file_path(name=
'pipeline.yaml')
shutil.copy(self.path_get('etc/ceilometer/pipeline.yaml'),
self.pipeline_cfg_file)
self.pipelinecfg_read_from_file()
policy_file = self.path_get('tests/policy.json')
with open(self.tempfile, 'w') as tmp:
tmp.write("[DEFAULT]\n")
tmp.write(
"rpc_backend=ceilometer.openstack.common.rpc.impl_fake\n")
tmp.write(
"auth_strategy=noauth\n")
tmp.write(
"debug=true\n")
tmp.write(
"pipeline_cfg_file=%s\n" % self.pipeline_cfg_file)
tmp.write(
"policy_file=%s\n" % policy_file)
tmp.write("[database]\n")
tmp.write("connection=log://localhost\n")
def _modify_pipeline_file(self):
with open(self.pipeline_cfg_file, 'w') as pipe_fd:
pipe_fd.truncate()
pipe_fd.write(yaml.safe_dump(self.pipeline_cfg[1]))
def pipelinecfg_read_from_file(self):
with open(self.pipeline_cfg_file) as fd:
data = fd.read()
self.pipeline_cfg = yaml.safe_load(data)
def tearDown(self):
super(ServiceRestartTest, self).tearDown()
self.sub.kill()
self.sub.wait()
@staticmethod
def _check_process_alive(pid):
try:
with open("/proc/%d/status" % pid) as fd_proc:
for line in fd_proc.readlines():
if line.startswith("State:"):
state = line.split(":", 1)[1].strip().split(' ')[0]
return state not in ['Z', 'T', 'Z+']
except IOError:
return False
def check_process_alive(self):
cond = lambda: self._check_process_alive(self.sub.pid)
return self._wait(cond, 60)
def parse_output(self, str_flag, timeout=3):
parse = ParseOutput(self.sub.stderr, str_flag)
parse.start()
parse.join(timeout)
parse.stop()
return parse
@staticmethod
def _wait(cond, timeout):
start = time.time()
while not cond():
if time.time() - start > timeout:
break
time.sleep(.1)
return cond()
def _spawn_service(self, cmd, conf_file=None):
if conf_file is None:
conf_file = self.tempfile
self.sub = subprocess.Popen([cmd, '--config-file=%s' % conf_file],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
#NOTE(Fengqian): Parse the output to see if the service started
self.assertTrue(self.parse_output("Starting").ret)
self.check_process_alive()
def _service_restart(self, cmd):
self._spawn_service(cmd)
self.assertTrue(self.sub.pid)
#NOTE(Fengqian): Modify the pipleline configure file to see
#if the file is reloaded correctly.
self._modify_pipeline_file()
self.pipelinecfg_read_from_file()
os.kill(self.sub.pid, signal.SIGHUP)
self.assertTrue(self.check_process_alive())
self.assertTrue(self.parse_output("Caught SIGHUP").ret)
self.assertEqual(self.pipeline_cfg,
yaml.safe_load(
self.parse_output("Pipeline config: ").ret_stream))
def test_compute_service_restart(self):
self._service_restart('ceilometer-agent-compute')
def test_central_service_restart(self):
self._service_restart('ceilometer-agent-central')
| citrix-openstack-build/ceilometer | tests/test_service.py | Python | apache-2.0 | 5,711 | 0.001576 |
#!/usr/bin/env python
"""Plot scheduled flight times for AA flights between JFK and LAX.
For a given year and month, visualize dist vs sch time, run a regression,
and look at error. Filter based on whether the destination is in the Pacific,
and study the regression and error for each group."""
import os
import sys
from analysis.filter import get_jetstream, get_pacific
from analysis.plot import plot_schtime, plot_regression, plot_error, plot_regression_coef
from analysis.regression import regression
def main():
year = 2015
month = 1
os.system('mkdir -p graphs') #Create directory to place graphs, if it doesn't exist.
plot_schtime(12478, 12892, 'AA') #Plot sch flight time from JFK to LAX
plot_schtime(12892, 12478, 'AA') #Plot sch flight time from LAX to JFK
flights = get_jetstream(year, month) #Get flight info.
#Get info whether destination is in the Pacific and filter.
df_pac = get_pacific(flights)
overseas = df_pac[df_pac.DestOverseas]
not_overseas = df_pac[~df_pac.DestOverseas]
analysislist = [[flights, 'Regression Error'],
[overseas, 'Pacific Regression Error'],
[not_overseas, 'US Regression Error']]
#Plot dist vs sch time, regression, and error for filtered flight data.
for i, [df, title] in enumerate(analysislist):
plot_regression(year, month, df)
print regression(year, month, df)
plot_error(year, month, df, title)
plot_regression_coef() #Plot monthly US and Pacific regression coefficients over time.
if __name__ == '__main__':
sys.exit(main()) | mtb0/flightmodel | src/drivers/AnalysisDriver.py | Python | mit | 1,592 | 0.011935 |
# -*- coding: utf-8 -*-
"""Main Controller"""
import pylons
from tg import expose, flash, require, url, lurl, request, redirect, tmpl_context
from tg.i18n import ugettext as _, lazy_ugettext as l_
from tg import predicates
from ebetl import model
from ebetl.controllers.secure import SecureController
from ebetl.model import DBSession, metadata
from tgext.admin.tgadminconfig import TGAdminConfig
from tgext.admin.controller import AdminController
from ebetl.lib.base import BaseController
from ebetl.controllers.error import ErrorController
from ebetl.model import *
from ebetl.model.zerobi import FACT_B2B,FACT_B2B_PRICE
import json
from ebetl.lib import views
from ebetl.lib.views import get_latest_cogs as gcogs
from webhelpers import paginate
from babel.numbers import format_currency, format_decimal
from decimal import Decimal
from sqlalchemy.sql import label
from sqlalchemy import func
try:
from collections import OrderedDict
except:
from ordereddict import OrderedDict
from tg.predicates import has_permission
import datetime
#from tgext.asyncjob import asyncjob_perform
def testme(arg):
print "====================== TESTME"
__all__ = ['MarketingController']
MEASURES = [
label('qta', func.sum(Movimentir.qtamovimento)),
label('net_total', func.sum(Movimentir.totalenetto)),
label('gross_total', func.sum(Movimentir.totale)),
#label('vat_total', func.sum(vat_total)),
#label('gross_total', func.sum(gross_total)),
#label('lis_ct', func.sum(lis_ct)),
#label('disc', func.sum(
# Factb2b.b2b_net_total - lis_ct
#))
]
class RepartiController(BaseController):
"""
Location Controller
"""
@expose('json')
def index(self, *kw, **args):
"""Handle the front-page."""
data = DBSession.query(Reparti).all()
return dict(data=data)
class TipologieController(BaseController):
"""
Location Controller
"""
@expose('json')
def index(self, *kw, **args):
"""Handle the front-page."""
data = DBSession.query(Tipologieprodotti).all()
return dict(data=data)
class ProduttoriController(BaseController):
"""
Location Controller
"""
@expose('json')
def index(self, *kw, **args):
"""Handle the front-page."""
data = DBSession.query(Produttori).all()
return dict(data=data)
class MarketingController(BaseController):
"""
"""
# The predicate that must be met for all the actions in this controller:
#allow_only = has_permission('manage',
# msg=l_('Only for people with the "manage" permission'))
@expose('ebetl.templates.marketing')
def index(self, *args, **kw):
"""Handle the front-page."""
print kw
group_by = [Clientifid.codiceclientefid, Clientifid.nome, Clientifid.cognome, Clientifid.email]
fltr = group_by + MEASURES
ret = DBSession.query(*fltr)
start = kw.get('start')
if start:
start_obj = datetime.datetime.strptime(start, "%Y/%m/%d")
else:
start_obj = datetime.datetime.now()
kw['start'] = start_obj.strftime("%Y/%m/%d")
print start_obj
ret=ret.filter(Movimentit.datadocumento>=start_obj)
end = kw.get('end')
if end:
end_obj = datetime.datetime.strptime(end, "%Y/%m/%d")
else:
end_obj = datetime.datetime.now()
kw['end'] = end_obj.strftime("%Y/%m/%d")
ret=ret.filter(Movimentit.datadocumento<=end_obj)
produttori = kw.get('produttori')
if produttori:
produttori = json.loads(produttori)
prdlist = []
if produttori:
for p in produttori:
prdlist.append(Produttori.numeroproduttore==p)
if prdlist:
ret=ret.filter(or_(*prdlist))
reparti = kw.get('reparti')
if reparti:
reparti = json.loads(reparti)
replist = []
if reparti:
for r in reparti:
replist.append(Prodotti.numeroreparto==r)
if replist:
ret=ret.filter(or_(*replist))
# join on document header table
join_condition=Clientifid.numeroclientefid==Ricevutet.numeroclientefid
ret = ret.join(Ricevutet, join_condition)
join_condition=Ricevutet.numeromovimento==Movimentit.numeromovimento
ret = ret.join(Movimentit, join_condition)
join_condition=Movimentit.numeromovimento==Movimentir.numeromovimento
ret = ret.join(Movimentir, join_condition)
join_condition=Prodotti.numeroprodotto==Movimentir.idprodotto
ret = ret.join(Prodotti, join_condition)
join_condition=Produttori.numeroproduttore==Prodotti.numeroproduttore
ret = ret.outerjoin(Produttori, join_condition)
results = ret.group_by(*group_by).all()
#results=[]
#join_condition=Movimentit.numeromovimento==Movimentir.numeromovimento
#ret = ret.join(Movimentir, join_condition)
columns = [l.key for l in fltr]
return dict(page='marketing', columns=columns, results=results, kw=kw)
def create(self):
"""POST /stocks: Create a new item"""
# url('stocks')
def new(self, format='html'):
"""GET /stocks/new: Form to create a new item"""
# url('new_stock')
@expose()
def export(self, id, *args, **kw):
"""PUT /stocks/id: Update an existing item"""
# Forms posted to this method should contain a hidden field:
# <input type="hidden" name="_method" value="PUT" />
# Or using helpers:
# h.form(url('stock', id=ID),
# method='put')
# url('stock', id=ID)
#DBSession.query(Inputb2b).filter(
# Inputb2b.b2b_id==id).update(dict(exported=1))
from ebetl.lib.etl.b2b import B2bObj
b2bobj=B2bObj(config)
#b2bobj.write_out()
#if self.options.export:
print asyncjob_perform(testme, 2)
return redirect(url('/b2b/show/%s'%id))
@expose()
def book(self, id, *args, **kw):
"""PUT /stocks/id: Update an existing item"""
# Forms posted to this method should contain a hidden field:
# <input type="hidden" name="_method" value="PUT" />
# Or using helpers:
# h.form(url('stock', id=ID),
# method='put')
# url('stock', id=ID)
DBSession.query(Inputb2b).filter(
Inputb2b.b2b_id==id).update(dict(booked=1))
DBSession.query(Factb2b).filter_by(inputb2b_id=id).update(dict(booked=1))
return redirect(url('/b2b/show/%s'%id))
@expose()
def update(self, id):
"""PUT /stocks/id: Update an existing item"""
# Forms posted to this method should contain a hidden field:
# <input type="hidden" name="_method" value="PUT" />
# Or using helpers:
# h.form(url('stock', id=ID),
# method='put')
# url('stock', id=ID)
ret = DBSession.query(Inputb2b).filter(
Inputb2b.b2b_id==id).one()
from ebetl.lib.etl.filconad import FilconadObj
fobj = FilconadObj(config, ret.record)
fobj.write_out(inputb2b_id=id)
return redirect(url('/b2b/show/%s'%id))
@expose()
def updatedoc(self, id, doc_num, *args, **kw):
"""PUT /stocks/id: Update an existing item"""
# Forms posted to this method should contain a hidden field:
# <input type="hidden" name="_method" value="PUT" />
# Or using helpers:
# h.form(url('stock', id=ID),
# method='put')
# url('stock', id=ID)
print [args]
print [kw]
DBSession.query(Factb2b).filter_by(inputb2b_id=id,doc_num=doc_num ).update(dict(validated=1))
redirect(url('/b2b/showdoc/%s/%s'%(id,doc_num)))
def delete(self, id):
"""DELETE /stocks/id: Delete an existing item"""
# Forms posted to this method should contain a hidden field:
# <input type="hidden" name="_method" value="DELETE" />
# Or using helpers:
# h.form(url('stock', id=ID),
# method='delete')
# url('stock', id=ID)
def _datagrid(self, query_lst , groupby, fltr):
ret = DBSession.query(*query_lst)
#ret = ret.order_by(Factb2b.row)
ret = ret.group_by(*groupby).filter(and_(*fltr))
return ret.all()
@expose('ebetl.templates.b2b_show')
def show(self, id):
"""GET /b2b/id: Show a specific item"""
#inputb2b = DBSession.query(Inputb2b).filter(Inputb2b.b2b_id==id).one()
groupby = [Factb2b.booked, Factb2b.validated, Provenienze, Factb2b.doc_date, Factb2b.doc_num]
query_lst = groupby + FACT_B2B
fltr = [and_(
Provenienze.numeroprovenienza==Factb2b.supplier_id,
Factb2b.inputb2b_id == id
)
]
results = self._datagrid(query_lst, groupby, fltr)
return dict(page='b2b', id=id, inputb2b=inputb2b, results=results)
@expose('ebetl.templates.b2b_showdoc')
def showdoc(self, id, doc_num):
"""Handle the front-page."""
"""GET /shodoc/id: Show a specific item"""
results = OrderedDict()
for i in ['total' , 'account_code', 'vat_code',
'recs', 'issues']:
results[i]=None
# Total
groupby = [Factb2b.validated, Factb2b.closed,
Provenienze, Factb2b.inputb2b_id, Factb2b.doc_date, Factb2b.doc_num]
query_lst = groupby + FACT_B2B
fltr = [and_(Factb2b.inputb2b_id==id,
Factb2b.doc_num == doc_num,
Factb2b.supplier_id==Provenienze.numeroprovenienza)]
results['total'] = self._datagrid(query_lst, groupby, fltr)
# Account
groupby = [Factb2b.doc_num,Factb2b.cost_center_code, Factb2b.account_code, Factb2b.b2b_vat_code]
query_lst = groupby + FACT_B2B
fltr = [and_(Factb2b.inputb2b_id==id,
Factb2b.doc_num == doc_num,
Factb2b.supplier_id==Provenienze.numeroprovenienza)]
results['account_code'] = self._datagrid(query_lst, groupby, fltr)
# Vat
groupby = [Factb2b.doc_num, Factb2b.cost_center_code, Factb2b.b2b_vat_code]
query_lst = groupby + FACT_B2B
fltr = [and_(Factb2b.inputb2b_id==id,
Factb2b.doc_num == doc_num,
Factb2b.supplier_id==Provenienze.numeroprovenienza)]
results['vat_code'] = self._datagrid(query_lst, groupby, fltr)
# Receipts
recs = [i[0] for i in DBSession.query(Factb2b.rec_num).filter(
and_(Factb2b.doc_num==doc_num,
Factb2b.inputb2b_id==id)).distinct().all()]
results['recs'] = []
for rec_num in recs:
groupby = [Factb2b.doc_num, Factb2b.rec_num]
query_lst = groupby + FACT_B2B
fltr = [Factb2b.doc_num==doc_num,Factb2b.inputb2b_id==id,
Factb2b.rec_num==rec_num
]
results['recs'].append(self._datagrid(query_lst, groupby, fltr)[0])
# Issues
fltr = [Factb2b.inputb2b_id==id,
Factb2b.doc_num == doc_num,
Factb2b.account_id==None ]
results['issues'] = DBSession.query(Factb2b).filter(and_(*fltr)).all()
# Products
groupby = [Factb2b.doc_num, Factb2b.account_code,
Factb2b.b2b_code, Factb2b.b2b_desc]
query_lst = groupby + FACT_B2B_PRICE
fltr = [Factb2b.inputb2b_id==id,
Factb2b.doc_num == doc_num,]
results['products'] = self._datagrid(query_lst, groupby, fltr)
return dict(page='b2b', results=results, id=id, doc_num=doc_num)
def edit(self, id, format='html'):
"""GET /stocks/id/edit: Form to edit an existing item"""
# url('edit_stock', id=ID)
| nomed/ebetl | ebetl/controllers/marketing.py | Python | artistic-2.0 | 12,560 | 0.017596 |
"""Webhooks for external integrations."""
from __future__ import absolute_import
from django.http import HttpRequest, HttpResponse
from django.utils.translation import ugettext as _
from zerver.models import get_client, UserProfile
from zerver.lib.actions import check_send_message
from zerver.lib.response import json_success, json_error
from zerver.lib.notifications import convert_html_to_markdown
from zerver.decorator import REQ, has_request_variables, authenticated_rest_api_view
import logging
import ujson
from typing import Any, Dict, List, Optional, Tuple, Union, Text
class TicketDict(dict):
"""
A helper class to turn a dictionary with ticket information into
an object where each of the keys is an attribute for easy access.
"""
def __getattr__(self, field):
# type: (str) -> Any
if "_" in field:
return self.get(field)
else:
return self.get("ticket_" + field)
def property_name(property, index):
# type: (str, int) -> str
"""The Freshdesk API is currently pretty broken: statuses are customizable
but the API will only tell you the number associated with the status, not
the name. While we engage the Freshdesk developers about exposing this
information through the API, since only FlightCar uses this integration,
hardcode their statuses.
"""
statuses = ["", "", "Open", "Pending", "Resolved", "Closed",
"Waiting on Customer", "Job Application", "Monthly"]
priorities = ["", "Low", "Medium", "High", "Urgent"]
if property == "status":
return statuses[index] if index < len(statuses) else str(index)
elif property == "priority":
return priorities[index] if index < len(priorities) else str(index)
else:
raise ValueError("Unknown property")
def parse_freshdesk_event(event_string):
# type: (str) -> List[str]
"""These are always of the form "{ticket_action:created}" or
"{status:{from:4,to:6}}". Note the lack of string quoting: this isn't
valid JSON so we have to parse it ourselves.
"""
data = event_string.replace("{", "").replace("}", "").replace(",", ":").split(":")
if len(data) == 2:
# This is a simple ticket action event, like
# {ticket_action:created}.
return data
else:
# This is a property change event, like {status:{from:4,to:6}}. Pull out
# the property, from, and to states.
property, _, from_state, _, to_state = data
return [property, property_name(property, int(from_state)),
property_name(property, int(to_state))]
def format_freshdesk_note_message(ticket, event_info):
# type: (TicketDict, List[str]) -> str
"""There are public (visible to customers) and private note types."""
note_type = event_info[1]
content = "%s <%s> added a %s note to [ticket #%s](%s)." % (
ticket.requester_name, ticket.requester_email, note_type,
ticket.id, ticket.url)
return content
def format_freshdesk_property_change_message(ticket, event_info):
# type: (TicketDict, List[str]) -> str
"""Freshdesk will only tell us the first event to match our webhook
configuration, so if we change multiple properties, we only get the before
and after data for the first one.
"""
content = "%s <%s> updated [ticket #%s](%s):\n\n" % (
ticket.requester_name, ticket.requester_email, ticket.id, ticket.url)
# Why not `"%s %s %s" % event_info`? Because the linter doesn't like it.
content += "%s: **%s** => **%s**" % (
event_info[0].capitalize(), event_info[1], event_info[2])
return content
def format_freshdesk_ticket_creation_message(ticket):
# type: (TicketDict) -> str
"""They send us the description as HTML."""
cleaned_description = convert_html_to_markdown(ticket.description)
content = "%s <%s> created [ticket #%s](%s):\n\n" % (
ticket.requester_name, ticket.requester_email, ticket.id, ticket.url)
content += """~~~ quote
%s
~~~\n
""" % (cleaned_description,)
content += "Type: **%s**\nPriority: **%s**\nStatus: **%s**" % (
ticket.type, ticket.priority, ticket.status)
return content
@authenticated_rest_api_view(is_webhook=True)
@has_request_variables
def api_freshdesk_webhook(request, user_profile, payload=REQ(argument_type='body'),
stream=REQ(default='freshdesk')):
# type: (HttpRequest, UserProfile, Dict[str, Any], Text) -> HttpResponse
ticket_data = payload["freshdesk_webhook"]
required_keys = [
"triggered_event", "ticket_id", "ticket_url", "ticket_type",
"ticket_subject", "ticket_description", "ticket_status",
"ticket_priority", "requester_name", "requester_email",
]
for key in required_keys:
if ticket_data.get(key) is None:
logging.warning("Freshdesk webhook error. Payload was:")
logging.warning(request.body)
return json_error(_("Missing key %s in JSON") % (key,))
ticket = TicketDict(ticket_data)
subject = "#%s: %s" % (ticket.id, ticket.subject)
try:
event_info = parse_freshdesk_event(ticket.triggered_event)
except ValueError:
return json_error(_("Malformed event %s") % (ticket.triggered_event,))
if event_info[1] == "created":
content = format_freshdesk_ticket_creation_message(ticket)
elif event_info[0] == "note_type":
content = format_freshdesk_note_message(ticket, event_info)
elif event_info[0] in ("status", "priority"):
content = format_freshdesk_property_change_message(ticket, event_info)
else:
# Not an event we know handle; do nothing.
return json_success()
check_send_message(user_profile, get_client("ZulipFreshdeskWebhook"), "stream",
[stream], subject, content)
return json_success()
| dawran6/zulip | zerver/webhooks/freshdesk/view.py | Python | apache-2.0 | 5,886 | 0.001019 |
from django.conf.urls import patterns, include, url
from django.views.generic import TemplateView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
import api, foxycart
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(r'^review/', TemplateView.as_view(template_name='base.html')),
url(r'^feedback/', TemplateView.as_view(template_name='feedback.html')),
# Examples:
# url(r'^$', 'review_app.views.home', name='home'),
# url(r'^review_app/', include('review_app.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
#url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
url(r'^api/', include('api.urls')),
url(r"^foxycart/", include('foxycart.urls')),
url(r"^foxycart/checkout", TemplateView.as_view(template_name='foxycart_checkout_template.html')),
url(r'^accounts/login/$', 'django.contrib.auth.views.login', {"template_name": "login.html"}),
url(r'^accounts/logout/$', 'django.contrib.auth.views.logout',
{"template_name": "base.html", "next_page": "/"}),
url(r"^protected/", 'review_app.views.protected_method', name="protected"),
url(r"^packages/", 'review_app.views.packages_method', name="packages"),
url(r'^package/(?P<package_id>[0-9]+)/$', 'review_app.views.package_method', name="package"),
)
| hacknashvillereview/review_application | review_app/review_app/urls.py | Python | mit | 1,606 | 0.004981 |
from __future__ import absolute_import, division, print_function, unicode_literals
import ossaudiodev
def print_fmts(rw):
print(rw == 'r' and 'read' or 'write')
sound = ossaudiodev.open(rw)
fmts = sound.getfmts()
for name in dir(ossaudiodev):
if name.startswith('AFMT'):
attr = getattr(ossaudiodev, name)
if attr & fmts:
print(name)
print()
sound.close()
print_fmts('w')
print_fmts('r')
| rec/echomesh | code/python/experiments/ossaudiodev/GetFormats.py | Python | mit | 429 | 0.025641 |
#!/usr/bin/env python
#
# Copyright 2006,2007,2010,2015 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest
import numpy as np
class test_random(gr_unittest.TestCase):
# NOTE: For tests on the output distribution of the random numbers, see gnuradio-runtime/apps/evaluation_random_numbers.py.
# Check for range [0,1) of uniform distributed random numbers
def test_1(self):
num_tests = 10000
values = np.zeros(num_tests)
rndm = gr.random()
for k in range(num_tests):
values[k] = rndm.ran1()
for value in values:
self.assertLess(value, 1)
self.assertGreaterEqual(value, 0)
# Check reseed method (init with time and seed as fix number)
def test_2(self):
num = 5
rndm0 = gr.random(42); # init with time
rndm1 = gr.random(42); # init with fix seed
for k in range(num):
x = rndm0.ran1();
y = rndm1.ran1();
self.assertEqual(x,y)
x = np.zeros(num)
y = np.zeros(num)
rndm0 = gr.random(42); # init with fix seed 1
for k in range(num):
x[k] = rndm0.ran1();
rndm1.reseed(43); # init with fix seed 2
for k in range(num):
y[k] = rndm0.ran1();
for k in range(num):
self.assertNotEqual(x[k],y[k])
if __name__ == '__main__':
gr_unittest.run(test_random, "test_random.xml")
| surligas/gnuradio | gnuradio-runtime/python/gnuradio/gr/qa_random.py | Python | gpl-3.0 | 2,171 | 0.00783 |
###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
import os,unittest
from pyasm.security import Batch
from pyasm.command import Command
from pyasm.prod.biz import Asset
from pyams.prod.maya import *
from maya_checkin import *
class MayaCheckinTest(unittest.TestCase):
def setUp(self):
batch = Batch()
def test_all(self):
# create a scene that will be checked in
asset_code = "prp101"
sid = "12345"
# create an asset
mel('sphere -n sphere1')
mel('circle -n circle1')
mel('group -n |%s |circle1 |sphere1' % asset_code )
# convert node into a maya asset
node = MayaNode("|%s" % asset_code )
asset_node = MayaAssetNode.add_sid( node, sid )
# checkin the asset
checkin = MayaAssetNodeCheckin(asset_node)
Command.execute_cmd(checkin)
# create a file from this node
asset_node.export()
if __name__ == '__main__':
unittest.main()
| Southpaw-TACTIC/TACTIC | src/pyasm/prod/checkin/maya_checkin_test.py | Python | epl-1.0 | 1,267 | 0.008682 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('project_admin', '0020_project_fiscal_year'),
]
operations = [
migrations.AddField(
model_name='projectgoal',
name='fiscal_year',
field=models.CharField(max_length=4, validators=[django.core.validators.RegexValidator(regex='^AF\\d2$')], default='AF16'),
preserve_default=False,
),
]
| luiscberrocal/homeworkpal | homeworkpal_project/project_admin/migrations/0021_projectgoal_fiscal_year.py | Python | mit | 568 | 0.001761 |
from StringIO import StringIO
#import sys
import json
from os.path import join
from .base import BaseCase
import validate
import jsonschema
class TestArticleValidate(BaseCase):
def setUp(self):
self.doc_json = join(self.fixtures_dir, 'elife-09560-v1.xml.json')
def tearDown(self):
pass
def test_main_bootstrap(self):
"valid output is returned"
valid, results = validate.main(open(self.doc_json, 'r'))
self.assertTrue(isinstance(results, dict))
self.assertTrue(isinstance(valid, bool))
def test_main_bootstrap_fails(self):
"invalid output raises a validation error"
data = json.load(open(self.doc_json, 'r'))
data['article']['type'] = 'unknown type that will cause a failure'
strbuffer = StringIO(json.dumps(data))
strbuffer.name = self.doc_json
self.assertRaises(jsonschema.ValidationError, validate.main, strbuffer)
def test_add_placeholders_for_validation(self):
article = {'article': {'id': 12345, 'version': 2}}
expected = {
'article': {
'-patched': True,
'id': 12345,
'version': 2,
'stage': 'published',
'versionDate': '2099-01-01T00:00:00Z',
'statusDate': '2099-01-01T00:00:00Z',
}}
validate.add_placeholders_for_validation(article)
self.assertEqual(article, expected)
def test_is_poa_not_poa(self):
# For test coverage
self.assertFalse(validate.is_poa({}))
| gnott/bot-lax-adaptor | src/tests/test_validate.py | Python | gpl-3.0 | 1,558 | 0.001284 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# system
import os
import sys
dir = os.path.split(os.path.split(os.path.split(os.path.realpath(__file__))[0])[0])[0]
sys.path.append(os.path.join(dir, 'scripts'))
# testing
import mock
import unittest
from mock import patch
# program
import setup.load as Config
import setup.database as DB
#
# Global variables.
#
TEST_DATA = 'test_flood_portal_output.json'
class CheckConfigurationStructure(unittest.TestCase):
'''Unit tests for the configuration files.'''
def test_that_load_config_fails_gracefully(self):
assert Config.LoadConfig('xxx.json') == False
## Object type tests.
def test_config_is_list(self):
d = Config.LoadConfig(os.path.join(dir, 'config', 'dev.json'))
assert type(d) is dict
def test_config_returns_a_table_list(self):
d = Config.LoadConfig(os.path.join(dir, 'config', 'dev.json'))
assert type(d['database']) is list
def test_config_checks_api_key(self):
Config.LoadConfig(os.path.join(dir, 'config', 'dev.json'))
assert Config.LoadConfig(os.path.join(dir, 'tests', 'data', 'test_config.json')) == False
class CheckDatabaseCreation(unittest.TestCase):
'''Unit tests for the setting up the database.'''
## Structural tests.
def test_wrapper_database_function_works(self):
assert DB.Main() != False
## Failed config file.
def test_database_fail(self):
assert DB.CreateTables(config_path=os.path.join(dir, 'tests', 'data', 'test_database_fail.json')) == False
def test_that_odd_table_names_fail(self):
assert DB.CreateTables(config_path=os.path.join(dir, 'tests', 'data', 'test_fail_column_names.json')) == False
| luiscape/hdxscraper-violation-documentation-center-syria | tests/unit/test_setup.py | Python | mit | 1,648 | 0.018204 |
"""
Copyright (c) 2019 John Robinson
Author: John Robinson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
# Global Imports
import logging
import unittest
import RPi.GPIO as GPIO
import Adafruit_GPIO.SPI as SPI
# Local Imports
from max31856 import MAX31856 as MAX31856
logging.basicConfig(
filename='test_MAX31856.log',
level=logging.DEBUG,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
_logger = logging.getLogger(__name__)
class Adafruit_MAX31856(unittest.TestCase):
def tearDown(self):
GPIO.cleanup()
#def test_software_spi_initialize(self):
#"""Checks to see if the sensor can initialize on the software SPI interface.
#Will fail if it cannot find the MAX31856 library or any dependencies.
#Test only checks to see that the sensor can be initialized in Software, does not check the
#hardware connection.
#"""
#_logger.debug('test_software_SPI_initialize()')
## Raspberry Pi software SPI configuration.
#software_spi = {"clk": 25, "cs": 8, "do": 9, "di": 10}
#sensor = MAX31856(software_spi=software_spi)
#if sensor:
#self.assertTrue(True)
#else:
#self.assertTrue(False)
def test_hardware_spi_initialize(self):
"""
Checks to see if the sensor can initialize on the hardware SPI interface.
Will fail if it cannot find the MAX31856 library or any dependencies.
Test only checks to see that the sensor can be initialized in Software, does not check the
hardware connection.
"""
_logger.debug('test_hardware_SPI_initialize()')
# Raspberry Pi hardware SPI configuration.
spi_port = 0
spi_device = 0
sensor = MAX31856(hardware_spi=SPI.SpiDev(spi_port, spi_device))
if sensor:
self.assertTrue(True)
else:
self.assertTrue(False)
def test_get_register_reading(self):
"""
Checks to see if we can read a register from the device. Good test for correct
connectivity.
"""
_logger.debug('test_get_register_reading()')
# Raspberry Pi hardware SPI configuration.
spi_port = 0
spi_device = 0
sensor = MAX31856(hardware_spi=SPI.SpiDev(spi_port, spi_device))
value = sensor._read_register(MAX31856.MAX31856_REG_READ_CR0)
for ii in range(0x00, 0x10):
# Read all of the registers, will store data to log
sensor._read_register(ii) # pylint: disable-msg=protected-access
if value:
self.assertTrue(True)
else:
self.assertTrue(False)
#def test_get_temperaure_reading_software_spi(self):
#"""Checks to see if we can read a temperature from the board, using software SPI
#"""
#_logger.debug('test_get_temperature_reading_software_spi')
## Raspberry Pi software SPI configuration.
#software_spi = {"clk": 25, "cs": 8, "do": 9, "di": 10}
#sensor = MAX31856(software_spi=software_spi)
#temp = sensor.read_temp_c()
#if temp:
#self.assertTrue(True)
#else:
#self.assertTrue(False)
def test_get_temperaure_reading(self):
"""
Checks to see if we can read a temperature from the board, using Hardware SPI
"""
_logger.debug('test_get_temperaure_reading')
# Raspberry Pi hardware SPI configuration.
spi_port = 0
spi_device = 0
sensor = MAX31856(hardware_spi=SPI.SpiDev(spi_port, spi_device))
temp = sensor.read_temp_c()
if temp:
self.assertTrue(True)
else:
self.assertTrue(False)
def test_get_internal_temperaure_reading(self):
"""
Checks to see if we can read a temperature from the board, using Hardware SPI
"""
_logger.debug('test_get_internal_temperature_reading()')
# Raspberry Pi hardware SPI configuration.
spi_port = 0
spi_device = 0
sensor = MAX31856(hardware_spi=SPI.SpiDev(spi_port, spi_device))
temp = sensor.read_internal_temp_c()
if temp:
self.assertTrue(True)
else:
self.assertTrue(False)
def test_get_internal_temperaure_reading_k_type(self):
"""
Checks to see if we can read a temperature from the board, using Hardware SPI, and K type thermocouple
"""
_logger.debug('test_get_internal_temperature_reading()')
# Raspberry Pi hardware SPI configuration.
spi_port = 0
spi_device = 0
sensor = MAX31856(hardware_spi=SPI.SpiDev(spi_port, spi_device), tc_type=MAX31856.MAX31856_K_TYPE)
temp = sensor.read_internal_temp_c()
if temp:
self.assertTrue(True)
else:
self.assertTrue(False)
def test_temperature_byte_conversions(self):
"""
Checks the byte conversion for various known temperature byte values.
"""
_logger.debug('test_temperature_byte_conversions()')
#-------------------------------------------#
# Test Thermocouple Temperature Conversions #
byte2 = 0x01
byte1 = 0x70
byte0 = 0x20
decimal_temp = MAX31856._thermocouple_temp_from_bytes(byte0, byte1, byte2) # pylint: disable-msg=protected-access
self.assertEqual(decimal_temp, 23.0078125)
# Check a couple values from the datasheet
byte2 = 0b00000001
byte1 = 0b10010000
byte0 = 0b00000000
decimal_temp = MAX31856._thermocouple_temp_from_bytes(byte0, byte1, byte2) # pylint: disable-msg=protected-access
self.assertEqual(decimal_temp, 25.0)
byte2 = 0b00000000
byte1 = 0b00000000
byte0 = 0b00000000
decimal_temp = MAX31856._thermocouple_temp_from_bytes(byte0, byte1, byte2) # pylint: disable-msg=protected-access
self.assertEqual(decimal_temp, 0.0)
byte2 = 0b11111111
byte1 = 0b11110000
byte0 = 0b00000000
decimal_temp = MAX31856._thermocouple_temp_from_bytes(byte0, byte1, byte2) # pylint: disable-msg=protected-access
self.assertEqual(decimal_temp, -1.0)
byte2 = 0b11110000
byte1 = 0b01100000
byte0 = 0b00000000
decimal_temp = MAX31856._thermocouple_temp_from_bytes(byte0, byte1, byte2) # pylint: disable-msg=protected-access
self.assertEqual(decimal_temp, -250.0)
#---------------------------------#
# Test CJ Temperature Conversions #
msb = 0x1C
lsb = 0x64
decimal_cj_temp = MAX31856._cj_temp_from_bytes(msb, lsb) # pylint: disable-msg=protected-access
self.assertEqual(decimal_cj_temp, 28.390625)
# Check a couple values from the datasheet
msb = 0b01111111
lsb = 0b11111100
decimal_cj_temp = MAX31856._cj_temp_from_bytes(msb, lsb) # pylint: disable-msg=protected-access
self.assertEqual(decimal_cj_temp, 127.984375)
msb = 0b00011001
lsb = 0b00000000
decimal_cj_temp = MAX31856._cj_temp_from_bytes(msb, lsb) # pylint: disable-msg=protected-access
self.assertEqual(decimal_cj_temp, 25)
msb = 0b00000000
lsb = 0b00000000
decimal_cj_temp = MAX31856._cj_temp_from_bytes(msb, lsb) # pylint: disable-msg=protected-access
self.assertEqual(decimal_cj_temp, 0)
msb = 0b11100111
lsb = 0b00000000
decimal_cj_temp = MAX31856._cj_temp_from_bytes(msb, lsb) # pylint: disable-msg=protected-access
self.assertEqual(decimal_cj_temp, -25)
msb = 0b11001001
lsb = 0b00000000
decimal_cj_temp = MAX31856._cj_temp_from_bytes(msb, lsb) # pylint: disable-msg=protected-access
self.assertEqual(decimal_cj_temp, -55)
if __name__ == "__main__":
unittest.main()
| johnrbnsn/Adafruit_Python_MAX31856 | Adafruit_MAX31856/test_MAX31856.py | Python | mit | 8,880 | 0.00732 |
#!/usr/bin/env python
# ***** BEGIN LICENSE BLOCK *****
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
# ***** END LICENSE BLOCK *****
import os
import re
from mozharness.mozilla.testing.errors import TinderBoxPrintRe
from mozharness.base.log import OutputParser, WARNING, INFO, CRITICAL
from mozharness.mozilla.buildbot import TBPL_WARNING, TBPL_FAILURE, TBPL_RETRY
from mozharness.mozilla.buildbot import TBPL_SUCCESS, TBPL_WORST_LEVEL_TUPLE
SUITE_CATEGORIES = ['mochitest', 'reftest', 'xpcshell']
def tbox_print_summary(pass_count, fail_count, known_fail_count=None,
crashed=False, leaked=False):
emphasize_fail_text = '<em class="testfail">%s</em>'
if pass_count < 0 or fail_count < 0 or \
(known_fail_count is not None and known_fail_count < 0):
summary = emphasize_fail_text % 'T-FAIL'
elif pass_count == 0 and fail_count == 0 and \
(known_fail_count == 0 or known_fail_count is None):
summary = emphasize_fail_text % 'T-FAIL'
else:
str_fail_count = str(fail_count)
if fail_count > 0:
str_fail_count = emphasize_fail_text % str_fail_count
summary = "%d/%s" % (pass_count, str_fail_count)
if known_fail_count is not None:
summary += "/%d" % known_fail_count
# Format the crash status.
if crashed:
summary += " %s" % emphasize_fail_text % "CRASH"
# Format the leak status.
if leaked is not False:
summary += " %s" % emphasize_fail_text % (
(leaked and "LEAK") or "L-FAIL")
return summary
class TestSummaryOutputParserHelper(OutputParser):
def __init__(self, regex=re.compile(r'(passed|failed|todo): (\d+)'), **kwargs):
self.regex = regex
self.failed = 0
self.passed = 0
self.todo = 0
self.last_line = None
super(TestSummaryOutputParserHelper, self).__init__(**kwargs)
def parse_single_line(self, line):
super(TestSummaryOutputParserHelper, self).parse_single_line(line)
self.last_line = line
m = self.regex.search(line)
if m:
try:
setattr(self, m.group(1), int(m.group(2)))
except ValueError:
# ignore bad values
pass
def evaluate_parser(self):
# generate the TinderboxPrint line for TBPL
emphasize_fail_text = '<em class="testfail">%s</em>'
failed = "0"
if self.passed == 0 and self.failed == 0:
self.tsummary = emphasize_fail_text % "T-FAIL"
else:
if self.failed > 0:
failed = emphasize_fail_text % str(self.failed)
self.tsummary = "%d/%s/%d" % (self.passed, failed, self.todo)
def print_summary(self, suite_name):
self.evaluate_parser()
self.info("TinderboxPrint: %s: %s\n" % (suite_name, self.tsummary))
class DesktopUnittestOutputParser(OutputParser):
"""
A class that extends OutputParser such that it can parse the number of
passed/failed/todo tests from the output.
"""
def __init__(self, suite_category, **kwargs):
# worst_log_level defined already in DesktopUnittestOutputParser
# but is here to make pylint happy
self.worst_log_level = INFO
super(DesktopUnittestOutputParser, self).__init__(**kwargs)
self.summary_suite_re = TinderBoxPrintRe.get('%s_summary' % suite_category, {})
self.harness_error_re = TinderBoxPrintRe['harness_error']['minimum_regex']
self.full_harness_error_re = TinderBoxPrintRe['harness_error']['full_regex']
self.harness_retry_re = TinderBoxPrintRe['harness_error']['retry_regex']
self.fail_count = -1
self.pass_count = -1
# known_fail_count does not exist for some suites
self.known_fail_count = self.summary_suite_re.get('known_fail_group') and -1
self.crashed, self.leaked = False, False
self.tbpl_status = TBPL_SUCCESS
def parse_single_line(self, line):
if self.summary_suite_re:
summary_m = self.summary_suite_re['regex'].match(line) # pass/fail/todo
if summary_m:
message = ' %s' % line
log_level = INFO
# remove all the none values in groups() so this will work
# with all suites including mochitest browser-chrome
summary_match_list = [group for group in summary_m.groups()
if group is not None]
r = summary_match_list[0]
if self.summary_suite_re['pass_group'] in r:
if len(summary_match_list) > 1:
self.pass_count = int(summary_match_list[-1])
else:
# This handles suites that either pass or report
# number of failures. We need to set both
# pass and fail count in the pass case.
self.pass_count = 1
self.fail_count = 0
elif self.summary_suite_re['fail_group'] in r:
self.fail_count = int(summary_match_list[-1])
if self.fail_count > 0:
message += '\n One or more unittests failed.'
log_level = WARNING
# If self.summary_suite_re['known_fail_group'] == None,
# then r should not match it, # so this test is fine as is.
elif self.summary_suite_re['known_fail_group'] in r:
self.known_fail_count = int(summary_match_list[-1])
self.log(message, log_level)
return # skip harness check and base parse_single_line
harness_match = self.harness_error_re.match(line)
if harness_match:
self.warning(' %s' % line)
self.worst_log_level = self.worst_level(WARNING, self.worst_log_level)
self.tbpl_status = self.worst_level(TBPL_WARNING, self.tbpl_status,
levels=TBPL_WORST_LEVEL_TUPLE)
full_harness_match = self.full_harness_error_re.match(line)
if full_harness_match:
r = full_harness_match.group(1)
if r == "application crashed":
self.crashed = True
elif r == "missing output line for total leaks!":
self.leaked = None
else:
self.leaked = True
return # skip base parse_single_line
if self.harness_retry_re.search(line):
self.critical(' %s' % line)
self.worst_log_level = self.worst_level(CRITICAL, self.worst_log_level)
self.tbpl_status = self.worst_level(TBPL_RETRY, self.tbpl_status,
levels=TBPL_WORST_LEVEL_TUPLE)
return # skip base parse_single_line
super(DesktopUnittestOutputParser, self).parse_single_line(line)
def evaluate_parser(self, return_code, success_codes=None):
success_codes = success_codes or [0]
if self.num_errors: # mozharness ran into a script error
self.tbpl_status = self.worst_level(TBPL_FAILURE, self.tbpl_status,
levels=TBPL_WORST_LEVEL_TUPLE)
# I have to put this outside of parse_single_line because this checks not
# only if fail_count was more then 0 but also if fail_count is still -1
# (no fail summary line was found)
if self.fail_count != 0:
self.worst_log_level = self.worst_level(WARNING, self.worst_log_level)
self.tbpl_status = self.worst_level(TBPL_WARNING, self.tbpl_status,
levels=TBPL_WORST_LEVEL_TUPLE)
# Account for the possibility that no test summary was output.
if self.pass_count <= 0 and self.fail_count <= 0 and \
(self.known_fail_count is None or self.known_fail_count <= 0):
self.error('No tests run or test summary not found')
self.worst_log_level = self.worst_level(WARNING,
self.worst_log_level)
self.tbpl_status = self.worst_level(TBPL_WARNING,
self.tbpl_status,
levels=TBPL_WORST_LEVEL_TUPLE)
if return_code not in success_codes:
self.tbpl_status = self.worst_level(TBPL_FAILURE, self.tbpl_status,
levels=TBPL_WORST_LEVEL_TUPLE)
# we can trust in parser.worst_log_level in either case
return (self.tbpl_status, self.worst_log_level)
def append_tinderboxprint_line(self, suite_name):
# We are duplicating a condition (fail_count) from evaluate_parser and
# parse parse_single_line but at little cost since we are not parsing
# the log more then once. I figured this method should stay isolated as
# it is only here for tbpl highlighted summaries and is not part of
# buildbot evaluation or result status IIUC.
summary = tbox_print_summary(self.pass_count,
self.fail_count,
self.known_fail_count,
self.crashed,
self.leaked)
self.info("TinderboxPrint: %s<br/>%s\n" % (suite_name, summary))
class EmulatorMixin(object):
""" Currently dependent on both TooltoolMixin and TestingMixin)"""
def install_emulator_from_tooltool(self, manifest_path):
dirs = self.query_abs_dirs()
if self.tooltool_fetch(manifest_path, output_dir=dirs['abs_work_dir']):
self.fatal("Unable to download emulator via tooltool!")
unzip = self.query_exe("unzip")
unzip_cmd = [unzip, '-q', os.path.join(dirs['abs_work_dir'], "emulator.zip")]
self.run_command(unzip_cmd, cwd=dirs['abs_emulator_dir'], halt_on_failure=True,
fatal_exit_code=3)
def install_emulator(self):
dirs = self.query_abs_dirs()
self.mkdir_p(dirs['abs_emulator_dir'])
if self.config.get('emulator_url'):
self._download_unzip(self.config['emulator_url'], dirs['abs_emulator_dir'])
elif self.config.get('emulator_manifest'):
manifest_path = self.create_tooltool_manifest(self.config['emulator_manifest'])
self.install_emulator_from_tooltool(manifest_path)
elif self.buildbot_config:
props = self.buildbot_config.get('properties')
url = 'https://hg.mozilla.org/%s/raw-file/%s/b2g/test/emulator.manifest' % (
props['repo_path'], props['revision'])
manifest_path = self.download_file(url,
file_name='tooltool.tt',
parent_dir=dirs['abs_work_dir'])
if not manifest_path:
self.fatal("Can't download emulator manifest from %s" % url)
self.install_emulator_from_tooltool(manifest_path)
else:
self.fatal("Can't get emulator; set emulator_url or emulator_manifest in the config!")
| simar7/build-mozharness | mozharness/mozilla/testing/unittest.py | Python | mpl-2.0 | 11,491 | 0.001653 |
## Automatically adapted for numpy.oldnumeric May 17, 2011 by -c
# Natural Language Toolkit: Classifiers
#
# Copyright (C) 2001 University of Pennsylvania
# Author: Edward Loper <edloper@gradient.cis.upenn.edu>
# URL: <http://nltk.sf.net>
# For license information, see LICENSE.TXT
#
# $Id: __init__.py,v 1.2 2003/10/27 04:41:28 trevorcohn1 Exp $
# To do:
# - make sure variable names are used consistantly (fd_list, etc.)
# - remove any confusions about the type of labels (string vs
# immutable)
"""
Classes and interfaces used to classify texts into categories. A
X{category} is a coherent group of texts. This module focuses on
X{single-category text classification}, in which:
- There set of categories is known.
- The number of categories is finite.
- Each text belongs to exactly one category.
A X{classifier} choses the most likely category for a given text.
Classifiers can also be used to estimate the probability that a given
text belongs to a category. This module defines the C{ClassifierI}
interface for creating classifiers. Note that classifiers can operate
on any kind of text. For example, classifiers can be used:
- to group documents by topic
- to group words by part of speech
- to group acoustic signals by which phoneme they represent
- to group sentences by their author
Each category is uniquely defined by a X{label}, such as C{'sports'}
or C{'news'}. Labels are typically C{string}s or C{integer}s, but can
be any immutable type. Classified texts are represented by C{Tokens}
whose types are C{LabeledText} objects. A C{LabeledText} consists of
a label and a text.
C{ClassifierTrainerI} is a general interface for classes that build
classifiers from training data.
C{accuracy} and C{log_likelihood} provide simple metrics for
evaluating the performance of a classifier.
@group Data Types: LabeledText
@group Interfaces: ClassifierI, ClassifierTrainerI
@group Evaulation: accuracy, log_likelihood, ConfusionMatrix
@sort: ClassifierI, ClassifierTrainerI
"""
from nltk.token import Token
from nltk.chktype import chktype as _chktype
from nltk.chktype import classeq as _classeq
import math, numpy.oldnumeric as Numeric, types, operator
##//////////////////////////////////////////////////////
## Texts and Labels
##//////////////////////////////////////////////////////
# A text can be any object. Texts are required to be immutable, since
# they are used as the type of a token.
# A label can be any immutable object. Typically, labels are either
# integers or strings.
##//////////////////////////////////////////////////////
## LabeledTexts
##//////////////////////////////////////////////////////
class LabeledText:
"""
A type consisting of a text and a label. A typical example would
be a document labeled with a category, such as \"sports\".
The text and the label are both required to be immutable. Labels
are ususally short strings or integers.
@type _text: (immutable)
@ivar _text: The C{LabeledText}'s text.
@type _label: (immutable)
@ivar _label: The text type's label. This specifies which
category the text belongs to.
"""
def __init__(self, text, label):
"""
Construct a new C{LabeledType}.
@param text: The new C{LabeledType}'s text.
@type text: (immutable)
@param label: The new C{LabeledType}'s label. This specifies
which category the text belongs to.
@type label: (immutable)
"""
self._text = text
self._label = label
def text(self):
"""
@return: this C{LabeledType}'s text.
@rtype: (immutable)
"""
return self._text
def label(self):
"""
@return: this C{LabeledType}'s label.
@rtype: (immutable)
"""
return self._label
def __lt__(self, other):
"""
Raise a C{TypeError}, since C{LabeledText} is not an ordered
type.
@raise TypeError: C{LabeledText} is not an ordered type.
"""
raise TypeError("LabeledText is not an ordered type")
def __le__(self, other):
"""
Raise a C{TypeError}, since C{LabeledText} is not an ordered
type.
@raise TypeError: C{LabeledText} is not an ordered type.
"""
raise TypeError("LabeledText is not an ordered type")
def __gt__(self, other):
"""
Raise a C{TypeError}, since C{LabeledText} is not an ordered
type.
@raise TypeError: C{LabeledText} is not an ordered type.
"""
raise TypeError("LabeledText is not an ordered type")
def __ge__(self, other):
"""
Raise a C{TypeError}, since C{LabeledText} is not an ordered
type.
@raise TypeError: C{LabeledText} is not an ordered type.
"""
raise TypeError("LabeledText is not an ordered type")
def __cmp__(self, other):
"""
@return: 0 if this C{LabeledType} is equal to C{other}. In
particular, return 0 iff C{other} is a C{LabeledType},
C{self.text()==other.text()}, and
C{self.label()==other.label()}; return a nonzero number
otherwise.
@rtype: C{int}
@param other: The C{LabeledText} to compare this
C{LabeledText} with.
@type other: C{LabeledText}
"""
if not _classeq(self, other): return 0
return not (self._text == other._text and
self._label == other._label)
def __hash__(self):
return hash( (self._text, self._label) )
def __repr__(self):
"""
@return: a string representation of this labeled text.
@rtype: C{string}
"""
return "%r/%r" % (self._text, self._label)
##//////////////////////////////////////////////////////
## Classiifer Interface
##//////////////////////////////////////////////////////
class ClassifierI:
"""
A processing interface for categorizing texts. The set of
categories used by a classifier must be fixed, and finite. Each
category is uniquely defined by a X{label}, such as C{'sports'} or
C{'news'}. Labels are typically C{string}s or C{integer}s, but
can be any immutable type. Classified texts are represented by
C{Tokens} whose types are C{LabeledText} objects.
Classifiers are required to implement two methods:
- C{classify}: determines which label is most appropriate for a
given text token, and returns a labeled text token with that
label.
- C{labels}: returns the list of category labels that are used
by this classifier.
Classifiers are also encouranged to implement the following
methods:
- C{distribution}: return a probability distribution that
specifies M{P(label|text)} for a given text token.
- C{prob}: returns M{P(label|text)} for a given labeled text
token.
- C{distribution_dictionary}: Return a dictionary that maps from
labels to probabilities.
- C{distribution_list}: Return a sequence, specifying the
probability of each label.
Classes implementing the ClassifierI interface may choose to only
support certain classes of tokens for input. If a method is
unable to return a correct result because it is given an
unsupported class of token, then it should raise a
NotImplementedError.
Typically, classifier classes encode specific classifier models;
but do not include the algorithms for training the classifiers.
Instead, C{ClassifierTrainer}s are used to generate classifiers
from training data.
@see: C{ClassifierTrainerI}
"""
def labels(self):
"""
@return: the list of category labels used by this classifier.
@rtype: C{list} of (immutable)
"""
raise AssertionError()
def classify(self, unlabeled_token):
"""
Determine which label is most appropriate for the given text
token, and return a C{LabeledText} token constructed from the
given text token and the chosen label.
@return: a C{LabeledText} token whose label is the most
appropriate label for the given token; whose text is the
given token's text; and whose location is the given
token's location.
@rtype: C{Token} with type C{LabeledText}
@param unlabeled_token: The text to be classified.
@type unlabeled_token: C{Token}
"""
raise AssertionError()
def distribution(self, unlabeled_token):
"""
Return a probability distribution indicating the likelihood
that C{unlabeled_token} is a member of each category.
@return: a probability distribution whose samples are
tokens derived from C{unlabeled_token}. The samples
are C{LabeledText} tokens whose text is
C{unlabeled_token}'s text; and whose location is
C{unlabeled_token}'s location. The probability of each
sample indicates the likelihood that the unlabeled token
belongs to each label's category.
@rtype: C{ProbDistI}
@param unlabeled_token: The text to be classified.
@type unlabeled_token: C{Token}
"""
raise NotImplementedError()
def prob(self, labeled_token):
"""
@return: The probability that C{labeled_token}'s text belongs
to the category indicated by C{labeled_token}'s label.
@rtype: C{float}
@param labeled_token: The labeled token for which to generate
a probability estimate.
@type labeled_token: C{Token} with type C{LabeledText}
"""
raise NotImplementedError()
def distribution_dictionary(self, unlabeled_token):
"""
Return a dictionary indicating the likelihood that
C{unlabeled_token} is a member of each category.
@return: a dictionary that maps from each label to the
probability that C{unlabeled_token} is a member of that
label's category.
@rtype: C{dictionary} from (immutable) to C{float}
@param unlabeled_token: The text to be classified.
@type unlabeled_token: C{Token}
"""
raise NotImplementedError()
def distribution_list(self, unlabeled_token):
"""
Return a list indicating the likelihood that
C{unlabeled_token} is a member of each category.
@return: a list of probabilities. The M{i}th element of the
list is the probability that C{unlabeled_text} belongs to
C{labels()[M{i}]}'s category.
@rtype: C{sequence} of C{float}
@param unlabeled_token: The text to be classified.
@type unlabeled_token: C{Token}
"""
raise NotImplementedError()
##//////////////////////////////////////////////////////
## Classiifer Trainer Interface
##//////////////////////////////////////////////////////
class ClassifierTrainerI:
"""
A processing interface for constructing new classifiers, using
training data. Classifier trainers must implement one method,
C{train}, which generates a new classifier from a list of training
samples.
"""
def train(self, labeled_tokens, **kwargs):
"""
Train a new classifier, using the given training samples.
@type labeled_tokens: C{list} of (C{Token} with type C{LabeledText})
@param labeled_tokens: A list of correctly labeled texts.
These texts will be used as training samples to construct
new classifiers.
@param kwargs: Keyword arguments.
- C{labels}: The set of possible labels. If none is
given, then the set of all labels attested in the
training data will be used instead. (type=C{list} of
(immutable)).
@return: A new classifier, trained from the given labeled
tokens.
@rtype: C{ClassifierI}
"""
raise AssertionError()
def find_labels(labeled_tokens):
"""
@return: A list of all labels that are attested in the given list
of labeled tokens.
@rtype: C{list} of (immutable)
@param labeled_tokens: The list of labeled tokens from which to
extract labels.
@type labeled_tokens: C{list} of (C{Token} with type C{LabeledText})
"""
assert _chktype(1, labeled_tokens, [Token], (Token,))
labelmap = {}
for token in labeled_tokens:
labelmap[token.type().label()] = 1
return labelmap.keys()
def label_tokens(unlabeled_tokens, label):
"""
@return: a list of labeled tokens, whose text and location
correspond to C{unlabeled_tokens}, and whose labels are
C{label}.
@rtype: C{list} of (C{Token} with type C{LabeledText})
@param unlabeled_tokens: The list of tokens for which a labeled
token list should be created.
@type unlabeled_tokens: C{list} of C{Token}
@param label: The label for the new labeled tokens.
@type label: (immutable)
"""
assert _chktype(1, unlabeled_tokens, [Token], (Token,))
return [Token(LabeledText(tok.type(), label), tok.loc())
for tok in unlabeled_tokens]
##//////////////////////////////////////////////////////
## Evaluation Metrics
##//////////////////////////////////////////////////////
def accuracy(classifier, labeled_tokens):
"""
@rtype: C{float}
@return: the given classifier model's accuracy on the given list
of labeled tokens. This float between zero and one indicates
what proportion of the tokens the model would label correctly.
@param labeled_tokens: The tokens for which the model's
accuracy should be computed.
@type labeled_tokens: C{list} of (C{Token} with type
C{LabeledText})
"""
assert _chktype(1, classifier, ClassifierI)
assert _chktype(2, labeled_tokens, [Token], (Token,))
total = 0
correct = 0
for ltok in labeled_tokens:
utok = Token(ltok.type().text(), ltok.loc())
if classifier.classify(utok) == ltok:
correct += 1
total += 1
return float(correct)/total
def log_likelihood(classifier, labeled_tokens):
"""
Evaluate the log likelihood of the given list of labeled
tokens for the given classifier model. This nonpositive float
gives an indication of how well the classifier models the
data. Values closer to zero indicate that it models it more
accurately.
@rtype: C{float}
@return: The log likelihood of C{labeled_tokens} for the given
classifier model.
@param labeled_tokens: The tokens whose log likelihood should
be computed.
@type labeled_tokens: C{list} of (C{Token} with type
C{LabeledText})
"""
assert _chktype(1, classifier, ClassifierI)
assert _chktype(2, labeled_tokens, [Token], (Token,))
likelihood = 0.0
for ltok in labeled_tokens:
utok = Token(ltok.type().text(), ltok.loc())
label = ltok.type().label()
dist = classifier.distribution_dictionary(utok)
if dist[label] == 0:
# Use some approximation to infinity. What this does
# depends on your system's float implementation.
likelihood -= 1e1000
else:
likelihood += math.log(dist[label])
return likelihood / len(labeled_tokens)
class ConfusionMatrix:
def __init__(self, classifier, labeled_tokens):
"""
Entry conf[i][j] is the number of times a document with label i
was given label j.
"""
assert _chktype(1, classifier, ClassifierI)
assert _chktype(2, labeled_tokens, [Token], (Token,))
try: import numpy.oldnumeric as Numeric
except: raise ImportError('ConfusionMatrix requires Numeric')
# Extract the labels.
ldict = {}
for ltok in labeled_tokens: ldict[ltok.type().label()] = 1
labels = ldict.keys()
# Construct a label->index dictionary
indices = {}
for i in range(len(labels)): indices[labels[i]] = i
confusion = Numeric.zeros( (len(labels), len(labels)) )
for ltok in labeled_tokens:
utok = Token(ltok.type().text(), ltok.loc())
ctok = classifier.classify(utok)
confusion[indices[ltok.type().label()],
indices[ctok.type().label()]] += 1
self._labels = labels
self._confusion = confusion
self._max_conf = max(Numeric.resize(confusion, (len(labels)**2,)))
def __getitem__(self, index):
assert _chktype(1, index, types.IntType)
return self._confusion[index[0], index[1]]
def __str__(self):
confusion = self._confusion
labels = self._labels
indexlen = len(`len(labels)`)
entrylen = max(indexlen, len(`self._max_conf`))
index_format = '%' + `indexlen` + 'd | '
entry_format = '%' + `entrylen` + 'd '
str = (' '*(indexlen)) + ' | '
for j in range(len(labels)):
str += (entry_format % j)
str += '\n'
str += ('-' * ((entrylen+1) * len(labels) + indexlen + 2)) + '\n'
for i in range(len(labels)):
str += index_format % i
for j in range(len(labels)):
str += entry_format % confusion[i,j]
str += '\n'
return str
def key(self):
labels = self._labels
str = 'Label key: (row = true label; col = classifier label)\n'
indexlen = len(`len(labels)`)
key_format = ' %'+`indexlen`+'d: %s\n'
for i in range(len(labels)):
str += key_format % (i, labels[i])
return str
def cross_validate(trainer, labeled_tokens, n_folds=10, target=None, trace=False):
"""
Perform N-fold cross validation on the given classifier. This divides the
tokens into N equally sized groups (subject to rounding), then performs N
training and testing passes. Each pass involves testing on a single fold
and testing on the remaining folds. This way every instance is used
exactly once for testing. The results (predictive accuracy) are averaged
over the N trials. The mean and standard deviation are returned as a
tuple.
"""
assert len(labeled_tokens) >= n_folds
# should randomly reorder labeled_tokens first?
folds = []
n = len(labeled_tokens)
for i in range(n_folds):
start = i * n / n_folds
end = (i + 1) * n / n_folds
folds.append(labeled_tokens[start:end])
if trace:
print 'cross_validate - using %d folds of %d items each approx' \
% (n_folds, len(folds[0]))
accuracies = []
precisions = []
recalls = []
for i in range(n_folds):
training = folds[:]
testing = training[i]
del training[i]
training = reduce(operator.add, training) # flatten
if trace:
print 'cross_validate [%d] - training classifier...' % (i + 1)
import time
start = time.time()
classifier = trainer.train(training)
if trace:
end = time.time()
print 'cross_validate elapsed time %.2f seconds' % (end - start)
print 'cross_validate [%d] - testing classifier...' % (i + 1)
start = end
yes = no = 0
tp = tn = fp = fn = 0
for ltok in testing:
utok = Token(ltok.type().text(), ltok.loc())
if trace >= 2:
print 'cross_validate [%d] - given' % (i + 1), ltok
ctok = classifier.classify(utok)
if trace >= 2:
print 'cross_validate [%d] - classified' % (i + 1),
print ctok.type().label()
if ltok.type().label() == ctok.type().label():
yes += 1
else:
no += 1
if target:
if ltok.type().label() == target:
if ltok.type().label() == ctok.type().label():
tp += 1
else:
fn += 1
else:
if ltok.type().label() == ctok.type().label():
fp += 1
else:
tn += 1
acc = float(yes) / (yes + no)
accuracies.append(acc)
if target:
precision = recall = None
try:
recall = float(tp) / (tp + fn)
recalls.append(recall)
except ZeroDivisionError:
pass
try:
precision = float(tp) / (tp + fp)
precisions.append(precision)
except ZeroDivisionError:
pass
if trace:
end = time.time()
print 'cross_validate elapsed time %.2f seconds' % (end - start)
print 'cross_validate [%d] - accuracy %.3f' % (i + 1, acc)
if target:
print 'cross_validate [%d] - precision %s recall %s' \
% (i + 1, precision, recall)
if trace:
print 'cross_validate - calculating mean and variance'
# find the mean
mean = reduce(operator.add, accuracies) / float(len(accuracies))
if target:
recall = reduce(operator.add, recalls) / float(len(recalls))
if len(precisions) > 0:
precision = reduce(operator.add, precisions) / float(len(precisions))
else:
precision = None
# find the standard deviation
var = 0
for i in range(n_folds):
var += accuracies[i] * (accuracies[i] - mean) ** 2
sd = var ** 0.5
if target:
return mean, sd, precision, recall
else:
return mean, sd
| ronaldahmed/robot-navigation | neural-navigation-with-lstm/MARCO/nltk_contrib/unimelb/tacohn/classifier/__init__.py | Python | mit | 21,922 | 0.003421 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.