repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
---|---|---|---|---|---|---|---|---|
wavelets/keras
|
keras/datasets/reuters.py
|
Python
|
mit
| 3,237 | 0.003089 |
# -*- coding: utf-8 -*-
from data_utils import get_file
import string
import random
import cPickle
def make_reuters_dataset(path='datasets/temp/reuters21578/', min_samples_per_topic=15):
import os
import re
from preprocessing.text import Tokenizer
wire_topics = []
topic_counts = {}
wire_bodies = []
for fname in os.listdir(path):
if 'sgm' in fname:
s = open(path + fname).read()
tag = '<TOPICS>'
while tag in s:
s = s[s.find(tag)+len(tag):]
topics = s[:s.find('</')]
if topics and not '</D><D>' in topics:
topic = topics.replace('<D>', '').replace('</D>', '')
wire_topics.append(topic)
topic_counts[topic] = topic_counts.get(topic, 0) + 1
else:
continue
bodytag = '<BODY>'
body = s[s.find(bodytag)+len(bodytag):]
body = body[:body.find('</')]
wire_bodies.append(body)
# only keep most common topics
items = topic_counts.items()
items.sort(key = lambda x: x[1])
kept_topics = set()
for x in items:
print x[0] +
|
': ' + str(x[1])
if x[1] >= min_samples_per_topic:
kept_topics.add(x[0])
print '-'
print 'Kept topics:', len(kept_topics)
# filter wires with rare topics
kept_wires = []
labels = []
topic_indexes = {}
for t, b in zip(wire_topics, wire_bodies):
if t in kept_topics:
if t not in topic_indexes:
topic_index = len(topic_indexes)
topic_indexes[t] = topic_index
|
else:
topic_index = topic_indexes[t]
labels.append(topic_index)
kept_wires.append(b)
# vectorize wires
tokenizer = Tokenizer()
tokenizer.fit(kept_wires)
X = tokenizer.transform(kept_wires)
print 'Sanity check:'
for w in ["banana", "oil", "chocolate", "the", "dsft"]:
print '...index of', w, ':', tokenizer.word_index.get(w)
dataset = (X, labels)
print '-'
print 'Saving...'
cPickle.dump(dataset, open('datasets/data/reuters.pkl', 'w'))
def load_data(path="reuters.pkl", nb_words=100000, maxlen=None, test_split=0.2, seed=113):
path = get_file(path, origin="https://s3.amazonaws.com/text-datasets/reuters.pkl")
f = open(path, 'rb')
X, labels = cPickle.load(f)
f.close()
random.seed(seed)
random.shuffle(X)
random.seed(seed)
random.shuffle(labels)
if maxlen:
new_X = []
new_labels = []
for x, y in zip(X, labels):
if len(x) < maxlen:
new_X.append(x)
new_labels.append(y)
X = new_X
labels = new_labels
X = [[1 if w >= nb_words else w for w in x] for x in X]
X_train = X[:int(len(X)*(1-test_split))]
y_train = labels[:int(len(X)*(1-test_split))]
X_test = X[int(len(X)*(1-test_split)):]
y_test = labels[int(len(X)*(1-test_split)):]
return (X_train, y_train), (X_test, y_test)
if __name__ == "__main__":
make_reuters_dataset()
(X_train, y_train), (X_test, y_test) = load_data()
|
enthought/pyside
|
tests/QtWebKit/webframe_test.py
|
Python
|
lgpl-2.1
| 899 | 0.002225 |
import unittest
import sys
from PySide.QtCore import QObject, SIGNAL, QUrl
from PySide.QtWebKit import *
from PySide.QtNetwork import QNetworkRequest
from helper import adjust_filename, UsesQApplication
class TestWebFrame(UsesQApplication):
def load_finished(self, ok):
self.assert_(ok)
page = self.view.page()
sel
|
f.assert_(page)
frame = page.mainFrame()
self.assert_(frame)
met
|
a = frame.metaData()
self.assertEqual(meta['description'], ['PySide Test METADATA.'])
self.app.quit()
def testMetaData(self):
self.view = QWebView()
QObject.connect(self.view, SIGNAL('loadFinished(bool)'),
self.load_finished)
url = QUrl.fromLocalFile(adjust_filename('fox.html', __file__))
self.view.setUrl(url)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
|
Elico-Corp/odoo-addons
|
multiprice_to_product_form/__openerp__.py
|
Python
|
agpl-3.0
| 1,385 | 0 |
# -*- coding: utf-8 -*-
########################################
|
######################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-2015 Elico Corp (<http://www.elico-corp.com>)
# Authors: Siyuan Gu
#
# This program is free software: you can redistribute it and/or modif
|
y
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Add multiprice to product form view',
'version': '8.0.1.0.0',
'category': 'Sales',
'depends': ['product_multiprices'],
'author': 'Elico Corp',
'license': 'AGPL-3',
'website': 'https://www.elico-corp.com',
'support': 'support@elico-corp.com',
'data': ['views/product.xml'],
'installable': True,
'application': False
}
|
NUKnightLab/slackdice
|
app.py
|
Python
|
mit
| 1,779 | 0.003373 |
from flask import Flask, request, jsonify
import random
import re
import sys
app = Flask(__name__)
SPEC = re.compile('^(\d+)d(\d+) ?(\w+)?$')
HIDDEN = ('hide', 'hidden', 'invisible', 'ephemeral', 'private')
USAGE = 'USAGE:\n' \
'`/roll [n]d[x] [options]`\n' \
'where:\n' \
' n == number of dice\n' \
' x == number of sides on each die\n' \
'e.g. `/roll 3d6` will roll 3 6-sided dice. ' \
'[options] may be any of (hide|hidden|invisible|ephemeral|private) ' \
'for a private roll.'
def do_roll(spec):
match = SPEC.match(spec)
if match is None:
return {
'response_type': 'ephemeral',
'text': 'ERROR: invalid roll command `%s`\n\n%s' % (
spec, USAGE)
}
num = int(match.group(1))
size = int(match.group(2))
flag = match.group(3)
if flag is not None and flag not in HIDDEN:
return {
'response_type': 'ephemeral',
'text': 'ERROR: unrecognized modifier `%s`' % flag
}
vals = []
for i in range(0, num):
vals.append(random.randint(1, size))
data = {
'response_type': 'ephemeral' i
|
f flag in HIDDEN else 'in_channel'
}
if num == 1:
data['text'] = str(vals[0])
else:
data['text'] = '%s = %d' % (
' + '.join(
|
[str(v) for v in vals]), sum(vals))
return data
@app.route("/", methods=['GET', 'POST'])
def roll():
try:
if request.method == 'POST':
spec = request.form['text']
else:
spec = request.args['spec']
return jsonify(do_roll(spec))
except:
return jsonify({
'response_type': 'ephemeral',
'text': USAGE
})
if __name__ == "__main__":
app.run(debug=True)
|
bfontaine/p7ldap
|
p7ldap/connection.py
|
Python
|
mit
| 619 | 0.004847 |
# -*- coding: UTF-8 -*-
import ldap
class Connection:
def __init__(self, url='ldap://annuaire.math.univ-paris-diderot.fr:389', \
base='ou=users,
|
dc=chevaleret,dc=univ-paris-diderot,dc=fr'):
self.base = base
self.url = url
self.con = ldap.initialize(self.url)
self.con.bind_s('', '')
def __del__(self):
self.con.unbind()
def search(self, kw, field='cn'):
|
"""
Search someone in the LDAP directory using a keyword
"""
qry = "%s=*%s*" % (field, kw)
return self.con.search_s(self.base, ldap.SCOPE_SUBTREE, qry, None)
|
chriskoz/OctoPrint
|
src/octoprint/plugins/softwareupdate/updaters/python_updater.py
|
Python
|
agpl-3.0
| 565 | 0.008881 |
# coding=utf-8
from __future__ import absolute_import
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
def can_perform_update(target, check):
return "python_updater" in check and check["python_upd
|
ater"] is not None
def perform_update(target, check, target_version, log_cb=None):
return check["python_updater"].perform_update(target, check, target_versio
|
n, log_cb=None)
|
oudalab/phyllo
|
phyllo/extractors/asconiusDB.py
|
Python
|
apache-2.0
| 2,970 | 0.005051 |
import sqlite3
import urllib
import re
from urllib.request impor
|
t urlopen
from bs4 import BeautifulSoup
from phyllo.phyllo_logger import logger
#
|
Note: The original ordering of chapters and verses was extremely complex.
# As a result, chapters are the bold headers and subsections are each p tag.
# Case 1: Sections split by numbers (Roman or not) followed by a period, or bracketed. Subsections split by <p> tags
def parsecase1(ptags, c, colltitle, title, author, date, URL):
# ptags contains all <p> tags. c is the cursor object.
chapter = '-1'
verse = 0
for p in ptags:
# make sure it's not a paragraph without the main text
try:
if p['class'][0].lower() in ['border', 'pagehead', 'shortborder', 'smallboarder', 'margin',
'internal_navigation']: # these are not part of the main t
continue
except:
pass
passage = ''
text = p.get_text().strip()
# Skip empty paragraphs. and skip the last part with the collection link.
if len(text) <= 0 or text.startswith('Asconius\n'):
continue
chapterb = p.find('b')
if chapterb is not None and text[0].isalpha():
test = chapterb.find(text = True)
if text == test:
chapter = text
verse = 0
continue
passage = text
verse+=1
if passage.startswith('Asconius'):
continue
c.execute("INSERT INTO texts VALUES (?,?,?,?,?,?,?, ?, ?, ?, ?)",
(None, colltitle, title, 'Latin', author, date, chapter,
verse, passage.strip(), URL, 'prose'))
def main():
collURL = 'http://www.thelatinlibrary.com/asconius.html'
collOpen = urllib.request.urlopen(collURL)
collSOUP = BeautifulSoup(collOpen, 'html5lib')
author = collSOUP.title.string.strip()
colltitle = 'QUINTUS ASCONIUS PEDIANUS'
date = 'c. 9 B.C. - c. A.D. 76'
textsURL = [collURL]
with sqlite3.connect('texts.db') as db:
c = db.cursor()
c.execute(
'CREATE TABLE IF NOT EXISTS texts (id INTEGER PRIMARY KEY, title TEXT, book TEXT,'
' language TEXT, author TEXT, date TEXT, chapter TEXT, verse TEXT, passage TEXT,'
' link TEXT, documentType TEXT)')
c.execute("DELETE FROM texts WHERE author='Asconius'")
for url in textsURL:
openurl = urllib.request.urlopen(url)
textsoup = BeautifulSoup(openurl, 'html5lib')
try:
title = textsoup.title.string.split(':')[1].strip()
except:
title = textsoup.title.string.strip()
getp = textsoup.find_all('p')
parsecase1(getp, c, colltitle, title, author, date, url)
logger.info("Program runs successfully.")
if __name__ == '__main__':
main()
|
mitzvotech/honorroll
|
lib/cleanup.py
|
Python
|
mit
| 1,317 | 0.001519 |
from pymongo import MongoClient
import os
from bson import json_util
from numpy import unique
client = MongoClient(os.environ.get("MONGOLAB_URI"))
db = client[os.environ.get("MONGOLAB_DB")]
class Organization:
def __init__(self):
self.orgs = self.get_orgs()
self.count = len(self.orgs)
def get_orgs(self):
out = []
for org in db.organizations.find():
out.append(org)
return out
def get_unique_orgs(self):
out = []
for org in self.orgs:
try:
out.append(org["organization_name"].strip())
except:
pass
return unique(out)
class Attorney:
def __init__(self):
self.attorneys = self.
|
get_attorneys()
self.count = len(self.attorneys)
def get_attorneys(self):
out = []
for org in db.attorneys.find():
|
out.append(org)
return out
def get_unique_orgs(self):
out = []
for attorney in self.attorneys:
try:
out.append(attorney["organization_name"].strip())
except:
pass
return unique(out)
if __name__ == "__main__":
org = Organization()
attorney = Attorney()
print(json_util.dumps(attorney.get_unique_orgs(), indent=2))
|
MoarCatz/chat-server
|
installer.py
|
Python
|
gpl-3.0
| 2,510 | 0.001195 |
import os, psycopg2, rsa
from urllib.parse import urlparse
class Installer:
def connect(self):
self.url = urlparse(os.environ["DATABASE_URL"])
self.db = psycopg2.connect(database=self.url.path[1:],
user=self.url.username,
password=self.url.password,
host=self.url.hostname,
port=self.url.port)
def create_database(self):
c = self.db.c
|
ursor()
c.execute('''CREATE TABLE users (name text PRIMARY KEY,
password text,
friends text ARRAY,
favorites text ARRAY,
blacklist text ARRAY,
dialogs text ARRAY)''')
c.execute('''CREATE TABLE profiles (name text PRIMARY KEY
|
REFERENCES users(name),
status text,
email text,
birthday bigint,
about text,
image bytea)''')
c.execute('''CREATE TABLE sessions (name text,
pub_key text ARRAY,
ip text UNIQUE,
last_active bigint)''')
c.execute('''CREATE TABLE requests (from_who text,
to_who text,
message text)''')
c.execute('''CREATE TABLE key (pub_key text ARRAY,
priv_key text ARRAY)''')
self.db.commit()
c.close()
def seed_database(self):
pubkey, privkey = rsa.newkeys(2048, accurate = False)
c = self.db.cursor()
c.execute('''INSERT INTO key VALUES (%s, %s)''',
(list(map(str, pubkey.__getstate__())),
list(map(str, privkey.__getstate__()))))
c.close()
self.db.commit()
self.db.close()
def install(self):
self.connect()
self.create_database()
self.seed_database()
if __name__ == '__main__':
Installer().install()
|
thuydang/ocrfeeder
|
src/ocrfeeder/odf/office.py
|
Python
|
gpl-3.0
| 3,311 | 0.028399 |
# -*- coding: utf-8 -*-
# Copyright (C) 2006-2007 Søren Roug, European Environment Agency
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Contributor(s):
#
from namespaces import OFFICENS
from element import Element
from draw import StyleRefElement
# Autogenerated
def Annotation(**args):
|
return StyleRefElement(qname = (OFFICENS,'annotation'), **args)
def AutomaticStyles(**args):
return Element(qname = (OFFICENS, 'automatic-styles'), **args)
def BinaryData(**args):
return Element(qname = (OFFICENS,'binary-data'), **args)
def Body(**args):
return Element(qname = (OFFICENS, 'body'), **args)
def ChangeInfo(**args):
return Element(qname = (OFFICENS,'change-info'), **args)
def Chart(**args):
return Element(qname
|
= (OFFICENS,'chart'), **args)
def DdeSource(**args):
return Element(qname = (OFFICENS,'dde-source'), **args)
def Document(**args):
return Element(qname = (OFFICENS,'document'), **args)
def DocumentContent(version="1.0", **args):
return Element(qname = (OFFICENS, 'document-content'), version=version, **args)
def DocumentMeta(version="1.0", **args):
return Element(qname = (OFFICENS, 'document-meta'), version=version, **args)
def DocumentSettings(version="1.0", **args):
return Element(qname = (OFFICENS, 'document-settings'), version=version, **args)
def DocumentStyles(version="1.0", **args):
return Element(qname = (OFFICENS, 'document-styles'), version=version, **args)
def Drawing(**args):
return Element(qname = (OFFICENS,'drawing'), **args)
def EventListeners(**args):
return Element(qname = (OFFICENS,'event-listeners'), **args)
def FontFaceDecls(**args):
return Element(qname = (OFFICENS, 'font-face-decls'), **args)
def Forms(**args):
return Element(qname = (OFFICENS,'forms'), **args)
def Image(**args):
return Element(qname = (OFFICENS,'image'), **args)
def MasterStyles(**args):
return Element(qname = (OFFICENS, 'master-styles'), **args)
def Meta(**args):
return Element(qname = (OFFICENS, 'meta'), **args)
def Presentation(**args):
return Element(qname = (OFFICENS,'presentation'), **args)
def Script(**args):
return Element(qname = (OFFICENS, 'script'), **args)
def Scripts(**args):
return Element(qname = (OFFICENS, 'scripts'), **args)
def Settings(**args):
return Element(qname = (OFFICENS, 'settings'), **args)
def Spreadsheet(**args):
return Element(qname = (OFFICENS, 'spreadsheet'), **args)
def Styles(**args):
return Element(qname = (OFFICENS, 'styles'), **args)
def Text(**args):
return Element(qname = (OFFICENS, 'text'), **args)
# Autogenerated end
|
jdepoix/goto_cloud
|
goto_cloud/test_assets/remote_host_patch_metaclass.py
|
Python
|
mit
| 1,494 | 0.004016 |
from unittest.mock import patch
def mocked_execute(remote_executor, command, *args, **kwargs):
from .test_assets import TestAsset
return TestAsset.REMOTE_HOST_MOCKS[remote_executor.hostname].execute(
command
)
class PatchRemoteHostMeta(type):
"""
can be used as a metaclass for a TestCase to patch relevant methods, required to mock a RemoteHost
"""
MOCKED_EXECUTE = mocked_execute
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
|
patch('remote_execution.remote_execution.SshRemoteExecutor.connect', lambda self: None)(self)
patch('remote_execution.remote_execution.SshRemoteExecutor.close', lambda self: None)(self)
patch('remote_execution.remote_execution.SshRemoteExecutor.is_connected', lambda self: True)(self)
patch(
'remote_execution.remote_execution.SshRemoteExecutor._execute',
PatchRemoteHostMeta.MOCKED_EXECUTE
|
)(self)
class PatchTrackedRemoteExecutionMeta(PatchRemoteHostMeta):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.executed_commands = set()
def tracked_mocked_execute(remote_host, command, *args, **kwargs):
self.executed_commands.add(command)
return PatchRemoteHostMeta.MOCKED_EXECUTE(remote_host, command)
patch(
'remote_execution.remote_execution.SshRemoteExecutor._execute',
tracked_mocked_execute
)(self)
|
anehx/anonboard-backend
|
core/models.py
|
Python
|
mit
| 2,696 | 0.006677 |
from django.db import models
from django.utils import timezone
from user.models import User
from adminsortable.models import SortableMixin
class Topic(SortableMixin):
'''
Model to define the behaviour of a topic
|
'''
class Meta:
'''
Meta options for topic model
Defines the default ordering
'''
ordering = [ 'order' ]
name = models.CharField(max_length=50, unique=True)
identifier = models.SlugField(max_length=50, unique=True)
description = models.CharField(max_length=255)
order = models.PositiveIntegerField(
defaul
|
t=0,
editable=False,
db_index=True
)
@property
def threads_last_day(self):
'''
Counts the amount of threads in this topic
created in the last 24 hours
:return: The amount of threads
:rtype: int
'''
last_day = timezone.now() - timezone.timedelta(days=1)
return len(Thread.objects.filter(topic=self, created__gte=last_day))
def __str__(self):
'''
Represents a topic as a string
:return: The name of the topic
:rtype: str
'''
return self.name
class Thread(models.Model):
'''
Model to define the behaviour of a thread
Is related to a topic
'''
class Meta:
'''
Meta options for thread model
Defines the default ordering
'''
ordering = [ '-created' ]
user = models.ForeignKey(User)
topic = models.ForeignKey(Topic, related_name='threads')
title = models.CharField(max_length=50)
content = models.TextField()
created = models.DateTimeField(auto_now_add=True)
def __str__(self):
'''
Represents a thread as a string
:return: The title of the thread
:rtype: str
'''
return self.title
class Comment(models.Model):
'''
Model to define the behaviour of a comment
Is related to a user and a thread
'''
class Meta:
'''
Meta options for comment model
Defines the default ordering
'''
ordering = [ 'created' ]
user = models.ForeignKey(User)
thread = models.ForeignKey(Thread, related_name='comments')
content = models.CharField(max_length=140)
created = models.DateTimeField(auto_now_add=True)
def __str__(self):
'''
Represents a comment as a string
:return: The user identifier and the thread title
:rtype: str
'''
return 'Comment from %s to thread %s' % (
str(self.user),
str(self.thread)
)
|
swirlingsand/deep-learning-foundations
|
play/multi-layer.py
|
Python
|
mit
| 771 | 0.001297 |
import numpy as np
def sigmoid(x):
"""
Calculate sigmoid
"""
return 1 / (1 + np.exp(-x))
# Network size
N_input = 4
N_hidden = 3
N_output = 2
np.random.seed(42)
# Make some fake data
X = np.random.randn(4)
weights_input_to_hidden = np.random.normal(
0, scale=0.1, size=(N_input, N_hidden))
weights_hidden_to_output = np.random.normal(
0, scale=0.1, size=(N_hidden, N_output))
# Make a forward pass through the network
hidden_layer_in = np.do
|
t(X, weights_input_to_hidden)
hidden_layer_out = sigmoid(hidden_layer_in)
print('Hidden-layer Output:')
print(hidden_layer_out)
|
output_layer_in = np.dot(hidden_layer_out, weights_hidden_to_output)
output_layer_out = sigmoid(output_layer_in)
print('Output-layer Output:')
print(output_layer_out)
|
cysuncn/python
|
spark/crm/PROC_O_FIN_FIN_PRODAREAINFO.py
|
Python
|
gpl-3.0
| 7,144 | 0.012726 |
#coding=UTF-8
from pyspark import SparkContext, SparkConf, SQLContext, Row, HiveContext
from pyspark.sql.types import *
from datetime import date, datetime, timedelta
import sys, re, os
st = datetime.now()
conf = SparkConf().setAppName('PROC_O_FIN_FIN_PRODAREAINFO').setMaster(sys.argv[2])
sc = SparkContext(conf = conf)
sc.setLogLevel('WARN')
if len(sys.argv) > 5:
if sys.argv[5] == "hive":
sqlContext = HiveContext(sc)
else:
sqlContext = SQLContext(sc)
hdfs = sys.argv[3]
dbname = sys.argv[4]
#处理需要使用的日期
etl_date = sys.argv[1]
#etl日期
V_DT = etl_date
#上一日日期
V_DT_LD = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8])) + timedelta(-1)).strftime("%Y%m%d")
#月初日期
V_DT_FMD = date(int(etl_date[0:4]), int(etl_date[4:6]), 1).strftime("%Y%m%d")
#上月末日期
V_DT_LMD = (date(int(etl_date[0:4]), int(etl_date[4:6]), 1) + timedelta(-1)).strftime("%Y%m%d")
#10位日期
V_DT10 = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8]))).strftime("%Y-%m-%d")
V_STEP = 0
O_CM_FIN_PRODAREAINFO = sqlContext.read.parquet(hdfs+'/O_CM_FIN_PRODAREAINFO/*')
O_CM_FIN_PRODAREAINFO.registerTempTable("O_CM_FIN_PRODAREAINFO")
#任务[12] 001-01::
V_STEP = V_STEP + 1
F_CM_FIN_PRODAREAINFO = sqlContext.read.parquet(hdfs+'/F_CM_FIN_PRODAREAINFO_BK/'+V_DT_LD+'.parquet/*')
F_CM_FIN_PRODAREAINFO.registerTempTable("F_CM_FIN_PRODAREAINFO")
sql = """
SELECT A.PRODCODE AS PRODCODE
,A.PRODZONENO AS PRODZONENO
,A.AGTBRNO AS AGTBRNO
,A.AGTZONENO AS AGTZONENO
,A.AGTZONENA AS AGTZONENA
,A.FEERATE AS FEERATE
,A.ZONENOMINAMT AS ZONENOMINAMT
,A.ZONENOMAXAMT AS ZONENOMAXAMT
,A.AGTAMT AS AGTAMT
,A.AGTSTATE AS AGTSTATE
,A.REGZONENO AS REGZONENO
,A.REGBRNO AS REGBRNO
,A.REGTELLERNO AS REGTELLERNO
,A.REGDATE AS REGDATE
,A.REGTIME AS REGTIME
,A.ENSZONENO AS ENSZONENO
,A.ENSBRNO AS ENSBRNO
,A.ENSTELLERNO AS ENSTELLERNO
,A.ENSDATE AS ENSDATE
,A.ENSTIME AS ENSTIME
,A.NOTE1 AS NOTE1
,A.NOTE2 AS NOTE2
,A.NOTE3 AS NOTE3
,A.NOTE4 AS NOTE4
,A.NOTE5 AS NOTE5
,A.FR_ID AS FR_ID
,V_DT AS ODS_ST_DATE
,'FIN' AS ODS_SYS_ID
FROM O_CM_FIN_PRODAREAINFO A --销售法人范围表
"""
sql = re.sub(r"\bV_DT\b", "'"+V_DT10+"'", sql)
F_CM_FIN_PRODAREAINFO_INNTMP1 = sqlContext.sql(sql)
F_CM_FIN_PRODAREAINFO_INNTMP1.registerTempTable("F_CM_FIN_PRODAREAINFO_INNTMP1")
#F_CM_FIN_PRODAREAINFO = sqlContext.read.parquet(hdfs+'/F_CM_FIN_PRODAREAINFO/*')
#F_CM_FIN_PRODAREAINFO.registerTempTable("F_CM_FIN_PRODAREAINFO")
sql = """
SELECT DST.PRODCODE --产品代码:src.PRODCODE
,DST.PRODZONENO --产品归属法人:src.PRODZONENO
,DST.AGTBRNO --代销法人机构码:src.AGTBRNO
,DST.AGTZONENO --代销法人:src.AGTZONENO
,DST.AGTZONENA --代销法人机构名称:src.AGTZONENA
,DST.FEERATE --手续费比例:src.FEERATE
,DST.ZONENOMINAMT --法人最低销售额度:src.ZONENOMINAMT
,DST.ZONENOMAXAMT --法人最高销售额度:src.ZONENOMAXAMT
,DST.AGTAMT --已销售额度:src.AGTAMT
,DST.AGTSTATE --销售代理状态:src.AGTSTATE
,DST.REGZONENO --登记机构所属法人:src.REGZONENO
,DST.REGBRNO --登记机构:src.REGBRNO
,DST.REGTELLERNO --登记柜员:src.REGTELLERNO
,DST.REGDATE --登记日期:src.REGDATE
,DST.REGTIME --登记时间:src.REGTIME
,DST.ENSZONENO --确认机构所属法人:src.ENSZONENO
,DST.ENSBRNO --确认机构:src.ENSBRNO
,DST.ENSTELLERNO --确认柜员:src.ENSTELLERNO
,DST.ENSDATE --确认日期:src.ENSDATE
,DST.ENSTIME --确认时间:src.ENSTIME
,DST.NOTE1
|
--备用1:src.NOTE1
,DST.NOTE2 --备用2:src.NOTE2
,DST.NOTE3
|
--备用3:src.NOTE3
,DST.NOTE4 --备用4:src.NOTE4
,DST.NOTE5 --备用5:src.NOTE5
,DST.FR_ID --法人代码:src.FR_ID
,DST.ODS_ST_DATE --系统平台日期:src.ODS_ST_DATE
,DST.ODS_SYS_ID --系统代码:src.ODS_SYS_ID
FROM F_CM_FIN_PRODAREAINFO DST
LEFT JOIN F_CM_FIN_PRODAREAINFO_INNTMP1 SRC
ON SRC.PRODCODE = DST.PRODCODE
WHERE SRC.PRODCODE IS NULL """
sql = re.sub(r"\bV_DT\b", "'"+V_DT10+"'", sql)
F_CM_FIN_PRODAREAINFO_INNTMP2 = sqlContext.sql(sql)
dfn="F_CM_FIN_PRODAREAINFO/"+V_DT+".parquet"
UNION=F_CM_FIN_PRODAREAINFO_INNTMP2.unionAll(F_CM_FIN_PRODAREAINFO_INNTMP1)
F_CM_FIN_PRODAREAINFO_INNTMP1.cache()
F_CM_FIN_PRODAREAINFO_INNTMP2.cache()
nrowsi = F_CM_FIN_PRODAREAINFO_INNTMP1.count()
nrowsa = F_CM_FIN_PRODAREAINFO_INNTMP2.count()
ret = os.system("hdfs dfs -rm -r /"+dbname+"/F_CM_FIN_PRODAREAINFO/*.parquet")
UNION.write.save(path = hdfs + '/' + dfn, mode='overwrite')
F_CM_FIN_PRODAREAINFO_INNTMP1.unpersist()
F_CM_FIN_PRODAREAINFO_INNTMP2.unpersist()
et = datetime.now()
print("Step %d start[%s] end[%s] use %d seconds, insert F_CM_FIN_PRODAREAINFO lines %d, all lines %d") % (V_STEP, st.strftime("%H:%M:%S"), et.strftime("%H:%M:%S"), (et-st).seconds, nrowsi, nrowsa)
ret = os.system("hdfs dfs -mv /"+dbname+"/F_CM_FIN_PRODAREAINFO/"+V_DT_LD+".parquet /"+dbname+"/F_CM_FIN_PRODAREAINFO_BK/")
#先删除备表当天数据
ret = os.system("hdfs dfs -rm -r /"+dbname+"/F_CM_FIN_PRODAREAINFO_BK/"+V_DT+".parquet")
#从当天原表复制一份全量到备表
ret = os.system("hdfs dfs -cp -f /"+dbname+"/F_CM_FIN_PRODAREAINFO/"+V_DT+".parquet /"+dbname+"/F_CM_FIN_PRODAREAINFO_BK/"+V_DT+".parquet")
|
bbarrows89/CSC110_Projects
|
fahrenheit.py
|
Python
|
mit
| 341 | 0.01173 |
# Bryan Barrows
|
# CSC 110 - Winter 17
# fahrenheit.py
# The purpose of this program is to convert a user input tempera
|
ture from celsius to fahrenheit.
def main():
celsius = eval(input("What is the Celsius temperature? "))
fahrenheit = (9/5) * celsius + 32
print("The temperature is ",fahrenheit," degrees Fahrenheit.")
main()
|
dataplumber/edge
|
src/main/python/plugins/slcp/indicator/Writer.py
|
Python
|
apache-2.0
| 2,746 | 0.003277 |
import logging
import urllib
import json
from edge.writer.proxywriter import ProxyWriter
class Writer(ProxyWriter):
def __init__(self, configFilePath):
super(Writer, self).__init__(configFilePath)
def _generateUrl(self, requestHandler):
url = self._configuration.get('solr', 'url')
parameters = {}
parameters['wt'] = 'json'
parameters['omitHeader'] = 'true'
parameters['q'] = '*:*'
try:
parameters['fq'] = 'id:"' + requestHandler.get_argument('id') + '"'
except:
parameters['fl'] = 'id,name,rate,uncertainties,unit,shortenUnit,abbrUnit,updated_at'
try:
if requestHandler.get_argument('latest').lower() == 'true':
parameters['fl'] = 'xLatest,yLatest,unit,abbrUnit,updated_at'
except:
pass
url += '/select?' + urllib.urlencode(parameters)
logging.debug("proxy to url : " + url)
return url
def onResponse(self, response):
if response.error:
self.requestHandler.set_status(404)
self.requestHandler.write(str(response.error))
self.requestHandler.finish()
else:
for name,
|
value in response.headers.iteritems():
logging.debug('header: '+name+':'+value)
self.requestHandler.set_header(name, value)
self.requestHandler.set_header('Access-Control-Allow-Origin', '*')
solrJson = json.loads(response.body)
if len(solrJson['response']['docs']) > 1:
# Need to order indicators accordingly
solrJsonClone = {}
solrJsonClone['response'] = {}
solrJsonClone['res
|
ponse']['start'] = solrJson['response']['start']
solrJsonClone['response']['numFound'] = solrJson['response']['numFound']
solrJsonClone['response']['docs'] = []
indicators = {}
for doc in solrJson['response']['docs']:
indicators[doc['id']] = doc
for indicator in self._configuration.get('solr', 'ordering').split(','):
if indicator in indicators:
solrJsonClone['response']['docs'].append(indicators[indicator])
solrJson = solrJsonClone
for doc in solrJson['response']['docs']:
if 'uncertainties' in doc:
if doc['id'] in self._configuration.get('solr', 'uncertainties').split(','):
doc['uncertainties'] = int(round(doc['uncertainties']))
doc['rate'] = int(round(doc['rate']))
self.requestHandler.write(solrJson)
self.requestHandler.finish()
|
mitocw/edx-platform
|
openedx/tests/settings.py
|
Python
|
agpl-3.0
| 3,886 | 0.001544 |
"""
Minimal Django settings for tests of common/lib.
Required in Django 1.9+ due to imports of models in stock Django apps.
"""
import sys
import tempfile
from django.utils.translation import ugettext_lazy as _
from path import Path
# TODO: Remove the rest of the sys.path modification here and in (cms|lms)/envs/common.py
REPO_ROOT = Path(__file__).abspath().dirname().dirname().dirname() # /edx-platform/
sys.path.append(REPO_ROOT / 'common' / 'djangoapps')
sys.path.append(REPO_ROOT / 'lms' / 'djangoapps')
ALL_LANGUAGES = []
BLOCK_STRUCTURES_SETTINGS = dict(
COURSE_PUBLISH_TASK_DELAY=30,
TASK_DEFAULT_RETRY_DELAY=30,
TASK_MAX_RETRIES=5,
)
COURSE_KEY_PATTERN = r'(?P<course_key_string>[^/+]+(/|\+)[^/+]+(/|\+)[^/?]+)'
COURSE_ID_PATTERN = COURSE_KEY_PATTERN.replace('course_key_string', 'course_id')
USAGE_KEY_PATTERN = r'(?P<usage_key_string>(?:i4x://?[^/]+/[^/]+/[^/]+/[^@]+(?:@[^/]+)?)|(?:[^/]+))'
COURSE_MODE_DEFAULTS = {
'bulk_sku': None,
'currency': 'usd',
'description': None,
'expiration_datetime': None,
'min_price': 0,
'name': 'Audit',
'sku': None,
'slug': 'audit',
'suggested_prices': '',
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'default.db',
'USER': '',
'PASSWORD'
|
: '',
'HOST': '',
'PORT': ''
|
,
}
}
PROCTORING_BACKENDS = {
'DEFAULT': 'mock',
'mock': {},
'mock_proctoring_without_rules': {},
}
FEATURES = {}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'djcelery',
'django_sites_extensions',
'openedx.core.djangoapps.django_comment_common',
'openedx.core.djangoapps.video_config',
'openedx.core.djangoapps.video_pipeline',
'openedx.core.djangoapps.bookmarks.apps.BookmarksConfig',
'edxval',
'lms.djangoapps.courseware',
'lms.djangoapps.instructor_task',
'student',
'openedx.core.djangoapps.site_configuration',
'lms.djangoapps.grades.apps.GradesConfig',
'lms.djangoapps.certificates.apps.CertificatesConfig',
'openedx.core.djangoapps.user_api',
'course_modes.apps.CourseModesConfig',
'lms.djangoapps.verify_student.apps.VerifyStudentConfig',
'openedx.core.djangoapps.dark_lang',
'openedx.core.djangoapps.content.course_overviews.apps.CourseOverviewsConfig',
'openedx.core.djangoapps.content.block_structure.apps.BlockStructureConfig',
'openedx.core.djangoapps.catalog',
'openedx.core.djangoapps.self_paced',
'openedx.core.djangoapps.schedules.apps.SchedulesConfig',
'openedx.core.djangoapps.theming.apps.ThemingConfig',
'openedx.core.djangoapps.external_user_ids',
'experiments',
'openedx.features.content_type_gating',
'openedx.features.course_duration_limits',
'openedx.features.discounts',
'milestones',
'celery_utils',
'waffle',
'edx_when',
'rest_framework_jwt',
# Django 1.11 demands to have imported models supported by installed apps.
'completion',
'entitlements',
)
LMS_ROOT_URL = "http://localhost:8000"
MEDIA_ROOT = tempfile.mkdtemp()
RECALCULATE_GRADES_ROUTING_KEY = 'edx.core.default'
POLICY_CHANGE_GRADES_ROUTING_KEY = 'edx.core.default'
POLICY_CHANGE_TASK_RATE_LIMIT = '300/h'
SECRET_KEY = 'insecure-secret-key'
SITE_ID = 1
SITE_NAME = "localhost"
PLATFORM_NAME = _('Your Platform Name Here')
DEFAULT_FROM_EMAIL = 'registration@example.com'
TRACK_MAX_EVENT = 50000
USE_TZ = True
RETIREMENT_SERVICE_WORKER_USERNAME = 'RETIREMENT_SERVICE_USER'
RETIRED_USERNAME_PREFIX = 'retired__user_'
PROCTORING_SETTINGS = {}
# Software Secure request retry settings
# Time in seconds before a retry of the task should be 60 mints.
SOFTWARE_SECURE_REQUEST_RETRY_DELAY = 60 * 60
# Maximum of 6 retries before giving up.
SOFTWARE_SECURE_RETRY_MAX_ATTEMPTS = 6
|
emundus/v6
|
plugins/fabrik_visualization/fusionchart/libs/fusioncharts-suite-xt/integrations/django/samples/fusioncharts/samples/dynamic_chart_resize.py
|
Python
|
gpl-2.0
| 1,972 | 0.010649 |
from django.shortcuts import render
from django.http import HttpResponse
# Include the `fusioncharts.py` file which has required functions to embed the charts in html page
from ..fusioncharts import FusionCharts
# Loading Data from a Static JSON String
# It is a example to show a Column 2D chart where data is passed as JSON string format.
# The `chart` method is defined to load chart data from an JSON string.
def chart(request):
# Create an object for the column2d chart
|
using the FusionCharts class constructor
column2d = FusionCharts("column2d", "ex1", '100%', '100%', "chartContainer", "json",
# The chart data is passed as a string to the `dataSource` parameter.
"""{
"chart":
{
"cap
|
tion": "Countries With Most Oil Reserves [2017-18]",
"subcaption": "In MMbbl = One Million barrels",
"xaxisname": "Country",
"yaxisname": "Reserves (MMbbl)",
"numbersuffix": "K",
"theme": "fusion"
},
"data": [{
"label": "Venezuela",
"value": "290"
}, {
"label": "Saudi",
"value": "260"
}, {
"label": "Canada",
"value": "180"
}, {
"label": "Iran",
"value": "140"
}, {
"label": "Russia",
"value": "115"
}, {
"label": "UAE",
"value": "100"
}, {
"label": "US",
"value": "30"
}, {
"label": "China",
"value": "30"
}]
}""")
# returning complete JavaScript and HTML code, which is used to generate chart in the browsers.
return render(request, 'dynamic-resize.html', {'output' : column2d.render(),'chartTitle': 'Chart Auto-Resise Sample'})
|
AdvancedClimateSystems/python-modbus
|
tests/system/responses/test_succesful_responses.py
|
Python
|
mpl-2.0
| 2,290 | 0 |
import pytest
from umodbus import conf
from umodbus.client import tcp
@pytest.fixture(scope='module', autou
|
se=True)
def enable_signed_values(request):
""" Use signed values when running tests it this module. """
tmp = conf.SIGNED_VALUES
conf.SIGNED_VALUES = True
def fin():
conf.SIGNED_VALUES = tmp
request.addfinalizer(fin)
@pytest.mark.parametrize('function', [
tcp.read_coils,
tcp.read_discrete_inputs,
])
def test_response_on_single_bit_value_read_requests(sock, function):
""" Validate response of a succesful Read Coils or Read Discrete Inputs
request.
|
"""
slave_id, starting_address, quantity = (1, 0, 10)
req_adu = function(slave_id, starting_address, quantity)
assert tcp.send_message(req_adu, sock) == [0, 1, 0, 1, 0, 1, 0, 1, 0, 1]
@pytest.mark.parametrize('function', [
tcp.read_holding_registers,
tcp.read_input_registers,
])
def test_response_on_multi_bit_value_read_requests(sock, function):
""" Validate response of a succesful Read Holding Registers or Read
Input Registers request.
"""
slave_id, starting_address, quantity = (1, 0, 10)
req_adu = function(slave_id, starting_address, quantity)
assert tcp.send_message(req_adu, sock) ==\
[0, -1, -2, -3, -4, -5, -6, -7, -8, -9]
@pytest.mark.parametrize('function, value', [
(tcp.write_single_coil, 1),
(tcp.write_single_register, -1337),
])
def test_response_single_value_write_request(sock, function, value):
""" Validate responde of succesful Read Single Coil and Read Single
Register request.
"""
slave_id, starting_address, value = (1, 0, value)
req_adu = function(slave_id, starting_address, value)
assert tcp.send_message(req_adu, sock) == value
@pytest.mark.parametrize('function, values', [
(tcp.write_multiple_coils, [1, 1]),
(tcp.write_multiple_registers, [1337, 15]),
])
def test_response_multi_value_write_request(sock, function, values):
""" Validate response of succesful Write Multiple Coils and Write Multiple
Registers request.
Both requests write 2 values, starting address is 0.
"""
slave_id, starting_address = (1, 0)
req_adu = function(slave_id, starting_address, values)
assert tcp.send_message(req_adu, sock) == 2
|
thejeshgn/quest
|
quest/admin.py
|
Python
|
gpl-3.0
| 213 | 0.037559 |
from .models import *
fr
|
om django.contrib import admin
class PingLogAdmin(admin.ModelAdmin):
list_display = ('id','hash_key','url','ip_address','user_agent','time')
admin.site.register(
|
PingLog, PingLogAdmin)
|
Azure/azure-sdk-for-python
|
sdk/scheduler/azure-mgmt-scheduler/azure/mgmt/scheduler/models/_models_py3.py
|
Python
|
mit
| 48,388 | 0.002232 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import Dict, List, Optional, Union
import msrest.serialization
from ._scheduler_management_client_enums import *
class HttpAuthentication(msrest.serialization.Model):
"""HttpAuthentication.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: OAuthAuthentication, BasicAuthentication, ClientCertAuthentication.
All required parameters must be populated in order to send to Azure.
:param type: Required. Gets or sets the HTTP authentication type.Constant filled by server.
Possible values include: "NotSpecified", "ClientCertificate", "ActiveDirectoryOAuth", "Basic".
:type type: str or ~azure.mgmt.scheduler.models.HttpAuthenticationType
"""
_validation = {
'type': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
}
_subtype_map = {
'type': {'ActiveDirectoryOAuth': 'OAuthAuthentication', 'Basic': 'BasicAuthentication', 'ClientCertificate': 'ClientCertAuthentication'}
}
def __init__(
self,
**kwargs
):
super(HttpAuthentication, self).__init__(**kwargs)
self.type = None # type: Optional[str]
class BasicAuthentication(HttpAuthentication):
"""BasicAuthentication.
All required parameters must be populated in order to send to Azure.
:param type: Required. Gets or sets the HTTP authentication type.Constant filled by server.
Possible values include: "NotSpecified", "ClientCertificate", "ActiveDirectoryOAuth", "Basic".
:type type: str or ~azure.mgmt.scheduler.models.HttpAuthenticationType
:param username: Gets or sets the username.
:type username: str
:param password: Gets or sets the password, return value will always be empty.
:type password: str
"""
_validation = {
'type': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
}
def __init__(
self,
*,
username: Optional[str] = None,
password: Optional[str] = None,
**kwargs
):
super(BasicAuthentication, self).__init__(**kwargs)
self.type = 'Basic' # type: str
self.username = username
self.password = password
class ClientCertAuthentication(HttpAuthentication):
"""ClientCertAuthentication.
All required parameters must be populated in order to send to Azure.
:param type: Required. Gets or sets the HTTP authentication type.Constant filled by server.
Possible values include: "NotSpecified", "ClientCertificate", "ActiveDirectoryOAuth", "Basic".
:type type: str or ~azure.mgmt.scheduler.models.HttpAuthenticationType
:param password: Gets or sets the certificate password, return value will always be empty.
:type password: str
:param pfx: Gets or sets the pfx certificate. Accepts certification in base64 encoding, return
value will always be empty.
:type pfx: str
:param certificate_thumbprint: Gets or sets the certificate thumbprint.
:type certificate_thumbprint: str
:param certificate_expiration_date: Gets or sets the certificate expiration date.
:type certificate_expiration_date: ~datetime.datetime
:param certificate_subject_name: Gets or sets the certificate subject name.
:type certificate_subject_name: str
"""
_validation = {
'type': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'pfx': {'key': 'pfx', 'type': 'str'},
'certificate_thumbprint': {'key': 'certificateThumbprint', 'type': 'str'},
'certificate_expiration_date': {'key': 'certificateExpirationDate', 'type': 'iso-8601'},
'certificate_subject_name': {'key': 'certificateSubjectName', 'type': 'str'},
}
def __init__(
self,
*,
password: Optional[str] = None,
pfx: Optional[str] = None,
certificate_thumbprint: Optional[str] = None,
certificate_expiration_date: Optional[datetime.datetime] = None,
certificate_subject_name: Optional[str] = None,
**kwargs
):
super(ClientCertAuthentication, self).__init__(**kwargs)
self.type = 'ClientCertificate' # type: str
self.password = password
self.pfx = pfx
self.certificate_thumbprint = certificate_thumbprint
self.certificate_expiration_date = certificate_expiration_date
self.certificate_subject_name = certificate_subject_name
class HttpRequest(msrest.serialization.Model):
"""HttpRequest.
:param authentication: Gets or sets the authentication method of the request.
:type authentication: ~azure.mgmt.scheduler.models.HttpAuthentication
:param uri: Gets or sets the URI of the request.
:type uri: str
:param method: Gets or sets the method of the request.
:type method: str
:param body: Gets or sets the request body.
:type body: str
:param headers: Gets or sets the headers.
:type headers: dict[str, str]
"""
_attribute_map = {
'authentication': {'key': 'authentication', 'type': 'HttpAuthentication'},
'uri': {'key': 'uri', 'type': 'str'},
'method': {'key': 'method', 'type': 'str'},
'body': {'key': 'body', 'type': 'str'},
'headers': {'key': 'headers', 'type': '{str}'},
}
def __init__(
self,
*,
authentication: Optional["HttpAuthentication"] = None,
uri: Optional[str] = None,
method: Optional[str] = None,
body: Optional[str] = None,
headers: Optional[Dict[str, str]] = None,
**kwargs
):
super(HttpRequest, self).__init__(**kwargs)
self.authentication = authentication
self.uri = uri
self.method = method
self.body = body
self.headers = headers
class JobAction(msrest.serialization.Model):
"""JobAction.
:param type: Gets or sets the job action type. Possible values include: "Http", "Https",
"StorageQueue", "ServiceBusQueue", "ServiceBusTopic".
:type type: str or ~azure.mgmt.scheduler.models.JobActionType
:param request: Gets or sets the http requests.
:type request: ~azure.mgmt.scheduler.models.HttpRequest
:param queue_mes
|
sage: Gets or sets the storage queue message.
:type queue_message: ~azure.mgmt.scheduler.models.StorageQueueMessage
:param service_bus_queue_message: Gets or sets the service bus queue messa
|
ge.
:type service_bus_queue_message: ~azure.mgmt.scheduler.models.ServiceBusQueueMessage
:param service_bus_topic_message: Gets or sets the service bus topic message.
:type service_bus_topic_message: ~azure.mgmt.scheduler.models.ServiceBusTopicMessage
:param retry_policy: Gets or sets the retry policy.
:type retry_policy: ~azure.mgmt.scheduler.models.RetryPolicy
:param error_action: Gets or sets the error action.
:type error_action: ~azure.mgmt.scheduler.models.JobErrorAction
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'request': {'key': 'request', 'type': 'HttpRequest'},
'queue_message': {'key': 'queueMessage', 'type': 'StorageQueueMessage'},
'service_bus_queue_message': {'key': 'serviceBusQueueMessage', 'type': 'ServiceBusQueueMessage'},
'service_bus_topic_message': {'key': 'serviceBusTopicMessage', 'type': 'ServiceBusTopicMessage'},
'retry_policy': {'key': 'retryPolicy', 'type': 'RetryPolicy'},
'error_action': {'key': 'erro
|
mykonosbiennale/mykonosbiennale.github.io
|
tasks.py
|
Python
|
apache-2.0
| 552 | 0.018116 |
from invoke import task
@task
def runserver(c):
|
c.run("PYTHONHTTPSVERIFY=0; ./manage.py runserver --settings=mykonosbiennale.offline_settings")
def export_artists(c):
c.run("PYTHONHTTPSVERIFY=0; ./manage.py export_artists - -settings = mykonosbiennale.offline_settings")
|
def export_filmfestival(c):
c.run("PYTHONHTTPSVERIFY=0; ./manage.py export_filmfestival - -settings = mykonosbiennale.offline_settings")
def staticsitegen(c):
c.run("PYTHONHTTPSVERIFY=0; ./manage.py staticsitegen --settings=mykonosbiennale.offline_settings")
|
sora7/listparse
|
OLD/lp2/listparse.py
|
Python
|
gpl-2.0
| 77,022 | 0.006116 |
#!/usr/bin/env python2
## PYTHON 2!!!!!!!!!!!!!!!!!!!
# -*- coding: utf-8 -*-
#========
|
===================================================================
|
===#
import os
import sys
import re
import StringIO
import shutil
# import tkFileDialog
import HTMLParser
import re
import base64
import Tkinter
import ttk
#------------------------------------------------------------------------------#
MODULES_DIR = 'modules'
WH_DIR = 'wh'
INFO_DIR = 'info'
M_DIR = 'M'
N_DIR = 'N'
G_DIR = 'G'
OST_DIR = 'OST'
################################################################################
# AUTO DIR MAKE
################################################################################
def make_dirs(main_dir):
def check(s):
s2 = s
FORBIDDEN_CHARS = ('?', '"', '`')
for char in FORBIDDEN_CHARS:
s2 = s2.replace(char, '')
return s2
def check_n_make(fullpath):
dir_fullpath = check(fullpath)
return _make_dir(dir_fullpath)
def _make_dir(fullpath):
if not os.path.exists(fullpath):
os.mkdir(fullpath)
return True
else:
# dir exists
return False
def prepare(main_dir):
main = os.path.normpath(main_dir)
wh_path = os.path.join(main, WH_DIR)
wh_dirname_ = os.path.join(*wh_path.split(os.path.sep)[-2:])
if _make_dir(wh_path):
print 'OK:', wh_dirname_
else:
print 'EXISTS:', wh_dirname_
return None
info_path = os.path.join(wh_path, INFO_DIR)
_make_dir(info_path)
for item in os.listdir(main):
if item != 'wh':
fullpath = os.path.join(main, item)
shutil.move(fullpath, info_path)
prepare(main_dir)
main_dir = os.path.normpath(main_dir)
info_dir = os.path.join(main_dir, WH_DIR, INFO_DIR)
titles = process_info_dir(info_dir)
TITLE_PATTERN = '%s'
YEAR_PATTERN = '(%s)'
TITLE_YEAR_PATTERN = '%s (%s)'
for (title, year) in titles:
if title != None:
if year != None:
if YEAR_PATTERN % (year) in title:
dir_name = TITLE_PATTERN % (title)
else:
dir_name = TITLE_YEAR_PATTERN % (title, year)
else:
dir_name = TITLE_PATTERN % (title)
main_path = os.path.join(main_dir, dir_name)
wh_path = os.path.join(main_dir, WH_DIR, dir_name)
main_path_dirname = os.path.join(*main_path.split(os.path.sep)[-2:])
if check_n_make(main_path):
print 'OK:', main_path_dirname
else:
print 'EXISTS:', main_path_dirname
wh_path_dirname = os.path.join(*wh_path.split(os.path.sep)[-3:])
if check_n_make(wh_path):
print 'OK:', wh_path_dirname
else:
print 'EXISTS:', wh_path_dirname
else:
# cannot create dir (unknown title)
pass
def process_info_dir(info_dir):
titles = []
def is_ignore(filename):
IGNORE = ('Relations',)
for item in IGNORE:
if item in filename:
return True
return False
# if os.path.exists(info_dir) and os.path.isdir(info_dir):
for a_certain_file in os.listdir(info_dir):
fullpath = os.path.join(info_dir, a_certain_file)
if os.path.isfile(fullpath) and not is_ignore(a_certain_file):
# print a_certain_file
parser = AniDBFileParser()
with open(fullpath) as f:
text = ''.join(f.readlines())
parser.feed(text)
(title, year) = parser.parse()
# (title, year) = parse_anidb_file(fullpath)
titles.append((title, year))
return titles
def make_dirs2(path):
parser = AniDBFileParser()
print path
with open(path) as f:
text = ''.join(f.readlines())
parser.feed(text)
print parser.parse()
################################################################################
# PARSERS
################################################################################
class AniDBFileParser(HTMLParser.HTMLParser):
__year = None
__name = None
__is_header = None
__is_table = None
__is_year_row = None
__is_year_col = None
__TITLE_REGEXP = re.compile(r'Anime:\s(.+)')
__YEAR_REGEXP = re.compile(r'(\d{4})')
def __init__(self):
self.__year = None
self.__name = None
self.__is_header = False
self.__is_table = False
self.__is_year_row = False
self.__is_year_col = False
HTMLParser.HTMLParser.__init__(self)
def feed(self, data):
#data = data.replace('</tr><tr', '</tr> <tr')
try:
data = str(data)
HTMLParser.HTMLParser.feed(self, data)
except UnicodeDecodeError:
pass
## piss off
def handle_starttag(self, tag, attrs):
if tag == 'h1':
attrs = Dict(attrs)
if attrs['class'] == 'anime':
self.__is_header = True
if tag == 'table':
self.__is_table = True
if self.__is_table:
if tag == 'tr':
attrs = Dict(attrs)
if (
## <tr class="year">
## <th class="field">Year</th>
## <td class="value">18.12.2009</td>
## </tr>
attrs['class'] == 'year' or
## <tr class="g_odd year">
## <th class="field">Year</th>
## <td class="value">02.07.2013 till 17.09.2013</td>
## </tr>
attrs['class'] == 'g_odd year'
):
self.__is_year_row = True
if self.__is_year_row:
if tag == 'td':
attrs = Dict(attrs)
if attrs['class'] == 'value':
self.__is_year_col = True
def handle_endtag(self, tag):
if tag == 'h1':
if self.__is_header:
self.__is_header = False
if tag == 'table':
self.__is_table = False
if self.__is_table:
if tag == 'tr':
self.__is_year_row = False
if self.__is_year_row:
if tag == 'td':
self.__is_year_col = False
def handle_data(self, data):
if self.__is_header:
data = str(data)
if re.search(self.__TITLE_REGEXP, data) is not None:
title = re.search(self.__TITLE_REGEXP, data).group(1)
self.__name = str(title)
if self.__is_table:
if self.__is_year_row:
if self.__is_year_col:
print 'YEAR DATA:', data
if re.search(self.__YEAR_REGEXP, data) is not None:
year = str(re.search(self.__YEAR_REGEXP, data).group(1))
self.__year = year
def parse(self):
return (self.__name, self.__year)
################################################################################
# FUNCTIONS
################################################################################
# unpacking nested lists
# lst = [1,[2,3,[4,5,6]],7,8,9] to
# lst = [1,2,3,4,5,6,7,8,9]
def unpack_list(lst):
all_items = []
for item in lst:
if isinstance(item, list) or isinstance(item, tuple):
for i in unpack_list(item):
all_items.append(i)
else:
all_items.append(item)
return all_items
################################################################################
# RES COPY
################################################################################
class ResCopy():
stored_dir = None
titles_dir = None
main_dirs = None
def __init__(self):
self.stored_dir = ''
self.titles_dir = ''
self.main_dirs = []
def set_dirs(self, store, titles):
self.stored_dir = store
self.titles_dir = titles
|
TaliesinSkye/evennia
|
src/commands/default/comms.py
|
Python
|
bsd-3-clause
| 38,986 | 0.003283 |
"""
Comsystem command module.
Comm commands are OOC commands and intended to be made available to
the Player at all times (they go into the PlayerCmdSet). So we
make sure to homogenize self.caller to always be the player object
for easy handling.
"""
from django.conf import settings
from src.comms.models import Channel, Msg, PlayerChannelConnection, ExternalChannelConnection
from src.comms import irc, imc2, rss
from src.comms.channelhandler import CHANNELHANDLER
from src.utils import create, utils, prettytable
from src.commands.default.muxcommand import MuxCommand, MuxPlayerCommand
# limit symbol import for API
__all__ = ("CmdAddCom", "CmdDelCom", "CmdAllCom",
"CmdChannels", "CmdCdestroy", "CmdCBoot", "CmdCemit",
"CmdCWho", "CmdChannelCreate", "CmdCset", "CmdCdesc",
"CmdPage", "CmdIRC2Chan", "CmdIMC2Chan", "CmdIMCInfo",
"CmdIMCTell", "CmdRSS2Chan")
def find_channel(caller, channelname, silent=False, noaliases=False):
"""
Helper function for searching for a single channel with
some error handling.
"""
channels = Channel.objects.channel_search(channelname)
if not channels:
if not noaliases:
channels = [chan for chan in Channel.objects.all() if channelname in chan.aliases]
if channels:
return channels[0]
if not silent:
caller.msg("Channel '%s' not found." % channelname)
return None
elif len(channels) > 1:
matches = ", ".join(["%s(%s)" % (chan.key, chan.id) for chan in channels])
if not silent:
caller.msg("Multiple channels match (be more specific): \n%s" % matches)
return None
return channels[0]
class CmdAddCom(MuxPlayerCommand):
"""
addcom - subscribe to a channel with optional alias
Usage:
addcom [alias=] <channel>
Joins a given channel. If alias is given, this will allow you to
refer to the channel by this alias rather than the full channel
name. Subsequent calls of this command can be used to add multiple
aliases to an already joined channel.
"""
key = "addcom"
aliase
|
s = ["aliaschan","chanalias"]
help_category = "Comms"
locks = "cmd:not pperm(channel_banned)"
def func(self):
"Implement the command"
caller = self.caller
args
|
= self.args
player = caller
if not args:
self.msg("Usage: addcom [alias =] channelname.")
return
if self.rhs:
# rhs holds the channelname
channelname = self.rhs
alias = self.lhs
else:
channelname = self.args
alias = None
channel = find_channel(caller, channelname)
if not channel:
# we use the custom search method to handle errors.
return
# check permissions
if not channel.access(player, 'listen'):
self.msg("%s: You are not allowed to listen to this channel." % channel.key)
return
string = ""
if not channel.has_connection(player):
# we want to connect as well.
if not channel.connect_to(player):
# if this would have returned True, the player is connected
self.msg("%s: You are not allowed to join this channel." % channel.key)
return
else:
string += "You now listen to the channel %s. " % channel.key
else:
string += "You are already connected to channel %s." % channel.key
if alias:
# create a nick and add it to the caller.
caller.nicks.add(alias, channel.key, nick_type="channel")
string += " You can now refer to the channel %s with the alias '%s'."
self.msg(string % (channel.key, alias))
else:
string += " No alias added."
self.msg(string)
class CmdDelCom(MuxPlayerCommand):
"""
delcom - unsubscribe from channel or remove channel alias
Usage:
delcom <alias or channel>
If the full channel name is given, unsubscribe from the
channel. If an alias is given, remove the alias but don't
unsubscribe.
"""
key = "delcom"
aliases = ["delaliaschan, delchanalias"]
help_category = "Comms"
locks = "cmd:not perm(channel_banned)"
def func(self):
"Implementing the command. "
caller = self.caller
player = caller
if not self.args:
self.msg("Usage: delcom <alias or channel>")
return
ostring = self.args.lower()
channel = find_channel(caller, ostring, silent=True, noaliases=True)
if channel:
# we have given a channel name - unsubscribe
if not channel.has_connection(player):
self.msg("You are not listening to that channel.")
return
chkey = channel.key.lower()
# find all nicks linked to this channel and delete them
for nick in [nick for nick in caller.nicks.get(nick_type="channel")
if nick.db_real.lower() == chkey]:
nick.delete()
channel.disconnect_from(player)
self.msg("You stop listening to channel '%s'. Eventual aliases were removed." % channel.key)
return
else:
# we are removing a channel nick
channame = caller.nicks.get(ostring, nick_type="channel")
channel = find_channel(caller, channame, silent=True)
if not channel:
self.msg("No channel with alias '%s' was found." % ostring)
else:
if caller.nicks.has(ostring, nick_type="channel"):
caller.nicks.delete(ostring, nick_type="channel")
self.msg("Your alias '%s' for channel %s was cleared." % (ostring, channel.key))
else:
self.msg("You had no such alias defined for this channel.")
class CmdAllCom(MuxPlayerCommand):
"""
allcom - operate on all channels
Usage:
allcom [on | off | who | destroy]
Allows the user to universally turn off or on all channels they are on,
as well as perform a 'who' for all channels they are on. Destroy deletes
all channels that you control.
Without argument, works like comlist.
"""
key = "allcom"
locks = "cmd: not pperm(channel_banned)"
help_category = "Comms"
def func(self):
"Runs the function"
caller = self.caller
args = self.args
if not args:
caller.execute_cmd("@channels")
self.msg("(Usage: allcom on | off | who | destroy)")
return
if args == "on":
# get names of all channels available to listen to and activate them all
channels = [chan for chan in Channel.objects.get_all_channels() if chan.access(caller, 'listen')]
for channel in channels:
caller.execute_cmd("addcom %s" % channel.key)
elif args == "off":
#get names all subscribed channels and disconnect from them all
channels = [conn.channel for conn in PlayerChannelConnection.objects.get_all_player_connections(caller)]
for channel in channels:
caller.execute_cmd("delcom %s" % channel.key)
elif args == "destroy":
# destroy all channels you control
channels = [chan for chan in Channel.objects.get_all_channels() if chan.access(caller, 'control')]
for channel in channels:
caller.execute_cmd("@cdestroy %s" % channel.key)
elif args == "who":
# run a who, listing the subscribers on visible channels.
string = "\n{CChannel subscriptions{n"
channels = [chan for chan in Channel.objects.get_all_channels() if chan.access(caller, 'listen')]
if not channels:
string += "No channels."
for channel in channels:
string += "\n{w%s:{n\n" % channel.key
conns = PlayerChannelConnection.objects.get_all_connections(channel)
if conns:
|
bjornt/PyGlow
|
examples/bin_clock.py
|
Python
|
mit
| 1,233 | 0 |
#!/usr/bin/env python2
#####
#
# PyGlow
#
#####
#
# Python module to control Pimoronis PiGlow
# [http://shop.pimoroni.com/products/piglow]
#
# * bin_clock.py - binary clock by Jiri Tyr
#
#####
from __future__ import print_function
from datetime import datetime
from PyGlow import PyGlow, ARM_LED_LIST, BOTH
from sys import stdout
from time import sleep
def int2bin(num):
return int('{0:b}'.format(num))
def print_time(pg):
now = datetime.now()
cur_time = [now.hour, now.minute, now.second]
bin_time = tuple(list(map(int2bin, cur_time)) + cur_time)
stdout.write(' %0.5d | %0.6d | %0.6d (%0.2d:%0.2d:%0.2d)\r' % bin_time)
stdout.flush()
lst
|
= []
for arm_index, arm_bin in enumerate(bin_time[0:3]):
for led_index, c in enumerate("%0.6d" % arm_
|
bin):
if c == '1':
lst.append(ARM_LED_LIST[arm_index][led_index])
pg.set_leds(lst).update_leds()
def main():
print(' %5s | %6s | %6s' % ('Hour', 'Minute', 'Second'))
pg = PyGlow(brightness=150, pulse=True, speed=1000, pulse_dir=BOTH)
try:
while True:
print_time(pg)
except KeyboardInterrupt:
print('')
pg.all(0)
if __name__ == '__main__':
main()
|
wisfern/vnpy
|
beta/gateway/korbitGateway/__init__.py
|
Python
|
mit
| 245 | 0.004082 |
# encoding: UTF-8
from vnpy.tr
|
ader import vtConstant
from korbitGateway import korbitGateway
gatewayClass = korbitGateway
gatewayName = 'KORBIT'
gateway
|
DisplayName = u'KORBIT'
gatewayType = vtConstant.GATEWAYTYPE_BTC
gatewayQryEnabled = True
|
SripriyaSeetharam/tacker
|
tacker/vm/drivers/heat/heat.py
|
Python
|
apache-2.0
| 17,136 | 0.000117 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2015 Intel Corporation.
# Copyright 2015 Isaku Yamahata <isaku.yamahata at intel com>
# <isaku.yamahata at gmail com>
# All Rights Reserved.
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Isaku Yamahata, Intel Corporation.
# shamelessly many codes are stolen from gbp simplechain_driver.py
import time
import yaml
from heatclient import client as heat_client
from heatclient import exc as heatException
from keystoneclient.v2_0 import client as ks_client
from oslo_config import cfg
from tacker.common import log
from tacker.openstack.common import jsonutils
from tacker.openstack.common import log as logging
from tacker.vm.drivers import abstract_driver
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
OPTS = [
cfg.StrOpt('heat_uri',
default='http://localhost:8004/v1',
help=_("Heat server address to create services "
"specified in the service chain.")),
cfg.IntOpt('stack_retries',
default=10,
help=_("Number of attempts to retry for stack deletion")),
cfg.IntOpt('stack_retry_wait',
default=5,
help=_("Wait time between two successive stack delete "
"retries")),
]
CONF.register_opts(OPTS, group='servicevm_heat')
STACK_RETRIES = cfg.CONF.servicevm_heat.stack_retries
STACK_RETRY_WAIT = cfg.CONF.servicevm_heat.stack_retry_wait
HEAT_TEMPLATE_BASE = """
heat_template_version: 2013-05-23
"""
class DeviceHeat(abstract_driver.DeviceAbstractDriver):
"""Heat driver of hosting device."""
def __init__(self):
super(DeviceHeat, self).__init__()
def get_type(self):
return 'heat'
def get_name(self):
return 'heat'
def get_description(self):
return 'Heat infra driver'
@log.log
def create_device_template_pre(self, plugin, context, device_template):
device_template_dict = device_template['device_template']
vnfd_yaml = device_template_dict['attributes'].get('vnfd')
if vnfd_yaml is None:
return
vnfd_dict = yaml.load(vnfd_yaml)
KEY_LIST = (('name', 'template_name'), ('description', 'description'))
device_template_dict.update(
dict((key, vnfd_dict[vnfd_key]) for (key, vnfd_key) in KEY_LIST
if (not key in device_template_dict.get(key) and
vnfd_key in vnfd_dict)))
service_types = vnfd_dict.get('service_properties', {}).get('type', [])
if service_types:
device_template_dict.setdefault('service_types', []).extend(
[{'service_type': service_type}
for service_type in service_types])
for vdu in vnfd_dict.get('vdus', {}).values():
mgmt_driver = vdu.get('mgmt_driver')
if mgmt_driver:
device_template_dict['mgmt_driver'] = mgmt_driver
LOG.debug(_('device_template %s'), device_template)
@log.log
def create(self, plugin, context, device):
LOG.debug(_('device %s'), device)
heatclient_ = HeatClient(context)
attributes = device['device_template']['a
|
ttributes'].copy()
vnfd_yaml = attributes.pop('vnfd'
|
, None)
fields = dict((key, attributes.pop(key)) for key
in ('stack_name', 'template_url', 'template')
if key in attributes)
for key in ('files', 'parameters'):
if key in attributes:
fields[key] = jsonutils.loads(attributes.pop(key))
# overwrite parameters with given dev_attrs for device creation
dev_attrs = device['attributes'].copy()
config_yaml = dev_attrs.pop('config', None)
fields.update(dict((key, dev_attrs.pop(key)) for key
in ('stack_name', 'template_url', 'template')
if key in dev_attrs))
for key in ('files', 'parameters'):
if key in dev_attrs:
fields.setdefault(key, {}).update(
jsonutils.loads(dev_attrs.pop(key)))
LOG.debug('vnfd_yaml %s', vnfd_yaml)
if vnfd_yaml is not None:
assert 'template' not in fields
assert 'template_url' not in fields
template_dict = yaml.load(HEAT_TEMPLATE_BASE)
outputs_dict = {}
template_dict['outputs'] = outputs_dict
vnfd_dict = yaml.load(vnfd_yaml)
LOG.debug('vnfd_dict %s', vnfd_dict)
KEY_LIST = (('description', 'description'),
)
for (key, vnfd_key) in KEY_LIST:
if vnfd_key in vnfd_dict:
template_dict[key] = vnfd_dict[vnfd_key]
for vdu_id, vdu_dict in vnfd_dict.get('vdus', {}).items():
template_dict.setdefault('resources', {})[vdu_id] = {
"type": "OS::Nova::Server"
}
resource_dict = template_dict['resources'][vdu_id]
KEY_LIST = (('image', 'vm_image'),
('flavor', 'instance_type'))
resource_dict['properties'] = {}
properties = resource_dict['properties']
for (key, vdu_key) in KEY_LIST:
properties[key] = vdu_dict[vdu_key]
if 'network_interfaces' in vdu_dict:
# properties['networks'] = (
# vdu_dict['network_interfaces'].values())
networks_list = []
properties['networks'] = networks_list
for network_param in vdu_dict[
'network_interfaces'].values():
if network_param.pop('management', False):
mgmt_port = 'mgmt_port-%s' % vdu_id
mgmt_port_dict = {
'type': 'OS::Neutron::Port',
'properties': {
'port_security_enabled': False,
}
}
mgmt_port_dict['properties'].update(network_param)
template_dict['resources'][
mgmt_port] = mgmt_port_dict
network_param = {
'port': {'get_resource': mgmt_port}
}
mgmt_ip = 'mgmt_ip-%s' % vdu_id
outputs_dict[mgmt_ip] = {
'description': 'management ip address',
'value': {
'get_attr': [mgmt_port, 'fixed_ips',
0, 'ip_address']
}
}
networks_list.append(network_param)
if ('placement_policy' in vdu_dict and
'availability_zone' in vdu_dict['placement_policy']):
properties['availability_zone'] = vdu_dict[
'placement_policy']['availability_zone']
if 'config' in vdu_dict:
properties['config_drive'] = True
metadata = properties.setdefault('metadata', {})
metadata.update(vdu_dict['config'])
for key, value in metadata.items():
metadata[key] = value[:255]
# monitoring_policy = vdu_dict.get('monitoring_p
|
beav/pulp
|
devel/pulp/devel/unit/server/base.py
|
Python
|
gpl-2.0
| 2,511 | 0.003584 |
import os
import unittest
import mock
from pulp.server.db import connection
class PulpWebservicesTests(unittest.TestCase):
"""
Base class for tests of webservice controllers. This base is used to work around the
authentication tests for each each method
"""
def setUp(self):
connection.initialize()
self.patch1 = mock.patch('pulp.server.webservices.controllers.decorators.'
'check_preauthenticated')
self.patch2 = mock.patch('pulp.server.webservices.controllers.decorators.'
'is_consumer_authorized')
self.patch3 = mock.patch('pulp.server.webservices.http.resource_path')
self.patch4 = mock.patch('pulp.server.webservices.http.header')
self.patch5 = mock.patch('web.webapi.HTTPError')
self.patch6 = mock.patch('pulp.server.managers.factory.principal_manager')
self.patch7 = mock.patch('pulp.server.managers.factory.user_query_manager')
self.patch8 = mock.patch('pulp.server.webservices.http.uri_path')
self.mock_check_pre_auth = self.patch1.start()
self.mock_check_pre_auth.return_value = 'ws-user'
self.mock_check_auth = self.patch2.start()
self.mock_check_auth.return_value = True
self.mock_http_resource_path = self.patch3.start()
self.patch4.start()
self.patch5.start()
self.patch6.start()
self.mock_user_query_manager = self.patch7.start()
self.mock_user_query_manager.return_value.is_superuser.return_value = False
self.mock_user_query_manager.return_value.is_authorized.return_value = True
self.mock_uri_path = self.patch8.start()
self.mock_uri_path.return_value = "/mock/"
def tearDown(self):
self.patch1.stop()
self.patch2.stop()
self.patch3.stop()
self.patch4.stop()
self.patch5.stop()
self.patch6.stop()
self.patch7.stop()
self.patch8.stop()
def validate_auth(self, operation):
"""
validate that a validation check was performed for a given operation
:param operation: the operation to validate
"""
self.mock_user_query_manager.return_value.is_authorized.assert_called_once_with(mock.ANY, mock.ANY, operation)
def get_mock_uri_pat
|
h(self, *args):
"""
:param object_id: the id of the object to get the uri for
|
:type object_id: str
"""
return os.path.join('/mock', *args) + '/'
|
tallstreet/jaikuenginepatch
|
join/tests.py
|
Python
|
apache-2.0
| 4,711 | 0.005731 |
import Cookie
import os
from django.conf import settings
from common.tests import ViewTestCase
from common import api
from common import clean
from common import util
class JoinTest(ViewTestCase):
def setUp(self):
super(JoinTest, self).setUp()
self.form_data = {'nick': 'johndoe',
'first_name': 'John',
'last_name': 'Doe',
'email': 'johndoe@google.com',
'password': 'good*password',
'confirm': 'good*password',
'hide': '1',
#'in
|
vite': ''
}
def tearDown(self):
self.form_data = None
def assert_join_validation_error(self, response, content):
self.assertContains(response, content)
self.assertTemplateUsed(response, 'join.html')
self.assertTemplateUsed(response, 'form_error.html')
def test_join_page(self):
r = self.client.get('/join')
self.assertContains(r, 'SIGN UP')
self.assertTemplateUsed(r, 'join.html')
def test_join_with_valid_data(self):
r
|
= self.client.post('/join', self.form_data)
r = self.assertRedirectsPrefix(r, '/welcome')
def test_join_with_invalid_email(self):
self.form_data['email'] = 'invalid'
r = self.client.post('/join', self.form_data)
self.assert_join_validation_error(r, 'supply a valid email address')
def test_join_with_used_email(self):
self.form_data['email'] = 'popular@example.com'
r = self.client.post('/join', self.form_data)
self.assert_join_validation_error(r, 'already associated')
def test_join_with_deleted_email(self):
self.form_data['email'] = 'popular@example.com'
r = self.client.post('/join', self.form_data)
self.assert_join_validation_error(r, 'already associated')
api.actor_remove(api.ROOT, 'popular@example.com')
self.form_data['email'] = 'popular@example.com'
r = self.client.post('/join', self.form_data)
r = self.assertRedirectsPrefix(r, '/welcome')
def test_join_with_invalid_nick(self):
self.form_data['nick'] = 'a'
r = self.client.post('/join', self.form_data)
self.assert_join_validation_error(r, 'Invalid nick')
def test_join_with_reserved_nick(self):
self.form_data['nick'] = 'popular'
r = self.client.post('/join', self.form_data)
self.assert_join_validation_error(r, 'already in use')
def test_join_with_banned_nick(self):
self.form_data['nick'] = 'json'
r = self.client.post('/join', self.form_data)
self.assert_join_validation_error(r, 'not allowed')
def test_join_with_used_nick(self):
self.form_data['nick'] = 'popular'
r = self.client.post('/join', self.form_data)
self.assert_join_validation_error(r, 'already in use')
def test_join_with_used_nick_case_insensitive(self):
self.form_data['nick'] = 'Popular'
r = self.client.post('/join', self.form_data)
self.assert_join_validation_error(r, 'already in use')
class WelcomeTest(ViewTestCase):
def setUp(self):
super(WelcomeTest, self).setUp()
self.login('girlfriend')
def tearDown(self):
self.logout()
def test_photo_view(self):
r = self.client.get('/welcome/1')
self.assertContains(r, 'Your photo')
self.assertTemplateUsed(r, 'welcome_photo.html')
def test_photo_upload(self):
nick = 'popular'
nick = clean.nick(nick)
old_avatar = api.actor_get(api.ROOT, nick).extra.get('icon',
'avatar_default')
self.login(nick)
f = open('testdata/test_avatar.jpg')
r = self.client.post('/welcome/1',
{
'imgfile': f,
'_nonce' :
util.create_nonce('popular', 'change_photo'),
})
r = self.assertRedirectsPrefix(r, '/welcome/1?')
new_avatar = api.actor_get(api.ROOT, nick).extra.get('icon',
'avatar_default')
self.assertNotEquals(old_avatar, new_avatar)
self.assertContains(r, 'Avatar uploaded')
self.assertTemplateUsed(r, 'welcome_photo.html')
self.assertTemplateUsed(r, 'flash.html')
def test_mobile_activation_view(self):
r = self.client.get('/welcome/2')
self.assertContains(r, 'SIGN IN')
self.assertTemplateUsed(r, 'welcome_mobile.html')
def test_contacts_view(self):
r = self.client.get('/welcome/3')
self.assertContains(r, 'Find some friends')
self.assertTemplateUsed(r, 'welcome_contacts.html')
def test_done_view(self):
r = self.client.get('/welcome/done')
self.assertContains(r, 'Congratulations!')
self.assertTemplateUsed(r, 'welcome_done.html')
|
SUSE/azure-sdk-for-python
|
azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2017_06_01_preview/models/regenerate_credential_parameters.py
|
Python
|
mit
| 1,115 | 0 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class RegenerateCredentialParameters(Model):
"""The parameters used to regenerate the login credential.
:param name: Specifies name of the password which should be regenerated --
password or password2. Possible values include: 'password', 'password2'
:type name: str or :class:`PasswordName
<azure.mgmt.containerregistry.v2017_
|
06_01_preview.models.PasswordName>`
"""
_validation = {
'name': {'required': True},
}
_attribute_map =
|
{
'name': {'key': 'name', 'type': 'PasswordName'},
}
def __init__(self, name):
self.name = name
|
google/nerfactor
|
third_party/xiuminglib/xiuminglib/io/np.py
|
Python
|
apache-2.0
| 1,973 | 0.000507 |
from os.path import dirname
import numpy as np
from ..os import open_file, exists_isdir, makedirs
from ..log import get_logger
logger = get_logger()
def read_or_write(data_f, fallback=None):
"""Loads the data file if it exists. Otherwise, if fallback is provided,
call fallback and save its return to disk.
Args:
data_f (str): Path to the data file, whose extension will be used for
deciding how to load the data.
fallback (function, optional): Fallback function used if data file
doesn't exist. Its return will be saved to ``data_f`` for future
loa
|
dings. It should not take argume
|
nts, but if yours requires taking
arguments, just wrap yours with::
fallback=lambda: your_fancy_func(var0, var1)
Returns:
Data loaded if ``data_f`` exists; otherwise, ``fallback``'s return
(``None`` if no fallback).
Writes
- Return by the fallback, if provided.
"""
# Decide data file type
ext = data_f.split('.')[-1].lower()
def load_func(path):
with open_file(path, 'rb') as h:
data = np.load(h)
return data
def save_func(data, path):
if ext == 'npy':
save = np.save
elif ext == 'npz':
save = np.savez
else:
raise NotImplementedError(ext)
with open_file(path, 'wb') as h:
save(h, data)
# Load or call fallback
if exists_isdir(data_f)[0]:
data = load_func(data_f)
msg = "Loaded: "
else:
msg = "File doesn't exist "
if fallback is None:
data = None
msg += "(fallback not provided): "
else:
data = fallback()
out_dir = dirname(data_f)
makedirs(out_dir)
save_func(data, data_f)
msg += "(fallback provided); fallback return now saved to: "
msg += data_f
logger.info(msg)
return data
|
sequana/sequana
|
sequana/utils/singleton.py
|
Python
|
bsd-3-clause
| 329 | 0.015198 |
# a singleton to avoid loading specific classes such as
# sequana.taxonomy.Taxonomy
class
|
Singleton(type):
_instances = {}
de
|
f __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
|
vguerci/Deluge.app
|
deluge/ui/tracker_icons.py
|
Python
|
gpl-3.0
| 19,201 | 0.002187 |
#
# tracker_icons.py
#
# Copyright (C) 2010 John Garland <johnnybg+deluge@gmail.com>
#
# Deluge is free software.
#
# You may redistribute it and/or modify it under the terms of the
# GNU General Public License, as published by the Free Software
# Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# deluge is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with deluge. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
#
# In addition, as a special exception, the copyright holders give
# permission to link the code of portions of this program with the OpenSSL
# library.
# You must obey the GNU General Public License in all respects for all of
# the code used other than OpenSSL. If you modify file(s) with this
# exception, you may extend this exception to your version of the file(s),
# but you are not obligated to do so. If you do not wish to do so, delete
# this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here.
#
#
import os
from HTMLParser import HTMLParser, HTMLParseError
from ur
|
lparse import urljoin, urlparse
from tempfile import mkstemp
from twisted.intern
|
et import defer, threads
from twisted.web.error import PageRedirect
try:
from twisted.web.resource import NoResource, ForbiddenResource
except ImportError:
# twisted 8
from twisted.web.error import NoResource, ForbiddenResource
from deluge.component import Component
from deluge.configmanager import get_config_dir
from deluge.httpdownloader import download_file
from deluge.decorators import proxy
from deluge.log import LOG as log
try:
import PIL.Image as Image
import deluge.ui.Win32IconImagePlugin
except ImportError:
PIL_INSTALLED = False
else:
PIL_INSTALLED = True
class TrackerIcon(object):
"""
Represents a tracker's icon
"""
def __init__(self, filename):
"""
Initialises a new TrackerIcon object
:param filename: the filename of the icon
:type filename: string
"""
self.filename = os.path.abspath(filename)
self.mimetype = extension_to_mimetype(self.filename.rpartition('.')[2])
self.data = None
self.icon_cache = None
def __eq__(self, other):
"""
Compares this TrackerIcon with another to determine if they're equal
:param other: the TrackerIcon to compare to
:type other: TrackerIcon
:returns: whether or not they're equal
:rtype: boolean
"""
return os.path.samefile(self.filename, other.filename) or \
self.get_mimetype() == other.get_mimetype() and \
self.get_data() == other.get_data()
def get_mimetype(self):
"""
Returns the mimetype of this TrackerIcon's image
:returns: the mimetype of the image
:rtype: string
"""
return self.mimetype
def get_data(self):
"""
Returns the TrackerIcon's image data as a string
:returns: the image data
:rtype: string
"""
if not self.data:
f = open(self.filename, "rb")
self.data = f.read()
f.close()
return self.data
def get_filename(self, full=True):
"""
Returns the TrackerIcon image's filename
:param full: an (optional) arg to indicate whether or not to
return the full path
:type full: boolean
:returns: the path of the TrackerIcon's image
:rtype: string
"""
return self.filename if full else os.path.basename(self.filename)
def set_cached_icon(self, data):
"""
Set the cached icon data.
"""
self.icon_cache = data
def get_cached_icon(self):
"""
Returns the cached icon data.
"""
return self.icon_cache
class TrackerIcons(Component):
"""
A TrackerIcon factory class
"""
def __init__(self, icon_dir=None, no_icon=None):
"""
Initialises a new TrackerIcons object
:param icon_dir: the (optional) directory of where to store the icons
:type icon_dir: string
:param no_icon: the (optional) path name of the icon to show when no icon
can be fetched
:type no_icon: string
"""
Component.__init__(self, "TrackerIcons")
if not icon_dir:
icon_dir = get_config_dir("icons")
self.dir = icon_dir
if not os.path.isdir(self.dir):
os.makedirs(self.dir)
self.icons = {}
for icon in os.listdir(self.dir):
if icon != no_icon:
host = icon_name_to_host(icon)
try:
self.icons[host] = TrackerIcon(os.path.join(self.dir, icon))
except KeyError:
log.warning("invalid icon %s", icon)
if no_icon:
self.icons[None] = TrackerIcon(no_icon)
else:
self.icons[None] = None
self.icons[''] = self.icons[None]
self.pending = {}
self.redirects = {}
def get(self, host):
"""
Returns a TrackerIcon for the given tracker's host
:param host: the host to obtain the TrackerIcon for
:type host: string
:returns: a Deferred which fires with the TrackerIcon for the given host
:rtype: Deferred
"""
host = host.lower()
if host in self.icons:
# We already have it, so let's return it
d = defer.succeed(self.icons[host])
elif host in self.pending:
# We're in the middle of getting it
# Add ourselves to the waiting list
d = defer.Deferred()
self.pending[host].append(d)
else:
# We need to fetch it
self.pending[host] = []
# Start callback chain
d = self.download_page(host)
d.addCallbacks(self.on_download_page_complete, self.on_download_page_fail,
errbackArgs=(host,))
d.addCallback(self.parse_html_page)
d.addCallbacks(self.on_parse_complete, self.on_parse_fail,
callbackArgs=(host,))
d.addCallback(self.download_icon, host)
d.addCallbacks(self.on_download_icon_complete, self.on_download_icon_fail,
callbackArgs=(host,), errbackArgs=(host,))
if PIL_INSTALLED:
d.addCallback(self.resize_icon)
d.addCallback(self.store_icon, host)
return d
def download_page(self, host, url=None):
"""
Downloads a tracker host's page
If no url is provided, it bases the url on the host
:param host: the tracker host
:type host: string
:param url: the (optional) url of the host
:type url: string
:returns: the filename of the tracker host's page
:rtype: Deferred
"""
if not url:
url = self.host_to_url(host)
log.debug("Downloading %s %s", host, url)
return download_file(url, mkstemp()[1], force_filename=True)
def on_download_page_complete(self, page):
"""
Runs any download clean up functions
:param page: the page that finished downloading
:type page: string
:returns: the page that finished downloading
:rtype: string
"""
log.debug("Finished downloading %s", page)
return page
def on_download_page_fail(self, f, host):
"""
Recovers from download error
:param f: the failure that occured
:type f: Failure
:param host: the name of the host whose page failed to download
:type host: string
|
davidyezsetz/kuma
|
kuma/contentflagging/utils.py
|
Python
|
mpl-2.0
| 1,154 | 0.000867 |
import hashlib
from kuma.core.utils import get_ip
def get_unique(content_type, object_pk, request=None, ip=None, user_agent=None, user=None):
"""Extract a set of unique identifiers from the request.
This set will be made up of one of the following combinations, depending
on what's available:
* user, None, None, unique_MD5_hash
* None, ip, user_agent, unique_MD5_hash
"""
if request:
if request.user.is_authenticated():
user = request.user
ip = user_agent = None
else:
user = None
ip = get_ip(request)
user_agent = request.META.get('HTTP_USER_AGENT', '')[:255]
# HACK: Build a hash of the fields that should be unique, let MySQL
# chew on that for a unique index. Note that any changes to this algo
# will create all new unique hashes that don't match any existing ones.
hash_text = "\n".join(unicode(x).encode('utf8') for x in (
cont
|
ent_type.pk, object_pk, ip, user_agent,
(user and user.pk or 'None')
))
unique_hash = hashlib.md5(h
|
ash_text).hexdigest()
return (user, ip, user_agent, unique_hash)
|
fadawar/infinit-lunch
|
main.py
|
Python
|
gpl-3.0
| 2,316 | 0.001727 |
#!/usr/bin/env python3
import asyncio
import os
from datetime import datetime
import aiohttp
from aiohttp import web
from raven import Client
from restaurants import (FormattedMenus, SafeRestaurant, OtherRestaurant,
AvalonRestaurant, TOTORestaurant, TOTOCantinaRestaurant,
CasaInkaRestaurant, OlivaRestaurant, CityCantinaRosumRestaurant)
from slack import Channel
# SLACK_HOOK = 'https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXXXXXXXXXXXXXXXXXX'
SLACK_HOOK = os.environ.get('SLACK_HOOK', None)
SLACK_CHANNEL = os.environ.get('SLACK_CHANNEL', None)
SECRET_KEY = os.environ.get('SECRET_KEY', None)
DEBUG = bool(os.environ.get('DEBUG', False))
def is_work_day():
return datetime.today().weekday() in range(0, 5)
def should_send_to_slack(secret_key):
return SLACK_HOOK and secret_key == SECRET_KEY
async def retrieve_menus(session):
futures = [
SafeRestaurant(TOTORestaurant(session)).retrieve_menu(),
SafeRestaurant(TOTOCantinaRestaurant(session)).retrieve_menu(),
SafeRestaurant(AvalonRestaurant(session)).retrieve_menu(),
SafeRestaurant(OlivaRestaurant(session)).retrieve_menu(),
SafeRestaurant(CasaInkaRestaurant(session)).retrieve_menu(),
SafeRestaurant(CityCantinaRosumRestaurant(session)).retrieve_menu(),
]
# Add list of o
|
ther restaurants first, will be in header.
menus = [await SafeRestaurant(OtherRestaurant()).retrieve_menu()]
for future in asyncio.as_completed(futures):
menus.append(await future)
return menus
async def index(request):
if is_work_day():
async with aiohttp.ClientSession() as session:
menus = FormattedMenus(await retrieve_menus(session))
secret_key = request.match_info.get('secret_key')
if shoul
|
d_send_to_slack(secret_key):
await Channel(SLACK_HOOK, session).send(menus)
return web.Response(text=str(menus))
return web.Response(text='Come on Monday-Friday')
sentry_client = Client() # credentials is taken from environment variable SENTRY_DSN
app = web.Application(debug=True)
app.router.add_get('/', index)
app.router.add_get('/{secret_key}', index)
if __name__ == '__main__':
web.run_app(app, host='localhost', port=5000)
|
pacoqueen/ginn
|
extra/scripts/ipython_session_obra_su_almacen.py
|
Python
|
gpl-2.0
| 1,431 | 0.006289 |
# coding: utf-8
# %save en ipython is wonderful. Cargar con ipython -i para continuar.
from framework import pclases
a = pclases.Obra.selectBy(nombre = "SU ALMACEN")[0]
len(a.clientes)
for c in a.clientes:
print c.nombre
vacidas = [o for o in pclases.Obra.select() if not o.clientes]
len(vacidas)
for o in vacidas:
if o.presupuestos:
for f in o.presupuestos:
o.addCliente(f.cliente)
else:
o.destroySelf()
vacidas = [o for o in pclases.Obra.select() if not o.clientes]
len(vacidas)
for c in a.clientes:
nueva_obra = a.clone(nombre = c.nombre)
nueva_obra.direccion = c.direccion
nueva_obra.cp = c.cp
nueva_obra.ciudad = c.ciudad
nueva_obra.provincia = c.provincia
nueva_obra.pais = c.pais
nueva_obra.observaciones = "[admin] splitted from SU ALMACEN. (8/9/2014)."
nueva_obra.addCliente(c)
if len(c.obras) == 1:
nueva_obra.generica = True
sus = pclases.Cliente.get(1589)
osus = sus.get_obra_generica()
osus.nombre
contactos_sus = [c for c in a.contactos if "sustraia" in c.correoe]
len(contactos_sus)
for c in contactos_sus:
c.removeObra(a)
c.addObra(osus)
|
c.sync()
ref = pclases.Cliente.select(pclases.Cliente.q.nombre.contains("REFRESCO I"))[0]
oref = ref.get_obra_generica()
for c in a.contactos:
c.removeObra(a)
c.addObra(oref)
c.sync()
len(a.presupuestos)
|
len(a.facturasVenta)
len(a.pedidosVenta)
|
spillai/procgraph
|
src/procgraph_mplayer/depth_buffer.py
|
Python
|
lgpl-3.0
| 1,391 | 0.008627 |
import numpy as np
from procgraph import Block
from contracts import contract
class DepthBuffer(Block):
Block.alias('depth_buffer')
Block.input('rgba')
Block.output('rgba')
Block.output('line')
Block.output('depth')
def init(self):
self.depth = None
def update(self):
r
|
gba = self.input.rgba
if self.depth is None:
H, W = rgba.shape[0:2]
self.depth = np.zeros((H, W))
self.depth.fill(0)
d = get_depth(rgba)
mask = rgba[:, :, 3] > 0
closer = np.logical_and(self.depth < d, mask)
|
farther = np.logical_not(closer)
self.depth[closer] = d
rgba = rgba.copy()
rgba[farther, 3] = 0
with_line = rgba[:, :, 0:3].copy()
with_line[d, :, 0] = 255
with_line[d, :, 1] = 55
depth = self.depth.copy()
depth[depth == 0] = np.nan
self.output.rgba = rgba
self.output.line = with_line
self.output.depth = depth
@contract(rgba='array[HxWx4](uint8)', returns='float,>=0,<=H-1')
def get_depth(rgba):
alpha = rgba[:, :, 3]
H, _ = alpha.shape
a = 0
w = 0
for i in range(H):
line = alpha[i, :].astype('float32')
a += i * np.sum(line)
w += np.sum(line)
a = a / w
return a
|
CrowdStrike/mellon
|
mellon/factories/git/file.py
|
Python
|
mit
| 4,601 | 0.00739 |
import os.path
from zope import component
from zope import interface
from zope.component.factory import Factory
from sparc.configuration import container
import mellon
from mellon.factories.filesystem.file import MellonByteFileFromFilePathAndConfig
from mellon.factories.filesystem.file import \
MellonUnicodeFileFromFilePathAndConfig
@interface.implementer(mellon.IByteMellonFile)
class MellonByteFileFromGitRepoCommitPathAndConfig(
MellonByteFileFromFilePathAndConfig):
def __init__(self, commit, file_path, config):
self.commit = commit
super(MellonByteFileFromGitRepoCommitPathAndConfig, self).\
__init__(file_path, config)
def __str__(self):
return "Git byte file in repo at {} for commit {} at location {}".\
format(self.commit.repo.working_dir,
str(self.commit), self.file_path)
mellonByteFileFromGitRepoCommitPathAndConfigFactory = \
Factory(MellonByteFileFromGitRepoCommitPathAndConfig)
@interface.implementer(mellon.IUnicodeMellonFile)
class MellonUnicodeFileFromGitRepoCommitPathAndConfig(
MellonUnicodeFileFromFilePathAndConfig):
def __init__(self, commit, file_path, config):
self.commit = commit
super(MellonUnicodeFileFromGitRepoCommitPathAndConfig, \
self).__init__(file_path, config)
def __str__(self):
return "Git unicode file in repo at {} for commit {} at location {}".\
format(self.commit.repo.working_dir,
str(self.commit), self.file_path)
mellonUnicodeFileFromGitRepoCommitPathAndConfigFactory = \
Factory(MellonUnicodeFileFromGitRepoCommitPathAndConfig)
@interface.implementer(mellon.IMellonFileProvider)
class MellonFileProviderForGitReposBaseDirectory(object):
def __init__(self, config):
"""Init
Args:
config: sparc.configuration.container.ISparcAppPyContainerConfiguration
provider with sparc.git[configure.yaml:GitReposBaseDir]
and mellon[configure.yaml:MellonSnippet] entries.
"""
self.config = config
def __iter__(self):
repos_base_dir = container.IPyContainerConfigValue(self.config).\
get('GitReposBaseDir')['directory']
repo_iter = component.createObject(\
u'sparc.git.repos.repos_from_recursive_dir', repos_base_dir)
for repo in repo_iter:
# iterate through the commits
examined = set()
for commit in repo.iter_commits('--all'):
# we need to reset the working tree to gain filesystem access
# to the blob data. This will allow us to pass in a path
# (needed to determine if file is binary or not)
repo.head.reference = commit
repo.head.reset(index=True, working_tree=True)
# iter through commit blobs (e.g. files)
for blob in commit.tree.traverse():
if blob.type != 'blob' or blob.hexsha in examined:
continue
else:
examined.add(blob.hexsha)
if not os.path.isfile(blob.abspath):
continue
path = component.createObject(u"mellon.filesystem_path", blob.abspath)
#path = provider(blob.abspath)
#interface.alsoProvides(path, mellon.IPath)
if mellon.IBinaryChecker(path).check():
yield component.createObject(\
u'mellon.factories.git.byte_file_from_commit_path_and_config',
|
commit,
path,
self.config)
else:
yield component.createObject(\
u'mellon.factories.git.unicode_file_from_commit_path_and_config',
|
commit,
path,
self.config)
MellonFileProviderForGitReposBaseDirectoryFactory = Factory(MellonFileProviderForGitReposBaseDirectory)
interface.alsoProvides(MellonFileProviderForGitReposBaseDirectoryFactory, mellon.IMellonFileProviderFactory)
|
tdtrask/ansible
|
lib/ansible/modules/network/aci/aci_switch_leaf_selector.py
|
Python
|
gpl-3.0
| 10,067 | 0.002384 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Bruno Calogero <brunocalogero@hotmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_switch_leaf_selector
short_description: Add a leaf Selector with Node Block Range and Policy Group to a Switch Policy Leaf Profile on Cisco ACI fabrics
description:
- Add a leaf Selector with Node Block range and Policy Group to a Switch Policy Leaf Profile on Cisco ACI fabrics.
- More information from the internal APIC class I(infra:LeafS), I(infra:NodeBlk), I(infra:RsAccNodePGrp) at
U(https://developer.cisco.com/docs/apic-mim-ref/).
author:
- Bruno Calogero (@brunocalogero)
version_added: '2.5'
notes:
- This module is to be used with M(aci_switch_policy_leaf_profile)
One first creates a leaf profile (infra:NodeP) and then creates an associated selector (infra:LeafS),
options:
description:
description:
- The description to assign to the C(leaf)
leaf_profile:
description:
- Name of the Leaf Profile to which we add a Selector.
aliases: [ leaf_profile_name ]
leaf:
description:
- Name of Leaf Selector.
aliases: [ name, leaf_name, leaf_profile_leaf_name, leaf_selector_name ]
leaf_node_blk:
description:
- Name of Node Block range to be added to Leaf Selector of given Leaf Profile
aliases: [ leaf_node_blk_name, node_blk_name ]
leaf_node_blk_description:
description:
- The description to assign to the C(leaf_node_blk)
from:
description:
- Start of Node Block Range
aliases: [ node_blk_range_from, from_range, range_from ]
to:
description:
- Start of Node Block Range
aliases: [ node_blk_range_to, to_range, range_to ]
policy_group:
description:
- Name of the Policy Group to be added to Leaf Selector of given Leaf Profile
aliases: [ name, policy_group_name ]
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: aci
'''
EXAMPLES = r'''
- name: adding a switch policy leaf profile selector associated Node Block range (w/ policy group)
aci_switch_leaf_selector:
host: apic
username: someusername
password: somepassword
leaf_profile: sw_name
leaf: leaf_selector_name
leaf_node_blk: node_blk_name
from: 1011
to: 1011
policy_group: somepolicygroupname
state: present
- name: adding a switch policy leaf profile selector associated Node Block range (w/o policy group)
aci_switch_leaf_selector:
host: apic
username: someusername
password: somepassword
leaf_profile: sw_name
leaf: leaf_selector_name
leaf_node_blk: node_blk_name
from: 1011
to: 1011
state: present
- name: Removing a switch policy leaf profile selector
aci_switch_leaf_selector:
host: apic
username: someusername
password: somepassword
leaf_profile: sw_name
leaf: leaf_selector_name
state: absent
- name: Querying a switch policy leaf profile selector
aci_switch_leaf_selector:
host: apic
username: someusername
password: somepassword
leaf_profile: sw_name
leaf: leaf_selector_name
state: query
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: string
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: string
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: string
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: string
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: string
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec()
argument_spec.update({
'description': dict(type='str'),
'leaf_profile': dict(type='str', aliases=['leaf_profile_name']),
'leaf': dict(type='str', aliases=['name', 'leaf_name', 'leaf_profile_leaf_name', 'leaf_selector_name']),
'leaf_node_blk': dict(type='str', aliases=['leaf_node_blk_name', 'node_blk_name']),
'leaf_node_blk_description': dict(type='str'),
'from': dict(type='int', aliases=['node_blk_range_from', 'from_range', 'range_from']),
'to': dict(type='int', aliases=['node_blk_range_to', 'to_range', 'range_to']),
'policy_group': dict(type='str', aliases=['policy_group_name']),
'state': dict(type='str', default='present', choices=['absent', 'present', 'query']),
})
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['leaf_profile', 'leaf']],
['state', 'present', ['leaf_profile', 'leaf', 'leaf_node_blk',
|
'from', 'to']]
]
)
|
description = module.params['description']
leaf_profile = module.params['leaf_profile']
leaf = module.params['leaf']
leaf_node_blk = module.params['leaf_node_blk']
leaf_node_blk_description = module.params['leaf_node_blk_description']
from_ = module.params['from']
to_ = module.params['to']
policy_group = module.params['policy_group']
state = module.params['state']
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class='infraNodeP',
aci_rn='infra/nprof-{0}'.format(leaf_profile),
filter_target='eq(infraNodeP.name, "{0}")'.format(leaf_profile),
|
unho/translate
|
translate/storage/test_directory.py
|
Python
|
gpl-2.0
| 2,774 | 0 |
"""Tests for the directory module"""
import os
from translate.storage import directory
class TestDirectory:
"""a test class to run tests on a test Pootle Server"""
def setup_method(self, method):
"""sets up a test directory"""
print("setup_method called on", self.__class__.__name__)
self.testdir = "%s_testdir" % (self.__class__.__name__)
self.cleardir(self.testdir)
os.mkdir(self.testdir)
def teardown_method(self, method):
"""removes the attributes set up by setup_method"""
self.cleardir(self.testdir)
def cleardir(self, dirname):
"""removes the given directory"""
if os.path.exists(dirname):
for dirpath, subdirs, filenames in os.walk(dirname, topdown=False):
for name in filenames:
os.remove(os.path.join(dirpath, name))
for name in subdirs:
|
os.rmdir(os.path.join(dirpath, name))
if os.path.exists(dirname):
os.rmdir(dirname)
assert not os.path.exists(dirname)
def touchfiles(self, dir, filenames, content=None):
for filename in filenames:
with open(os.path.join(dir, filename), "w") as fh:
if content:
fh.write(content)
def mkdir(self, dir):
"""Makes a directory inside self.testdir."""
os.mkdir(os.path.join(
|
self.testdir, dir))
def test_created(self):
"""test that the directory actually exists"""
print(self.testdir)
assert os.path.isdir(self.testdir)
def test_basic(self):
"""Tests basic functionality."""
files = ["a.po", "b.po", "c.po"]
files.sort()
self.touchfiles(self.testdir, files)
d = directory.Directory(self.testdir)
filenames = [name for dir, name in d.getfiles()]
filenames.sort()
assert filenames == files
def test_structure(self):
"""Tests a small directory structure."""
files = ["a.po", "b.po", "c.po"]
self.touchfiles(self.testdir, files)
self.mkdir("bla")
self.touchfiles(os.path.join(self.testdir, "bla"), files)
d = directory.Directory(self.testdir)
filenames = [name for dirname, name in d.getfiles()]
filenames.sort()
files = files * 2
files.sort()
assert filenames == files
def test_getunits(self):
"""Tests basic functionality."""
files = ["a.po", "b.po", "c.po"]
posource = '''msgid "bla"\nmsgstr "blabla"\n'''
self.touchfiles(self.testdir, files, posource)
d = directory.Directory(self.testdir)
for unit in d.getunits():
assert unit.target == "blabla"
assert len(d.getunits()) == 3
|
amwelch/a10sdk-python
|
a10sdk/core/vrrp/vrrp_a_state_stats.py
|
Python
|
apache-2.0
| 19,224 | 0.00515 |
from a10sdk.common.A10BaseClass import A10BaseClass
class Stats(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param sync_tx_create_ext_bit_counter: {"description": "Conn Sync Create with Ext Sent counter", "format": "counter", "type": "number", "oid": "29", "optional": true, "size": "8"}
:param sync_tx_max_packed: {"description": "Min Sync Msg Per Packet", "format": "counter", "type": "number", "oid": "32", "optional": true, "size": "2"}
:param sync_persist_rx_no_such_sg_group: {"description": "Persist Conn Sync No Service Group Found", "format": "counter", "type": "number", "oid": "42", "optional": true, "size": "8"}
:param
|
sync_rx_persist_update_age_counter: {"description": "Conn Sync Update Persist Age Pkts Received counter", "format": "counter", "type": "
|
number", "oid": "11", "optional": true, "size": "2"}
:param sync_rx_type_invalid: {"description": "Conn Sync Type Invalid", "format": "counter", "type": "number", "oid": "57", "optional": true, "size": "8"}
:param sync_rx_ext_rtsp: {"description": "Conn Sync Ext RTSP", "format": "counter", "type": "number", "oid": "74", "optional": true, "size": "8"}
:param sync_tx_lsn_fullcone: {"description": "Conn Sync Update LSN Fullcone Sent counter", "format": "counter", "type": "number", "oid": "81", "optional": true, "size": "8"}
:param sync_rx_update_seqnos_counter: {"description": "Conn Sync Update Seq Num Received counter", "format": "counter", "type": "number", "oid": "61", "optional": true, "size": "8"}
:param sync_rx_create_ext_bit_counter: {"description": "Conn Sync Create with Ext Received counter", "format": "counter", "type": "number", "oid": "49", "optional": true, "size": "8"}
:param sync_persist_rx_no_such_rport: {"description": "Persist Conn Sync Real Port Not Found", "format": "counter", "type": "number", "oid": "41", "optional": true, "size": "8"}
:param sync_tx_persist_del_counter: {"description": "Conn Sync Delete Persist Session Pkts Sent counter", "format": "counter", "type": "number", "oid": "13", "optional": true, "size": "2"}
:param sync_persist_rx_no_such_vport: {"description": "Persist Conn Sync Virt Port Not Found", "format": "counter", "type": "number", "oid": "39", "optional": true, "size": "8"}
:param sync_rx_reserve_ha: {"description": "Conn Sync Reserve HA Conn", "format": "counter", "type": "number", "oid": "75", "optional": true, "size": "8"}
:param sync_get_buff_failed_rt: {"description": "Conn Sync Get Buff Failure No Route", "format": "counter", "type": "number", "oid": "65", "optional": true, "size": "8"}
:param sync_rx_conn_exists: {"description": "Conn Sync Create Conn Exists", "format": "counter", "type": "number", "oid": "50", "optional": true, "size": "8"}
:param sync_err_lsn_fullcone: {"description": "Conn Sync LSN Fullcone Failure", "format": "counter", "type": "number", "oid": "83", "optional": true, "size": "8"}
:param sync_get_buff_failed_port: {"description": "Conn Sync Get Buff Failure Wrong Port", "format": "counter", "type": "number", "oid": "66", "optional": true, "size": "8"}
:param query_tx_get_buff_failed: {"description": "Conn Query Get Buff Failure", "format": "counter", "type": "number", "oid": "22", "optional": true, "size": "8"}
:param sync_rx_ext_nat_mac: {"description": "Conn Sync NAT MAC Failure", "format": "counter", "type": "number", "oid": "80", "optional": true, "size": "8"}
:param sync_pkt_rcv_counter: {"description": "Conn Sync Received counter", "format": "counter", "type": "number", "oid": "2", "optional": true, "size": "2"}
:param sync_tx_smp_radius_table_counter: {"description": "Conn Sync Update LSN RADIUS Sent counter", "format": "counter", "type": "number", "oid": "17", "optional": true, "size": "2"}
:param sync_rx_proto_not_supported: {"description": "Conn Sync Protocol Invalid", "format": "counter", "type": "number", "oid": "52", "optional": true, "size": "8"}
:param sync_tx_persist_update_age_counter: {"description": "Conn Sync Update Persist Age Pkts Sent counter", "format": "counter", "type": "number", "oid": "14", "optional": true, "size": "2"}
:param query_rx_zero_info_counter: {"description": "Conn Query Packet Empty", "format": "counter", "type": "number", "oid": "23", "optional": true, "size": "8"}
:param sync_rx_len_invalid: {"description": "Conn Sync Length Invalid", "format": "counter", "type": "number", "oid": "33", "optional": true, "size": "8"}
:param query_pkt_invalid_idx_counter: {"description": "Conn Query Invalid Interface", "format": "counter", "type": "number", "oid": "21", "optional": true, "size": "8"}
:param sync_rx_create_static_sby: {"description": "Conn Sync Create Static Standby", "format": "counter", "type": "number", "oid": "72", "optional": true, "size": "8"}
:param sync_persist_rx_cannot_process_mandatory: {"description": "Persist Conn Sync Process Mandatory Invalid", "format": "counter", "type": "number", "oid": "37", "optional": true, "size": "8"}
:param sync_persist_rx_len_invalid: {"description": "Persist Conn Sync Length Invalid", "format": "counter", "type": "number", "oid": "34", "optional": true, "size": "8"}
:param sync_tx_total_info_counter: {"description": "Conn Sync Total Info Pkts Sent counter", "format": "counter", "type": "number", "oid": "28", "optional": true, "size": "8"}
:param sync_pkt_invalid_idx_counter: {"description": "Conn Sync Invalid Interface", "format": "counter", "type": "number", "oid": "26", "optional": true, "size": "8"}
:param sync_rx_nat_alloc_sby: {"description": "Conn Sync NAT Alloc Standby", "format": "counter", "type": "number", "oid": "69", "optional": true, "size": "8"}
:param sync_rx_persist_del_counter: {"description": "Conn Sync Delete Persist Session Pkts Received counter", "format": "counter", "type": "number", "oid": "10", "optional": true, "size": "2"}
:param sync_rx_update_age_counter: {"description": "Conn Sync Update Age Received counter", "format": "counter", "type": "number", "oid": "5", "optional": true, "size": "2"}
:param sync_persist_rx_proto_not_supported: {"description": "Persist Conn Sync Protocol Invalid", "format": "counter", "type": "number", "oid": "35", "optional": true, "size": "8"}
:param sync_rx_dcmsg_counter: {"description": "Conn Sync forward CPU", "format": "counter", "type": "number", "oid": "59", "optional": true, "size": "8"}
:param query_tx_min_packed: {"description": "Min Query Msg Per Packet", "format": "counter", "type": "number", "oid": "20", "optional": true, "size": "2"}
:param sync_tx_min_packed: {"description": "Max Sync Msg Per Packet", "format": "counter", "type": "number", "oid": "31", "optional": true, "size": "2"}
:param sync_persist_rx_conn_get_failed: {"description": "Persist Conn Sync Get Conn Failure", "format": "counter", "type": "number", "oid": "44", "optional": true, "size": "8"}
:param query_tx_max_packed: {"description": "Max Query Msg Per Packet", "format": "counter", "type": "number", "oid": "19", "optional": true, "size": "2"}
:param sync_persist_rx_ext_bit_process_error: {"description": "Persist Conn Sync Proc Ext Bit Failure", "format": "counter", "type": "number", "oid": "38", "optional": true, "size": "8"}
:param sync_rx_seq_deltas: {"description": "Conn Sync Seq Deltas Failure", "format": "counter", "type": "number", "oid": "76", "optional": true, "size": "8"}
:param query_pkt_rcv_counter: {"description": "Conn Query Received counter", "format": "counter", "type": "number", "oid": "16", "optional": true, "size": "2"}
:param sync_query_dcmsg_counter: {"description": "Conn Sync query forward CPU", "format": "counter", "type": "number", "oid": "64", "optional": true, "size": "8"}
:param sync_rx_zero_info_counter: {"description": "Conn Sync Packet Empty", "format": "counter", "type": "number", "oid": "58", "optional": true, "size": "8"}
:param sync_rx_ftp_control: {"description": "Conn Sync FTP Control Failure", "format": "counter", "type": "number", "oid": "77", "optional": true, "size": "8"}
:param sync_pkt_tx_counter: {"description": "Con
|
Ebag333/Pyfa
|
eos/effects/shipbonuscarrierc4warfarelinksbonus.py
|
Python
|
gpl-3.0
| 1,348 | 0.007418 |
# shipBonusCarrierC4WarfareLinksBonus
#
# Used by:
# Ship: Chimera
type = "passive"
def handler(fit, src, context):
fit.modules.filteredItemBoost(
lambda mod: mod.item.requiresSkill("Shield Command") or mod.item.requiresSkill("Information Command"),
"warfareBuff2Value", src.getModifiedItemAttr("shipBonusCarrierC4"), skill="Caldari Carrier")
fit.modules.filteredItemBoost(
lambda mod: mod.item.requiresSkill("Shield Command") or mod.item.requiresSkill("Information Command"),
"buffDuration", src.getModifiedItemAttr("shipBonusCarrierC4"), skill="Caldari Carrier")
fit.modules.filteredItemBoost(
lambda mod: mod.item.requiresSkill("Shield Command") or mod.item.requiresSkill("Information Command"),
"warfareBuff3Value", src.getModifiedItemAttr("shipBonusCarrierC4"), skill="Caldari Carrier")
fit.modules.filteredItemBoost(
lambda mod: mod.item.requiresSkill("Shield Command") or mod.item.requiresSkill("Information Command"),
"warfareBuff4Value", src.getModifiedItemAttr("shipBonusCarrierC4"), skill="
|
Caldari Carrier")
fit.modules.filteredItemBoost(
lambda mod: mod.item.requiresSkill("Shield Command") or mod.ite
|
m.requiresSkill("Information Command"),
"warfareBuff1Value", src.getModifiedItemAttr("shipBonusCarrierC4"), skill="Caldari Carrier")
|
IndicoDataSolutions/IndicoIo-python
|
tests/etc/test_url.py
|
Python
|
mit
| 467 | 0.002141 |
i
|
mport unittest
from indicoio import config, fer
class TestBatchSize(unittest.TestCase):
def setUp(self):
self.api_key = config.api_key
if not self.api_key:
raise unittest.SkipTest
def test_url_support(self):
test_url = "https://s3-us-west-2.amazonaws.com/indico-test-data/face.jpg"
response = fer(test_url)
self.assertTrue(isinstance(response, dict))
sel
|
f.assertEqual(len(response.keys()), 6)
|
kohr-h/odl
|
odl/test/solvers/smooth/smooth_test.py
|
Python
|
mpl-2.0
| 5,137 | 0 |
# Copyright 2014-2017 The ODL contributors
#
# This file is part of ODL.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.
"""Test for the smooth solvers."""
from __future__ import division
import pytest
import odl
from odl.operator import OpNotImplementedError
nonlinear_cg_beta = odl.util.testutils.simple_fixture('nonlinear_cg_beta',
['FR', 'PR', 'HS', 'DY'])
@pytest.fixture(scope="module", params=['l2_squared', 'l2_squared_scaled',
'rosenbrock', 'quadratic_form'])
def functional(request):
"""functional with optimum 0 at 0."""
name = request.param
if name == 'l2_squared':
space = odl.rn(3)
return odl.solvers.L2NormSquared(space)
elif name == 'l2_squared_scaled':
space = odl.uniform_discr(0, 1, 3)
scaling = odl.MultiplyOperator(space.element([1, 2, 3]),
domain=space)
return odl.solvers.L2NormSquared(space) * scaling
elif name == 'quadratic_form':
space = odl.rn(3)
# Symmetric and diagonally dominant matrix
matrix = odl.MatrixOperator([[7.0, 1, 2],
[1,
|
5, -3],
[2, -3, 8]])
vector = space.element([1, 2, 3])
# Calibrate so that functional is zero in optimal point
constant = 1 / 4 * vector.inner(matrix.inverse(vector))
return odl.solvers.QuadraticForm(
operator=matrix, vector=vector, constant=constant)
elif name == 'rosenbrock':
# Moderately ill-behaved rosenbrock functional.
rosenbrock =
|
odl.solvers.RosenbrockFunctional(odl.rn(2), scale=2)
# Center at zero
return rosenbrock.translated([-1, -1])
else:
assert False
@pytest.fixture(scope="module", params=['constant', 'backtracking'])
def functional_and_linesearch(request, functional):
"""Return functional with optimum 0 at 0 and a line search."""
name = request.param
if name == 'constant':
return functional, 1.0
else:
return functional, odl.solvers.BacktrackingLineSearch(functional)
@pytest.fixture(scope="module", params=['first', 'second'])
def broyden_impl(request):
return request.param
def test_newton_solver(functional_and_linesearch):
"""Test the Newton solver."""
functional, line_search = functional_and_linesearch
try:
# Test if derivative exists
functional.gradient.derivative(functional.domain.zero())
except OpNotImplementedError:
return
# Solving the problem
x = functional.domain.one()
odl.solvers.newtons_method(functional, x, tol=1e-6,
line_search=line_search)
# Assert x is close to the optimum at [1, 1]
assert functional(x) < 1e-3
def test_bfgs_solver(functional_and_linesearch):
"""Test the BFGS quasi-Newton solver."""
functional, line_search = functional_and_linesearch
x = functional.domain.one()
odl.solvers.bfgs_method(functional, x, tol=1e-3,
line_search=line_search)
assert functional(x) < 1e-3
def test_lbfgs_solver(functional_and_linesearch):
"""Test limited memory BFGS quasi-Newton solver."""
functional, line_search = functional_and_linesearch
x = functional.domain.one()
odl.solvers.bfgs_method(functional, x, tol=1e-3,
line_search=line_search, num_store=5)
assert functional(x) < 1e-3
def test_broydens_method(broyden_impl, functional_and_linesearch):
"""Test the ``broydens_method`` quasi-Newton solver."""
functional, line_search = functional_and_linesearch
x = functional.domain.one()
odl.solvers.broydens_method(functional, x, tol=1e-3,
line_search=line_search, impl=broyden_impl)
assert functional(x) < 1e-3
def test_steepest_descent(functional):
"""Test the ``steepest_descent`` solver."""
line_search = odl.solvers.BacktrackingLineSearch(functional)
x = functional.domain.one()
odl.solvers.steepest_descent(functional, x, tol=1e-3,
line_search=line_search)
assert functional(x) < 1e-3
def test_adam(functional):
"""Test the ``adam`` solver."""
x = functional.domain.one()
odl.solvers.adam(functional, x, tol=1e-2, learning_rate=0.5)
assert functional(x) < 1e-3
def test_conjguate_gradient_nonlinear(functional, nonlinear_cg_beta):
"""Test the ``conjugate_gradient_nonlinear`` solver."""
line_search = odl.solvers.BacktrackingLineSearch(functional)
x = functional.domain.one()
odl.solvers.conjugate_gradient_nonlinear(functional, x, tol=1e-3,
line_search=line_search,
beta_method=nonlinear_cg_beta)
assert functional(x) < 1e-3
if __name__ == '__main__':
odl.util.test_file(__file__)
|
Teagan42/home-assistant
|
tests/components/dyson/test_fan.py
|
Python
|
apache-2.0
| 30,624 | 0.000229 |
"""Test the Dyson fan component."""
import json
import unittest
from unittest import mock
import asynctest
from libpurecool.const import FanMode, FanSpeed, NightMode, Oscillation
from libpurecool.dyson_pure_cool import DysonPureCool
from libpurecool.dyson_pure_cool_link import DysonPureCoolLink
from libpurecool.dyson_pure_state import DysonPureCoolState
from libpurecool.dyson_pure_state_v2 import DysonPureCoolV2State
from homeassistant.components import dyson as dyson_parent
from homeassistant.components.dyson import DYSON_DEVICES
import homeassistant.components.dyson.fan as dyson
from homeassistant.components.fan import (
ATTR_OSCILLATING,
ATTR_SPEED,
DOMAIN,
SERVICE_OSCILLATE,
SPEED_HIGH,
SPEED_LOW,
SPEED_MEDIUM,
)
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON
from homeassistant.helpers import discovery
from homeassistant.setup import async_setup_component
from tests.common import get_test_home_assistant
class MockDysonState(DysonPureCoolState):
"""Mock Dyson state."""
def __init__(self):
"""Create new Mock Dyson State."""
pass
def _get_dyson_purecool_device():
"""Return a valid device as provided by the Dyson web services."""
device = mock.Mock(spec=DysonPureCool)
device.serial = "XX-XXXXX-XX"
device.name = "Living room"
device.connect = mock.Mock(return_value=True)
device.auto_connect = mock.Mock(return_value=True)
device.state = mock.Mock()
device.state.oscillation = "OION"
device.state.fan_power = "ON"
device.state.speed = FanSpeed.FAN_SPEED_AUTO.value
device.state.night_mode = "OFF"
device.state.auto_mode = "ON"
device.state.oscillation_angle_low = "0090"
device.state.oscillation_angle_high = "0180"
device.state.front_direction = "ON"
device.state.sleep_timer = 60
device.state.hepa_filter_state = "0090"
device.state.carbon_filter_state = "0080"
return device
def _get_dyson_purecoollink_device():
"""Return a valid device as provided by the Dyson web services."""
device = mock.Mock(spec=DysonPureCoolLink)
device.serial = "XX-XXXXX-XX"
device.name = "Living room"
device.connect = mock.Mock(return_value=True)
device.auto_connect = mock.Mock(return_value=True)
device.state = mock.Mock()
device.state.oscillation = "ON"
device.state.fan_mode = "FAN"
device.state.speed = FanSpeed.FAN_SPEED_AUTO.value
device.state.night_mode = "OFF"
return device
def _get_supported_speeds():
return [
int(FanSpeed.FAN_SPEED_1.value),
int(FanSpeed.FAN_SPEED_2.value),
int(FanSpeed.FAN_SPEED_3.value),
int(FanSpeed.FAN_SPEED_4.value),
int(FanSpeed.FAN_SPEED_5.value),
int(FanSpeed.FAN_SPEED_6.value),
int(FanSpeed.FAN_SPEED_7.value),
int(FanSpeed.FAN_SPEED_8.value),
int(FanSpeed.FAN_SPEED_9.value),
int(FanSpeed.FAN_SPEED_10.value),
]
def _get_config():
"""Return a config dictionary."""
return {
dyson_parent.DOMAIN: {
dyson_parent.CONF_USERNAME: "email",
dyson_parent.CONF_PASSWORD: "password",
dyson_parent.CONF_LANGUAGE: "GB",
dyson_parent.CONF_DEVICES: [
{"device_id": "XX-XXXXX-XX", "device_ip": "192.168.0.1"}
],
}
}
def _get_device_with_no_state():
"""Return a device with no state."""
device = mock.Mock()
device.name = "Device_name"
device.state = None
return device
def _get_device_off():
"""Return a device with state off."""
device = mock.Mock()
device.name = "Device_name"
device.state = mock.Mock()
device.state.fan_mode = "OFF"
device.state.night_mode = "ON"
device.state.speed = "0004"
return device
def _get_device_auto():
"""Return a device with state auto."""
device = mock.Mock()
device.name = "Device_name"
device.state = mock.Mock()
device.state.fan_mode = "AUTO"
device.state.night_mode = "ON"
device.state.speed = "AUTO"
return device
def _get_device_on():
"""Return a valid state on."""
device = mock.Mock(spec=DysonPureCoolLink)
device.name = "Device_name"
device.state = mock.Mock()
device.state.fan_mode = "FAN"
device.state.fan_state = "FAN"
device.state.oscillation = "ON"
device.state.night_mode = "OFF"
device.state.speed = "0001"
return device
class DysonSetupTest(unittest.TestCase):
"""Dyson component setup tests."""
def setUp(self): # pylint: disable=invalid-name
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
def test_setup_component_with_no_devices(self):
"""Test setup component with no devices."""
self.hass.data[dyson.DYSON_DEVICES] = []
add_entities = mock.MagicMock()
dyson.setup_platform(self.hass, None, add_entities, mock.Mock())
add_entities.assert_called_with([])
def test_setup_component(self):
"""Test setup component with devices."""
def _add_device(devices):
assert len(devices) == 2
assert devices[0].name == "Device_name"
device_fan = _get_device_on()
device_purecool_fan = _get_dyson_purecool_device()
device_non_fan = _get_device_off()
self.hass.data[dyson.DYSON_DEVICES] = [
device_fan,
device_purecool_fan,
device_non_fan,
]
dyson.setup_platform(self.hass, None, _add_device)
class DysonTest(unittest.TestCase):
"""Dyson fan component test class."""
def setUp(self): # pylint: disable=invalid-name
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
def test_dyson_set_speed(self):
"""Test set fan speed."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert not component.should_poll
component.set_speed("1")
set_confi
|
g = device.set_configuration
set_config.assert_called_with(
fan_mode=FanMode.FAN, fan_speed=FanSpeed.FAN_SPEED_1
)
component.set_speed("AUTO")
set_config = device.set_configuration
set_config.assert_called_with(fan_mode=FanMode.AUTO)
def test_dyson_turn_on(self):
"""Test turn on fan."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.has
|
s, device)
assert not component.should_poll
component.turn_on()
set_config = device.set_configuration
set_config.assert_called_with(fan_mode=FanMode.FAN)
def test_dyson_turn_night_mode(self):
"""Test turn on fan with night mode."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert not component.should_poll
component.set_night_mode(True)
set_config = device.set_configuration
set_config.assert_called_with(night_mode=NightMode.NIGHT_MODE_ON)
component.set_night_mode(False)
set_config = device.set_configuration
set_config.assert_called_with(night_mode=NightMode.NIGHT_MODE_OFF)
def test_is_night_mode(self):
"""Test night mode."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert not component.night_mode
device = _get_device_off()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert component.night_mode
def test_dyson_turn_auto_mode(self):
"""Test turn on/off fan with auto mode."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
assert not component.should_poll
component.set_auto_mode(True)
set_config = device.set_configurat
|
WikiWatershed/tr-55
|
tr55/model.py
|
Python
|
apache-2.0
| 13,671 | 0.000658 |
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
"""
TR-55 Model Implementation
A mapping between variable/parameter names found in the TR-55 document
and variables used in this program are as follows:
* `precip` is referred to as P in the report
* `runoff` is Q
* `evaptrans` maps to ET, the evapotranspiration
* `inf` is the amount of water that infiltrates into the soil (in inches)
* `init_abs` is Ia, the initial abstraction, another form of infiltration
"""
import copy
import numpy as np
from tr55.tablelookup import lookup_cn, lookup_bmp_storage, \
lookup_ki, is_bmp, is_built_type, make_precolumbian, \
get_pollutants, get_bmps, lookup_pitt_runoff, lookup_bmp_drainage_ratio
from tr55.water_quality import get_volume_of_runoff, get_pollutant_load
from tr55.operations import dict_plus
def runoff_pitt(precip, evaptrans, soil_type, land_use):
"""
The Pitt Small Storm Hydrology method. The output is a runoff
value in inches.
This uses numpy to make a linear interpolation between tabular values to
calculate the exact runoff for a given value
`precip` is the amount of precipitation in inches.
"""
runoff_ratios = lookup_pitt_runoff(soil_type, land_use)
runoff_ratio = np.interp(precip, runoff_ratios['precip'], runoff_ratios['Rv'])
runoff = precip*runoff_ratio
return min(runoff, precip - evaptrans)
def nrcs_cutoff(precip, curve_number):
"""
A function to find the cutoff between precipitation/curve number
pairs that have zero runoff by definition, and those that do not.
"""
if precip <= -1 * (2 * (curve_number - 100.0) / curve_number):
return True
else:
return False
def runoff_nrcs(precip, evaptrans, soil_type, land_use):
"""
The runoff equation from the TR-55 document. The output is a
runoff value in inches.
`precip` is the amount of precipitation in inches.
"""
curve_number = lookup_cn(soil_type, land_use)
if nrcs_cutoff(precip, curve_number):
return 0.0
potential_retention = (1000.0 / curve_number) - 10
initial_abs = 0.2 * potential_retention
precip_minus_initial_abs = precip - initial_abs
numerator = pow(precip_minus_initial_abs, 2)
denominator = (precip_minus_initial_abs + potential_retention)
runoff = numerator / denominator
return min(runoff, precip - evaptrans)
def simulate_cell_day(precip, evaptrans, cell, cell_count):
"""
Simulate a bunch of cells of the same type during a one-day event.
`precip` is the amount of precipitation in inches.
`evaptrans` is evapotranspiration in inches per day - this is the
ET for the cell after taking the crop/landscape factor into account
this is NOT the ETmax.
`cell` is a string which contains a soil type and land use
separated by a colon.
`cell_count` is the number of cells to simulate.
The return value is a dictionary of runoff, evapotranspiration, and
infiltration as a volume (inches * #cells).
"""
def clamp(runoff, et, inf, precip):
"""
This function ensures that runoff + et + inf <= precip.
NOTE: Infiltration is normally independent of the
precipitation level, but this function introduces a slight
dependency (that is, at very low levels of precipitation, this
function can cause infiltration to be smaller than it
ordinarily would be.
"""
total = runoff + et + inf
if (total > precip):
scale = precip / total
runoff *= scale
et *= scale
inf *= scale
return (runoff, et, inf)
precip = max(0.0, precip)
soil_type, land_use, bmp = cell.lower().split(':')
# If there is no precipitation, then there is no runoff or
# infiltration; however, there is evapotranspiration. (It is
# understood that over a period of time, this can lead to the sum
# of the three values exceeding the total precipitation.)
if precip == 0.0:
return {
'runoff-vol': 0.0,
'et-vol': 0.0,
'inf-vol': 0.0,
}
# If the BMP is cluster_housing or no_till, then make it the
# land-use. This is done because those two types of BMPs behave
# more like land-uses than they do BMPs.
if bmp and not is_bmp(bmp):
land_use = bmp or land_use
# When the land-use is a built-type use the Pitt Small Storm Hydrology
# Model until the runoff predicted by the NRCS model is greater than that
# predicted by the NRCS model.
if is_built_type(land_use):
pitt_runoff = runoff_pitt(precip, evaptrans, soil_type, land_use)
nrcs_runoff = runoff_nrcs(precip, evaptrans, soil_type, land_use)
runoff = max(pitt_runoff, nrcs_runoff)
else:
runoff = runoff_nrcs(precip, evaptrans, soil_type, land_use)
inf = max(0.0, precip - (evaptrans + runoff))
# (runoff, evaptrans, inf) = clamp(runoff, evaptrans, inf, precip)
return {
'runoff-vol': cell_count * runoff,
'et-vol': cell_count * evaptrans,
'inf-vol': cell_count * inf,
}
def create_unmodified_census(census):
"""
This creates a cell census, ignoring any modifications. The
output is suitable for use as input to `simulate_water_quality`.
"""
unmod = copy.deepcopy(census)
unmod.pop('modifications', None)
return unmod
def create_modified_census(census):
"""
This creates a cell census, with modifications, that is suitable
for use as input to `simulate_water_quality`.
For every type of cell that undergoes modification, the
modifications are indicated with a sub-distribution under that
cell type.
"""
mod = copy.deepcopy(census)
mod.pop('modifications', None)
for (cell, subcensus) in mod['distribution'].items():
n = subcensus['cell_count']
changes = {
'distribution': {
cell: {
'distribution': {
cell: {'cell_count': n}
}
}
}
}
mod = dict_plus(mod, changes)
|
for modificat
|
ion in (census.get('modifications') or []):
for (orig_cell, subcensus) in modification['distribution'].items():
n = subcensus['cell_count']
soil1, land1 = orig_cell.split(':')
soil2, land2, bmp = modification['change'].split(':')
changed_cell = '%s:%s:%s' % (soil2 or soil1, land2 or land1, bmp)
changes = {
'distribution': {
orig_cell: {
'distribution': {
orig_cell: {'cell_count': -n},
changed_cell: {'cell_count': n}
}
}
}
}
mod = dict_plus(mod, changes)
return mod
def simulate_water_quality(tree, cell_res, fn,
pct=1.0, current_cell=None, precolumbian=False):
"""
Perform a water quality simulation by doing simulations on each of
the cell types (leaves), then adding them together by summing the
values of a node's subtrees and storing them at that node.
`tree` is the (sub)tree of cell distributions that is currently
under consideration.
`pct` is the percentage of calculated water volume to retain.
`cell_res` is the size of each cell/pixel in meters squared
(used for turning inches of water into volumes of water).
`fn` is a function that takes a cell type and a number of cells
and returns a dictionary containing runoff, et, and inf as
volumes.
`current_cell` is the cell type for the present node.
"""
# Internal node.
if 'cell_count' in tree and 'distribution' in tree:
n = tree['cell_count']
# simulate subtrees
if n != 0:
tally = {}
for cell, subtree in tree['distribution'].items():
simulate_water_quality(subtree, cell_res, fn,
|
Mirantis/tempest
|
tempest/api/volume/admin/test_volume_quotas_negative.py
|
Python
|
apache-2.0
| 3,331 | 0 |
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.volume import base
from tempest import exceptions
from tempest import test
class VolumeQuotasNegativeTestJSON(base.BaseVolumeV1AdminTest):
_interface = "json"
force_tenant_isolation = True
@classmethod
@test.safe_setup
def setUpClass(cls):
super(VolumeQuotasNegativeTestJSON, cls).setUpClass()
demo_user = cls.isolated_creds.get_primary_creds()
cls.demo_tenant_id = demo_user.tenant_id
cls.shared_quota_set = {'gigabytes': 3, 'volumes': 1, 'snapshots': 1}
# NOTE(gfidente): no need to restore original quota set
# after the tests as they only work with tenant isolation.
resp, quota_set = cls.quotas_client.update_quota_set(
cls.demo_tenant_id,
**cls.shared_quota_set)
# NOTE(gfidente): no need to delete in tearDown as
# they are created using utility wrapper methods.
cls.volume = cls.create_volume()
cls.snapshot = cls.create_snapshot(cls.volume['id'])
@test.attr(type='negative')
def test_quota_volumes(self):
self.assertRaises(exceptions.OverLimit,
self.volumes_client.create_volume,
size=1)
@test.attr(type='negative')
def test_quota_volume_snapshots(self):
self.assertRaises(exceptions.OverLimit,
self.snapshots_client.create_snapshot,
self.volume['id'])
@test.attr(type='negative')
def test_quota_volume_gigabytes(self):
# NOTE(gfidente): quota set needs to be changed for this test
# or we may be limited by the volumes or snaps quota number, not by
# actual gigs usage; next line ensures shared set is restored.
self.addCleanup(self.quotas_client.update_quota_set,
self.demo_tenant_id,
**self.shared_quota_set)
new_quota_set = {'gigabytes': 2, 'volumes': 2, 'snapshots': 1}
resp, quota_set = self.quotas_client.update_q
|
uota_set(
self.demo_tenant_id,
**new_quota_set)
self.assertRaises(exceptions.OverLimit,
self.volumes_client.create_volume,
size=1)
new_quota_set = {'gigabytes': 2, 'volumes': 1, 'snapshots': 2}
resp, quota_set = self.quotas_clien
|
t.update_quota_set(
self.demo_tenant_id,
**self.shared_quota_set)
self.assertRaises(exceptions.OverLimit,
self.snapshots_client.create_snapshot,
self.volume['id'])
class VolumeQuotasNegativeTestXML(VolumeQuotasNegativeTestJSON):
_interface = "xml"
|
gangadharkadam/tailorerp
|
erpnext/stock/report/batch_wise_balance_history/batch_wise_balance_history.py
|
Python
|
agpl-3.0
| 2,666 | 0.026632 |
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt
def execute(filters=None):
if not filters: filters = {}
columns = get_columns(filters)
item_map = get_item_details(filters)
iwb_map = get_item_warehouse_batch_map(filters)
data = []
for item in sorted(iwb_map):
for wh in sorted(iwb_map[item]):
for
|
batch in sorted(iwb_map[item][wh]):
qty_dict = iwb_map[item][wh][batch]
data.append([item, item_map[item]["item_name"],
item_map[item]
|
["description"], wh, batch,
qty_dict.opening_qty, qty_dict.in_qty,
qty_dict.out_qty, qty_dict.bal_qty
])
return columns, data
def get_columns(filters):
"""return columns based on filters"""
columns = [_("Item") + ":Link/Item:100"] + [_("Item Name") + "::150"] + [_("Description") + "::150"] + \
[_("Warehouse") + ":Link/Warehouse:100"] + [_("Batch") + ":Link/Batch:100"] + [_("Opening Qty") + "::90"] + \
[_("In Qty") + "::80"] + [_("Out Qty") + "::80"] + [_("Balance Qty") + "::90"]
return columns
def get_conditions(filters):
conditions = ""
if not filters.get("from_date"):
frappe.throw(_("'From Date' is required"))
if filters.get("to_date"):
conditions += " and posting_date <= '%s'" % filters["to_date"]
else:
frappe.throw(_("'To Date' is required"))
return conditions
#get all details
def get_stock_ledger_entries(filters):
conditions = get_conditions(filters)
return frappe.db.sql("""select item_code, batch_no, warehouse,
posting_date, actual_qty
from `tabStock Ledger Entry`
where docstatus < 2 %s order by item_code, warehouse""" %
conditions, as_dict=1)
def get_item_warehouse_batch_map(filters):
sle = get_stock_ledger_entries(filters)
iwb_map = {}
for d in sle:
iwb_map.setdefault(d.item_code, {}).setdefault(d.warehouse, {})\
.setdefault(d.batch_no, frappe._dict({
"opening_qty": 0.0, "in_qty": 0.0, "out_qty": 0.0, "bal_qty": 0.0
}))
qty_dict = iwb_map[d.item_code][d.warehouse][d.batch_no]
if d.posting_date < filters["from_date"]:
qty_dict.opening_qty += flt(d.actual_qty)
elif d.posting_date >= filters["from_date"] and d.posting_date <= filters["to_date"]:
if flt(d.actual_qty) > 0:
qty_dict.in_qty += flt(d.actual_qty)
else:
qty_dict.out_qty += abs(flt(d.actual_qty))
qty_dict.bal_qty += flt(d.actual_qty)
return iwb_map
def get_item_details(filters):
item_map = {}
for d in frappe.db.sql("select name, item_name, description from tabItem", as_dict=1):
item_map.setdefault(d.name, d)
return item_map
|
imallett/MOSS
|
resources/bytemap font/echo as array.py
|
Python
|
mit
| 923 | 0.021668 |
f = open("font.txt","rb")
fontdata = f.re
|
ad()
f.close()
out = "static char font[128][5] = {"
for i in xrange(128):
out += "{"
#Encode these into 7 bit "byte"s, with MSB=0
#We don't save anything by using 8-bit bytes; the last five bits of the last byte spill over, so we still get 5 bytes/char
bits = []
for j in xr
|
ange(5*7):
bits.append( ord(fontdata[(5*7)*i+j])&0x01 )
## print bits
for j in xrange(5):
byte_bits = bits[7*j:7*j+7]
## print byte_bits
byte_bits = [byte_bits[i]<<(7-i-1) for i in xrange(7)]
## print byte_bits
byte = 0x00
for k in byte_bits: byte |= k
## print byte
byte_str = "0x%x" % byte
## print byte_str
## raw_input()
out += byte_str
if j != 5-1: out += ","
out += "}"
if i != 128-1: out += ","
out +="};"
print out
|
P1R/freeMonoCrom
|
MM.py
|
Python
|
gpl-2.0
| 5,424 | 0.014381 |
'''
Modulo Movimiento Nanometros
@author: P1R0
import ObjSerial, sys;
ObjSer = ObjSerial.ObjSerial(0,9600)
ObjSer.cts = True
ObjSer.dtr = True
ObjSer.bytesize = 8
'''
SxN = 59.71 #Constante de Calibracion del Motor
#Funcion para inicializar Monocromador
def init(ObjSer,A):
ObjSer.flushOutput()
ObjSer.write(unicode("A\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0A\r\n"))
echo(ObjSer)
ObjSer.write(unicode("A\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0A\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0R\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0U1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0V1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0T400\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0K1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0Y1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0Y0\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0K0\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0V1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0T1000\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0F-\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0V1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0T400\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0K1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0V1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0T4000\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0K0\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0M99999\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0K1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0V1\r\n"))
echo(ObjSer)
ObjSer.write(unicode("0T400\r\n"))
echo(ObjSer)
#en la posicion cero
ObjSer.write(unicode("0M-3925\r\n"))
echo(ObjSer)
#En de estar fuera de rango mandamos como parametro 1
if A == 1:
ObjSer.write(unicode("0M3925\r\n"))
echo(ObjSer)
return 0
#funcion para aproximar errores metodo de interpolacion
def Error(x):
Y = [0,
0.010373807,
-0.05124284,
-0.227092782,
-0.572418858,
-1.150211522,
-2.019461229,
-3.247663205,
-4.904050745,
-7.062119076,
-9.803353877,
-13.21724083,
-17.39877039,
-22.45717585,
-28.51818573,
-35.71928571,
-44.22644716,
-54.22539859,
-65.94810183,
-79.66102345,
95.70661095,
-114.4980595,
-136.5895354,
-162.693691,
-193.8151306,
-231.3914014,
-277.6754313,
-336.5191712,
-415.6610186,
-536.5034235,
-763.8268297,
-804.7677106];
X = [0,
50.002,
99.999,
149.999,
199.997,
249.997,
300.007,
349.993,
400.003,
449.997,
499.994,
550.005,
600.002,
649.993,
700.003,
749.995,
800.004,
849.995,
900.004,
949.999,
1000.006,
1049.997,
1100.004,
1150.001,
1200.005,
1250.002,
1300,
1349.999,
1399.998,
449.998,
1490,
1492];
i = 0;
while x > X[i]:
x0=X[i];
y0=Y[i];
x1=X[i+1];
y1=Y[i+1];
i=i+1;
r=y1-y0;
d=r/(x1-x0);
y=y0+(d*(x-x0));
return y
#funcion para calcular y mover el motor
def Calcula(ObjSer,Nm,LastPos):
Er=Error(Nm);
NmyEr = Nm - Er;
uS = NmyEr * SxN;
dif = uS - int(uS);
if dif > 0.5:
uS = int(uS) + 1;
else:
uS = int(uS);
Mover = uS - LastPos;
print "La diferencia a mover es: %d" % Mover;
Mueve(ObjSer,Mover);
LastPos = uS;
return LastPos
#Funcion para llamar al eco del ObjSerial
def echo(ObjSer):
line = ObjSer.readline()
print line
#Funcion para mover el motor
def Mueve(ObjSer, Mover):
#mover Full Step cuando recibe como parametros microSteps
MoverFS = ((Mover-3) / 5);
ObjSer.flushOutput();
ObjSer.write(unicode("0U0\r\n"));
echo(ObjSer);
ObjSer.write(unicode("0V1\r\n"));
echo(ObjSer);
ObjSer.write(unicode("0T1000\r\n"));
echo(ObjSer);
ObjSer.write(unicode("0M%d\r\n" % MoverFS));
echo(ObjSer);
ObjSer.write(unicode("0U1\r\n"));
echo(ObjSer);
ObjSer.write(unicode("0V1\r\n"));
echo(ObjSer);
ObjSer.write(unicode("0T400\r\n"));
echo(ObjSer);
#ultimos 3 microsteps para una aproximacion mas suave.
ObjSer.write(unico
|
de("0M3\r\n"));
echo(ObjSer);
'''
if __name__ == "__main__":
N = 0;
LastPos = 0;
init(0);
while 1:
while type(N)!= float:
try:
N = raw_input("Ingresa Nanometros o quit para cerrar:");
if N == "quit":
ObjSer.close();
sys.exit(0);
N = float(N)
|
;
except (ValueError, TypeError):
print "error, el valor debe ObjSer entero o flotante";
LastPos = Calcula(N,LastPos);
print "los microspasos totales son: %d" % LastPos;
N=0
'''
|
lgarest/django_snippets_api
|
snippets/v3_0/urls.py
|
Python
|
gpl-2.0
| 353 | 0.002833 |
from django.conf.urls import patterns, url
from rest_framework.urlpatterns import format_suffix_patterns
fro
|
m snippets.v3_0 import views
urlpatterns = patterns('',
url(r'^v3_0/snippets/$'
|
, views.SnippetList.as_view()),
url(r'^v3_0/snippets/(?P<pk>[0-9]+)/$', views.SnippetDetail.as_view()),
)
urlpatterns = format_suffix_patterns(urlpatterns)
|
gloaec/trifle
|
src/rdflib/plugins/serializers/turtle.py
|
Python
|
gpl-3.0
| 12,175 | 0.000903 |
"""
Turtle RDF graph serializer for RDFLib.
See <http://www.w3.org/TeamSubmission/turtle/> for syntax specification.
"""
from collections import defaultdict
from rdflib.term import BNode, Literal, URIRef
from rdflib.exceptions import Error
from rdflib.serializer import Serializer
from rdflib.namespace import RDF, RDFS
__all__ = ['RecursiveSerializer', 'TurtleSerializer']
class RecursiveSerializer(Serializer):
topClasses = [RDFS.Class]
predicateOrder = [RDF.type, RDFS.label]
maxDepth = 10
indentString = u" "
def __init__(self, store):
super(RecursiveSerializer, self).__init__(store)
self.stream = None
self.reset()
def addNamespace(self, prefix, uri):
self.namespaces[prefix] = uri
def checkSubject(self, subject):
"""Check to see if the subject should be serialized yet"""
if ((self.isDone(subject))
or (subject not in self._subjects)
or ((subject in self._topLevels) and (self.depth > 1))
or (isinstance(subject, URIRef)
and (self.depth >= self.maxDepth))):
return False
return True
def isDone(self, subject):
"""Return true if subject is serialized"""
return subject in self._serialized
def orderSubjects(self):
seen = {}
subjects = []
for classURI in self.topClasses:
members = list(self.store.subjects(RDF.type, classURI))
members.sort()
for member in members:
subjects.append(member)
self._topLevels[member] = True
seen[member] = True
recursable = [
(isinstance(subject, BNode),
self._references[subject], subject)
for subject in self._subjects if subject not in seen]
recursable.sort()
subjects.extend([subject for (isbnode, refs, subject) in recursable])
return subjects
def preprocess(self):
for triple in self.store.triples((None, None, None)):
self.preprocessTriple(triple)
def preprocessTriple(self, (s, p, o)):
self._references[o]+=1
self._subjects[s] = True
def reset(self):
self.depth = 0
self.lists = {}
self.namespaces = {}
self._references = defaultdict(int)
self._serialized = {}
self._subjects = {}
self._topLevels = {}
for prefix, ns in self.store.namespaces():
self.addNamespace(prefix, ns)
def buildPredicateHash(self, subject):
"""
Build a hash key by predicate to a list of objects for the given
subject
"""
properties = {}
for s, p, o in self.store.triples((subject, None, None)):
oList = properties.get(p, [])
oList.append(o)
properties[p] = oList
return properties
def sortProperties(self, properties):
"""Take a hash from predicate uris to lists of values.
Sort the lists of values. Return a sorted list of properties."""
# Sort object lists
for prop, objects in properties.items():
objects.sort()
# Make sorted list of properties
propList = []
seen = {}
for prop in self.predicateOrder:
if (prop in properties) and (prop not in seen):
propList.append(prop)
seen[prop] = True
props = properties.keys()
props.sort()
for prop in props:
if prop not in seen:
propList.append(prop)
seen[prop] = True
return propList
def subjectDone(self, subject):
"""Mark a subject as done."""
self._serialized[subject] = True
def indent(self, modifier=0):
"""Returns indent string multiplied by the depth"""
return (self.depth + modifier) * self.indentString
def write(self, text):
"""Write text in given encoding."""
self.stream.write(text.encode(self.encoding, 'replace'))
SUBJECT = 0
VERB = 1
OBJECT = 2
_GEN_QNAME_FOR_DT = False
_SPACIOUS_OUTPUT = False
class TurtleSerializer(RecursiveSerializer):
short_name = "turtle"
indentString = ' '
def __init__(self, store):
self._ns_rewrite = {}
super(TurtleSerializer, self).__init__(store)
self.keywords = {
RDF.type: 'a'
}
self.reset()
self.stream = None
self._spacious = _SPACIOUS_OUTPUT
def addNamespace(self, prefix, namespace):
# Turtle does not support prefix that start with _
# if they occur in the graph, rewrite to p_blah
# this is more complicated since we need to make sure p_blah
# does not already exist. And we register namespaces as we go, i.e.
# we may first see a triple with prefix _9 - rewrite it to p_9
# and then later find a triple with a "real" p_9 prefix
# so we need to keep track of ns rewrites we made so far.
if (prefix > '' and prefix[0] == '_') \
or self.namespaces.get(prefix, namespace) != namespace:
if prefix not in self._ns_rewrite:
p = "p" + prefix
while p in self.namespaces:
p = "p" + p
self._ns_rewrite[prefix] = p
prefix = self._ns_rewrite.get(prefix, prefix)
super(TurtleSerializer, self).addNamespace(prefix, namespace)
return prefix
def reset(self):
super(TurtleSerializer, self).reset()
self._shortNames = {}
self._started = False
self._ns_rewrite = {}
def serialize(self, stream, base=None, encoding=None,
spacious=None, **args):
self.reset()
self.stream = stream
self.base = base
if spacious is not None:
self._spacious = spacious
self.preprocess()
subjects_list = self.orderSubjects()
self.startDocument()
firstTime =
|
True
for subject in subjects_list:
if self.isDone(subject):
continue
if firstTime:
firstTime = False
|
if self.statement(subject) and not firstTime:
self.write('\n')
self.endDocument()
stream.write(u"\n".encode('ascii'))
def preprocessTriple(self, triple):
super(TurtleSerializer, self).preprocessTriple(triple)
for i, node in enumerate(triple):
if node in self.keywords:
continue
# Don't use generated prefixes for subjects and objects
self.getQName(node, gen_prefix=(i == VERB))
if isinstance(node, Literal) and node.datatype:
self.getQName(node.datatype, gen_prefix=_GEN_QNAME_FOR_DT)
p = triple[1]
if isinstance(p, BNode): # hmm - when is P ever a bnode?
self._references[p]+=1
def getQName(self, uri, gen_prefix=True):
if not isinstance(uri, URIRef):
return None
parts = None
try:
parts = self.store.compute_qname(uri, generate=gen_prefix)
except:
# is the uri a namespace in itself?
pfx = self.store.store.prefix(uri)
if pfx is not None:
parts = (pfx, uri, '')
else:
# nothing worked
return None
prefix, namespace, local = parts
# QName cannot end with .
if local.endswith("."): return None
prefix = self.addNamespace(prefix, namespace)
return u'%s:%s' % (prefix, local)
def startDocument(self):
self._started = True
ns_list = sorted(self.namespaces.items())
for prefix, uri in ns_list:
self.write(self.indent() + '@prefix %s: <%s> .\n' % (prefix, uri))
if ns_list and self._spacious:
self.write('\n')
def endDocument(self):
if self._spacious:
self.write('\n')
def statement(self, subject):
self.subjectDone(subject)
return self.s_squared(subject) or self.s_default(subject)
def s_default(self, subject):
|
lycheng/leetcode
|
tests/test_linked_list.py
|
Python
|
mit
| 2,549 | 0 |
# -*- coding: utf-8 -*-
import unittest
from linked_list import (delete_node, list_cycle, remove_elements,
reverse_list)
from public import ListNode
class TestLinkedList(unittest.TestCase):
def test_delete_node(self):
so = delete_node.Solution()
head = ListNode(1)
head.next = ListNode(2)
head.next.next = ListNode(3)
head.next.next.next = ListNode(4)
so.deleteNode(head.next)
self.assertEqual(head.next.val, 3)
def test_has_cycle(self):
so = list_cycle.Solution()
self.assertFalse(so.hasCycle(None))
head = ListNode(1)
self.assertFalse(so.hasCycle(head))
head.next = head
self.assertTrue(so.hasCycle(head))
head.next = ListNode(2)
head.next.next = ListNode(3)
self.assertFalse(so.hasCycle(head))
head.next.next.next = head
self.assertTrue(so.hasCycle(head))
def test_detect_cycle(self):
so = list_cycle.Solution()
head = ListNode(1)
self.assertFalse(so.detectCycle(head))
self.assertFalse(so.detectCycle(None))
head.next = ListNode(2)
self.assertFalse(so.detectCycle(head))
cross = ListNode(3)
head.next.next = cross
head.next.next.next = ListNode(4)
head.next.next.next.next = ListNode(5)
head.next.next.next.next.next = cross
self.assertEqual(so.detectCycle(head), cross)
def test_remove_elements(self):
so = remove_elements.Solution()
self.assertFalse(so.removeElements(None, 0))
head = ListNode(1)
head.next = ListNode(2)
head.next.next = ListNode(2)
head.next.next.next = ListNode(3)
head.next.next.next.next = ListNode(4)
head = so.removeElements(head, 1)
self.assertEqual(head.val, 2)
head = so.removeElements(head, 2)
self.assertEqual(head.val, 3)
head = so.removeElements(head, 4)
self.assertFalse(head.next)
def test_reverse_linked_list(self):
so
|
= reverse_list.Solution()
self.assertFalse(so.reverseList_iteratively(None))
head = ListNode(1)
head.next = ListNode(2)
head.next.next = ListNode(3)
self.assertEqual(so.reverseList_iteratively(head).val, 3)
self.assertFalse(so.reverseList_recursively(None))
head = ListNode(1)
head.next = ListNode(2)
head.next.next = ListN
|
ode(3)
self.assertEqual(so.reverseList_recursively(head).val, 3)
|
christianurich/VIBe2UrbanSim
|
3rdparty/opus/src/urbansim/gridcell/total_improvement_value_per_residential_unit_within_walking_distance.py
|
Python
|
gpl-2.0
| 2,680 | 0.011194 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from opus_core.variables.variable import Variable
from variable_functions import my_attribute_label
from numpy import ma
from numpy import float32
class total_improvement_value_per_residential_unit_within_walking_distance(Variable):
total_improvement_value_within_walking_distance = "total_improvement_value_within_walking_distance"
residential_units_within_walking_distance = "residential_units_within_walking_distance"
def dependencies(self):
return [my_attribute_label(self.total_improvement_value_within_walking_distance),
my_attribute_label(self.residential_units_within_walking_distance)]
def compute(self, dataset_pool):
units_wwd = self.get_dataset().get_attribute(self.residential_units_within_walking_distance)
return self.get_dataset().get_attribute(self.total_improvement_value_within_walking_distance) /\
ma.masked_where(units_wwd == 0, units_wwd.astype(float32))
from opus_core.tests import opus_unittest
from opus_core.tests.utils.variable_tester import VariableTester
from numpy import array
class Tests(opus_unittest.OpusTestCase):
def test_my_inputs(self):
tester = VariableTester(
__file__,
package_order=['urbansim'],
test_data={
'gridcell':{
'grid_id': array([1,2,3,4]),
'relative_x': array([1,2,1,2]),
'relative_y': array([1,1,2,2]),
'total_residential_value': array([100, 500, 1000, 1500]),
'governmental_improvement_value': array([100, 500, 1000, 1500]),
# The four items below are 'throw-away' items to allow this Variable to test -
# they can be anything and not affect the outcome of this variable.
'commercial_improvement_value': array([0, 0, 0, 0]),
'industrial_improvement_value': array([0, 0, 0, 0]),
'residential_improvement_value': array([0, 0, 0, 0]),
|
'residential_units': array([0, 0, 0, 0]),
},
'urbansim_constant':{
"walking_distance_circle_radius": array([150]),
'cell_si
|
ze': array([150]),
}
}
)
should_be = array([1800, 3100, 4600, 6000])
tester.test_is_equal_for_variable_defined_by_this_module(self, should_be)
if __name__=='__main__':
opus_unittest.main()
|
slx-dev/digital-kanban
|
src/klopfer.py
|
Python
|
mit
| 1,083 | 0 |
import directory
import scanner
import mapper
import board
import os
class Klopfer(object):
def __init__(self, import_dir, export_dir):
self.import_dir = import_dir
self.export_dir = export_dir
print "Klopfer class"
def run(self):
# open dir and get oldest file with the given extension
dir = directory.Directory(os, self.import_dir, ['jpg', 'jpeg'])
self.imagefile = dir.get_oldest_file()
|
# open image
scan = scanner.Scanner(self.imagefile.name)
self.remove_image()
informations = scan.scan()
# load board_id and cards
mapping = mapper.Mapper(informations)
board_id = mapping.board_id
cards = mapping.get_cards()
# create board
current_board = board.Board(board_id, cards)
# write board to json
current_board.export_json(self.export_dir)
# remove old image
def remove_image(self):
|
# Uncomment in production version when multiple input files are present
# os.remove(self.imagefile.name)
pass
|
bashu/django-easy-seo
|
seo/south_migrations/0004_auto__del_url__del_unique_url_url_site.py
|
Python
|
gpl-3.0
| 2,904 | 0.00792 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing unique constraint on 'Url', fields ['url', 'site']
db.delete_unique('seo_url', ['url', 'site_id'])
# Deleting model 'Url'
db.delete_table('seo_url')
def backwards(self, orm):
# Adding model 'Url'
db.create_table('seo_url', (
('url', self.gf('django.db.models.fields.CharField')(default='/', max_length=200)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('site', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['sites.Site'], null=True, blank=True)),
))
db.send_create_signal('seo', ['Url'])
# Adding unique constraint on 'Url', fields ['url', 'site']
db.create_unique('seo_url', ['url', 'site_id'])
models = {
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
|
},
'seo.seo': {
'Meta': {'unique_together': "(('content_t
|
ype', 'object_id'),)", 'object_name': 'Seo'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['seo']
|
ebukoz/thrive
|
erpnext/demo/user/manufacturing.py
|
Python
|
gpl-3.0
| 4,593 | 0.027433 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, random, erpnext
from datetime import timedelta
from frappe.utils.make_random import how_many
from frappe.desk import query_report
from erpnext.manufacturing.doctype.workstation.workstation import WorkstationHolidayError
from erpnext.manufacturing.doctype.work_order.test_work_order import make_wo_order_test_record
def work():
if random.random() < 0.3: return
frappe.set_user(frappe.db.get_global('demo_manufacturing_user'))
if not frappe.get_all('Sales Order'): return
from erpnext.projects.doctype.timesheet.timesheet import OverlapError
ppt = frappe.new_doc("Production Plan")
ppt.company = erpnext.get_default_company()
# ppt.use_multi_level_bom = 1 #refactored
ppt.get_items_from = "Sales Order"
# ppt.purchase_request_for_warehouse = "Stores - WPL" # refactored
ppt.run_method("get_open_sales_orders")
if not ppt.get("sales_orders"): return
ppt.run_method("get_items")
ppt.run_method("raise
|
_materia
|
l_requests")
ppt.save()
ppt.submit()
ppt.run_method("raise_work_orders")
frappe.db.commit()
# submit work orders
for pro in frappe.db.get_values("Work Order", {"docstatus": 0}, "name"):
b = frappe.get_doc("Work Order", pro[0])
b.wip_warehouse = "Work in Progress - WPL"
b.submit()
frappe.db.commit()
# submit material requests
for pro in frappe.db.get_values("Material Request", {"docstatus": 0}, "name"):
b = frappe.get_doc("Material Request", pro[0])
b.submit()
frappe.db.commit()
# stores -> wip
if random.random() < 0.4:
for pro in query_report.run("Open Work Orders")["result"][:how_many("Stock Entry for WIP")]:
make_stock_entry_from_pro(pro[0], "Material Transfer for Manufacture")
# wip -> fg
if random.random() < 0.4:
for pro in query_report.run("Work Orders in Progress")["result"][:how_many("Stock Entry for FG")]:
make_stock_entry_from_pro(pro[0], "Manufacture")
for bom in frappe.get_all('BOM', fields=['item'], filters = {'with_operations': 1}):
pro_order = make_wo_order_test_record(item=bom.item, qty=2,
source_warehouse="Stores - WPL", wip_warehouse = "Work in Progress - WPL",
fg_warehouse = "Stores - WPL", company = erpnext.get_default_company(),
stock_uom = frappe.db.get_value('Item', bom.item, 'stock_uom'),
planned_start_date = frappe.flags.current_date)
# submit job card
if random.random() < 0.4:
submit_job_cards()
def make_stock_entry_from_pro(pro_id, purpose):
from erpnext.manufacturing.doctype.work_order.work_order import make_stock_entry
from erpnext.stock.stock_ledger import NegativeStockError
from erpnext.stock.doctype.stock_entry.stock_entry import IncorrectValuationRateError, \
DuplicateEntryForWorkOrderError, OperationsNotCompleteError
try:
st = frappe.get_doc(make_stock_entry(pro_id, purpose))
st.posting_date = frappe.flags.current_date
st.fiscal_year = str(frappe.flags.current_date.year)
for d in st.get("items"):
d.cost_center = "Main - " + frappe.get_cached_value('Company', st.company, 'abbr')
st.insert()
frappe.db.commit()
st.submit()
frappe.db.commit()
except (NegativeStockError, IncorrectValuationRateError, DuplicateEntryForWorkOrderError,
OperationsNotCompleteError):
frappe.db.rollback()
def submit_job_cards():
work_orders = frappe.get_all("Work Order", ["name", "creation"], {"docstatus": 1, "status": "Not Started"})
work_order = random.choice(work_orders)
# for work_order in work_orders:
start_date = work_order.creation
work_order = frappe.get_doc("Work Order", work_order.name)
job = frappe.get_all("Job Card", ["name", "operation", "work_order"],
{"docstatus": 0, "work_order": work_order.name})
if not job: return
job_map = {}
for d in job:
job_map[d.operation] = frappe.get_doc("Job Card", d.name)
for operation in work_order.operations:
job = job_map[operation.operation]
job_time_log = frappe.new_doc("Job Card Time Log")
job_time_log.from_time = start_date
minutes = operation.get("time_in_mins")
job_time_log.time_in_mins = random.randint(int(minutes/2), minutes)
job_time_log.to_time = job_time_log.from_time + \
timedelta(minutes=job_time_log.time_in_mins)
job_time_log.parent = job.name
job_time_log.parenttype = 'Job Card'
job_time_log.parentfield = 'time_logs'
job_time_log.completed_qty = work_order.qty
job_time_log.save(ignore_permissions=True)
job.time_logs.append(job_time_log)
job.save(ignore_permissions=True)
job.submit()
start_date = job_time_log.to_time
|
alexkolar/pyvmomi
|
tests/test_iso8601.py
|
Python
|
apache-2.0
| 4,117 | 0 |
# VMware vSphere Python SDK
# Copyright (c) 2008-2015 VMware, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from datetime import timedelta
import tests
import vcr
from pyVim import connect
from pyVmomi.Iso8601 import TZManager
class Iso8601Tests(tests.VCRTestBase):
@vcr.use_cassette('test_vm_config_iso8601.yaml',
cassette_library_dir=tests.fixtures_path,
record_mode='once')
def test_vm_config_iso8601(self):
si = connect.SmartConnect(host='vcsa',
user='my_user',
pwd='my_password')
search_index = si.content.searchIndex
uuid = "5001ad1b-c78d-179e-ecd7-1cc0e1cf1b96"
vm = search_index.FindByUuid(None, uuid, True, True)
boot_time = vm.runtime.bootTime
# NOTE (hartsock): assertIsNone does not work in Python 2.6
self.assertTrue(boot_time is not None)
# 2014-08-05T17:50:20.594958Z
expected_time = datetime(2014, 8, 5, 17, 50, 20, 594958,
boot_time.tzinfo)
self.assertEqual(expected_time, boot_time)
def test_iso8601_set_datetime(self):
# NOTE (hartsock): This test is an example of how to register
# a fixture based test to compare the XML document that pyVmomi
# is transmitting. We needed to invent a set of tools to effectively
# compare logical XML documents to each other. In this case we are
# only interested in the 'soapenv:Body' tag and its children.
now_string = "2014-08-19T04:29:36.070918-04:00"
# NOTE (hartsock): the strptime formatter has a bug in python 2.x
# http://bugs.python.org/issue6641 so we're building the date time
# using the constructor arguments instead of parsing it.
now = datetime(2014, 8, 19, 4, 29, 36, 70918,
TZManager.GetTZInfo(
tzname='EDT',
utcOffset=timedelta(hours=-4, minutes=0)))
def has_tag(doc):
if doc is None:
return False
return '<dateTime>' in doc
def correct_time_string(doc):
return '<dateTime>{0}</dateTime>'.format(now_string) in doc
def check_date_time_value(r1, r2):
for r in [r1, r2]:
if has_tag(r.body):
|
if not correct_time_string(r.body):
return False
return True
my_vcr = vcr.VCR()
my_vcr.register_matcher('document', check_date_time_value)
# NOTE (hartsock): the `match_on` option is altered to use the
# look at the XML body sent to the server
with my_vcr.use_cassette('iso8601_set_datetime.yaml',
cassette_library_dir=tests.fixtu
|
res_path,
record_mode='once',
match_on=['method', 'scheme', 'host', 'port',
'path', 'query', 'document']):
si = connect.SmartConnect(host='vcsa',
user='my_user',
pwd='my_password')
search_index = si.content.searchIndex
uuid = "4c4c4544-0043-4d10-8056-b1c04f4c5331"
host = search_index.FindByUuid(None, uuid, False)
date_time_system = host.configManager.dateTimeSystem
# NOTE (hartsock): sending the date time 'now' to host.
date_time_system.UpdateDateTime(now)
|
jansohn/pyload
|
module/plugins/hoster/OboomCom.py
|
Python
|
gpl-3.0
| 4,627 | 0.004322 |
# -*- coding: utf-8 -*-
#
# Test links:
# https://www.oboom.com/B7CYZIEB/10Mio.dat
import re
from module.common.json_layer import json_loads
from module.plugins.internal.Hoster import Hoster
from module.plugins.captcha.ReCaptcha import ReCaptcha
class OboomCom(Hoster):
__name__ = "OboomCom"
__type__ = "hoster"
__version__ = "0.38"
__status__ = "testing"
__pattern__ = r'https?://(?:www\.)?oboom\.com/(?:#(?:id=|/)?)?(?P<ID>\w{8})'
__description__ = """Oboom.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("stanley", "stanley.foerster@gmail.com")]
RECAPTCHA_KEY = "6LdqpO0SAAAAAJGHXo63HyalP7H4qlRs_vff0kJX"
def setup(self):
self.chunk_limit = 1
self.multiDL = self.resume_download = self.premium
def process(self, pyfile):
self.pyfile.url.replace(".com/#id=", ".com/#")
self.pyfile.url.replace(".com/#/", ".com/#")
self.html = self.load(pyfile.url)
self.get_file_id(self.pyfile.url)
self.get_session_token()
self.get_fileInfo(self.session_token, self.file_id)
self.pyfile.name = self.file_name
self.pyfile.size = self.file_size
if not self.premium:
self.solve_captcha()
self.get_download_ticket()
self.download("http://%s/1.0/dlh" % self.download_domain, get={'ticket': self.download_ticket, 'http_errors': 0})
def load_url(self, url, get=None):
if get is None:
get = {}
return json_loads(self.load(url, get))
def get_file_id(self, url):
self.file_id = re.match(OboomCom.__pattern__, url).group('ID')
def get_session_token(self):
if self.premium:
accountInfo = self.account.get_data()
if "session" in accountInfo:
self.session_token = accountInfo['session']
else:
self.fail(_("Could not retrieve premium session"))
else:
apiUrl = "http://www.oboom.com/1.0/guestsession"
result
|
= self.load_url(apiUrl)
if result[0] == 200:
self.session_token = result[1]
else:
self.fail(_("Could not retrieve token for guest session. Error code: %s") % result[0])
def
|
solve_captcha(self):
recaptcha = ReCaptcha(self)
response, challenge = recaptcha.challenge(self.RECAPTCHA_KEY)
apiUrl = "http://www.oboom.com/1.0/download/ticket"
params = {'recaptcha_challenge_field': challenge,
'recaptcha_response_field': response,
'download_id': self.file_id,
'token': self.session_token}
result = self.load_url(apiUrl, params)
if result[0] == 200:
self.download_token = result[1]
self.download_auth = result[2]
self.captcha.correct()
self.wait(30)
else:
if result[0] == 403:
if result[1] == -1: #: Another download is running
self.set_wait(15 * 60)
else:
self.set_wait(result[1], True)
self.wait()
self.retry(5)
elif result[0] == 400 and result[1] == "forbidden":
self.retry(5, 15 * 60, _("Service unavailable"))
self.retry_captcha()
def get_fileInfo(self, token, fileId):
apiUrl = "http://api.oboom.com/1.0/info"
params = {'token': token, 'items': fileId, 'http_errors': 0}
result = self.load_url(apiUrl, params)
if result[0] == 200:
item = result[1][0]
if item['state'] == "online":
self.file_size = item['size']
self.file_name = item['name']
else:
self.offline()
else:
self.fail(_("Could not retrieve file info. Error code %s: %s") % (result[0], result[1]))
def get_download_ticket(self):
apiUrl = "http://api.oboom.com/1/dl"
params = {'item': self.file_id, 'http_errors': 0}
if self.premium:
params['token'] = self.session_token
else:
params['token'] = self.download_token
params['auth'] = self.download_auth
result = self.load_url(apiUrl, params)
if result[0] == 200:
self.download_domain = result[1]
self.download_ticket = result[2]
elif result[0] == 421:
self.retry(wait=result[2] + 60, msg=_("Connection limit exceeded"))
else:
self.fail(_("Could not retrieve download ticket. Error code: %s") % result[0])
|
sbidoul/buildbot
|
worker/buildbot_worker/monkeypatches/testcase_assert.py
|
Python
|
gpl-2.0
| 2,243 | 0.001337 |
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from future.utils import string_types
import re
import unittest
def _assertRaisesRegexp(self, expected_exception, expected_regexp,
callable_obj, *args, **kwds):
"""
Asserts that the message in a raised exception matches a regexp.
This is a simple clone of unittest.TestCase.assertRaisesRegexp() method
introduced in python 2.7. The goal for this function is to behave exactly
as assertRaisesRegexp() in standard library.
"""
exception = None
try:
callable_obj(*args, **kwds)
except expected_exception as ex: # let unexpected exceptions pass through
exception = ex
if exception is None:
self.fail("%s not raised" % str(expected_exception.__name__))
if isinstance(expected_regexp, string_types):
expected_regex
|
p = re.compile(expected_regexp)
if not expected_regexp.search(str(exception)):
self.fail('"%s" does not match "%s"' %
(expected_regexp.pattern, str(exception)))
def patch():
hasAssertRaisesRegexp = getattr(unittest.TestCase, "assertRaisesRegexp", None)
hasAssertRaisesRegex = getattr(unittest.TestCase, "assertRaisesRegex", None)
if
|
not hasAssertRaisesRegexp:
# Python 2.6
unittest.TestCase.assertRaisesRegexp = _assertRaisesRegexp
if not hasAssertRaisesRegex:
# Python 2.6 and Python 2.7
unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
|
china-x-orion/infoeye
|
tools/netstat.py
|
Python
|
mit
| 966 | 0.024845 |
#!/usr/bin/python
"""
Author: rockylinux
E-Mail: Jingzheng.W@gmail.com
"""
import commands
#display the living connections
#return a list containning living connections
class netstat:
def __init__(self):
self.__name = 'netstat'
def getData(self):
(status, output) = commands.getstatusou
|
tput('netstat -ntu | /usr/bin/awk \'NR>2 {sub(/:[^:]+$/, ""); print $5}\' | sort | uniq -c')
#return output.split('\n')
rst = [i.strip().split() for i in output.split("\n")]
if len(rst[0]) == 0:
print [["", ""]]
else:
print rst
#[i.strip().split() for i in output.split("\n")]return [i.strip() for i in output.split("\n")]
def testGetData(self,test):
if type(test) == type([]):
for i in test:
pri
|
nt i
else:
print test
if __name__ == '__main__':
a = netstat()
test = a.getData()
#a.testGetData(test)
|
cbrafter/TRB18_GPSVA
|
codes/sumoAPI/HybridVAControl_PROFILED.py
|
Python
|
mit
| 14,996 | 0.005802 |
#!/usr/bin/env python
"""
@file HybridVAControl.py
@author Craig Rafter
@date 19/08/2016
class for fixed time signal control
"""
import signalControl, readJunctionData, traci
from math import atan2, degrees
import numpy as np
from collections import defaultdict
class HybridVAControl(signalControl.signalControl):
def __init__(self, junctionData, minGreenTime=10, maxGreenTime=60, scanRange=250, packetRate=0.2):
super(HybridVAControl, self).__init__()
self.junctionData = junctionData
self.firstCalled = self.getCurrentSUMOtime()
self.lastCalled = self.getCurrentSUMOtime()
self.lastStageIndex = 0
traci.trafficlights.setRedYellowGreenState(self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString)
self.packetRate = int(1000*packetRate)
self.transition = False
self.CAMactive = False
# dict[vehID] = [position, heading, velocity, Tdetect]
self.newVehicleInfo = {}
self.oldVehicleInfo = {}
self.scanRange = scanRange
self.jcnCtrlRegion = self._getJncCtrlRegion()
# print(self.junctionData.id)
# print(self.jcnCtrlRegion)
self.controlledLanes = traci.trafficlights.getControlledLanes(self.junctionData.id)
# dict[laneID] = [heading, shape]
self.laneDetectionInfo = self._getIncomingLaneInfo()
self.stageTime = 0.0
self.minGreenTime = minGreenTime
self.maxGreenTime = maxGreenTime
self.secondsPerMeterTraffic = 0.45
self.nearVehicleCatchDistance = 25
self.extendTime = 1.0 # 5 m in 10 m/s (acceptable journey 1.333)
self.laneInductors = self._getLaneInductors()
def process(self):
# Packets sent on this step
# packet delay + only get packets towards the end of the second
if (not self.getCurrentSUMOtime() % self.packetRate) and (self.getCurrentSUMOtime() % 1000 > 500):
self.CAMactive = True
self._getCAMinfo()
else:
self.CAMactive = False
# Update stage decisions
# If there's no ITS enabled vehicles present use VA ctrl
if len(self.oldVehicleInfo) < 1 and not self.getCurrentSUMOtime() % 1000:
detectTimePerLane = self._getLaneDetectTime()
#print(detectTimePerLane)
# Set adaptive time limit
#print(detectTimePerLane < 3)
if np.any(detectTimePerLane < 2):
extend = self.extendTime
else:
extend = 0.0
self.stageTime = max(self.stageTime + extend, self.minGreenTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
# If active and on the second, or transition then make stage descision
elif (self.CAMactive and not self.getCurrentSUMOtime() % 1000) or self.transition:
oncomingVeh = self._getOncomingVehicles()
# If new stage get furthest from stop line whose velocity < 5% speed
# limit and determine queue length
if self.transition:
furthestVeh = self._getFurthestStationaryVehicle(oncomingVeh)
if furthestVeh[0] != '':
meteredTime = self.secondsPerMeterTraffic*furthestVeh[1]
self.stageTime = max(self.minGreenTime, meteredTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
# If we're in this state this should never happen but just in case
else:
self.stageTime = self.minGreenTime
# If currently staging then extend time if there are vehicles close
# to the stop line
else:
nearestVeh = self._getNearestVehicle(oncomingVeh)
# If a vehicle detected
if nearestVeh != '' and nearestVeh[1] <= self.nearVehicleCatchDistance:
if (self.oldVehicleInfo[nearestVeh[0]][2] != 1e6
and self.oldVehicleInfo[nearestVeh[0]][2] > 1.0/self.secondsPerMeterTraffic):
meteredTime = nearestVeh[1]/self.oldVehicleInfo[nearestVeh[0]][2]
else:
meteredTime = self.secondsPerMeterTraffic*nearestVeh[1]
elapsedTime = 0.001*(self.getCurrentSUMOtime() - self.lastCalled)
Tremaining = self.stageTime - elapsedTime
self.stageTime = elapsedTime + max(meteredTime, Tremaining)
self.stageTime = min(self.stageTime, self.maxGreenTime)
# no detectable near vehicle try inductive loop info
elif nearestVeh == '' or nearestVeh[1] <= self.nearVehicleCatchDistance:
detectTimePerLane = self._getLaneDetectTime()
print('Loops2')
# Set adaptive time limit
if np.any(detectTimePerLane < 2):
extend = self.extendTime
else:
extend = 0.0
self.stageTime = max(self.stageTime + extend, self.minGreenTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
else:
pass
# process stage as normal
else:
pass
# print(self.stageTime)
self.transition = False
if self.transitionObject.active:
# If the transition object is active i.e. processing a transition
pass
elif (self.getCurrentSUMOtime() - self.firstCalled) < (self.junctionData.offset*1000):
# Process offset first
pass
elif (self.getCurre
|
ntSUMOtime() - self.lastCalled) < self.stageTi
|
me*1000:
# Before the period of the next stage
pass
else:
# Not active, not in offset, stage not finished
if len(self.junctionData.stages) != (self.lastStageIndex)+1:
# Loop from final stage to first stage
self.transitionObject.newTransition(
self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString,
self.junctionData.stages[self.lastStageIndex+1].controlString)
self.lastStageIndex += 1
else:
# Proceed to next stage
#print(0.001*(self.getCurrentSUMOtime() - self.lastCalled))
self.transitionObject.newTransition(
self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString,
self.junctionData.stages[0].controlString)
self.lastStageIndex = 0
#print(0.001*(self.getCurrentSUMOtime() - self.lastCalled))
self.lastCalled = self.getCurrentSUMOtime()
self.transition = True
self.stageTime = 0.0
super(HybridVAControl, self).process()
def _getHeading(self, currentLoc, prevLoc):
dy = currentLoc[1] - prevLoc[1]
dx = currentLoc[0] - prevLoc[0]
if currentLoc[1] == prevLoc[1] and currentLoc[0] == prevLoc[0]:
heading = -1
else:
if dy >= 0:
heading = degrees(atan2(dy, dx))
else:
heading = 360 + degrees(atan2(dy, dx))
# Map angle to make compatible with SUMO heading
if 0 <= heading <= 90:
heading = 90 - heading
elif 90 < heading < 360:
heading = 450 - heading
return heading
def _getJncCtrlRegion(self):
jncPosition = traci.junction.getPosition(self.junctionData.id)
otherJuncPos = [traci.junction.getPosition(x) for x in traci.trafficlights.getIDList() if x != self.junctionData.id]
ctrlRegion = {'N':jncPosition[1]+self.scanRange, 'S':jncPosition[1]-self.scanRange,
'E':jncPosition[0]+self.scanRange, 'W':jncPosition[0]-self.scanRange}
TOL = 10 # Exclusion region around junction boundary
if otherJuncPos != []:
for pos in otherJuncPos:
|
Silvian/Reminders-App
|
reminders/urls.py
|
Python
|
gpl-3.0
| 1,236 | 0.000809 |
"""reminders URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from . import views
urlpatterns = [
url(r'
|
^admin/', admin.site.urls),
url(r'^login/', views.display_login, name='login'),
url(r'^logout/', views.logout_view, name='logout'),
url(r'^api/authenticate', views.authenticate_user, name='authenticate_user'),
url(r'^api/reminders', views.get_reminders, name='get_reminders'),
url(r'^api/add', views.add_reminder, name='add_reminder'),
url(r'^api/remove
|
', views.remove_reminder, name='remove_reminder'),
url(r'^$', views.display_index, name='index'),
]
|
buchwj/xvector
|
client/xVClient/StartupScreen.py
|
Python
|
gpl-3.0
| 14,622 | 0.004514 |
# xVector Engine Client
# Copyright (c) 2011 James Buchwald
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
'''
Screen with the title menu, metaserver, and automatic updater.
The startup screen can be thought of as a central hub for the client. Its
primary role is to allow the player to select a server to connect to, then
prepare the local files for that connection. Its final step is to hand
off control to the Game screen, the first state of which is the login screen.
'''
import os.path
import logging
import traceback
import sys
import time
from PyQt4 import QtCore, QtGui
from PyQt4.QtCore import SIGNAL
from . import ClientPaths, ClientGlobals, Metaserver, ClientNetworking
from .ui.TitleWidgetUI import Ui_TitleWidget
from .ui.PrivateServerWidgetUI import Ui_PrivateServerWidget
mainlog = logging.getLogger("Client.Main")
class TitleMenu(QtGui.QWidget):
'''Widget that displays the title menu.'''
def __init__(self, parent=None):
'''
Sets up the title menu widget.
@type parent: QtGui.QWidget
@param parent: Parent widget.
'''
# Inherit base class behavior.
super(TitleMenu, self).__init__(parent=parent)
# Set up our UI.
self.ui = Ui_TitleWidget()
'''Automatically-generated user interface object.'''
self.ui.setupUi(self)
# Connect buttons.
self.connect(self.ui.btnPublic, SIGNAL("clicked()"),
self.OnPublicServer)
self.connect(self.ui.btnPrivate, SIGNAL("clicked()"),
self.OnPrivateServer)
self.connect(self.ui.btnSettings, SIGNAL("clicked()"),
self.OnSettings)
self.connect(self.ui.btnExit, SIGNAL("clicked()"),
self.OnExit)
def OnPublicServer(self):
'''Called when the "Public Servers" button is clicked.'''
# Notify the main widget.
self.parent().OnPublicServer()
def OnPrivateServer(self):
'''Called when the "Private Servers" button is clicked.'''
# Notify the main widget.
self.parent().OnPrivateServer()
def OnSettings(self):
'''Called when the "Settings" button is clicked.'''
pass # TODO: Implement
def OnExit(self):
'''Called when the "Exit" button is clicked.'''
self.parent().OnClose()
def paintEvent(self, event):
'''
Called from Qt when the widget is redrawn.
@type event: QtGui.QPaintEvent
@param event: Paint event.
'''
# Enable stylesheets on this widget.
opt = QtGui.QStyleOption()
opt.init(self)
painter = QtGui.QPainter(self)
self.style().drawPrimitive(QtGui.QStyle.PE_Widget, opt,
painter, self)
class PrivateServerWidget(QtGui.QWidget):
'''
Widget that allows the user to connect to a private server by address.
'''
def __init__(self, parent=None):
'''
Creates a new private server widget.
'''
# Inherit base class behavior.
super(PrivateServerWidget, self).__init__(parent)
# Set up UI.
self.ui = Ui_PrivateServerWidget()
'''Automatically-generated user interface object.'''
self.ui.setupUi(self)
# Create our validators.
self.PortValidator = QtGui.QIntValidator(1, 65535, self)
'''Port number validator.'''
self.ui.PortEdit.setValidator(self.PortValidator)
# Connect buttons to their callbacks.
self.connect(self.ui.ConnectButton, SIGNAL("clicked()"),
self.OnConnect)
self.connect(self.ui.BackButton, SIGNAL("clicked()"), self.OnBack)
def OnConnect(self):
'''Called when the Connect button is clicked.'''
# Validate user input.
host = self.ui.HostEdit.text()
if host == "":
msg = "Host must not be empty."
mainlog.error(msg)
return
try:
port = int(self.ui.PortEdit.text())
if port < 1 or port > 65535: raise Exception
except:
# Port must be an integer.
msg = "Port must be a number between 1 and 65535."
mainlog.error(msg)
return
# Connect.
address = (host, port)
self.parent().ConnectToServer(address)
def OnBack(self):
'''Called when the Back button is clicked.'''
self.parent().BackToMain()
def paintEvent(self, event):
'''
Called from Qt when the widget is redrawn.
@type event: QtGui.QPaintEvent
@param event: Paint event.
'''
# Enable stylesheets on this widget.
opt = QtGui.QStyleOption()
opt.init(self)
painter = QtGui.QPainter(self)
self.style().drawPrimitive(QtGui.QStyle.PE_Widget, opt,
painter, self)
class StartupScreen(QtGui.QWidget):
'''
Game startup screen; handles the title menu, metaserver, and auto-updater.
'''
##
## Startup state constants
##
StartupState_None = 0
|
'''This state shows nothing.'''
StartupState_Title = 1
'''This state shows the title menu.'''
StartupState_Metaserver = 2
'''This state allows a player to choose a server from the
|
list.'''
StartupState_PrivateServer = 3
'''This state allows a player to connect to a server by address.'''
StartupState_Settings = 4
'''This state allows a player to change the local settings.'''
StartupState_Updater = 5
'''This state retrieves updated files from the server.'''
##
## Control constants
##
FramesPerSecond = 30
'''Framecap for the startup screen.'''
FadeTime = 1.0
'''Time, in seconds, of the fade effect.'''
BackgroundFile = os.path.join("ui", "backgrounds", "startup.png")
'''Path (relative to the master resource root) of the background.'''
def __init__(self, parent=None):
'''
Creates a new startup screen object.
@type parent: QtGui.QWidget
@param parent: Parent object of the screen (usually the main window)
'''
# Inherit base class behavior.
super(StartupScreen,self).__init__(parent)
App = ClientGlobals.Application
# Declare attributes which will hold our widgets.
self.TitleMenu = None
'''The TitleMenu widget to display.'''
self.PublicServersMenu = None
'''The public servers menu to display.'''
self.PrivateServersMenu = None
'''The private servers menu to display.'''
self.SettingsScreen = None
'''The settings screen to display.'''
# Create our layout.
self.Layout = QtGui.QVBoxLayout()
'''Main layout of the startup screen.'''
self.setLayout(self.Layout)
# Set our initial state.
self.State = self.StartupState_Title
'''Current state of the startup screen.'''
self.FadeIn = True
'''Whether or not we are fading in.'''
self.FadeOut = False
'''Whether or not we are fading out.'''
self.FadeAlpha = 0.0
'''Alpha of the widget; used for fading effects.'''
self.FadeBrush = QtGui.QBrush(QtCore.Qt.black)
'''Brush used to draw the fade effect.'''
self.OnFadeComplete = None
'''Call
|
shackra/thomas-aquinas
|
summa/audio/system.py
|
Python
|
bsd-3-clause
| 776 | 0 |
# coding: utf-8
# This file is part of Thomas Aquinas.
#
# Thoma
|
s Aquinas is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Thomas Aquinas is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS F
|
OR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Thomas Aquinas. If not, see <http://www.gnu.org/licenses/>.
#
# veni, Sancte Spiritus.
import ctypes
import logging
|
amolenaar/gaphor
|
packaging/make-script.py
|
Python
|
lgpl-2.1
| 2,017 | 0.000496 |
import os
import subprocess
from pathlib import Path
import pyinstaller_versionfile
import tomli
packaging_path = Path(__file__).resolve().parent
def get_version() -> str:
project_dir = Path(__file__).resolve().parent.parent
f = project_dir / "pyproject.toml"
return str(tomli.loads(f.read_text())["tool"]["poetry"]["version"])
def make_gaphor_script():
pyproject_toml = packaging_path.parent / "pyproject.toml"
with open(pyproject_toml, "rb") as f:
toml = tomli.load(f)
gaphor_script = packaging_path / "gaphor-script.py"
with open(gaphor_script, "w") as file:
# https://github.com/pyinstaller/pyinstaller/issues/6100
# On one Windows computer, PyInstaller was adding a ;
|
to
# end of the path, this removes it if it exists
file.write("import os\n")
file.write("if os.environ['PATH'][-1] == ';':\n")
|
file.write(" os.environ['PATH'] = os.environ['PATH'][:-1]\n")
# Check for and remove two semicolons in path
file.write("os.environ['PATH'] = os.environ['PATH'].replace(';;', ';')\n")
plugins = toml["tool"]["poetry"]["plugins"]
for cat in plugins.values():
for entrypoint in cat.values():
file.write(f"import {entrypoint.split(':')[0]}\n")
file.write("from gaphor.ui import main\n")
file.write("import sys\n")
file.write("main(sys.argv)\n")
def make_file_version_info():
win_packaging_path = packaging_path / "windows"
metadata = win_packaging_path / "versionfile_metadata.yml"
file_version_out = win_packaging_path / "file_version_info.txt"
version = get_version()
if "dev" in version:
version = version[: version.rfind(".dev")]
pyinstaller_versionfile.create_versionfile_from_input_file(
output_file=file_version_out,
input_file=metadata,
version=version,
)
def make_pyinstaller():
os.chdir(packaging_path)
subprocess.run(["pyinstaller", "-y", "gaphor.spec"])
|
jlopezbi/rhinoUnfolder
|
rhino_unwrapper/cutSelection/autoCuts.py
|
Python
|
gpl-3.0
| 3,550 | 0.006197 |
def auto_fill_cuts(myMesh,user_cuts,weight_function):
'''
fill in user_cut list (or if empty create new one) which
prefers edges with larger weight, given by the weight_function
NOTE: currently sets naked edges as cuts
'''
sorted_edges = get_edge_weights(myMesh,user_cuts,weight_function)
fold_list = getSpanningKruskal(sorted_edges,myMesh.mesh)
cuts = getCutList(myMesh,fold_list)
myMesh.set_cuts(cuts)
return cuts
def get_edge_weights(myMesh, userCuts,weight_function):
edges_with_weights= []
for i in xrange(myMesh.mesh.TopologyEdges.Count):
if userCuts:
if i not in userCuts:
edges_with_weights.append((i,weight_function(myMesh, i)))
else:
edges_with_weights.append((i, float('inf')))
else:
edges_with_weights.append((i,weight_function(myMesh, i)))
return edges_with_weights
def getSpanningKruskal(edges_with_weights, mesh):
'''
this section of the code should be updated to use the union-find trick.
input:
edges_with_weights = list of tuples (edgeIdx, weight)
mesh = Rhino.Geometry mesh
output:
foldList = list of edgeIdx's that are to be folded
'''
# sorted from smallest to greatest; user cuts, which get inf weight, have low likelyhood of becoming fold edges
sorted_edges = sorted(edges_with_weights, key=lambda tup: tup[1], reverse=False)
treeSets = []
foldList = []
for tupEdge in sorted_edges:
edgeIdx = tupEdge[0]
arrConnFaces = mesh.TopologyEdges.GetConnectedFaces(edgeIdx)
if(len(arrConnFaces) > 1): # this avoids problems with naked edges
setConnFaces = set(
[arrConnFaces.GetValue(0), arrConnFaces.GetValue(1)])
parentSets = []
# print"edgeSet:"
# print setConnFaces
isLegal = True
for i, treeSet in enumerate(treeSets):
if setConnFaces.issubset(treeSet):
# print"--was illegal"
isLegal = False
break
elif not setConnFaces.isdisjoint(treeSet):
# print"overlapped"
parentSets.append(i)
if isLegal == True:
#do not save edge as a fold if the user set it as a cut
if tupEdge[1] != float('inf'):
|
foldList.append(ed
|
geIdx)
if len(parentSets) == 0:
treeSets.append(setConnFaces)
elif len(parentSets) == 1:
treeSets[parentSets[0]].update(setConnFaces)
elif len(parentSets) == 2:
treeSets[parentSets[0]].update(treeSets[parentSets[1]])
treeSets.pop(parentSets[1])
elif len(parentSets) > 2:
print"Error in m.s.t: more than two sets overlapped with edgeSet!"
print "len parentSets: %d\n" % len(parentSets)
print(treeSets)
print(parentSets)
print(setConnFaces)
# wow there must be a cleaner way of doing this!!! some set tricks
# also the if staements could be cleaned up probs.
return foldList
def getCutList(myMesh, foldList):
all_edges = myMesh.get_set_of_edges()
cut_set = all_edges.difference(set(foldList))
cut_list = []
for edge in cut_set:
if not myMesh.is_naked_edge(edge):
cut_list.append(edge)
return cut_list
|
rcbops/horizon-buildpackage
|
horizon/middleware.py
|
Python
|
apache-2.0
| 2,838 | 0 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright
|
2011 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2011 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless requi
|
red by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Middleware provided and used by Horizon.
"""
import logging
from django import shortcuts
from django.contrib import messages
from django.utils.translation import ugettext as _
from horizon import api
from horizon import exceptions
from horizon import users
LOG = logging.getLogger(__name__)
class HorizonMiddleware(object):
""" The main Horizon middleware class. Required for use of Horizon. """
def process_request(self, request):
""" Adds data necessary for Horizon to function to the request.
Adds the current "active" :class:`~horizon.Dashboard` and
:class:`~horizon.Panel` to ``request.horizon``.
Adds a :class:`~horizon.users.User` object to ``request.user``.
"""
request.__class__.user = users.LazyUser()
request.horizon = {'dashboard': None, 'panel': None}
if request.user.is_authenticated() and \
request.user.authorized_tenants is None:
try:
authd = api.tenant_list_for_token(request,
request.user.token,
endpoint_type='internalURL')
except Exception, e:
authd = []
LOG.exception('Could not retrieve tenant list.')
if hasattr(request.user, 'message_set'):
messages.error(request,
_("Unable to retrieve tenant list."))
request.user.authorized_tenants = authd
def process_exception(self, request, exception):
""" Catch NotAuthorized and Http302 and handle them gracefully. """
if isinstance(exception, exceptions.NotAuthorized):
messages.error(request, unicode(exception))
return shortcuts.redirect('/auth/login')
if isinstance(exception, exceptions.Http302):
if exception.message:
messages.error(request, exception.message)
return shortcuts.redirect(exception.location)
|
PhilLidar-DAD/geonode
|
geonode/geoserver/helpers.py
|
Python
|
gpl-3.0
| 61,811 | 0.001052 |
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import json
import sys
import os
import urllib
import logging
import re
import time
import errno
import uuid
import datetime
from bs4 import BeautifulSoup
import geoserver
import httplib2
from urlparse import urlparse
from urlparse import urlsplit
from threading import local
from collections import namedtuple
from itertools import cycle, izip
from lxml import etree
import xml.etree.ElementTree as ET
from decimal import Decimal
from owslib.wcs import WebCoverageService
from owslib.util import http_post
from django.core.exceptions import ImproperlyConfigured
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import pre_delete
from django.template.loader import render_to_string
from django.conf import settings
from django.utils.translation import ugettext as _
from dialogos.models import Comment
from agon_ratings.models import OverallRating
from gsimporter import Client
from owslib.wms import WebMapService
from geoserver.store import CoverageStore, DataStore, datastore_from_index,\
coveragestore_from_index, wmsstore_from_index
from geoserver.workspace import Workspace
from geoserver.catalog import Catalog
from geoserver.catalog import FailedRequestError, UploadError
from geoserver.catalog import ConflictingDataError
from geoserver.resource import FeatureType, Coverage
from geoserver.support import DimensionInfo
from geonode import GeoNodeException
from geonode.layers.utils import layer_type, get_files
from geonode.layers.models import Layer, Attribute, Style
from geonode.layers.enumerations import LAYER_ATTRIBUTE_NUMERIC_DATA_TYPES
logger = logging.getLogger(__name__)
if not hasattr(settings, 'OGC_SERVER'):
msg = (
'Please configure OGC_SERVER when enabling geonode.geoserver.'
' More info can be found at '
'http://docs.geonode.org/en/master/reference/developers/settings.html#ogc-server')
raise ImproperlyConfigured(msg)
def check_geoserver_is_up():
"""Verifies all geoserver is running,
this is needed to be able to upload.
"""
url = "%sweb/" % ogc_server_settings.LOCATION
resp, content = http_client.request(url, "GET")
msg = ('Cannot connect to the GeoServer at %s\nPlease make sure you '
'have started it.' % ogc_server_settings.LOCATION)
assert resp['status'] == '200', msg
def _add_sld_boilerplate(symbolizer):
"""
Wrap an XML snippet representing a single symbolizer in the appropriate
elements to make it a valid SLD which applies that symbolizer to all features,
including format strings to allow interpolating a "name" variable in.
"""
return """
<StyledLayerDescriptor version="1.0.0" xmlns="http://www.opengis.net/sld" xmlns:ogc="http://www.opengis.net/ogc"
xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.opengis.net/sld http://schemas.opengis.net/sld/1.0.0/StyledLayerDescriptor.xsd">
<NamedLayer>
<Name>%(name)s</Name>
<UserStyle>
<Name>%(name)s</Name>
<Title>%(name)s</Title>
<FeatureTypeStyle>
<Rule>
""" + symbolizer + """
</Rule>
</FeatureTypeStyle>
</UserStyle>
</NamedLayer>
</StyledLayerDescriptor>
"""
_raster_template = """
<RasterSymbolizer>
<Opacity>1.0</Opacity>
</RasterSymbolizer>
"""
_polygon_template = """
<PolygonSymbolizer>
<Fill>
<CssParameter name="fill">%(bg)s</CssParameter>
</Fill>
<Stroke>
<CssParameter name="stroke">%(fg)s</CssParameter>
<CssParameter name="stroke-width">0.7</CssParameter>
</Stroke>
</PolygonSymbolizer>
"""
_line_template = """
<LineSymbolizer>
<Stroke>
<CssParameter name="stroke">%(bg)s</CssParameter>
<CssParameter name="stroke-width">3</CssParameter>
</Stroke>
</LineSymbolizer>
</Rule>
</FeatureTypeStyle>
<FeatureTypeStyle>
<Rule>
<LineSymbolizer>
<Stroke>
<CssParameter name="stroke">%(fg)s</CssParameter>
</Stroke>
</LineSymbolizer>
"""
_point_template = """
<PointSymbolizer>
<Graphic>
<Mark>
<WellKnownName>%(mark)s</WellKnownName>
<Fill>
<CssParameter name="fill">%(bg)s</CssParameter>
</Fill>
<Stroke>
<CssParameter name="stroke">%(fg)s</CssParameter>
</Stroke>
</Mark>
<Size>10</Size>
</Graphic>
</PointSymbolizer>
"""
_style_templates = dict(
raster=_add_sld_boilerplate(_raster_template),
polygon=_add_sld_boilerplate(_polygon_template),
line=_add_sld_boilerplate(_line_template),
point=_add_sld_boilerplate(_point_template)
)
def _style_name(resource):
return _punc.sub("_", resource.store.workspace.name + ":" + resource.name)
def get_sld_for(layer):
# FIXME: GeoServer sometimes fails to associate a style with the data, so
# for now we default to using a point style.(it works for lines and
# polygons, hope this doesn't happen for rasters though)
name = layer.default_style.name if layer.default_style is not None else "point"
# FIXME: When gsconfig.py exposes the default geometry type for vector
# layers we should use that rather than guessing based on the auto-detected
# style.
if name in _style_templates:
fg, bg, mark = _style_contexts.next()
return _style_templates[name] % dict(
name=layer.name,
fg=fg,
bg=bg,
mark=mark)
else:
|
return None
def fixup_style(cat, resource, style):
logger.debug("Creating styles for layers associated with [%s]", resource)
layers = cat.get_layers(resource=resource)
logger.info("Found %d layers associated with [%s]", len(layers), resource)
for lyr in layers:
if lyr.default_style.name in _style_templates:
logger.info("%s uses a defau
|
lt style, generating a new one", lyr)
name = _style_name(resource)
if style is None:
sld = get_sld_for(lyr)
else:
sld = style.read()
logger.info("Creating style [%s]", name)
style = cat.create_style(name, sld)
lyr.default_style = cat.get_style(name)
logger.info("Saving changes to %s", lyr)
cat.save(lyr)
logger.info("Successfully updated %s", lyr)
def cascading_delete(cat, layer_name):
resource = None
try:
if layer_name.find(':') != -1:
workspace, name = layer_name.split(':')
ws = cat.get_workspace(workspace)
try:
store = get_store(cat, name, workspace=ws)
except FailedRequestError:
if ogc_server_settings.DATASTORE:
try:
store = get_store(cat, ogc_server_settings.DATASTORE, workspace=ws)
except FailedRequestError:
logger.debug(
'the store was not found in geoserver')
return
else:
logger.debug(
'the store was not found in geoserver')
return
if ws is None:
logger.debug(
'cascading delete was called on a layer where the workspace was not found')
return
resource = cat.get_resource(name, store=store, workspace=workspace)
else:
|
silasb/flatcam
|
FlatCAMTool.py
|
Python
|
mit
| 8,497 | 0.002942 |
from PyQt4 import QtGui, QtCore
from shapely.geometry import Point
from shapely import affinity
from math import sqrt
import FlatCAMApp
from GU
|
IElements import *
from FlatCAMObj import FlatCAMGerber, FlatCAMExcellon
class FlatCAMTool(QtGui.QWidget):
toolName = "FlatCAM Generic Tool"
def __init__(self, app, parent=None):
"""
|
:param app: The application this tool will run in.
:type app: App
:param parent: Qt Parent
:return: FlatCAMTool
"""
QtGui.QWidget.__init__(self, parent)
# self.setSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Maximum)
self.layout = QtGui.QVBoxLayout()
self.setLayout(self.layout)
self.app = app
self.menuAction = None
def install(self):
self.menuAction = self.app.ui.menutool.addAction(self.toolName)
self.menuAction.triggered.connect(self.run)
def run(self):
# Remove anything else in the GUI
self.app.ui.tool_scroll_area.takeWidget()
# Put ourself in the GUI
self.app.ui.tool_scroll_area.setWidget(self)
# Switch notebook to tool page
self.app.ui.notebook.setCurrentWidget(self.app.ui.tool_tab)
self.show()
class DblSidedTool(FlatCAMTool):
toolName = "Double-Sided PCB Tool"
def __init__(self, app):
FlatCAMTool.__init__(self, app)
## Title
title_label = QtGui.QLabel("<font size=4><b>%s</b></font>" % self.toolName)
self.layout.addWidget(title_label)
## Form Layout
form_layout = QtGui.QFormLayout()
self.layout.addLayout(form_layout)
## Layer to mirror
self.object_combo = QtGui.QComboBox()
self.object_combo.setModel(self.app.collection)
form_layout.addRow("Bottom Layer:", self.object_combo)
## Axis
self.mirror_axis = RadioSet([{'label': 'X', 'value': 'X'},
{'label': 'Y', 'value': 'Y'}])
form_layout.addRow("Mirror Axis:", self.mirror_axis)
## Axis Location
self.axis_location = RadioSet([{'label': 'Point', 'value': 'point'},
{'label': 'Box', 'value': 'box'}])
form_layout.addRow("Axis Location:", self.axis_location)
## Point/Box
self.point_box_container = QtGui.QVBoxLayout()
form_layout.addRow("Point/Box:", self.point_box_container)
self.point = EvalEntry()
self.point_box_container.addWidget(self.point)
self.box_combo = QtGui.QComboBox()
self.box_combo.setModel(self.app.collection)
self.point_box_container.addWidget(self.box_combo)
self.box_combo.hide()
## Alignment holes
self.alignment_holes = EvalEntry()
form_layout.addRow("Alignment Holes:", self.alignment_holes)
## Drill diameter for alignment holes
self.drill_dia = LengthEntry()
form_layout.addRow("Drill diam.:", self.drill_dia)
## Buttons
hlay = QtGui.QHBoxLayout()
self.layout.addLayout(hlay)
hlay.addStretch()
self.create_alignment_hole_button = QtGui.QPushButton("Create Alignment Drill")
self.mirror_object_button = QtGui.QPushButton("Mirror Object")
hlay.addWidget(self.create_alignment_hole_button)
hlay.addWidget(self.mirror_object_button)
self.layout.addStretch()
## Signals
self.create_alignment_hole_button.clicked.connect(self.on_create_alignment_holes)
self.mirror_object_button.clicked.connect(self.on_mirror)
self.axis_location.group_toggle_fn = self.on_toggle_pointbox
## Initialize form
self.mirror_axis.set_value('X')
self.axis_location.set_value('point')
def on_create_alignment_holes(self):
axis = self.mirror_axis.get_value()
mode = self.axis_location.get_value()
if mode == "point":
px, py = self.point.get_value()
else:
selection_index = self.box_combo.currentIndex()
bb_obj = self.app.collection.object_list[selection_index] # TODO: Direct access??
xmin, ymin, xmax, ymax = bb_obj.bounds()
px = 0.5*(xmin+xmax)
py = 0.5*(ymin+ymax)
xscale, yscale = {"X": (1.0, -1.0), "Y": (-1.0, 1.0)}[axis]
dia = self.drill_dia.get_value()
tools = {"1": {"C": dia}}
holes = self.alignment_holes.get_value()
drills = []
for hole in holes:
point = Point(hole)
point_mirror = affinity.scale(point, xscale, yscale, origin=(px, py))
drills.append({"point": point, "tool": "1"})
drills.append({"point": point_mirror, "tool": "1"})
def obj_init(obj_inst, app_inst):
obj_inst.tools = tools
obj_inst.drills = drills
obj_inst.create_geometry()
self.app.new_object("excellon", "Alignment Drills", obj_init)
def on_mirror(self):
selection_index = self.object_combo.currentIndex()
fcobj = self.app.collection.object_list[selection_index]
# For now, lets limit to Gerbers and Excellons.
# assert isinstance(gerb, FlatCAMGerber)
if not isinstance(fcobj, FlatCAMGerber) and not isinstance(fcobj, FlatCAMExcellon):
self.info("ERROR: Only Gerber and Excellon objects can be mirrored.")
return
axis = self.mirror_axis.get_value()
mode = self.axis_location.get_value()
if mode == "point":
px, py = self.point.get_value()
else:
selection_index = self.box_combo.currentIndex()
bb_obj = self.app.collection.object_list[selection_index] # TODO: Direct access??
xmin, ymin, xmax, ymax = bb_obj.bounds()
px = 0.5*(xmin+xmax)
py = 0.5*(ymin+ymax)
fcobj.mirror(axis, [px, py])
fcobj.plot()
def on_toggle_pointbox(self):
if self.axis_location.get_value() == "point":
self.point.show()
self.box_combo.hide()
else:
self.point.hide()
self.box_combo.show()
class Measurement(FlatCAMTool):
toolName = "Measurement Tool"
def __init__(self, app):
FlatCAMTool.__init__(self, app)
# self.setContentsMargins(0, 0, 0, 0)
self.layout.setMargin(0)
self.layout.setContentsMargins(0, 0, 3, 0)
self.setSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Maximum)
self.point1 = None
self.point2 = None
self.label = QtGui.QLabel("Click on a reference point ...")
self.label.setFrameStyle(QtGui.QFrame.StyledPanel | QtGui.QFrame.Plain)
self.label.setMargin(3)
self.layout.addWidget(self.label)
# self.layout.setMargin(0)
self.setVisible(False)
self.click_subscription = None
self.move_subscription = None
def install(self):
FlatCAMTool.install(self)
self.app.ui.right_layout.addWidget(self)
self.app.plotcanvas.mpl_connect('key_press_event', self.on_key_press)
def run(self):
self.toggle()
def on_click(self, event):
if self.point1 is None:
self.point1 = (event.xdata, event.ydata)
else:
self.point2 = copy(self.point1)
self.point1 = (event.xdata, event.ydata)
self.on_move(event)
def on_key_press(self, event):
if event.key == 'm':
self.toggle()
def toggle(self):
if self.isVisible():
self.setVisible(False)
self.app.plotcanvas.mpl_disconnect(self.move_subscription)
self.app.plotcanvas.mpl_disconnect(self.click_subscription)
else:
self.setVisible(True)
self.move_subscription = self.app.plotcanvas.mpl_connect('motion_notify_event', self.on_move)
self.click_subscription = self.app.plotcanvas.mpl_connect('button_press_event', self.on_click)
def on_move(self, event):
if self.point1 is None:
self.label.setText("Click on a reference point...")
else:
try:
dx = event
|
blamed-cloud/PythonGames
|
fractoe.py
|
Python
|
mit
| 10,693 | 0.041242 |
#!/usr/bin/env python
#fractoe.py
from AISuite.game import Game as Game
import AISuite.player as player
from AISuite.alphabeta import UPPER_BOUND, LOWER_BOUND, shallowest_first
import AISuite.recorder as recorder
import AISuite.PythonLibraries.prgm_lib as prgm_lib
import fractoe_tictactoe as tictactoe
Tictactoe = tictactoe.Tictactoe
from fractoe_heuristics import fractoe_heuristic
BOARD_SIZE = 3
def coor_split(num):
col = num % BOARD_SIZE
row = (num - col) / BOARD_SIZE
return [row,col]
def coor_splice(row,col):
return row*BOARD_SIZE + col
class Fractoe(Game):
def __init__(self, player1, player2, be_quiet = False, show_game = False):
super(self.__class__, self).__init__(player1, player2, be_quiet)
self.grid = [[Tictactoe(), Tictactoe(), Tictactoe()], [Tictactoe(), Tictactoe(), Tictactoe()], [Tictactoe(), Tictactoe(), Tictactoe()]]
self.rows = BOARD_SIZE
self.cols = BOARD_SIZE
self.show_board = show_game
self.current_box = -1
self.current_row = -1
self.current_col = -1
self.thinking = False
self.boards_won = [-1,-1,-1,-1,-1,-1,-1,-1,-1]
self.player_token = [" ","X","O"]
self.last_moves = [ [[-1,-1],[-1,-1]], [[-1,-1], [-1,-1]] ]
def load_state_from_string(self, state_string):
class_data = state_string.split(";")
self.boards_won = [-1,-1,-1,-1,-1,-1,-1,-1,-1]
for num in range(9):
col = num % 3
row = (num - (num % 3))/3
self.grid[row][col].load(class_data[num])
self.boards_won[num] = self.grid[row][col].get_winner()
self.turn = int(class_data[9])
self.current_box = int(class_data[10])
if self.current_box != -1:
x = self.current_box
self.current_col = x % 3
self.current_row = (x - (x % 3))/3
else:
self.current_row = -1
self.current_col = -1
self.check_for_winner()
def __str__(self):
value = ""
for row in range(3):
for col in range(3):
value += str(self.grid[row][col]) + ';'
value += str(self.turn) + ';'
value += str(self.current_box)
return value
@staticmethod
def parse_state(game_state):
split_list = game_state.split(';')
split_list = split_list[:-2] + split_list[-1]
return ';'.join(split_list)
def get_child_states(self):
root = str(self)
moves = self.get_child_moves()
states = []
for m in moves:
self.current_box = int(str(m)[0])
self.current_col = self.current_box % 3
self.current_row = (self.current_box - self.current_col)/3
num = int(str(m)[1])
self.try_placing_square(num)
self.turn += 1
states += [str(self)]
self.load_state_from_string(root)
return states
def get_child_moves(self):
children = []
if self.current_box == -1:
for box in range(9):
if self.boards_won[box] == -1:
for x in range(9):
out_c = box % 3
out_r = (box - out_c)/3
in_c = x % 3
in_r = (x - in_c)/3
if self.grid[out_r][out_c].get_square(in_r,in_c) == " ":
children += [str(box) + str(x)]
else:
for x in range(9):
out_c = self.current_box % 3
out_r = (self.current_box - out_c)/3
in_c = x % 3
in_r = (x - in_c)/3
if self.grid[out_r][out_c].get_square(in_r,in_c) == " ":
children += [str(self.current_box) + str(x)]
return children
def do_turn(self):
human = self.is_human_turn()
if human or self.show_board:
self.opg()
if not human:
if not self.quiet and not self.thinking:
print "Player" + str(
|
self.get_player_num()) + " (the computer) is thinking..."
|
self.thinking = True
finished_playing = False
valid_moves = self.get_child_moves()
while not finished_playing:
if human:
if self.current_box != -1:
print "Current board is " + str(self.current_box)
self.grid[self.current_row][self.current_col].opg()
print "Player" + str(self.get_player_num()) + ", it is your turn to play."
print "Please enter a valid move string."
print "a valid move string is two numbers, such as 34"
print "this indicates the 4-th square on the 3-rd board (both 0-indexed)"
move = self.current_player().choose_move(self)
if human and move in self.escapes:
self.handle_escape(move)
elif str(move) in valid_moves:
self.current_box = int(str(move)[0])
self.current_col = self.current_box % 3
self.current_row = (self.current_box - self.current_col)/3
num = int(str(move)[1])
inner_col = num % 3
inner_row = (num - (num % 3))/3
turn_descriptor = [[self.current_row,self.current_col], [inner_row, inner_col]]
self.try_placing_square(num)
self.turn += 1
finished_playing = True
self.thinking = False
self.last_moves[self.get_player_num()-1] = turn_descriptor
else:
if human:
print 'That wasn\'t a valid move.'
print 'Valid moves look like: 08 or 27'
self.opg()
self.check_winner()
def make_new_instance(self):
return Fractoe(player.Player(), player.Player())
def opg(self):
prgm_lib.cls(100)
for x in range(len(self.grid)):
size = 0
string0 = ''
for z in range(3):
string1 = ''
string2 = ''
for y in range(len(self.grid[x])):
special = self.get_last_moves_in(x,y,z)
string3 = self.grid[x][y].get_row(z,special)
for var in range(len(string3) - 9 * len(special)):
string2 += "-"
string1 += string3 + " || "
string2 += " || "
print string1[:-4]
if z != 2:
print string2[:-4]
size = len(string2)-4
for var in range(size):
string0 += "="
if x != 2:
print string0
print
def check_for_winner(self):
for x in range(3):
if self.boards_won[3*x] == self.boards_won[3*x+1] == self.boards_won[3*x+2] > 0:
self.winner = self.boards_won[3*x]
if self.boards_won[x] == self.boards_won[x+3] == self.boards_won[x+6] > 0:
self.winner = self.boards_won[x]
if self.boards_won[0] == self.boards_won[4] == self.boards_won[8] > 0:
self.winner = self.boards_won[4]
if self.boards_won[2] == self.boards_won[4] == self.boards_won[6] > 0:
self.winner = self.boards_won[4]
if self.winner == -1 and self.check_full():
self.winner = 0
return self.winner
def check_full(self):
full = True
for x in self.boards_won:
if x == -1:
full = False
return full
def is_board_won(self, board):
return self.boards_won[board]
def get_current_box(self):
return self.current_box
def get_board_string(self,row,col):
return str(self.grid[row][col])
def get_last_moves_in(self,x,y,z):
special = []
if self.last_moves[0][0][0] == x and self.last_moves[0][0][1] == y and self.last_moves[0][1][0] == z:
special += [self.last_moves[0][1][1]]
if self.last_moves[1][0][0] == x and self.last_moves[1][0][1] == y and self.last_moves[1][1][0] == z:
special += [self.last_moves[1][1][1]]
return special
def try_placing_square(self, num):
inner_col = num % 3
inner_row = (num - (num % 3))/3
value = False
if self.grid[self.current_row][self.current_col].get_square(inner_row,inner_col) == " ":
token = self.player_token[self.get_player_num()]
self.grid[self.current_row][self.current_col].set_square(inner_row,inner_col,token)
if self.grid[self.current_row][self.current_col].is_finished():
box_winner = self.grid[self.current_row][self.current_col].get_winner()
self.boards_won[self.current_box] = box_winner
self.check_for_winner()
if not self.grid[inner_row][inner_col].is_finished():
self.current_box = num
self.current_row = inner_row
self.current_col = inner_col
else:
self.current_box = -1
self.current_row = -1
self.current_col = -1
value = True
return value
if __name__ == "__main__":
option = "simulate_d2_end"
filename = "fr_game_data_d2.txt"
num_games = 0
win_counts = [0,0,0]
if option == "simulate_all":
filename = "fr_game_data_all.txt"
num_games = 10000
FILE = open(filename, 'a')
for x in range(num_games):
g = Fractoe(player.RandomAI(),player.RandomAI(),True)
w = g.play()
g.record_history_to_file(FILE)
if x % 100 == 0:
print x
win_counts[w] += 1
FILE.close()
elif option == "simulate_end":
filename = "fr_game_data.txt"
num_games = 50000
FILE = open(filename, 'a')
for x in range(num_games):
g = Fractoe(player.RandomAI(),player.RandomAI
|
barche/k3d
|
tests/bitmap/bitmap.modifier.BitmapAdd.py
|
Python
|
gpl-2.0
| 350 | 0.011429 |
#python
import k3d
import testing
se
|
tup = testing.setup_bitmap_modifier_test("BitmapReader", "BitmapAdd")
setup.source.file = k3d.filesystem.generic_path(testing.source_path() + "/bitmaps/" + "test_
|
rgb_8.png")
setup.modifier.value = 0.5
testing.require_similar_bitmap(setup.document, setup.modifier.get_property("output_bitmap"), "BitmapAdd", 0)
|
xiangke/pycopia
|
mibs/pycopia/mibs/HPR_IP_MIB_OID.py
|
Python
|
lgpl-2.1
| 1,140 | 0.015789 |
# python
# This file is generated by a program (mib2py).
import HPR_IP_MIB
OIDMAP = {
'1.3.6.1.2.1.34.6.1.5': HPR_IP_MIB.hprIp,
'1.3.6.1.2.1.34.6.1.5.1.1.1': HPR_IP_MIB
|
.hprIpActiv
|
eLsLsName,
'1.3.6.1.2.1.34.6.1.5.1.1.2': HPR_IP_MIB.hprIpActiveLsAppnTrafficType,
'1.3.6.1.2.1.34.6.1.5.1.1.3': HPR_IP_MIB.hprIpActiveLsUdpPackets,
'1.3.6.1.2.1.34.6.1.5.2.1.1': HPR_IP_MIB.hprIpAppnPortName,
'1.3.6.1.2.1.34.6.1.5.2.1.2': HPR_IP_MIB.hprIpAppnPortAppnTrafficType,
'1.3.6.1.2.1.34.6.1.5.2.1.3': HPR_IP_MIB.hprIpAppnPortTOSPrecedence,
'1.3.6.1.2.1.34.6.1.5.3.1.1': HPR_IP_MIB.hprIpLsLsName,
'1.3.6.1.2.1.34.6.1.5.3.1.2': HPR_IP_MIB.hprIpLsAppnTrafficType,
'1.3.6.1.2.1.34.6.1.5.3.1.3': HPR_IP_MIB.hprIpLsTOSPrecedence,
'1.3.6.1.2.1.34.6.1.5.3.1.4': HPR_IP_MIB.hprIpLsRowStatus,
'1.3.6.1.2.1.34.6.1.5.4.1.1': HPR_IP_MIB.hprIpCnVrnName,
'1.3.6.1.2.1.34.6.1.5.4.1.2': HPR_IP_MIB.hprIpCnAppnTrafficType,
'1.3.6.1.2.1.34.6.1.5.4.1.3': HPR_IP_MIB.hprIpCnTOSPrecedence,
'1.3.6.1.2.1.34.6.1.5.4.1.4': HPR_IP_MIB.hprIpCnRowStatus,
'1.3.6.1.2.1.34.6.2.2.5': HPR_IP_MIB.hprIpMonitoringGroup,
'1.3.6.1.2.1.34.6.2.2.6': HPR_IP_MIB.hprIpConfigurationGroup,
}
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_06_01/aio/operations/_express_route_circuit_connections_operations.py
|
Python
|
mit
| 19,491 | 0.005541 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteCircuitConnectionsOperations:
"""ExpressRouteCircuitConnectionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
connection_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/connections/{connectionName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
connection_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified Express Route Circuit Connection from the specified express route
circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
|
:type circuit_name: str
:par
|
am peering_name: The name of the peering.
:type peering_name: str
:param connection_name: The name of the express route circuit connection.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
connection_name=connection_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/connections/{connectionName}'} # type: ignore
async
|
tbarrongh/cosc-learning-labs
|
src/learning_lab/05_acl_list.py
|
Python
|
apache-2.0
| 1,742 | 0.006889 |
#!/usr/bin/env python2.7
# Copyright 2015 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
''' Sample usage of function 'acl_list'.
Print the function's documentation.
App
|
ly the function to a network device.
Print the function output.
If no ACLs are found then retry with a different network device.
'''
from __future__ import print_function as _print_function
from pydoc import plain
from pydoc import render_doc as doc
from basics.context import sys_exit, EX_OK, EX_TEMPFAIL
from basics.acl import acl_list,inventory_acl
def demonstrate(device_name):
''' Apply function 'acl_li
|
st' to the specified device.
Return True if an ACL was found.
'''
print('acl_list(' + device_name, end=')\n')
result = acl_list(device_name)
print(result)
return bool(result)
def main():
''' Select a device and demonstrate.'''
print(plain(doc(acl_list)))
inventory = inventory_acl()
if not inventory:
print('There are no ACL capable devices to examine. Demonstration cancelled.')
else:
for device_name in inventory:
if demonstrate(device_name):
return EX_OK
return EX_TEMPFAIL
if __name__ == "__main__":
sys_exit(main())
|
tananaev/traccar
|
tools/test-trips.py
|
Python
|
apache-2.0
| 1,319 | 0.003033 |
#!/usr/bin/python
import urllib
import httplib
import time
import datetime
id = '123456789012345'
server = 'localhost:5055'
points = [
('2017-01-01 00:00:00', 59.93211887, 30.33050537, 0.0),
('2017-01-01 00:05:00', 59.93266715, 30.33190012, 50.0),
('2017-01-01 00:10:00', 59.93329069, 30.33333778, 50.0),
('2017-01-01 00:15:00', 59.93390346, 30.33468962, 0.0),
('2017-01-01 00:20:00', 59.93390346, 30.33468962, 0.0),
('2017-01-01 00:25:00', 59.93416146, 30.33580542, 50.0),
('2017-01-01 00:30:00', 59.93389271, 30.33790827, 50.0),
('2017-01-01 00:35:00', 59.93357020, 30.34033298, 50.0),
('2017-01-01 00:40:00', 59.93330144, 30.34252167, 0.0),
('2017-01-01 00:44:00', 59.93355945, 30.34413099, 50.0),
('2017-01-01 00:50:00', 59.93458072, 30.34458160, 0.0),
('2017-01-01 00:55:00', 59.93458072, 30.34458160, 0.0),
]
def send(conn, time, lat, lon
|
, speed):
params = (('id', id), ('timestamp', int(time)), ('lat', lat), ('lon', lon), ('speed', speed))
conn.request('POST', '?' + urllib.urlencode(params))
conn.getresponse().read()
conn = httplib.HTTP
|
Connection(server)
for i in range(0, len(points)):
(moment, lat, lon, speed) = points[i]
send(conn, time.mktime(datetime.datetime.strptime(moment, "%Y-%m-%d %H:%M:%S").timetuple()), lat, lon, speed)
|
chrisvans/roastdoge
|
config/wsgi.py
|
Python
|
mit
| 422 | 0 |
"""
WSGI config for roastdog project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
from djan
|
go.core.wsgi import get_wsgi_application
from dj_static import Cling
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "
|
settings.base")
application = Cling(get_wsgi_application())
|
wanghuafeng/spider_tools
|
decorator.py
|
Python
|
mit
| 1,524 | 0.004076 |
#!-*- coding:utf-8 -*-
import time
def retries(times=3, timeout=1):
"""对未捕获异常进行重试"""
def decorator(func):
def _wrapper(*args, **kw):
att, retry = 0, 0
while retry < times:
retry += 1
try:
return func(*args, **kw)
except:
att += timeout
if retry < times:
time.sleep(att)
return _wrapper
return decorator
def empty_content_retries(times=3, timeout=2):
"""响应为空的进行重试"""
def decorator(func):
def _wrapper(*args, **kw):
att, retry = 0, 0
while retry < times:
retry += 1
ret = func(*args, **kw)
if ret:
return ret
att += timeout
time.sleep(att)
|
return _wrapper
return decorator
def use_logging(level):
"""带参数的装饰器"""
def decorator(func):
print func.__name__
def wrapper(*args, **kwargs):
if level == "warn":
print ("level:%s, %s is running" % (level, func.__name__))
elif level == "info":
print ("level:%s, %s is running" % (level, f
|
unc.__name__))
return func(*args, **kwargs)
return wrapper
return decorator
if __name__ == "__main__":
@use_logging(level="warn")
def foo(name='foo'):
print("i am %s" % name)
foo()
|
MyersGer/django-doubleoptin-contactform
|
doptincf/urls.py
|
Python
|
gpl-3.0
| 1,171 | 0.005978 |
# This file is part of django-doubleoptin-contactform.
# django-doubleoptin-contactform is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# django-doubleoptin-contactform is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULA
|
R PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
|
# along with Foobar. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template
urlpatterns = patterns('',
(r'^$', 'doptincf.views.contact'),
(r'^received/$', direct_to_template, {'template': 'contact/received.html'}),
(r'^(?P<contact_id>\d+)/verify/$', 'doptincf.views.verify'),
(r'^received/$', direct_to_template, {'template': 'contact/verified.html'}),
)
|
futurecolors/raven-harakiri
|
test_harakiri.py
|
Python
|
mit
| 10,635 | 0.00489 |
# coding: utf-8
example_traceback = """*** HARAKIRI ON WORKER 16 (pid: 2259, try: 1) ***
*** uWSGI Python tracebacker output ***
thread_id = Thread-2 filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 504 function = __bootstrap line = self.__bootstrap_inner()
thread_id = Thread-2 filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 532 function = __bootstrap_inner line = self.run()
thread_id = Thread-2 filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 484 function = run line = self.__target(*self.__args, **self.__kwargs)
thread_id = Thread-2 filename = /home/project/envs/project_prod/lib/python2.6/site-packages/raven/transport/threaded.py lineno = 79 function = _target line = record = self._queue.get()
thread_id = Thread-2 filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/Queue.py lineno = 168 function = get line = self.not_empty.wait()
thread_id = Thread-2 filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 239 function = wait line = waiter.acquire()
thread_id = NR-Harvest-Thread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 504 function = __bootstrap line = self.__bootstrap_inner()
thread_id = NR-Harvest-Thread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 532 function = __bootstrap_inner line = self.run()
thread_id = NR-Harvest-Thread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 484 function = run line = self.__target(*self.__args, **self.__kwargs)
thread_id = NR-Harvest-Thread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/core/agent.py lineno = 511 function = _harvest_loop line = self._harvest_shutdown.wait(delay)
thread_id = NR-Harvest-Thread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 395 function = wait line = self.__cond.wait(timeout)
thread_id = NR-Harvest-Thread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 258 function = wait line = _sleep(delay)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/web_transaction.py lineno = 828 function = __call__ line = result = application(environ, _start_response)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/object_wrapper.py lineno = 237 function = __call__ line = self._nr_instance, args, kwargs, **self._nr_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/function_trace.py lineno = 93 function = literal_wrapper line = return wrapped(*args, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/web_transaction.py lineno = 717 function = __call__ line = return self._nr_next_object(environ, start_response)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/django/core/handlers/wsgi.py lineno = 241 function = __call__ line = response = self.get_response(request)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/django/core/handlers/base.py lineno = 111 function = get_response line = response = callback(request, *callback_args, **callback_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/object_wrapper.py lineno = 237 function = __call__ line = self._nr_instance, args, kwargs, **self._nr_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/hooks/framework_django.py lineno = 475 function = wrapper line = return wrapped(*args, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/django/views/decorators/csrf.py lineno = 77 function = wrapped_view line = return view_func(*args, **kwargs)
thread_id = MainThread filename = /home/project/src/project_prod/contrib/netauth/views.py lineno = 74 function = complete line = return backend.complete(request, response)
thread_id = MainThread filename = /home/project/src/project_prod/contrib/netauth/backends/vkontakte.py lineno = 59 function = complete line = redirect = super(VkontakteBackend, self).complete(request, response)
thread_id = MainThread filename = /home/project/src/project_prod/contrib/netauth/backends/__init__.py lineno = 175 function = complete line = self.fill_extra_fields(request, extra)
thread_id = MainThread filename = /home/project/src/project_prod/contrib/netauth/backends/__init__.py lineno = 114 function = fill_extra_fields line = form = str_to_class(settings.EXTRA_FORM)(data)
thread_id = MainThread filename = /home/project/src/project_prod/contrib/netauth/forms.py lineno = 43 function = __init__ line = files = {'avatar': ContentFile(requests.get(url).content, name=str(uuid.uuid4()))}
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/api.py lineno = 55 function = get line = return request('get', url, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/object_wrapper.py lineno = 237 function = __call__ line = self._nr_instance, args, kwargs, **self._nr_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/external_trace.py lineno = 123 function = dynamic_wrapper line = return wrapped(*args, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/api.py lineno = 44 function = request line = return session.request(method=method, url=url, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/object_wrapper.py lineno = 237 function = __call__ line = self._nr_instance, args, kwargs, **self._nr_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/external_tr
|
ace.py lineno = 123 function = dynamic_wrapper line = return wr
|
apped(*args, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/sessions.py lineno = 335 function = request line = resp = self.send(prep, **send_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/sessions.py lineno = 438 function = send line = r = adapter.send(request, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/adapters.py lineno = 292 function = send line = timeout=timeout
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/packages/urllib3/connectionpool.py lineno = 428 function = urlopen line = body=body, headers=headers)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/packages/urllib3/connectionpool.py lineno = 280 function = _make_request line = conn.request(method, url, **httplib_request_kw)
thread_id = MainThread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/httplib.py lineno = 914 function = request line = self._send_request(method, url, body, headers)
thread_id = MainThread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/httplib.py lineno = 951 function = _send_request line = self.endheaders()
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/object_wrapper.py lineno = 237 function = __call__ line = self._nr_instance, args, kwargs, **self._nr_kwargs)
thread_id = MainThread filename = /home/project/en
|
bcl/blivet
|
blivet/devices/md.py
|
Python
|
gpl-2.0
| 23,663 | 0.000887 |
# devices/md.py
#
# Copyright (C) 2009-2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): David Lehman <dlehman@redhat.com>
#
import os
import six
from gi.repository import BlockDev as blockdev
from ..devicelibs import mdraid, raid
from .. import errors
from .. import util
from ..flags import flags
from ..storage_log import log_method_call
from .. import udev
from ..size import Size
from ..tasks import availability
import logging
log = logging.getLogger("blivet")
from .storage import StorageDevice
from .container import ContainerDevice
from .raid import RaidDevice
class MDRaidArrayDevice(ContainerDevice, RaidDevice):
""" An mdraid (Linux RAID) device. """
_type = "mdarray"
_packages = ["mdadm"]
_devDir = "/dev/md"
_formatClassName = property(lambda s: "mdmember")
_formatUUIDAttr = property(lambda s: "mdUuid")
_external_dependencies = [availability.BLOCKDEV_MDRAID_PLUGIN]
def __init__(self, name, level=None, major=None, minor=None, size=None,
memberDevices=None, totalDevices=None,
uuid=None, fmt=None, exists=False, metadataVersion=None,
parents=None, sysfsPath=''):
"""
:param name: the device name (generally a device node's basename)
:type name: str
:keyword exists: does this device exist?
:type exists: bool
:keyword size: the device's size
:type size: :class:`~.size.Size`
:keyword parents: a list of parent devices
:type parents: list of :class:`StorageDevice`
:keyword fmt: this device's formatting
:type fmt: :class:`~.formats.DeviceFormat` or a subclass of it
:keyword sysfsPath: sysfs device path
:type sysfsPath: str
:keyword uuid: the device UUID
:type uuid: str
:keyword level: the device's RAID level
:type level: any valid RAID level descriptor
:keyword int memberDevices: the number of active member devices
:keyword int totalDevices: the total number of member devices
:keyword metadataVersion: the version of the device's md metadata
:type metadataVersion: str (eg: "0.90")
:keyword minor: the device minor (obsolete?)
:type minor: int
"""
# pylint: disable=unused-argument
# These attributes are used by _addParent, so they must be initialized
# prior to instantiating the superclass.
self._memberDevices = 0 # the number of active (non-spare) members
self._totalDevices = 0 # the total number of members
# avoid attribute-defined-outside-init pylint warning
self._level = None
super(MDRaidArrayDevice, self).__init__(name, fmt=fmt, uuid=uuid,
exists=exists, size=size,
parents=parents,
|
sysfsPath=sysfsPath)
try:
self.level = level
except errors.DeviceError as e:
# Could not set the level, so set loose the parents that were
# added in superclass constructor.
for
|
dev in self.parents:
dev.removeChild()
raise e
self.uuid = uuid
self._totalDevices = util.numeric_type(totalDevices)
self.memberDevices = util.numeric_type(memberDevices)
self.chunkSize = mdraid.MD_CHUNK_SIZE
if not self.exists and not isinstance(metadataVersion, str):
self.metadataVersion = "default"
else:
self.metadataVersion = metadataVersion
if self.parents and self.parents[0].type == "mdcontainer" and self.type != "mdbiosraidarray":
raise errors.DeviceError("A device with mdcontainer member must be mdbiosraidarray.")
if self.exists and self.mdadmFormatUUID and not flags.testing:
# this is a hack to work around mdadm's insistence on giving
# really high minors to arrays it has no config entry for
with open("/etc/mdadm.conf", "a") as c:
c.write("ARRAY %s UUID=%s\n" % (self.path, self.mdadmFormatUUID))
@property
def mdadmFormatUUID(self):
""" This array's UUID, formatted for external use.
:returns: the array's UUID in mdadm format, if available
:rtype: str or NoneType
"""
formatted_uuid = None
if self.uuid is not None:
try:
formatted_uuid = blockdev.md.get_md_uuid(self.uuid)
except blockdev.MDRaidError:
pass
return formatted_uuid
@property
def level(self):
""" Return the raid level
:returns: raid level value
:rtype: an object that represents a RAID level
"""
return self._level
@property
def _levels(self):
""" Allowed RAID level for this type of device."""
return mdraid.RAID_levels
@level.setter
def level(self, value):
""" Set the RAID level and enforce restrictions based on it.
:param value: new raid level
:param type: object
:raises :class:`~.errors.DeviceError`: if value does not describe
a valid RAID level
:returns: None
"""
try:
level = self._getLevel(value, self._levels)
except ValueError as e:
raise errors.DeviceError(e)
self._level = level
@property
def createBitmap(self):
""" Whether or not a bitmap should be created on the array.
If the the array is sufficiently small, a bitmap yields no benefit.
If the array has no redundancy, a bitmap is just pointless.
"""
try:
return self.level.has_redundancy() and self.size >= Size(1000) and self.format.type != "swap"
except errors.RaidError:
# If has_redundancy() raises an exception then this device has
# a level for which the redundancy question is meaningless. In
# that case, creating a write-intent bitmap would be a meaningless
# action.
return False
def getSuperBlockSize(self, raw_array_size):
"""Estimate the superblock size for a member of an array,
given the total available memory for this array and raid level.
:param raw_array_size: total available for this array and level
:type raw_array_size: :class:`~.size.Size`
:returns: estimated superblock size
:rtype: :class:`~.size.Size`
"""
return blockdev.md.get_superblock_size(raw_array_size,
version=self.metadataVersion)
@property
def size(self):
"""Returns the actual or estimated size depending on whether or
not the array exists.
"""
if not self.exists or not self.mediaPresent:
try:
size = self.level.get_size([d.size for d in self.devices],
self.memberDevices,
self.chunkSize,
self.getSuperBlockSize)
|
pinax/pinax-forums
|
pinax/forums/tests/urls.py
|
Python
|
mit
| 132 | 0 |
from django.conf.urls import include, url
urlpatterns
|
= (
url(r"^", include("pinax.forums.urls", namespace="pinax_foru
|
ms")),
)
|
LeeKamentsky/CellProfiler
|
cellprofiler/gui/tests/__init__.py
|
Python
|
gpl-2.0
| 421 | 0 |
"""cellprofiler.gui.tests.__init__.py
CellPro
|
filer is distributed under the GNU General Public License.
See the accompanying file LICENSE for details.
Copyright (c) 2003-2009 Massachusetts Institute of Technology
Copyright (c) 2009-2015 Broad Institute
All rights reserved.
Please see the AUTHORS file for credits.
Website: http://www.cellprofiler.org
"""
if __name__ == "__main__":
import nose
|
nose.main()
|
zencore-dobetter/zencore-redtask
|
src/scripts/zrts.py
|
Python
|
mit
| 1,207 | 0.002486 |
#!/usr/bin/env python
"""
Task message queue consumer server.
"""
import click
import logging
from zencore.conf import Settings
from zencore.redtask import server
from zencore.utils.debug import setup_simple_logger
CONFIG = Settings()
logger = logging.getLogger(__name__)
@click.group()
@click.option("-c", "--config", help="Config file path, default etc/config.yaml")
def zrts(config):
"""Task message queue consumer server.
"""
CONFIG.setup(config)
setup_simple_logger(CONFIG.get("logging"))
logger.debug("Logger setup done.")
@zrts
|
.command()
def start():
"""Start zencore-redtask consumer server.
"""
logger.debug("Server starting...")
server.start(CONFIG)
@zrts.command()
def stop():
"""Stop zencore-redtask consumer server.
"""
logger.debug("Server stopping...")
server.stop(CONFIG)
@zrts.command()
def reload():
"""Re
|
load zencore-redtask consumer server.
"""
logger.debug("Server reloading...")
server.reload(CONFIG)
@zrts.command()
def status():
"""Get zencore-redtask consumer server's status.
"""
logger.debug("Server get status...")
server.status(CONFIG)
if __name__ == "__main__":
zrts()
|
after12am/expecto
|
machine_learning/kiki/kiki/settings.py
|
Python
|
mit
| 2,210 | 0 |
"""
Django settings for kiki project.
For more information on this file, see
https://docs.dja
|
ngoproject.com/en/dev/topics/settings/
For the full list of settings and th
|
eir values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/dev/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'l99imh-s+-4ijwo+!gejon7!xp@$hmun43gck7t($1(g-rdtsu'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
# 'django.contrib.auth',
# 'django.contrib.contenttypes',
# 'django.contrib.sessions',
# 'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
'wiki',
'docclass'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'kiki.urls'
WSGI_APPLICATION = 'kiki.wsgi.application'
# Database
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '127.0.0.1',
'PORT': 3306
}
}
# Internationalization
# https://docs.djangoproject.com/en/dev/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/dev/howto/static-files/
STATIC_URL = '/static/'
|
openstates/openstates
|
openstates/ne/bills.py
|
Python
|
gpl-3.0
| 10,305 | 0.001456 |
import pytz
import urllib
from datetime import datetime
from pupa.scrape import Scraper, Bill, VoteEvent
from openstates.utils import LXMLMixin
TIMEZONE = pytz.timezone("US/Central")
VOTE_TYPE_MAP = {"yes": "yes", "no": "no"}
class NEBillScraper(Scraper, LXMLMixin):
def scrape(self, session=None):
if session is None:
session = self.jurisdiction.legislative_sessions[-1]
self.info("no session specified, using %s", session["identifier"])
start_year = datetime.strptime(session["start_date"], "%Y-%m-%d").year
end_year = datetime.strptime(session["end_date"], "%Y-%m-%d").year
yield from self.scrape_year(session["identifier"], start_year)
if start_year != end_year:
yield from self.scrape_year(session["identifier"], end_year)
def scrape_year(self, session, year):
main_url = (
"https://nebraskalegislature.gov/bills/search_by_date.php?"
"SessionDay={}".format(year)
)
page = self.lxmlize(main_url)
document_links = self.get_nodes(
page,
'//div[@class="main-content"]//div[@class="table-responsive"]//'
'table[@class="table"]/tbody/tr/td[1]/a',
)
for document_link in document_links:
# bill_number = document_link.text
bill_link = document_link.attrib["href"]
# POST request for search form
# post_dict = {'DocumentNumber': bill_number, 'Legislature': session}
# headers = urllib.urlencode(post_dict)
# bill_resp = self.post('http://nebraskalegislature.gov/bills/'
# 'search_by_number.php', data=post_dict)
# bill_link = bill_resp.url
# bill_page = bill_resp.text
yield from self.bill_info(bill_link, session, main_url)
def bill_info(self, bill_link, session, main_url):
bill_page = self.lxmlize(bill_link)
long_title = self.get_node(
bill_page, '//div[@class="main-content"]//h2'
).text.split()
bill_number = long_title[0]
title = ""
for x in range(2, len(long_title)):
title += long_title[x] + " "
title = title[0:-1]
if not title:
self.error("no title, skipping %s", bill_number)
return
bill_type = "resolution" if "LR" in bill_number else "bill"
bill = Bill(bill_number, session, title, classification=bill_type)
bill.add_source(main_url)
bill.add_source(bill_link)
introduced_by = self.get_node(
bill_page,
"//body/div[3]/div[2]/div[2]/div/div[3]/div[1]/ul/li[1]/a[1]/text()",
)
if not introduced_by:
introduced_by = self.get_node(
bill_page,
"//body/div[3]/div[2]/div[2]/div/div[2]/div[1]/ul/li[1]/text()",
)
introduced_by = introduced_by.split("Introduced By:")[1].strip()
introduced_by = introduced_by.strip()
bill.add_sponsorship(
name=introduced_by,
entity_type="person",
primary=True,
classification="primary",
)
action_nodes = self.get_nodes(
bill_page, '//div[@class="main-content"]/div[5]//table/tbody/tr'
)
for action_node in action_nodes:
date = self.get_node(action_node, "./td[1]").text
date = datetime.strptime(date, "%b %d, %Y")
# The action node may have an anchor element within it, so
# we grab all the text within.
action = self.get_node(action_node, "./td[2]").text_content()
if "Governor" in action:
actor = "executive"
elif "Speaker" in action:
actor = "legislature"
else:
actor = "legislature"
action_type = self.action_types(action)
bill.add_action(
action,
date.strftime("%Y-%m-%d"),
chamber=actor,
classification=action_type,
)
# Grabs bill version documents.
version_links = self.get_nodes(
bill_page, "/html/body/div[3]/div[2]/div[2]/div/" "div[3]/div[2]/ul/li/a"
)
for version_link in version_links:
version_name = version_link.text
version_url = version_link.attrib["href"]
# replace Current w/ session number
version_url = version_url.replace("Current", sessi
|
on)
bill.add_version_link(
version_name, version_url, media_type="application/pdf"
)
soi
|
= self.get_nodes(bill_page, ".//a[contains(text(), 'Statement of Intent')]")
if soi:
bill.add_document_link(
"Statement of Intent", soi[0].get("href"), media_type="application/pdf"
)
comstmt = self.get_nodes(
bill_page, ".//a[contains(text(), 'Committee Statement')]"
)
if comstmt:
bill.add_document_link(
"Committee Statement",
comstmt[0].get("href"),
media_type="application/pdf",
)
fn = self.get_nodes(bill_page, ".//a[contains(text(), 'Fiscal Note')]")
if fn:
bill.add_document_link(
"Fiscal Note", fn[0].get("href"), media_type="application/pdf"
)
# Adds any documents related to amendments.
amendment_links = self.get_nodes(
bill_page, ".//div[contains(@class, 'amend-link')]/a"
)
for amendment_link in amendment_links:
amendment_name = amendment_link.text
amendment_url = amendment_link.attrib["href"]
# skip over transcripts
if "/AM/" not in amendment_url:
continue
bill.add_document_link(
amendment_name, amendment_url, media_type="application/pdf"
)
yield bill
yield from self.scrape_votes(bill, bill_page, actor)
def scrape_amendments(self, bill, bill_page):
amd_xpath = '//div[contains(@class,"amends") and not(contains(@class,"mb-3"))]'
for row in bill_page.xpath(amd_xpath):
status = row.xpath("string(./div[2])").strip()
if "adopted" in status.lower():
version_url = row.xpath("./div[1]/a/@href")[0]
version_name = row.xpath("./div[1]/a/text()")[0]
bill.add_version_link(
version_name,
version_url,
media_type="application/pdf",
on_duplicate="ignore",
)
def scrape_votes(self, bill, bill_page, chamber):
vote_links = bill_page.xpath(
'//table[contains(@class,"history")]//a[contains(@href, "view_votes")]'
)
for vote_link in vote_links:
vote_url = vote_link.attrib["href"]
date_td, motion_td, *_ = vote_link.xpath("ancestor::tr/td")
date = datetime.strptime(date_td.text, "%b %d, %Y")
motion_text = motion_td.text_content()
vote_page = self.lxmlize(vote_url)
passed = "Passed" in motion_text or "Advanced" in motion_text
cells = vote_page.xpath(
'//div[contains(@class,"table-responsive")]/table//td'
)
vote = VoteEvent(
bill=bill,
chamber=chamber,
start_date=TIMEZONE.localize(date),
motion_text=motion_text,
classification="passage",
result="pass" if passed else "fail",
)
yes_count = self.process_count(vote_page, "Yes:")
no_count = self.process_count(vote_page, "No:")
exc_count = self.process_count(vote_page, "Excused - Not Voting:")
absent_count = self.process_count(vote_page, "Absent - Not Voting:")
present_count = self.process_count(vote_page, "Present - Not Voting:")
vote.set_count("yes", yes_count)
vote.set_count("no", no_count)
|
citrix-openstack-build/ceilometer
|
tests/test_service.py
|
Python
|
apache-2.0
| 5,711 | 0.001576 |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# Copyright © 2012 eNovance <licensing@enovance.com>
#
# Author: Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import yaml
import subprocess
import os
import shutil
import signal
import time
import threading
from ceilometer import service
from ceilometer.tests import base
class ServiceTestCase(base.TestCase):
def test_prepare_service(self):
service.prepare_service([])
#NOTE(Fengqian): I have to set up a thread to parse the ouput of
#subprocess.Popen. Because readline() may block the process in
#some conditions.
class ParseOutput(threading.Thread):
def __init__(self, input_stream, str_flag):
super(ParseOutput, self).__init__()
self.input_stream = input_stream
self.str_flag = str_flag
self.ret_stream = None
self.ret = False
self.thread_stop = False
def run(self):
while not self.thread_stop:
next_line = self.input_stream.readline()
if next_line == '':
break
if self.str_flag in next_line:
self.ret = True
self.ret_stream = next_line[(next_line.find(self.str_flag) +
len(self.str_flag)):]
self.stop()
def stop(self):
self.thread_stop = True
class ServiceRestartTest(base.TestCase):
|
def setUp(self):
super(ServiceRestartTest, self).setUp()
self.tempfile = self.temp_config_file_path()
self.pipeline_cfg_file = self.temp_config_file_path(name=
'pipeline.yaml')
shutil.c
|
opy(self.path_get('etc/ceilometer/pipeline.yaml'),
self.pipeline_cfg_file)
self.pipelinecfg_read_from_file()
policy_file = self.path_get('tests/policy.json')
with open(self.tempfile, 'w') as tmp:
tmp.write("[DEFAULT]\n")
tmp.write(
"rpc_backend=ceilometer.openstack.common.rpc.impl_fake\n")
tmp.write(
"auth_strategy=noauth\n")
tmp.write(
"debug=true\n")
tmp.write(
"pipeline_cfg_file=%s\n" % self.pipeline_cfg_file)
tmp.write(
"policy_file=%s\n" % policy_file)
tmp.write("[database]\n")
tmp.write("connection=log://localhost\n")
def _modify_pipeline_file(self):
with open(self.pipeline_cfg_file, 'w') as pipe_fd:
pipe_fd.truncate()
pipe_fd.write(yaml.safe_dump(self.pipeline_cfg[1]))
def pipelinecfg_read_from_file(self):
with open(self.pipeline_cfg_file) as fd:
data = fd.read()
self.pipeline_cfg = yaml.safe_load(data)
def tearDown(self):
super(ServiceRestartTest, self).tearDown()
self.sub.kill()
self.sub.wait()
@staticmethod
def _check_process_alive(pid):
try:
with open("/proc/%d/status" % pid) as fd_proc:
for line in fd_proc.readlines():
if line.startswith("State:"):
state = line.split(":", 1)[1].strip().split(' ')[0]
return state not in ['Z', 'T', 'Z+']
except IOError:
return False
def check_process_alive(self):
cond = lambda: self._check_process_alive(self.sub.pid)
return self._wait(cond, 60)
def parse_output(self, str_flag, timeout=3):
parse = ParseOutput(self.sub.stderr, str_flag)
parse.start()
parse.join(timeout)
parse.stop()
return parse
@staticmethod
def _wait(cond, timeout):
start = time.time()
while not cond():
if time.time() - start > timeout:
break
time.sleep(.1)
return cond()
def _spawn_service(self, cmd, conf_file=None):
if conf_file is None:
conf_file = self.tempfile
self.sub = subprocess.Popen([cmd, '--config-file=%s' % conf_file],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
#NOTE(Fengqian): Parse the output to see if the service started
self.assertTrue(self.parse_output("Starting").ret)
self.check_process_alive()
def _service_restart(self, cmd):
self._spawn_service(cmd)
self.assertTrue(self.sub.pid)
#NOTE(Fengqian): Modify the pipleline configure file to see
#if the file is reloaded correctly.
self._modify_pipeline_file()
self.pipelinecfg_read_from_file()
os.kill(self.sub.pid, signal.SIGHUP)
self.assertTrue(self.check_process_alive())
self.assertTrue(self.parse_output("Caught SIGHUP").ret)
self.assertEqual(self.pipeline_cfg,
yaml.safe_load(
self.parse_output("Pipeline config: ").ret_stream))
def test_compute_service_restart(self):
self._service_restart('ceilometer-agent-compute')
def test_central_service_restart(self):
self._service_restart('ceilometer-agent-central')
|
mtb0/flightmodel
|
src/drivers/AnalysisDriver.py
|
Python
|
mit
| 1,592 | 0.011935 |
#!/usr/bin/env python
"""Plot scheduled flight times for AA flights between JFK and LAX.
For a given year and month, visualize dist vs sch time, run a regression,
and look at error. Filter based on whether the destination is in the Pacific,
and study the regression and error for each group."""
import
|
os
import sys
from analysis.filter import get_jetstream, get_pacific
from analysis.plot import plot_schtime, plot_reg
|
ression, plot_error, plot_regression_coef
from analysis.regression import regression
def main():
year = 2015
month = 1
os.system('mkdir -p graphs') #Create directory to place graphs, if it doesn't exist.
plot_schtime(12478, 12892, 'AA') #Plot sch flight time from JFK to LAX
plot_schtime(12892, 12478, 'AA') #Plot sch flight time from LAX to JFK
flights = get_jetstream(year, month) #Get flight info.
#Get info whether destination is in the Pacific and filter.
df_pac = get_pacific(flights)
overseas = df_pac[df_pac.DestOverseas]
not_overseas = df_pac[~df_pac.DestOverseas]
analysislist = [[flights, 'Regression Error'],
[overseas, 'Pacific Regression Error'],
[not_overseas, 'US Regression Error']]
#Plot dist vs sch time, regression, and error for filtered flight data.
for i, [df, title] in enumerate(analysislist):
plot_regression(year, month, df)
print regression(year, month, df)
plot_error(year, month, df, title)
plot_regression_coef() #Plot monthly US and Pacific regression coefficients over time.
if __name__ == '__main__':
sys.exit(main())
|
nomed/ebetl
|
ebetl/controllers/marketing.py
|
Python
|
artistic-2.0
| 12,560 | 0.017596 |
# -*- coding: utf-8 -*-
"""Main Controller"""
import pylons
from tg import expose, flash, require, url, lurl, request, redirect, tmpl_context
from tg.i18n import ugettext as _, lazy_ugettext as l_
from tg import predicates
from ebetl import model
from ebetl.controllers.secure import SecureController
from ebetl.model import DBSession, metadata
from tgext.admin.tgadminconfig import TGAdminConfig
from tgext.admin.controller import AdminController
from ebetl.lib.base import BaseController
from ebetl.controllers.error import ErrorController
from ebetl.model import *
from ebetl.model.zerobi import FACT_B2B,FACT_B2B_PRICE
import json
from ebetl.lib import views
from ebetl.lib.views import get_latest_cogs as gcogs
from webhelpers import paginate
from babel.numbers import format_currency, format_decimal
from decimal import Decimal
from sqlalchemy.sql import label
from sqlalchemy import func
try:
from collections import OrderedDict
except:
from ordereddict import OrderedDict
from tg.predicates import has_permission
import datetime
#from tgext.asyncjob import asyncjob_perform
def testme(arg):
print "====================== TESTME"
__all__ = ['MarketingController']
MEASURES = [
label('qta', func.sum(Movimentir.qtamovimento)),
label('net_total', func.sum(Movimentir.totalenetto)),
label('gross_total', func.sum(Movimentir.totale)),
#label('vat_total', func.sum(vat_total)),
#label('gross_total', func.sum(gross_total)),
#label('lis_ct', func.sum(lis_ct)),
#label('disc', func.sum(
# Factb2b.b2b_net_total - lis_ct
#))
]
class RepartiController(BaseController):
"""
Location Controller
"""
@expose('json')
def index(self, *kw, **args):
"""Handle the front-page."""
data = DBSession.query(Reparti).all()
return dict(data=data)
class TipologieController(BaseController):
"""
Location Controller
"""
@expose('json')
def index(self, *kw, **args):
"""Handle the front-page."""
data = DBSession.query(Tipologieprodotti).all()
return dict(data=data)
class ProduttoriController(BaseController):
"""
Location Controller
"""
@expose('json')
def index(self, *kw, **args):
"""Handle the front-page."""
data = DBSession.query(Produttori).all()
return dict(data=data)
class MarketingController(BaseController):
"""
"""
# The predicate that must be met for all the actions in this controller:
#allow_only = has_permission('manage',
# msg=l_('Only for people with the "manage" permission'))
@expose('ebetl.templates.marketing')
def index(self, *args, **kw):
"""Handle the front-page."""
print kw
group_by = [Clientifid.codiceclientefid, Clientifid.nome, Clientifid.cognome, Clientifid.email]
fltr = group_by + MEASURES
ret = DBSession.query(*fltr)
start = kw.get('start')
if start:
start_obj = datetime.datetime.strptime(start, "%Y/%m/%d")
else:
start_obj = datetime.datetime.now()
kw['start'] = start_obj.strftime("%Y/%m/%d")
print start_obj
ret=ret.filter(Movimentit.datadocumento>=start_obj)
end = kw.get('end')
if end:
end_obj = datetime.datetime.strptime(end, "%Y/%m/%d")
else:
end_obj = datetime.datetime.now()
kw['end'] = end_obj.strftime("%Y/%m/%d")
ret=ret.filter(Movimentit.datadocumento<=end_obj)
produttori = kw.get('produttori')
if produttori:
produttori = json.loads(produttori)
prdlist = []
if produttori:
for p in produttori:
prdlist.append(Produttori.numeroproduttore==p)
if prdlist:
ret=ret.filter(or_(*prdlist))
reparti = kw.get('reparti')
if reparti:
reparti = json.loads(reparti)
replist = []
if reparti:
for r in reparti:
replist.append(Prodotti.numeroreparto==r)
if replist:
ret=ret.filter(or_(*replist))
# join on document header table
join_condition=Clientifid.numeroclientefid==Ricevutet.numeroclientefid
ret = ret.join(Ricevutet, join_condition)
join_condition=Ricevutet.numeromovimento==Movimentit.numeromovimento
ret = ret.join(Movimentit, join_condition)
join_condition=Movimentit.numeromovimento==Movimentir.numeromovimento
ret = ret.join(Movimentir, join_condition)
join_condition=Prodotti.numeroprodotto==Movimentir.idprodotto
ret = ret.join(Prodotti, join_condition)
join_condition=Produttori.numeroproduttore==Prodotti.numeroproduttore
ret = ret.outerjoin(Produttori, join_condition)
results = ret.group_by(*group_by).all()
#results=[]
#join_condition=Movimentit.numeromovimento==Movimentir.numeromovimento
#ret = ret.join(Movimentir, join_condition)
columns = [l.key for l in fltr]
return dict(page='marketing', columns=columns, results=results, kw=kw)
def create(self):
"""POST /stocks: Create a new item"""
# url('stocks')
def new(self, format='html'):
"""GET /stocks/new: Form to create a new item"""
# url('new_stock')
@expose()
def export(self, id, *args, **kw):
"""PUT /stocks/id: Update an existing item"""
# Forms posted to this method should contain a hidden field:
# <input type="hidden" name="_method" value="PUT" />
# Or using helpers:
# h.form(url('stock', id=ID),
# method='put')
# url('stock', id=ID)
#DBSession.query(Inputb2b).filter(
# Inputb2b.b2b_id==id).update(dict(exported=1
|
))
from ebetl.lib.etl.b2b import B2bObj
b2bobj=B2bObj(config)
#b2bobj.write_out()
#if self.options.export:
print asyncjob_perform(testme, 2)
return redirect(url('/b2b/show/%s'%id))
@expose()
def book(self, id, *args, **kw):
"""PUT /stoc
|
ks/id: Update an existing item"""
# Forms posted to this method should contain a hidden field:
# <input type="hidden" name="_method" value="PUT" />
# Or using helpers:
# h.form(url('stock', id=ID),
# method='put')
# url('stock', id=ID)
DBSession.query(Inputb2b).filter(
Inputb2b.b2b_id==id).update(dict(booked=1))
DBSession.query(Factb2b).filter_by(inputb2b_id=id).update(dict(booked=1))
return redirect(url('/b2b/show/%s'%id))
@expose()
def update(self, id):
"""PUT /stocks/id: Update an existing item"""
# Forms posted to this method should contain a hidden field:
# <input type="hidden" name="_method" value="PUT" />
# Or using helpers:
# h.form(url('stock', id=ID),
# method='put')
# url('stock', id=ID)
ret = DBSession.query(Inputb2b).filter(
Inputb2b.b2b_id==id).one()
from ebetl.lib.etl.filconad import FilconadObj
fobj = FilconadObj(config, ret.record)
fobj.write_out(inputb2b_id=id)
return redirect(url('/b2b/show/%s'%id))
@expose()
def updatedoc(self, id, doc_num, *args, **kw):
"""PUT /stocks/id: Update an existing item"""
# Forms posted to this method should contain a hidden field:
# <input type="hidden" name="_method" value="PUT" />
# Or using helpers:
# h.form(url('stock', id=ID),
# method='put')
# url('stock', id=ID)
print [args]
print [kw]
DBSession.query(Factb2b).filter_by(inputb2b_id=id,doc_num=doc_num ).update(dict(validated=1))
redirect(url('/b2b/showdoc/%s/%s'%(id,doc_num)))
def delete(self, id):
"""DE
|
dawran6/zulip
|
zerver/webhooks/freshdesk/view.py
|
Python
|
apache-2.0
| 5,886 | 0.001019 |
"""Webhooks for external integrations."""
from __future__ import absolute_import
from django.http import HttpRequest, HttpResponse
from django.utils.translation import ugettext as _
from zerver.models import get_client, UserProfile
from zerver.lib.actions import check_send_message
from zerver.lib.response import json_success, json_error
from zerver.lib.notifications import convert_html_to_markdown
from zerver.decorator import REQ, has_request_variables, authenticated_rest_api_view
import logging
import ujson
from typing import Any, Dict, List, Optional, Tuple, Union, Text
class TicketDict(dict):
"""
A helper class to turn a dictionary with ticket information into
an object where each of the keys is an attribute for easy access.
"""
def __getattr__(self, field):
# type: (str) -> Any
if "_" in field:
return self.get(field)
else:
return self.get("ticket_" + field)
def property_name(property, index):
# type: (str, int) -> str
"""The Freshdesk API is currently pretty broken: statuses are customizable
but the API will only tell you the number associated with the status, not
the name. While we engage the Freshdesk developers about exposing this
information through the API, since only FlightCar uses this integration,
hardcode their statuses.
"""
statuses = ["", "", "Open", "Pending", "Resolved", "Closed",
"Waiting on Customer", "Job Application", "Monthly"]
priorities = ["", "Low", "Medium", "High", "Urgent"]
if property == "status":
return statuses[index] if index < len(statuses) else str(index)
elif property == "priority":
return priorities[index] if index < len(priorities) else str(index)
else:
raise ValueError("Unknown property")
def parse_freshdesk_event(event_string):
# type: (str) -> List[str]
"""These are always of the form "{ticket_action:created}" or
"{status:{from:4,to:6}}". Note the lack of string quoting: this isn't
valid JSON so we have to parse it ourselves.
"""
data = event_string.replace("{", "").replace("}", "").replace(",", ":").split(":")
if len(data) == 2:
# This is a simple ticket action event, like
# {ticket_action:created}.
return data
else:
|
# This is a property change event, like {status:{from:4,to:6}}. Pull out
# the property, from, and to states.
property, _, from_state, _, to_state
|
= data
return [property, property_name(property, int(from_state)),
property_name(property, int(to_state))]
def format_freshdesk_note_message(ticket, event_info):
# type: (TicketDict, List[str]) -> str
"""There are public (visible to customers) and private note types."""
note_type = event_info[1]
content = "%s <%s> added a %s note to [ticket #%s](%s)." % (
ticket.requester_name, ticket.requester_email, note_type,
ticket.id, ticket.url)
return content
def format_freshdesk_property_change_message(ticket, event_info):
# type: (TicketDict, List[str]) -> str
"""Freshdesk will only tell us the first event to match our webhook
configuration, so if we change multiple properties, we only get the before
and after data for the first one.
"""
content = "%s <%s> updated [ticket #%s](%s):\n\n" % (
ticket.requester_name, ticket.requester_email, ticket.id, ticket.url)
# Why not `"%s %s %s" % event_info`? Because the linter doesn't like it.
content += "%s: **%s** => **%s**" % (
event_info[0].capitalize(), event_info[1], event_info[2])
return content
def format_freshdesk_ticket_creation_message(ticket):
# type: (TicketDict) -> str
"""They send us the description as HTML."""
cleaned_description = convert_html_to_markdown(ticket.description)
content = "%s <%s> created [ticket #%s](%s):\n\n" % (
ticket.requester_name, ticket.requester_email, ticket.id, ticket.url)
content += """~~~ quote
%s
~~~\n
""" % (cleaned_description,)
content += "Type: **%s**\nPriority: **%s**\nStatus: **%s**" % (
ticket.type, ticket.priority, ticket.status)
return content
@authenticated_rest_api_view(is_webhook=True)
@has_request_variables
def api_freshdesk_webhook(request, user_profile, payload=REQ(argument_type='body'),
stream=REQ(default='freshdesk')):
# type: (HttpRequest, UserProfile, Dict[str, Any], Text) -> HttpResponse
ticket_data = payload["freshdesk_webhook"]
required_keys = [
"triggered_event", "ticket_id", "ticket_url", "ticket_type",
"ticket_subject", "ticket_description", "ticket_status",
"ticket_priority", "requester_name", "requester_email",
]
for key in required_keys:
if ticket_data.get(key) is None:
logging.warning("Freshdesk webhook error. Payload was:")
logging.warning(request.body)
return json_error(_("Missing key %s in JSON") % (key,))
ticket = TicketDict(ticket_data)
subject = "#%s: %s" % (ticket.id, ticket.subject)
try:
event_info = parse_freshdesk_event(ticket.triggered_event)
except ValueError:
return json_error(_("Malformed event %s") % (ticket.triggered_event,))
if event_info[1] == "created":
content = format_freshdesk_ticket_creation_message(ticket)
elif event_info[0] == "note_type":
content = format_freshdesk_note_message(ticket, event_info)
elif event_info[0] in ("status", "priority"):
content = format_freshdesk_property_change_message(ticket, event_info)
else:
# Not an event we know handle; do nothing.
return json_success()
check_send_message(user_profile, get_client("ZulipFreshdeskWebhook"), "stream",
[stream], subject, content)
return json_success()
|
hacknashvillereview/review_application
|
review_app/review_app/urls.py
|
Python
|
mit
| 1,606 | 0.004981 |
from django.conf.urls import patterns, include, url
from django.views.generic import TemplateView
# Un
|
comment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
import api, foxycart
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(r'^review/', TemplateView.as_view(template_name='base.html')),
url(r'^feedback/', TemplateView.as_view(template_name='feedback.html')),
# Examples:
# url(r'^$', 'review_app.views.home', name='home'),
# url(r'^rev
|
iew_app/', include('review_app.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
#url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
url(r'^api/', include('api.urls')),
url(r"^foxycart/", include('foxycart.urls')),
url(r"^foxycart/checkout", TemplateView.as_view(template_name='foxycart_checkout_template.html')),
url(r'^accounts/login/$', 'django.contrib.auth.views.login', {"template_name": "login.html"}),
url(r'^accounts/logout/$', 'django.contrib.auth.views.logout',
{"template_name": "base.html", "next_page": "/"}),
url(r"^protected/", 'review_app.views.protected_method', name="protected"),
url(r"^packages/", 'review_app.views.packages_method', name="packages"),
url(r'^package/(?P<package_id>[0-9]+)/$', 'review_app.views.package_method', name="package"),
)
|
rec/echomesh
|
code/python/experiments/ossaudiodev/GetFormats.py
|
Python
|
mit
| 429 | 0.025641 |
from __future__ import absolute_import, division, print_function, unicode_literals
import ossaudiode
|
v
def print_fmts(rw):
print(rw == 'r' and 'read' or 'write')
sound = ossaudiodev.open(rw)
fmts = sound.getfmts()
for name in dir(ossaudiodev):
if name.startswith('AFMT'):
attr
|
= getattr(ossaudiodev, name)
if attr & fmts:
print(name)
print()
sound.close()
print_fmts('w')
print_fmts('r')
|
surligas/gnuradio
|
gnuradio-runtime/python/gnuradio/gr/qa_random.py
|
Python
|
gpl-3.0
| 2,171 | 0.00783 |
#!/usr/bin/env python
#
# Copyright 2006,2007,2010,2015 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
#
|
GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Stre
|
et,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest
import numpy as np
class test_random(gr_unittest.TestCase):
# NOTE: For tests on the output distribution of the random numbers, see gnuradio-runtime/apps/evaluation_random_numbers.py.
# Check for range [0,1) of uniform distributed random numbers
def test_1(self):
num_tests = 10000
values = np.zeros(num_tests)
rndm = gr.random()
for k in range(num_tests):
values[k] = rndm.ran1()
for value in values:
self.assertLess(value, 1)
self.assertGreaterEqual(value, 0)
# Check reseed method (init with time and seed as fix number)
def test_2(self):
num = 5
rndm0 = gr.random(42); # init with time
rndm1 = gr.random(42); # init with fix seed
for k in range(num):
x = rndm0.ran1();
y = rndm1.ran1();
self.assertEqual(x,y)
x = np.zeros(num)
y = np.zeros(num)
rndm0 = gr.random(42); # init with fix seed 1
for k in range(num):
x[k] = rndm0.ran1();
rndm1.reseed(43); # init with fix seed 2
for k in range(num):
y[k] = rndm0.ran1();
for k in range(num):
self.assertNotEqual(x[k],y[k])
if __name__ == '__main__':
gr_unittest.run(test_random, "test_random.xml")
|
Southpaw-TACTIC/TACTIC
|
src/pyasm/prod/checkin/maya_checkin_test.py
|
Python
|
epl-1.0
| 1,267 | 0.008682 |
###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
import os,unittest
from pyasm.security import Batch
from pyasm
|
.command import Command
from pyasm.prod.biz import Asset
from pyams.prod.maya import *
from maya_checkin import *
|
class MayaCheckinTest(unittest.TestCase):
def setUp(self):
batch = Batch()
def test_all(self):
# create a scene that will be checked in
asset_code = "prp101"
sid = "12345"
# create an asset
mel('sphere -n sphere1')
mel('circle -n circle1')
mel('group -n |%s |circle1 |sphere1' % asset_code )
# convert node into a maya asset
node = MayaNode("|%s" % asset_code )
asset_node = MayaAssetNode.add_sid( node, sid )
# checkin the asset
checkin = MayaAssetNodeCheckin(asset_node)
Command.execute_cmd(checkin)
# create a file from this node
asset_node.export()
if __name__ == '__main__':
unittest.main()
|
luiscberrocal/homeworkpal
|
homeworkpal_project/project_admin/migrations/0021_projectgoal_fiscal_year.py
|
Python
|
mit
| 568 | 0.001761 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import mi
|
grations, models
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('project_admin', '0020_project_fiscal_year'),
]
operations = [
migrations.AddField(
model_name='projectgoal',
name='fiscal_year',
field=models.CharField(max_length=4, validators=[django.core.validators.RegexValidator(regex='^AF\\d2$')], defaul
|
t='AF16'),
preserve_default=False,
),
]
|
gnott/bot-lax-adaptor
|
src/tests/test_validate.py
|
Python
|
gpl-3.0
| 1,558 | 0.001284 |
from StringIO import StringIO
#import sys
import json
from os.path import join
from .base import BaseCase
import validate
import jsonschema
class TestArticleValidate(BaseCase):
def setUp(self):
self.doc_json = join(self.fixtures_dir, 'elife-09560-v1.xml.json')
def tearDown(self):
pass
def test_main_bootstrap(self):
"valid output is returned"
valid, results = validate.main(open(self.doc_json, 'r'))
self.assertTrue(isinstance(results, dict))
self.assertTrue(isinstance(valid, bool))
def test_main_bootstrap_fails(self):
"invalid output raises a validation error"
data = json.load(open(self.doc_json, 'r'))
data['article']['type'] = 'unknown type that will cause a failure'
strbuffer = StringIO(json.dumps(data))
strbuffer.name = self.doc_json
self.assertRaises(jsonschema.ValidationError, validate.main, strbuffer)
def test_add_placeholders_for_validation(self):
ar
|
ticle = {'article': {'id': 12345, 'version': 2}}
expected = {
'article': {
'-patched':
|
True,
'id': 12345,
'version': 2,
'stage': 'published',
'versionDate': '2099-01-01T00:00:00Z',
'statusDate': '2099-01-01T00:00:00Z',
}}
validate.add_placeholders_for_validation(article)
self.assertEqual(article, expected)
def test_is_poa_not_poa(self):
# For test coverage
self.assertFalse(validate.is_poa({}))
|
luiscape/hdxscraper-violation-documentation-center-syria
|
tests/unit/test_setup.py
|
Python
|
mit
| 1,648 | 0.018204 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# system
import os
import sys
dir = os.path.split(os.path.split(os.path.split(os.path.realpath(__file__))[0])[0])[0]
sys.path.append(os.path.join(dir, 'scripts'))
# testing
import mock
import unittest
from mock import patch
# program
import setup
|
.load as Config
import setup.database as DB
#
# Global variables.
#
TEST_DATA = 'test_flood_portal_output.json'
class CheckConfigurationStructure(unittest.TestCase):
'''Unit tests for the configurat
|
ion files.'''
def test_that_load_config_fails_gracefully(self):
assert Config.LoadConfig('xxx.json') == False
## Object type tests.
def test_config_is_list(self):
d = Config.LoadConfig(os.path.join(dir, 'config', 'dev.json'))
assert type(d) is dict
def test_config_returns_a_table_list(self):
d = Config.LoadConfig(os.path.join(dir, 'config', 'dev.json'))
assert type(d['database']) is list
def test_config_checks_api_key(self):
Config.LoadConfig(os.path.join(dir, 'config', 'dev.json'))
assert Config.LoadConfig(os.path.join(dir, 'tests', 'data', 'test_config.json')) == False
class CheckDatabaseCreation(unittest.TestCase):
'''Unit tests for the setting up the database.'''
## Structural tests.
def test_wrapper_database_function_works(self):
assert DB.Main() != False
## Failed config file.
def test_database_fail(self):
assert DB.CreateTables(config_path=os.path.join(dir, 'tests', 'data', 'test_database_fail.json')) == False
def test_that_odd_table_names_fail(self):
assert DB.CreateTables(config_path=os.path.join(dir, 'tests', 'data', 'test_fail_column_names.json')) == False
|
johnrbnsn/Adafruit_Python_MAX31856
|
Adafruit_MAX31856/test_MAX31856.py
|
Python
|
mit
| 8,880 | 0.00732 |
"""
Copyright (c) 2019 John Robinson
Author: John Robinson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
# Global Imports
import logging
import unittest
import RPi.GPIO as GPIO
import Adafruit_GPIO.SPI as SPI
# Local Imports
from max31856 import MAX31856 as MAX31856
logging.basicConfig(
filename='test_MAX31856.log',
level=logging.DEBUG,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
_logger = logging.getLogger(__name__)
class Adafruit_MAX31856(unittest.TestCase):
def tearDown(self):
GPIO.cleanup()
#def test_software_spi_initialize(self):
#"""Checks to see if the sensor can initialize on the software SPI interface.
#Will fail if it cannot find the MAX31856 library or any dependencies.
#Test only checks to see that the sensor can be initialized in Software, does not check the
#hardware connection.
#"""
#_logger.debug('test_software_SPI_initialize()')
## Raspberry Pi software SPI configuration.
#software_spi = {"clk": 25, "cs": 8, "do": 9, "di": 10}
#sensor = MAX31856(software_spi=software_spi)
#if sensor:
#self.assertTrue(True)
#else:
#self.assertTrue(False)
def test_hardware_spi_initialize(self):
"""
Checks to see if the sensor can initialize on the hardware SPI interface.
Will fail if it cannot find the MAX31856 library or any dependencies.
Test only checks to see that the sensor can be initialized in Software, does not check the
hardware connection.
"""
_logger.debug('test_hardware_SPI_initialize()')
# Raspberry Pi hardware SPI configuration.
spi_port = 0
spi_device = 0
sensor = MAX31856(hardware_spi=SPI.SpiDev(spi_port, spi_device))
if sensor:
self.assertTrue(True)
else:
self.assertTrue(False)
def test_get_register_reading(self):
"""
Checks to see if we can read a register from the device. Good test for correct
connectivity.
"""
_logger.debug('test_get_register_reading()')
# Raspberry Pi hardware SPI configuration.
spi_port = 0
spi_device = 0
sensor = MAX31856(hardware_spi=SPI.SpiDev(spi_port, spi_device))
value = sensor._read_register(MAX31856.MAX31856_REG_READ_CR0)
for ii in range(0x00, 0x10):
# Read all of the registers, will store data to log
sensor._read_register(ii) # pylint: disable-msg=protected-access
if value:
self.assertTrue(True)
else:
self.assertTrue(False)
#def test_get_temperaure_reading_software_spi(self):
#"""Checks to see if we can read a temperature from the board, using software SPI
#"""
#_logger.debug('test_get_temperature_reading_software_spi')
## Raspberry Pi software SPI configuration.
#software_spi = {"clk": 25, "cs": 8, "do": 9, "di": 10}
#sensor = MAX31856(software_spi=software_spi)
#temp = sensor.read_temp_c()
#if temp:
#self.assertTrue(True)
#else:
#self.assertTrue(False)
def test_get_temperaure_reading(self):
"""
Checks to see if we can read a temperature from the board, using Hardware SPI
"""
_logger.debug('test_get_temperaure_reading')
# Raspberry Pi hardware SPI configuration.
spi_port = 0
spi_device = 0
sensor = MAX31856(hardware_spi=SPI.SpiDev(spi_port, spi_device))
temp = sensor.read_temp_c()
if temp:
self.assertTrue(True)
else:
self.assertTrue(False)
def test_get_internal_temperaure_reading(self):
"""
Checks to see if we can read a temperature from the board, using Hardware SPI
"""
_logger.debug('test_get_internal_temperature_reading()')
# Raspberry Pi hardware SPI configuration.
spi_port = 0
spi_device = 0
sensor = MAX31856(hardware_spi=SPI.SpiDev(spi_port, spi_device))
temp = sensor.read_internal_temp_c()
if temp:
self.assertTrue(True)
else:
self.assertTrue(False)
def test_get_internal_temperaure_reading_k_type(self):
"""
Checks to see if we can read a temperature from the board, using Hardware SPI, and K type thermocouple
"""
_logger.debug('test_get_internal_temperature_reading()')
# Raspberry Pi hardware SPI configuration.
spi_port = 0
spi_device = 0
sensor = MAX31856(hardware_spi=SPI.SpiDev(spi_port, spi_device), tc_type=MAX31856.MAX31856_K_TYPE)
temp = sensor.read_internal_temp_c()
if temp:
self.assertTrue(True)
else:
self.assertTrue(False)
def test_temperature_byte_conversions(self):
"""
Checks the byte conversion for various known temperature byte values.
"""
_logger.debug('test_temperature_byte_conversions()')
#-------------------------------------------#
# Test Thermocouple Temperature Conversions #
byte2 = 0x01
byte1 = 0x70
byte0 = 0x20
decimal_temp = MAX31856._thermocouple_temp_from_bytes(byte0, byte1, byte2) # pylint: disable-msg=protected-access
self.assertEqual(decimal_temp, 23.0078125)
# Check a couple values from the datasheet
byte2 = 0b00000001
byte1 = 0b10010000
byte0 = 0b00000000
decimal_temp = MAX31856._thermocouple_temp_from_bytes(byte0, byte1, byte2) # pylint: disable-msg=protected-access
self.assertEqual(decimal_temp, 25.0)
byte2 = 0b00000000
byte1 = 0b00000000
byte0 = 0b00000000
decimal_temp = MAX31856._thermocouple_temp_from_bytes(byte0, byte1, byte2) # pylint: disable-msg=protected-access
self.assertEqual(decimal_temp, 0.0)
byte2 = 0b11111111
byte1 = 0b11110000
byte0 = 0b00000000
decimal_temp = MAX31856._thermocouple_temp_from_bytes(byte0, byte1, byte2) # pylint: disable-msg=protected-access
self.assertEqual(decimal_temp, -1.0)
byte2 = 0b11110000
byte1 = 0b01100000
byte0 = 0b00000000
decimal_temp = MAX31856._thermocouple_temp_from_bytes(byte0, byte1, byte2) # pylint: disable-msg=protected-access
self.assertEqual(decimal_temp, -250.0)
#---------------------------------#
# Test CJ Temperature Conversions #
msb = 0x1C
lsb = 0x64
decimal_cj_temp = MAX31856._cj_temp_from_bytes(msb, lsb) # pylint: disable-msg=protected-access
|
self.assertEqual(decimal_cj_temp, 28.390625)
# Check a couple values from the datasheet
msb = 0b01111111
lsb = 0b11111100
dec
|
imal_cj_temp = MAX31856._cj_temp_from_bytes(msb, lsb) # pylint: disable-msg=protected-access
self.assertEqual(decimal_cj_temp, 127.984375)
msb = 0b00011001
lsb = 0b00000000
decimal_cj_temp = MAX31856._cj_temp_from_bytes(msb, lsb) # pylint: disable-msg=protected-access
|
simar7/build-mozharness
|
mozharness/mozilla/testing/unittest.py
|
Python
|
mpl-2.0
| 11,491 | 0.001653 |
#!/usr/bin/env python
# ***** BEGIN LICENSE BLOCK *****
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
# ***** END LICENSE BLOCK *****
import os
import re
from mozharness.mozilla.testing.errors import TinderBoxPrintRe
from mozharness.base.log import OutputParser, WARNING, INFO, CRITICAL
from mozharness.mozilla.buildbot import TBPL_WARNING, TBPL_FAILURE, TBPL_RETRY
from mozharness.mozilla.buildbot import TBPL_SUCCESS, TBPL_WORST_LEVEL_TUPLE
SUITE_CATEGORIES = ['mochitest', 'reftest', 'xpcshell']
def tbox_print_summary(pass_count, fail_count, known_fail_count=None,
crashed=False, leaked=False):
emph
|
asize_fail_text = '<em class="testfail">%s</em>'
if pass_count < 0 or fail_count < 0 or \
(known_fail_count is not None and known_fail_count < 0):
summary = emphasi
|
ze_fail_text % 'T-FAIL'
elif pass_count == 0 and fail_count == 0 and \
(known_fail_count == 0 or known_fail_count is None):
summary = emphasize_fail_text % 'T-FAIL'
else:
str_fail_count = str(fail_count)
if fail_count > 0:
str_fail_count = emphasize_fail_text % str_fail_count
summary = "%d/%s" % (pass_count, str_fail_count)
if known_fail_count is not None:
summary += "/%d" % known_fail_count
# Format the crash status.
if crashed:
summary += " %s" % emphasize_fail_text % "CRASH"
# Format the leak status.
if leaked is not False:
summary += " %s" % emphasize_fail_text % (
(leaked and "LEAK") or "L-FAIL")
return summary
class TestSummaryOutputParserHelper(OutputParser):
def __init__(self, regex=re.compile(r'(passed|failed|todo): (\d+)'), **kwargs):
self.regex = regex
self.failed = 0
self.passed = 0
self.todo = 0
self.last_line = None
super(TestSummaryOutputParserHelper, self).__init__(**kwargs)
def parse_single_line(self, line):
super(TestSummaryOutputParserHelper, self).parse_single_line(line)
self.last_line = line
m = self.regex.search(line)
if m:
try:
setattr(self, m.group(1), int(m.group(2)))
except ValueError:
# ignore bad values
pass
def evaluate_parser(self):
# generate the TinderboxPrint line for TBPL
emphasize_fail_text = '<em class="testfail">%s</em>'
failed = "0"
if self.passed == 0 and self.failed == 0:
self.tsummary = emphasize_fail_text % "T-FAIL"
else:
if self.failed > 0:
failed = emphasize_fail_text % str(self.failed)
self.tsummary = "%d/%s/%d" % (self.passed, failed, self.todo)
def print_summary(self, suite_name):
self.evaluate_parser()
self.info("TinderboxPrint: %s: %s\n" % (suite_name, self.tsummary))
class DesktopUnittestOutputParser(OutputParser):
"""
A class that extends OutputParser such that it can parse the number of
passed/failed/todo tests from the output.
"""
def __init__(self, suite_category, **kwargs):
# worst_log_level defined already in DesktopUnittestOutputParser
# but is here to make pylint happy
self.worst_log_level = INFO
super(DesktopUnittestOutputParser, self).__init__(**kwargs)
self.summary_suite_re = TinderBoxPrintRe.get('%s_summary' % suite_category, {})
self.harness_error_re = TinderBoxPrintRe['harness_error']['minimum_regex']
self.full_harness_error_re = TinderBoxPrintRe['harness_error']['full_regex']
self.harness_retry_re = TinderBoxPrintRe['harness_error']['retry_regex']
self.fail_count = -1
self.pass_count = -1
# known_fail_count does not exist for some suites
self.known_fail_count = self.summary_suite_re.get('known_fail_group') and -1
self.crashed, self.leaked = False, False
self.tbpl_status = TBPL_SUCCESS
def parse_single_line(self, line):
if self.summary_suite_re:
summary_m = self.summary_suite_re['regex'].match(line) # pass/fail/todo
if summary_m:
message = ' %s' % line
log_level = INFO
# remove all the none values in groups() so this will work
# with all suites including mochitest browser-chrome
summary_match_list = [group for group in summary_m.groups()
if group is not None]
r = summary_match_list[0]
if self.summary_suite_re['pass_group'] in r:
if len(summary_match_list) > 1:
self.pass_count = int(summary_match_list[-1])
else:
# This handles suites that either pass or report
# number of failures. We need to set both
# pass and fail count in the pass case.
self.pass_count = 1
self.fail_count = 0
elif self.summary_suite_re['fail_group'] in r:
self.fail_count = int(summary_match_list[-1])
if self.fail_count > 0:
message += '\n One or more unittests failed.'
log_level = WARNING
# If self.summary_suite_re['known_fail_group'] == None,
# then r should not match it, # so this test is fine as is.
elif self.summary_suite_re['known_fail_group'] in r:
self.known_fail_count = int(summary_match_list[-1])
self.log(message, log_level)
return # skip harness check and base parse_single_line
harness_match = self.harness_error_re.match(line)
if harness_match:
self.warning(' %s' % line)
self.worst_log_level = self.worst_level(WARNING, self.worst_log_level)
self.tbpl_status = self.worst_level(TBPL_WARNING, self.tbpl_status,
levels=TBPL_WORST_LEVEL_TUPLE)
full_harness_match = self.full_harness_error_re.match(line)
if full_harness_match:
r = full_harness_match.group(1)
if r == "application crashed":
self.crashed = True
elif r == "missing output line for total leaks!":
self.leaked = None
else:
self.leaked = True
return # skip base parse_single_line
if self.harness_retry_re.search(line):
self.critical(' %s' % line)
self.worst_log_level = self.worst_level(CRITICAL, self.worst_log_level)
self.tbpl_status = self.worst_level(TBPL_RETRY, self.tbpl_status,
levels=TBPL_WORST_LEVEL_TUPLE)
return # skip base parse_single_line
super(DesktopUnittestOutputParser, self).parse_single_line(line)
def evaluate_parser(self, return_code, success_codes=None):
success_codes = success_codes or [0]
if self.num_errors: # mozharness ran into a script error
self.tbpl_status = self.worst_level(TBPL_FAILURE, self.tbpl_status,
levels=TBPL_WORST_LEVEL_TUPLE)
# I have to put this outside of parse_single_line because this checks not
# only if fail_count was more then 0 but also if fail_count is still -1
# (no fail summary line was found)
if self.fail_count != 0:
self.worst_log_level = self.worst_level(WARNING, self.worst_log_level)
self.tbpl_status = self.worst_level(TBPL_WARNING, self.tbpl_status,
levels=TBPL_WORST_LEVEL_TUPLE)
# Account for the possibility that no test summary was output.
if self.pass_count <= 0 and self.fail_count <= 0 and \
(self.known_fail_count is None or self.k
|
ronaldahmed/robot-navigation
|
neural-navigation-with-lstm/MARCO/nltk_contrib/unimelb/tacohn/classifier/__init__.py
|
Python
|
mit
| 21,922 | 0.003421 |
## Automatically adapted for numpy.oldnumeric May 17, 2011 by -c
# Natural Language Toolkit: Classifiers
#
# Copyright (C) 2001 University of Pennsylvania
# Author: Edward Loper <edloper@gradient.cis.upenn.edu>
# URL: <http://nltk.sf.net>
# For license information, see LICENSE.TXT
#
# $Id: __init__.py,v 1.2 2003/10/27 04:41:28 trevorcohn1 Exp $
# To do:
# - make sure variable names are used consistantly (fd_list, etc.)
# - remove any confusions about the type of labels (string vs
# immutable)
"""
Classes and interfaces used to classify texts into categories. A
X{category} is a coherent group of texts. This module focuses on
X{single-category text classification}, in which:
- There set of categories is known.
- The number of categories is finite.
- Each text belongs to exactly one category.
A X{classifier} choses the most likely category for a given text.
Classifiers can also be used to estimate the probability that a given
text belongs to a category. This module defines the C{ClassifierI}
interface for creating classifiers. Note that classifiers can operate
on any kind of text. For example, classifiers can be used:
- to group documents by topic
- to group words by part of speech
- to group acoustic signals by which phoneme they represent
- to group sentences by their author
Each category is uniquely defined by a X{label}, such as C{'sports'}
or C{'news'}. Labels are typically C{string}s or C{integer}s, but can
be any immutable type. Classified texts are represented by C{Tokens}
whose types are C{LabeledText} objects. A C{LabeledText} consists of
a label and a text.
C{ClassifierTrainerI} is a general interface for classes that build
classifiers from training data.
C{accuracy} and C{log_likelihood} provide simple metrics for
evaluating the performance of a classifier.
@group Data Types: LabeledText
@group Interfaces: ClassifierI, ClassifierTrainerI
@group Evaulation: accuracy, log_likelihood, ConfusionMatrix
@sort: ClassifierI, ClassifierTrainerI
"""
from nltk.token import Token
from nltk.chktype import chktype as _chktype
from nltk.chktype import classeq as _classeq
import math, numpy.oldnumeric as Numeric, types, operator
##//////////////////////////////////////////////////////
## Texts and Labels
##//////////////////////////////////////////////////////
# A text can be any object. Texts are required to be immutable, since
# they are used as the type of a token.
# A label can be any immutable object. Typically, labels are either
# integers or strings.
##//////////////////////////////////////////////////////
## LabeledTexts
##//////////////////////////////////////////////////////
class LabeledText:
"""
A type consisting of a text and a label. A typical example would
be a document labeled with a category, such as \"sports\".
The text and the label are both required to be immutable. Labels
are ususally short strings or integers.
@type _text: (immutable)
@ivar _text: The C{LabeledText}'s text.
@type _label: (immutable)
@ivar _label: The text type's label. This specifies which
category the text belongs to.
"""
def __init__(self, text, label):
"""
Construct a new C{LabeledType}.
@param text: The new C{LabeledType}'s text.
@type text: (immutable)
@param label: The new C{LabeledType}'s label. This specifies
which category the text belongs to.
@type label: (immutable)
"""
self._text = text
self._label = label
def text(self):
"""
@return: this C{LabeledType}'s text.
@rtype: (immutable)
"""
return self._text
def label(self):
"""
@return: this C{LabeledType}'s label.
@rtype: (immutable)
"""
return self._label
def __lt__(self, other):
"""
Raise a C{TypeError}, since C{LabeledText} is not an ordered
type.
@raise TypeError: C{LabeledText} is not an ordered type.
"""
raise TypeError("LabeledText is not an ordered type")
def __le__(self, other):
"""
Raise a C{TypeError}, since C{LabeledText} is not an ordered
type.
@raise TypeError: C{LabeledText} is not an ordered type.
"""
raise TypeError("LabeledText is not an ordered type")
def __gt__(self, other):
"""
Raise a C{TypeError}, since C{LabeledText} is not an ordered
type.
@raise TypeError: C{LabeledText} is not an ordered type.
"""
raise TypeError("LabeledText is not an ordered type")
def __ge__(self, other):
"""
Raise a C{TypeError}, since C{LabeledText} is not an ordered
type.
@raise TypeError: C{LabeledText} is not an ordered type.
"""
raise TypeError("LabeledText is not an ordered type")
def __cmp__(self, other):
"""
@return: 0 if this C{LabeledType} is equal to C{other}. In
particular, return 0 iff C{other} is a C{LabeledType},
C{self.text()==other.text()}, and
C{self.label()==other.label()}; return a nonzero number
otherwise.
@rtype: C{int}
@param other: The C{LabeledText} to compare this
C{LabeledText} with.
@type other: C{LabeledText}
"""
if not _classeq(self, other): return 0
return not (self._text == other._text and
self._label == other._label)
def __hash__(self):
return hash( (self._text, self._label) )
def __repr__(self):
"""
@return: a string representation of this labeled text.
@rtype: C{string}
"""
return "%r/%r" % (self._text, self._label)
##//////////////////////////////////////////////////////
## Classiifer Interface
##//////////////////////////////////////////////////////
class ClassifierI:
"""
A processing interface for categorizing texts. The set of
categories used by a classifier must be fixed, and finite. Each
category is uniquely defined by a X{label}, such as C{'sports'} or
C{'news'}. Labels are typically C{string}s or C{integer}s, but
can be any immutable type. Classified texts are represented by
C{Tokens} whose types are C{LabeledText} objects.
Cla
|
ssifiers are required to implement two methods:
- C{classify}: determines which label is most appropriate for a
given text token, and returns a labeled text token with that
label.
- C{labels}: returns the list of category labels that
|
are used
by this classifier.
Classifiers are also encouranged to implement the following
methods:
- C{distribution}: return a probability distribution that
specifies M{P(label|text)} for a given text token.
- C{prob}: returns M{P(label|text)} for a given labeled text
token.
- C{distribution_dictionary}: Return a dictionary that maps from
labels to probabilities.
- C{distribution_list}: Return a sequence, specifying the
probability of each label.
Classes implementing the ClassifierI interface may choose to only
support certain classes of tokens for input. If a method is
unable to return a correct result because it is given an
unsupported class of token, then it should raise a
NotImplementedError.
Typically, classifier classes encode specific classifier models;
but do not include the algorithms for training the classifiers.
Instead, C{ClassifierTrainer}s are used to generate classifiers
from training data.
@see: C{ClassifierTrainerI}
"""
def labels(self):
"""
@return: the list of category labels used by this classifier.
@rtype: C{list} of (immutable)
"""
raise AssertionError()
def classify(self, unlabeled_token):
"""
Determine which label is most appropriate for the given text
token, and return a C{LabeledText} token constructed from the
given text toke
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.