text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# -*- python -*-
# Copyright (C) 2009-2015 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import gdb
import os
import os.path
pythondir = '/home/build/work/GCC-5-0-build/install-native/share/gcc-arm-none-eabi'
libdir = '/home/build/work/GCC-5-0-build/install-native/arm-none-eabi/lib/armv7e-m/softfp/fpv5-sp-d16'
# This file might be loaded when there is no current objfile. This
# can happen if the user loads it manually. In this case we don't
# update sys.path; instead we just hope the user managed to do that
# beforehand.
if gdb.current_objfile () is not None:
# Update module path. We want to find the relative path from libdir
# to pythondir, and then we want to apply that relative path to the
# directory holding the objfile with which this file is associated.
# This preserves relocatability of the gcc tree.
# Do a simple normalization that removes duplicate separators.
pythondir = os.path.normpath (pythondir)
libdir = os.path.normpath (libdir)
prefix = os.path.commonprefix ([libdir, pythondir])
# In some bizarre configuration we might have found a match in the
# middle of a directory name.
if prefix[-1] != '/':
prefix = os.path.dirname (prefix) + '/'
# Strip off the prefix.
pythondir = pythondir[len (prefix):]
libdir = libdir[len (prefix):]
# Compute the ".."s needed to get from libdir to the prefix.
dotdots = ('..' + os.sep) * len (libdir.split (os.sep))
objfile = gdb.current_objfile ().filename
dir_ = os.path.join (os.path.dirname (objfile), dotdots, pythondir)
if not dir_ in sys.path:
sys.path.insert(0, dir_)
# Call a function as a plain import would not execute body of the included file
# on repeated reloads of this object file.
from libstdcxx.v6 import register_libstdcxx_printers
register_libstdcxx_printers(gdb.current_objfile())
| FabianKnapp/nexmon | buildtools/gcc-arm-none-eabi-5_4-2016q2-linux-x86/arm-none-eabi/lib/armv7e-m/softfp/fpv5-sp-d16/libstdc++.a-gdb.py | Python | gpl-3.0 | 2,501 | 0.006397 |
# -*- coding: utf-8 -*-
import sys
import string
from datetime import datetime,timedelta
import calendar
import csv
import re
# ファイルオープン(fpは引数でログファイル,wfpは書き出すcsvファイルを指定)
fp = open(sys.argv[1],'r')
# logがローテートするタイミングが1日の間にある場合,/var/log/kern.logと/var/log/kern.log.1の両方を読み込む必要があるかもしれない
wfp = open('/path/to/program/csv_data/formatted.csv', 'a')
writer = csv.writer(wfp, lineterminator='\n')
# 昨日の日付を計算
yesterday = datetime.now() + timedelta(days=-1)
print "下記の日時のログ整形データをformatted.csvに書き出します"
print yesterday.strftime('%Y %b %d %H:%M:%S')
# idと書き出し用リストの変数を作成
i = 0
w = [0] * 7
# csvヘッダの作成
#w[0] = "id"
#w[1] = "weekday"
#w[2] = "hour"
#w[3] = "smacaddr"
#w[4] = "dipaddr"
#w[5] = "proto"
#w[6] = "spt"
# ファイルに1行出力
#writer.writerow(w)
# ログファイルのEOFまで
for line in fp.readlines():
# フォワーディングパケットで,内部ネットから出るログを指定
if line.find("FORWARD_F IN=eth1") >= 0:
# kernel:の数値の[の後に空白が入ると,後のsplitでうまくきれないため,[を削除する
line = line.replace('[','')
line = line.replace(' DF ',' ')
# 0文字以上の文字をsplitで切り出し
l = filter(lambda x: len(x)>0, re.split(r" ", line))
#昨日の日時と一致するログを出力
if l[0] == yesterday.strftime('%b') and int(l[1], 10) == int(yesterday.strftime('%d'), 10):
# print l
# id
w[0] = i
# 昨日の曜日(Mon:0,Tue;1,Wed:2,Thu:3,FRI:4,SAT:5,SUN:6)
w[1] = yesterday.weekday()
# 時刻(時のみ)
w[2] = int(l[2][:2], 10)
# 送信元MACアドレス
w[3] = l[9][4:]
# 送信先IPアドレス
w[4] = l[11][4:]
# プロトコル
w[5] = l[17][6:]
# 送信先ポート番号
# プロトコルがICMPなら,送信先ポート番号を0に
if l[17][6:] == "ICMP":
l[19] = 0
w[6] = l[19]
else:
w[6] = l[19][4:]
i += 1
# ファイルに1行出力
writer.writerow(w)
# ファイルクローズ
fp.close()
wfp.close()
| High-Hill/bachelor_dap_gw | program/log_formatting.py | Python | mit | 2,502 | 0.008947 |
#!/usr/bin/env python
# encoding: utf-8
"""
views.py
Created by Christophe VAN FRACKEM on 2014/05/25.
Copyright (c) 2014 Tiss'Page. All rights reserved.
"""
__author__ = 'Christophe VAN FRACKEM <contact@tisspage.fr>'
__version__= '0.0.1'
__copyright__ = '© 2014 Tiss\'Page'
from django.shortcuts import render_to_response
from django.views.generic import View, TemplateView, FormView, UpdateView
from django.http import HttpResponseRedirect, HttpResponse
from django.core.mail import send_mail, BadHeaderError
from django.core.mail import EmailMultiAlternatives
from django.contrib import messages
from website.forms import ContactForm
class ContactFormView(FormView):
form_class=ContactForm
success_url = '/'
def get_context_data(self, **kwargs):
context = super(ContactFormView, self).get_context_data(**kwargs)
context.update(form=ContactForm()
)
return context
def form_valid(self, form):
subject = u'Contact via le site tisspage.fr'
from_email = 'contact@tisspage.fr'
to = 'contact@tisspage.fr'
text_content = 'Un internaute vient de vous contacter via le formulaire de contact de votre site Internet.'
html_content = 'Un internaute vient de vous contacter via le formulaire de contact de votre site Internet.<br><br>'
html_content += u'<strong>Son email :</strong><a href="mailto:{email}"> {email}</a><br>'.format(email=form.cleaned_data.get('email'))
html_content += u'<strong>Son message :</strong> <br>{message}'.format(message=form.cleaned_data.get('message'))
msg = EmailMultiAlternatives(subject, text_content, from_email, [to])
msg.attach_alternative(html_content, "text/html")
msg.send()
messages.add_message(self.request, messages.INFO, 'Votre message a bien été envoyé. Je vous contacterai dans les plus brefs délais.')
return HttpResponseRedirect(self.get_success_url())
class PageView(ContactFormView, FormView):
template_name="index.html"
class MentionsView(ContactFormView, FormView):
template_name="mentions.html"
| tisspage/resume-website | website/views.py | Python | gpl-3.0 | 2,019 | 0.021351 |
"""Publishing native (typically pickled) objects.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from traitlets.config import Configurable
from ipykernel.inprocess.socket import SocketABC
from traitlets import Instance, Dict, CBytes
from ipykernel.jsonutil import json_clean
from ipykernel.serialize import serialize_object
from jupyter_client.session import Session, extract_header
class ZMQDataPublisher(Configurable):
topic = topic = CBytes(b'datapub')
session = Instance(Session, allow_none=True)
pub_socket = Instance(SocketABC, allow_none=True)
parent_header = Dict({})
def set_parent(self, parent):
"""Set the parent for outbound messages."""
self.parent_header = extract_header(parent)
def publish_data(self, data):
"""publish a data_message on the IOPub channel
Parameters
----------
data : dict
The data to be published. Think of it as a namespace.
"""
session = self.session
buffers = serialize_object(data,
buffer_threshold=session.buffer_threshold,
item_threshold=session.item_threshold,
)
content = json_clean(dict(keys=list(data.keys())))
session.send(self.pub_socket, 'data_message', content=content,
parent=self.parent_header,
buffers=buffers,
ident=self.topic,
)
def publish_data(data):
"""publish a data_message on the IOPub channel
Parameters
----------
data : dict
The data to be published. Think of it as a namespace.
"""
from ipykernel.zmqshell import ZMQInteractiveShell
ZMQInteractiveShell.instance().data_pub.publish_data(data)
| bdh1011/wau | venv/lib/python2.7/site-packages/ipykernel/datapub.py | Python | mit | 1,761 | 0.003975 |
class Action:
label = ""
selectable = 0
def __init__ (self,label="",s=0):
self.label = label
self.selectable = s
def getLabel (self):
return self.label
def do (self):
tmp = 1
def canSelect (self):
return self.selectable
| tbdale/crystalfontz-lcd-ui | python/Action.py | Python | mit | 289 | 0.020761 |
# -*- coding: utf-8 -*-
import os
import subprocess
import sys
import threading
import time
import signal
from thriftpy.protocol import TBinaryProtocolFactory
from thriftpy.server import TThreadedServer
from thriftpy.thrift import TProcessor
from thriftpy.transport import TServerSocket, TBufferedTransportFactory
from .logger import log
_EXIT_RELOADER = 3
def restart_with_reloader(host, port):
"""Spawn a new Python interpreter with the same arguments as this one,
but running the reloader thread.
"""
log('info', 'Running on {}:{} in DEBUG mode'.format(host, port))
while True:
args = [sys.executable] + sys.argv
new_environ = os.environ.copy()
new_environ['archer.reload_loop'] = 'true'
exit_code = subprocess.call(args, env=new_environ)
if exit_code != _EXIT_RELOADER:
return exit_code
def reloader_loop(extra_files=None, interval=1):
"""When this function is run from the main thread, it will force other
threads to exit when any modules currently loaded change.
:param extra_files: a list of additional files it should watch.
:param interval: reload loop interval
"""
from itertools import chain
mtimes = {}
while True:
for filename in chain(_iter_module_files(), extra_files or ()):
try:
mtime = os.stat(filename).st_mtime
except OSError:
continue
old_time = mtimes.get(filename)
if old_time is None:
mtimes[filename] = mtime
continue
elif mtime > old_time:
log(
'info', ' * Detected change in %r, reloading' % filename)
log('info', ' * Restarting with reloader')
sys.exit(_EXIT_RELOADER)
time.sleep(interval)
def _iter_module_files():
for module in list(sys.modules.values()):
filename = getattr(module, '__file__', None)
if filename:
old = None
while not os.path.isfile(filename):
old = filename
filename = os.path.dirname(filename)
if filename == old:
break
else:
if filename[-4:] in ('.pyc', '.pyo'):
filename = filename[:-1]
yield filename
def _make_server(app, host, port, daemon=True):
processor = TProcessor(app.service, app)
server_socket = TServerSocket(host=host, port=port)
server = TThreadedServer(processor, server_socket,
iprot_factory=TBinaryProtocolFactory(),
itrans_factory=TBufferedTransportFactory(),
daemon=daemon)
return server
def run_simple(host, port, app, extra_files=None, interval=1,
use_reloader=True):
signal.signal(signal.SIGTERM, lambda *args: sys.exit(0))
server = _make_server(app, host, port)
if not use_reloader:
log('info', 'server starting at {}:{}'.format(host, port))
server.serve()
if os.environ.get('archer.reload_loop') == 'true':
t = threading.Thread(target=server.serve, args=())
t.setDaemon(True)
t.start()
try:
reloader_loop(extra_files, interval)
except KeyboardInterrupt:
return
try:
sys.exit(restart_with_reloader(host, port))
except KeyboardInterrupt:
pass
| eleme/archer | archer/_server.py | Python | mit | 3,460 | 0 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Follow'
db.create_table('actstream_follow', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])),
('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
))
db.send_create_signal('actstream', ['Follow'])
# Adding unique constraint on 'Follow', fields ['user', 'content_type', 'object_id']
db.create_unique('actstream_follow', ['user_id', 'content_type_id', 'object_id'])
# Adding model 'Action'
db.create_table('actstream_action', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('actor_content_type', self.gf('django.db.models.fields.related.ForeignKey')(related_name='actor', to=orm['contenttypes.ContentType'])),
('actor_object_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
('verb', self.gf('django.db.models.fields.CharField')(max_length=255)),
('description', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('target_content_type', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='target', null=True, to=orm['contenttypes.ContentType'])),
('target_object_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('action_object_content_type', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='action_object', null=True, to=orm['contenttypes.ContentType'])),
('action_object_object_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('public', self.gf('django.db.models.fields.BooleanField')(default=True)),
))
db.send_create_signal('actstream', ['Action'])
def backwards(self, orm):
# Removing unique constraint on 'Follow', fields ['user', 'content_type', 'object_id']
db.delete_unique('actstream_follow', ['user_id', 'content_type_id', 'object_id'])
# Deleting model 'Follow'
db.delete_table('actstream_follow')
# Deleting model 'Action'
db.delete_table('actstream_action')
models = {
'actstream.action': {
'Meta': {'object_name': 'Action'},
'action_object_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'action_object'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'action_object_object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'actor_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actor'", 'to': "orm['contenttypes.ContentType']"}),
'actor_object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'target_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'target'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'target_object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'actstream.follow': {
'Meta': {'unique_together': "(('user', 'content_type', 'object_id'),)", 'object_name': 'Follow'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['actstream']
| hzlf/openbroadcast | website/apps/actstream/migrations/0001_initial.py | Python | gpl-3.0 | 7,890 | 0.008365 |
__author__ = 'Evtushenko Georgy'
from setuptools import setup, find_packages
setup(
name="metanet",
version="0.1",
description="Free portable library for meta neural network research",
license="GPL3",
packages=['metanet', 'metanet.datasets', 'metanet.networks', 'metanet.networks.nodes', 'metanet.networks.artificial_networks', 'metanet.networks.nodes', 'metanet.networks.groups', 'metanet.networks.connections'],
install_requires=['numpy', 'networkx'],
)
| senior-zero/metanet | setup.py | Python | gpl-3.0 | 482 | 0.002075 |
'''tzinfo timezone information for Africa/Asmera.'''
from pytz.tzinfo import DstTzInfo
from pytz.tzinfo import memorized_datetime as d
from pytz.tzinfo import memorized_ttinfo as i
class Asmera(DstTzInfo):
'''Africa/Asmera timezone definition. See datetime.tzinfo for details'''
zone = 'Africa/Asmera'
_utc_transition_times = [
d(1,1,1,0,0,0),
d(1936,5,4,21,24,40),
]
_transition_info = [
i(9300,0,'ADMT'),
i(10800,0,'EAT'),
]
Asmera = Asmera()
| newvem/pytz | pytz/zoneinfo/Africa/Asmera.py | Python | mit | 483 | 0.043478 |
import pytest
class TestTail:
@pytest.mark.complete("tail --", require_longopt=True)
def test_1(self, completion):
assert completion
| algorythmic/bash-completion | test/t/test_tail.py | Python | gpl-2.0 | 151 | 0 |
import gettext
_ = gettext.gettext
from html5lib.constants import voidElements, spaceCharacters
spaceCharacters = u"".join(spaceCharacters)
class TreeWalker(object):
def __init__(self, tree):
self.tree = tree
def __iter__(self):
raise NotImplementedError
def error(self, msg):
return {"type": "SerializeError", "data": msg}
def normalizeAttrs(self, attrs):
if not attrs:
attrs = []
elif hasattr(attrs, 'items'):
attrs = attrs.items()
return [(unicode(name),unicode(value)) for name,value in attrs]
def emptyTag(self, namespace, name, attrs, hasChildren=False):
yield {"type": "EmptyTag", "name": unicode(name),
"namespace":unicode(namespace),
"data": self.normalizeAttrs(attrs)}
if hasChildren:
yield self.error(_("Void element has children"))
def startTag(self, namespace, name, attrs):
return {"type": "StartTag",
"name": unicode(name),
"namespace":unicode(namespace),
"data": self.normalizeAttrs(attrs)}
def endTag(self, namespace, name):
return {"type": "EndTag",
"name": unicode(name),
"namespace":unicode(namespace),
"data": []}
def text(self, data):
data = unicode(data)
middle = data.lstrip(spaceCharacters)
left = data[:len(data)-len(middle)]
if left:
yield {"type": "SpaceCharacters", "data": left}
data = middle
middle = data.rstrip(spaceCharacters)
right = data[len(middle):]
if middle:
yield {"type": "Characters", "data": middle}
if right:
yield {"type": "SpaceCharacters", "data": right}
def comment(self, data):
return {"type": "Comment", "data": unicode(data)}
def doctype(self, name, publicId=None, systemId=None, correct=True):
return {"type": "Doctype",
"name": name is not None and unicode(name) or u"",
"publicId": publicId, "systemId": systemId,
"correct": correct}
def unknown(self, nodeType):
return self.error(_("Unknown node type: ") + nodeType)
class RecursiveTreeWalker(TreeWalker):
def walkChildren(self, node):
raise NodeImplementedError
def element(self, node, namespace, name, attrs, hasChildren):
if name in voidElements:
for token in self.emptyTag(namespace, name, attrs, hasChildren):
yield token
else:
yield self.startTag(name, attrs)
if hasChildren:
for token in self.walkChildren(node):
yield token
yield self.endTag(name)
from xml.dom import Node
DOCUMENT = Node.DOCUMENT_NODE
DOCTYPE = Node.DOCUMENT_TYPE_NODE
TEXT = Node.TEXT_NODE
ELEMENT = Node.ELEMENT_NODE
COMMENT = Node.COMMENT_NODE
UNKNOWN = "<#UNKNOWN#>"
class NonRecursiveTreeWalker(TreeWalker):
def getNodeDetails(self, node):
raise NotImplementedError
def getFirstChild(self, node):
raise NotImplementedError
def getNextSibling(self, node):
raise NotImplementedError
def getParentNode(self, node):
raise NotImplementedError
def __iter__(self):
currentNode = self.tree
while currentNode is not None:
details = self.getNodeDetails(currentNode)
type, details = details[0], details[1:]
hasChildren = False
endTag = None
if type == DOCTYPE:
yield self.doctype(*details)
elif type == TEXT:
for token in self.text(*details):
yield token
elif type == ELEMENT:
namespace, name, attributes, hasChildren = details
if name in voidElements:
for token in self.emptyTag(namespace, name, attributes, hasChildren):
yield token
hasChildren = False
else:
endTag = name
yield self.startTag(namespace, name, attributes)
elif type == COMMENT:
yield self.comment(details[0])
elif type == DOCUMENT:
hasChildren = True
else:
yield self.unknown(details[0])
if hasChildren:
firstChild = self.getFirstChild(currentNode)
else:
firstChild = None
if firstChild is not None:
currentNode = firstChild
else:
while currentNode is not None:
details = self.getNodeDetails(currentNode)
type, details = details[0], details[1:]
if type == ELEMENT:
namespace, name, attributes, hasChildren = details
if name not in voidElements:
yield self.endTag(namespace, name)
nextSibling = self.getNextSibling(currentNode)
if nextSibling is not None:
currentNode = nextSibling
break
if self.tree is currentNode:
currentNode = None
else:
currentNode = self.getParentNode(currentNode)
| naokits/adminkun_viewer_old | Server/gaeo/html5lib/treewalkers/_base.py | Python | mit | 5,461 | 0.003662 |
#! /usr/bin/env python
# import os
import pygame
import random
from highscore import is_high_score_and_save
# it is better to have an extra variable, than an extremely long line.
player_img_path = 'player.png'
pill_img_path = 'pill.png'
boost_img_path = 'boost.png'
background = pygame.image.load("space.jpg")
# create a group for all the pills
pillsGroup = pygame.sprite.Group()
class Pill(pygame.sprite.Sprite):
is_boost = False
def __init__(self, boost):
pygame.sprite.Sprite.__init__(self, pillsGroup)
self.is_boost = boost
if self.is_boost:
self.image = pygame.image.load(boost_img_path)
else:
self.image = pygame.image.load(pill_img_path)
self.rect = self.image.get_rect()
self.warp()
self.alive = True
def draw(self, surface):
global myfont
""" Draw on surface """
# blit yourself at your current position
surface.blit(self.image, (self.rect.x, self.rect.y))
def kill(self):
global time_remaining
""" When eaten, warp """
if self.is_boost:
time_remaining = time_remaining + 1
self.warp()
def warp(self):
""" Random position of pill """
self.rect.x = random.randrange(100, 540)
self.rect.y = random.randrange(100, 300)
class Player(pygame.sprite.Sprite):
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.image.load(player_img_path)
self.rect = self.image.get_rect()
def handle_keys(self):
""" Handles Keys """
key = pygame.key.get_pressed()
dist = 4 # distance moved in 1 frame, try changing it
if key[pygame.K_DOWN]: # down key
self.rect.y += dist # move down
elif key[pygame.K_UP]: # up key
self.rect.y -= dist # move up
if key[pygame.K_RIGHT]: # right key
self.rect.x += dist # move right
elif key[pygame.K_LEFT]: # left key
self.rect.x -= dist # move left
def update(self):
global score
""" Update the player """
player.handle_keys() # handle the keys
if pygame.sprite.spritecollide(self, pillsGroup, True):
score = score + 1
print ("Player ate Pill Score", score)
def draw(self, surface):
""" Draw on surface """
# blit yourself at your current position
surface.blit(self.image, (self.rect.x, self.rect.y))
pygame.init()
clock = pygame.time.Clock()
score = 0
is_game_over = False
is_high_score = False
time_remaining = 10
pygame.time.set_timer(pygame.USEREVENT + 1, 1000)
pygame.font.init()
myfont = pygame.font.SysFont("Serif", 30)
screen = pygame.display.set_mode((640, 400))
# Create pills
for num in range(0, 15):
pill = Pill(False) # Create a pill
for num in range(0, 2):
pill = Pill(True) # Create a pill
player = Player() # create an instance of player
running = True
while running:
# handle every event since the last frame.
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit() # quit the screen
running = False
elif event.type == pygame.USEREVENT + 1:
print("Tick", time_remaining)
if not is_game_over:
time_remaining = time_remaining - 1
if time_remaining == 0:
is_game_over = True
is_high_score = is_high_score_and_save(score)
screen.fill((255, 255, 255)) # fill the screen with white
screen.blit(background, (0, 0))
# Draw score
scoreText = "Score: " + str(score)
textsurface = myfont.render(scoreText, True, (255, 255, 255))
screen.blit(textsurface, (5, 5))
# Draw time
timeText = "Time: " + str(time_remaining)
textsurface = myfont.render(timeText, True, (0, 255, 255))
screen.blit(textsurface, (5, 25))
if is_game_over:
if is_high_score:
high_score_text = "High Score " + str(score)
high_score_surface = myfont.render(
high_score_text, True, (255, 255, 0))
screen.blit(high_score_surface, (250, 350))
game_over_surface = myfont.render("Game Over", True, (255, 0, 0))
screen.blit(game_over_surface, (250, 160))
else:
player.update()
player.draw(screen) # draw the player to the screen
pillsGroup.update()
pillsGroup.draw(screen)
pygame.display.update() # update the screen
clock.tick(40) # Limit the game to running at 40 frames per second
| dojojon/pygame | week5/spawn5.py | Python | mit | 4,613 | 0.000434 |
import logging
import os
from twilio.rest import Client
class TwilioClient(object):
def __init__(self):
self.logger = logging.getLogger("botosan.logger")
self.account_sid = os.environ["TWILIO_SID"]
self.account_token = os.environ["TWILIO_TOKEN"]
self.client = Client(self.account_sid, self.account_token)
def get_mcc_and_mnc(self, phone_number):
"""
Gets the Mobile Country Code and Mobile Network code for a given Twilio Number
:param phone_number: The phone number, containing the +CC Number, ex: +12345678901 for the US.
:return: a tuple containing the mcc and mnc
"""
number = self.client.lookups.phone_numbers(phone_number).fetch(type="carrier")
self.logger.info(number.carrier['mobile_country_code'])
self.logger.info(number.carrier['mobile_network_code'])
return number.carrier['mobile_country_code'], number.carrier['mobile_network_code']
def get_available_numbers(self):
numbers = self.client.available_phone_numbers("GB").local.list(exclude_local_address_required=True)
print(numbers.count())
phone_numbers = []
for number in numbers:
phone_numbers.append(number.phone_number)
return phone_numbers
| FredLoh/BotoSan | twilio-mnc-mcc-getter.py | Python | mit | 1,280 | 0.003906 |
__author__ = 'besta'
class BestaPlayer:
def __init__(self, fichier, player):
self.fichier = fichier
self.grille = self.getFirstGrid()
self.best_hit = 0
self.players = player
def getFirstGrid(self):
"""
Implements function to get the first grid.
:return: the grid.
"""
li = []
with open(self.fichier, 'r') as fi:
for line in fi.readlines():
li.append(line)
return li
def updateGrid(self):
"""
Implements function to update the grid to alter n-1
round values
"""
with open(self.fichier, 'r') as fi:
for line in fi.readlines():
i = 0
for car in line:
j = 0
if car != '\n':
self.grille[i][j] = car
j += 1
i += 1
def grilleEmpty(self):
"""
Implement function to check if the grid is empty.
"""
for line in self.grille:
for car in line[:len(line) - 1]:
if car != '0':
return False
return True
def checkLines(self, player, inARow):
"""
Implements function to check the current lines setup to evaluate best combinaison.
:param player: check for your numbers (your player number) or those of your opponent.
:param inARow: how many tokens in a row (3 or 2).
:return: true or false
"""
count = 0
flag = False
for line_number, line in enumerate(self.grille):
count = 0
for car_pos, car in enumerate(line[:len(line) - 1]):
if int(car) == player and not flag:
count = 1
flag = True
elif int(car) == player and flag:
count += 1
if count == inARow:
if car_pos - inARow >= 0 and self.canPlayLine(line_number, car_pos - inARow):
return True, car_pos - inARow
if car_pos + 1 <= 6 and self.canPlayLine(line_number, car_pos + 1):
return True, car_pos + 1
else:
count = 0
return False, 0
def canPlayLine(self, line, col):
"""
Function to check if we can fill the line with a token.
:param line: which line
:param col: which column
:return: true or false
"""
if line == 5:
return self.grille[line][col] == '0'
else:
return self.grille[line][col] == '0' and self.grille[line + 1][col] != '0'
def changeColumnInLines(self):
"""
Implements function to transform columns in lines to make tests eaiser.
:return: a reverse matrice
"""
column = []
for x in xrange(7):
col = ''
for y in xrange(6):
col += self.grille[y][x]
column.append(col)
return column
def checkColumns(self, player, inARow):
"""
Implements function to check the current columns setup to evaluate best combinaison.
:param player: check for your numbers (your player number) or those of your opponent.
:param inARow: how many tokens in a row (3 or 2).
:return: true or false
"""
column = self.changeColumnInLines()
count = 0
flag = False
for col_number, line in enumerate(column):
count = 0
for car_pos, car in enumerate(line):
if int(car) == player and not flag:
count = 1
flag = True
elif int(car) == player and flag:
count += 1
if count == inARow and car_pos - inARow >= 0 and self.grille[car_pos - inARow][col_number] == '0':
return True, col_number
else:
count = 0
return False, 0
def checkDiagonalLeftToRight(self, player, inARow):
"""
Implements function to check the current diagonal to evaluate best combinaison.
:param player: check for your numbers or opponent ones.
:param inARow: how many tokens in a row (3 or 2).
:return:
"""
x = 3
flag = False
while x < 6:
count = 0
x_int = x
y_int = 0
while x_int >= 0:
if int(self.grille[x_int][y_int]) == player and not flag:
count = 1
flag = True
elif int(self.grille[x_int][y_int]) == player and flag:
count += 1
if count == inARow and y_int + 1 <= 6 and x_int - 1 >= 0 and self.grille[x_int][y_int + 1] != '0':
return True, y_int + 1
else:
count = 0
flag = False
x_int -= 1
y_int += 1
x += 1
y = 1
flag = False
while y <= 3:
count = 0
x_int = 5
y_int = y
while y_int <= 6 and x_int >= 0:
if int(self.grille[x_int][y_int]) == player and not flag:
count = 1
flag = True
elif int(self.grille[x_int][y_int]) == player and flag:
count += 1
if count == inARow and y_int + 1 <= 6 and x_int - 1 >= 0 and self.grille[x_int][y + 1] != '0':
return True, y_int + 1
else:
count = 0
flage = False
x_int -= 1
y_int += 1
y += 1
return False, 0
def checkDiagonalRightToLeft(self, player, inARow):
"""
Implements function to check the current diagonal to evaluate best combinaison.
:param player: check for your numbers or opponent ones.
:param inARow: how many tokens in a row (3 or 2).
:return:
"""
x = 3
flag = False
while x < 6:
count = 0
x_int = x
y_int = 6
while x_int >= 0:
if int(self.grille[x_int][y_int]) == player and not flag:
count = 1
flag = True
elif int(self.grille[x_int][y_int]) == player and flag:
count += 1
if count == inARow and y_int - 1 >= 0 and x_int - 1 >= 0 and self.grille[x_int][y_int - 1] != '0':
return True, y_int - 1
else:
count = 0
flag = False
x_int -= 1
y_int -= 1
x += 1
y = 5
flag = False
while y <= 3:
count = 0
x_int = 5
y_int = y
while y_int >= 3 and x_int >= 0:
if int(self.grille[x_int][y_int]) == player and not flag:
count = 1
flag = True
elif int(self.grille[x_int][y_int]) == player and flag:
count += 1
if count == inARow and y_int - 1 >= 0 and x_int - 1 >= 0 and self.grille[x_int][y - 1] != '0':
return True, y_int - 1
else:
count = 0
flage = False
x_int -= 1
y_int -= 1
y -= 1
return False, 0
def checkDiagonals(self, player, inARow):
"""
Calls two diagonal functional.
:return: an int, representing the column where to play or 0 and False if there is no pattern search.
"""
check = self.checkDiagonalLeftToRight(player, inARow)
if check[0]:
return check
else:
return self.checkDiagonalRightToLeft(player, inARow)
def playSomeColumn(self, player, inARow):
"""
Call all function for a player and a number of tokens given.
:param player: which player
:param inARow: how many token
:return: true or false (col number if true)
"""
methods = {'checklines': self.checkLines, 'checkcolumn': self.checkColumns, 'checkdiagonal': self.checkDiagonals}
for key, function in methods.items():
which_col = function(player, inARow)
if which_col[0]:
return which_col
return False, 0
def findFirstColumnEmpty(self):
"""
Implements function to get the first column where a slot remain.
:return: the column
"""
for col in xrange(7):
if self.grille[0][col] == '0':
return col
return -1
def decideColumn(self):
"""
Implements main function : to decide what is the better hit to do.
:return: an int, representing the column where we play
"""
if self.grilleEmpty():
return 3
li_sequence = [3, 2, 1]
li_players = [self.players[0], self.players[1]]
for sequence in li_sequence:
for player in li_players:
choosen_col = self.playSomeColumn(player, sequence)
if choosen_col[0]:
return choosen_col[1]
return self.findFirstColumnEmpty()
| KeserOner/puissance4 | bestaplayer.py | Python | mit | 9,518 | 0.001681 |
# *****************************************************************************
# Copyright (c) 2016 TechBubble Technologies and other Contributors.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
#
# Contributors:
# Adam Milton-Barker - TechBubble Technologies Limited
# Irene Naya - TechBubble Technologies Limited
# *****************************************************************************
import inspect
import json
import os
import paho.mqtt.client as mqtt
import sys
import time
class JumpWayPythonMQTTDeviceConnection():
def __init__(self, configs):
self._configs = configs
self.mqttClient = None
self.mqttTLS = os.path.dirname(os.path.abspath(__file__)) + "/ca.pem"
self.mqttHost = 'iot.techbubbletechnologies.com'
self.mqttPort = 8883
self.deviceStatusCallback = None
self.deviceCommandsCallback = None
self.deviceKeysCallback = None
self.deviceSSLsCallback = None
self.devicePackageCallback = None
self.deviceAITrainingCallback = None
self.deviceAITrainingDataCallback = None
if self._configs['locationID'] == None:
raise ConfigurationException("locationID property is required")
if self._configs['zoneID'] == None:
raise ConfigurationException("zoneID property is required")
elif self._configs['deviceId'] == None:
raise ConfigurationException("deviceId property is required")
elif self._configs['deviceName'] == None:
raise ConfigurationException("deviceName property is required")
elif self._configs['username'] == None:
raise ConfigurationException("username property is required")
elif self._configs['password'] == None:
raise ConfigurationException("password property is required")
def connectToDevice(self):
self.mqttClient = mqtt.Client(client_id=self._configs['deviceName'], clean_session=False)
deviceStatusTopic = '%s/Devices/%s/%s/Status' % (self._configs['locationID'], self._configs['zoneID'], self._configs['deviceId'])
self.mqttClient.will_set(deviceStatusTopic, "OFFLINE", 0, False)
self.mqttClient.tls_set(self.mqttTLS, certfile=None, keyfile=None)
self.mqttClient.on_connect = self.on_connect
self.mqttClient.on_message = self.on_message
self.mqttClient.on_publish = self.on_publish
self.mqttClient.on_subscribe = self.on_subscribe
self.mqttClient.username_pw_set(str(self._configs['username']),str(self._configs['password']))
self.mqttClient.connect(self.mqttHost,self.mqttPort,10)
self.mqttClient.loop_start()
def on_connect(self, client, obj, flags, rc):
self.publishToDeviceStatus("ONLINE")
print("rc: "+str(rc))
def on_subscribe(self, client, obj, mid, granted_qos):
print("Subscribed: "+str(self._configs['deviceName']))
def on_message(self, client, obj, msg):
splitTopic=msg.topic.split("/")
if splitTopic[4]=='Commands':
if self.deviceCommandsCallback == None:
print("No deviceCommandsCallback set")
else:
self.deviceCommandsCallback(msg.topic,msg.payload)
elif splitTopic[4]=='Keys':
if self.deviceKeysCallback == None:
print("No deviceKeysCallback set")
else:
self.deviceKeysCallback(msg.topic,msg.payload)
elif splitTopic[4]=='SSLs':
if self.deviceSSLsCallback == None:
print("No deviceSSLsCallback set")
else:
self.deviceSSLsCallback(msg.topic,msg.payload)
elif splitTopic[4]=='Package':
if self.devicePackageCallback == None:
print("No devicePackageCallback set")
else:
self.devicePackageCallback(msg.topic,msg.payload)
def subscribeToDeviceChannel(self, channel, qos=0):
if self._configs['locationID'] == None:
print("locationID is required!")
return False
elif self._configs['zoneID'] == None:
print("zoneID is required!")
return False
elif self._configs['deviceId'] == None:
print("deviceId is required!")
return False
else:
deviceChannel = '%s/Devices/%s/%s/%s' % (self._configs['locationID'], self._configs['zoneID'], self._configs['deviceId'], channel)
self.mqttClient.subscribe(deviceChannel, qos=qos)
print("Subscribed to Device "+channel+" Channel")
return True
def publishToDeviceStatus(self, data):
if self._configs['locationID'] == None:
print("locationID is required!")
return False
elif self._configs['zoneID'] == None:
print("zoneID is required!")
return False
elif self._configs['deviceId'] == None:
print("deviceId is required!")
return False
else:
deviceStatusTopic = '%s/Devices/%s/%s/Status' % (self._configs['locationID'], self._configs['zoneID'], self._configs['deviceId'])
self.mqttClient.publish(deviceStatusTopic,data)
print("Published to Device Status ")
def publishToDeviceChannel(self, channel, data):
if self._configs['locationID'] == None:
print("locationID is required!")
return False
elif self._configs['zoneID'] == None:
print("zoneID is required!")
return False
elif self._configs['deviceId'] == None:
print("deviceId is required!")
return False
else:
deviceChannel = '%s/Devices/%s/%s/%s' % (self._configs['locationID'], self._configs['zoneID'], self._configs['deviceId'], channel)
self.mqttClient.publish(deviceChannel,json.dumps(data))
print("Published to Device "+channel+" Channel")
def publishToTassActivityRecognitionChannel(self, byteArray, userID):
if self._configs['locationID'] == None:
print("locationID is required!")
return False
elif self._configs['zoneID'] == None:
print("zoneID is required!")
return False
elif self._configs['deviceId'] == None:
print("deviceId is required!")
return False
else:
ActivityRecognitionTopic = '%s/Devices/%s/%s/ActivityRecognition/%s' % (self._configs['locationID'], self._configs['zoneID'], self._configs['deviceId'], userID)
self.mqttClient.publish(ActivityRecognitionTopic,byteArray,0)
print("Published to Device Tass Activity Recognition Channel ")
def publishToTassActivityIntruderChannel(self, byteArray):
if self._configs['locationID'] == None:
print("locationID is required!")
return False
elif self._configs['zoneID'] == None:
print("zoneID is required!")
return False
elif self._configs['deviceId'] == None:
print("deviceId is required!")
return False
else:
ActivityIntruderTopic = '%s/Devices/%s/%s/ActivityIntruder' % (self._configs['locationID'], self._configs['zoneID'], self._configs['deviceId'])
self.mqttClient.publish(ActivityIntruderTopic,byteArray,0)
print("Published to Device Tass Activity Intruder Channel ")
def on_publish(self, client, obj, mid):
print("Published: "+str(mid))
def on_log(self, client, obj, level, string):
print(string)
def disconnectFromDevice(self):
self.publishToDeviceStatus("OFFLINE")
self.mqttClient.disconnect()
self.mqttClient.loop_stop()
| AdamMiltonBarker/TechBubble-Iot-JumpWay-Python-MQTT | src/techbubbleiotjumpwaymqtt/device.py | Python | epl-1.0 | 6,911 | 0.03328 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2015. Tšili Lauri Johannes
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from dateutil.relativedelta import relativedelta
def get_time_range(start_date, end_date, step):
"""Get generator of time values within given times
Method is used to construct generator of time values within given interval.
The end values are both inclusive.
Parameters
----------
start_date, end_date : datetime, date or time
start_date and end_date are the time values between witch the list is
constructed.
step : dict
Dictionary describing the increment between included time values.
May hold negative values.
Yields
----------
datetime, date or time
The type of the yielded object depends on the input variables.
See Also
----------
dateutil.relativedelta : Refer here for possible values for step.
Examples
----------
>>> start_date = datetime.date(2010, 1, 1)
>>> end_date = datetime.date(2010, 3, 1)
>>> get_time_range(start_date, end_date, months=1)
date(2010, 1, 1)
date(2010, 2, 1)
date(2010, 3, 1)
"""
start_date = start_date - relativedelta(**step)
current_step = step.copy()
while start_date + relativedelta(**step) < end_date +\
relativedelta(**current_step):
yield start_date + relativedelta(**step)
for key in step.keys():
step[key] = step[key] + current_step[key]
def format_month(month, out_format="digit"):
"""Format month data to desired format
Method formats month's name to zero-padded digit string.
Parameters
----------
month : str
The month to be converted.
out_format : str, optional
Specifies the format into the month needs to be formated.
Default value is digit.
Returns
----------
month : str
The formated data is returned in a string even when the data represents
an integer.
Notes
----------
Currently the only supported formating is from a name into a
zero-padded string digit.
The input is not case sensitive.
Examples
----------
>>> format_month('feb')
'02'
>>> format_month('aUG')
'10'
"""
if out_format is "digit":
month = month.upper()[:3]
if month == 'JAN':
month = "01"
elif month == 'FEB':
month = "02"
elif month == 'MAR':
month = "03"
elif month == 'APR':
month = "04"
elif month == 'MAY':
month = "05"
elif month == 'JUN':
month = "06"
elif month == 'JUL':
month = "07"
elif month == 'AUG':
month = "08"
elif month == 'SEP':
month = "09"
elif month == 'OCT':
month = "10"
elif month == 'NOV':
month = "11"
elif month == 'DEC':
month = "12"
else:
return None
return month
def format_season(season, out_format="digit"):
"""Format season data to desired format
Method formats seasons's name to zero-padded digit string.
Parameters
----------
season : str
The season to be converted.
out_format : str, optional
Specifies the format into the season needs to be formated.
Default value is digit.
Returns
----------
season : str
The formated data is returned in a string even when the data represents
an integer.
Notes
----------
Currently the only supported formating is from a name into a
zero-padded string digit.
The input is not case sensitive.
Examples
----------
>>> format_season('spR')
'03'
>>> format_season('WintE')
'12'
"""
if out_format is "digit":
season = season.upper()[:3]
if season == 'SPR':
season = "03"
elif season == 'SUM':
season = "06"
elif season == 'AUT':
season = "09"
elif season == 'WIN':
season = "12"
else:
return None
return season
| tsili/datpy | datpy/time_operations.py | Python | agpl-3.0 | 4,806 | 0 |
#!/usr/bin/python2
# Copyright 2012 Anton Beloglazov
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
import random
import shutil
import time
from datetime import datetime
from db import init_db
if len(sys.argv) < 5:
print 'You must specify 4 arguments:'
print '1. The MySQL DB user name'
print '2. The MySQL DB password'
print '3. The start datetime in the format: %Y-%m-%d %H:%M:%S'
print '4. The finish datetime in the format: %Y-%m-%d %H:%M:%S'
sys.exit(1)
db = init_db('mysql://' + sys.argv[1] + ':' + sys.argv[2] + '@localhost/spe')
start_time = datetime.fromtimestamp(
time.mktime(time.strptime(sys.argv[3], '%Y-%m-%d %H:%M:%S')))
finish_time = datetime.fromtimestamp(
time.mktime(time.strptime(sys.argv[4], '%Y-%m-%d %H:%M:%S')))
#print "Start time: " + str(start_time)
#print "Finish time: " + str(finish_time)
def total_seconds(delta):
return (delta.microseconds +
(delta.seconds + delta.days * 24 * 3600) * 1000000) / 1000000
total_time = 0
total_idle_time = 0
for hostname, host_id in db.select_host_ids().items():
prev_timestamp = start_time
prev_state = 1
states = {0: [], 1: []}
for timestamp, state in db.select_host_states(host_id, start_time, finish_time):
if prev_timestamp:
states[prev_state].append(total_seconds(timestamp - prev_timestamp))
prev_timestamp = timestamp
prev_state = state
states[prev_state].append(total_seconds(finish_time - prev_timestamp))
#print states
off_time = sum(states[0])
on_time = sum(states[1])
total_time += off_time + on_time
total_idle_time += off_time
print "Total time: " + str(total_time)
print "Total idle time: " + str(total_idle_time)
print "Idle time fraction: " + str(float(total_idle_time) / total_time)
| beloglazov/openstack-neat | utils/idle-time-fraction.py | Python | apache-2.0 | 2,317 | 0.003453 |
# pylint:disable=R0201
"""docstring"""
__revision__ = ''
class Interface(object):
"""base class for interfaces"""
class IMachin(Interface):
"""docstring"""
def truc(self):
"""docstring"""
def troc(self, argument):
"""docstring"""
class Correct1(object):
"""docstring"""
__implements__ = IMachin
def __init__(self):
pass
def truc(self):
"""docstring"""
pass
def troc(self, argument):
"""docstring"""
pass
class Correct2(object):
"""docstring"""
__implements__ = (IMachin,)
def __init__(self):
pass
def truc(self):
"""docstring"""
pass
def troc(self, argument):
"""docstring"""
print argument
class MissingMethod(object):
"""docstring"""
__implements__ = IMachin,
def __init__(self):
pass
def troc(self, argument):
"""docstring"""
print argument
def other(self):
"""docstring"""
class BadArgument(object):
"""docstring"""
__implements__ = (IMachin,)
def __init__(self):
pass
def truc(self):
"""docstring"""
pass
def troc(self):
"""docstring"""
pass
class InterfaceCantBeFound(object):
"""docstring"""
__implements__ = undefined
def __init__(self):
"""only to make pylint happier"""
def please(self):
"""public method 1/2"""
def besilent(self):
"""public method 2/2"""
class InterfaceCanNowBeFound(object):
"""docstring"""
__implements__ = BadArgument.__implements__ + Correct2.__implements__
def __init__(self):
"""only to make pylint happier"""
def please(self):
"""public method 1/2"""
def besilent(self):
"""public method 2/2"""
class EmptyImplements(object):
"""no pb"""
__implements__ = ()
def __init__(self):
"""only to make pylint happier"""
def please(self):
"""public method 1/2"""
def besilent(self):
"""public method 2/2"""
| godfryd/pylint | test/input/func_interfaces.py | Python | gpl-2.0 | 2,062 | 0.00485 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from ..._input_field import InputField
class BoolInput(InputField):
"""Simple input that controls a boolean variable.
GUI indications
----------------
It can be implemented as a switch or a checkbox, for example.
"""
_false_strings = ("f", "false")
_true_strings = ("t", "true")
dtype = bool
_type = 'bool'
_default = {}
def parse(self, val):
if val is None:
pass
elif isinstance(val, str):
val = val.lower()
if val in self._true_strings:
val = True
elif val in self._false_strings:
val = False
else:
raise ValueError(f"String '{val}' is not understood by {self.__class__.__name__}")
elif not isinstance(val, bool):
self._raise_type_error(val)
return val
| zerothi/sisl | sisl/viz/input_fields/basic/bool.py | Python | mpl-2.0 | 1,059 | 0.000944 |
"""
file: Calculate_Jump-ratio.py
author: Michael Entrup b. Epping (michael.entrup@wwu.de)
version: 20170306
info: A script that calculates the Jump-Ratio of two images.
The second image is devided by the first one.
A drift correction is performed. The first image is shiftet towards to the second one.
"""
# pylint: disable-msg=C0103
# pylint: enable-msg=C0103
from __future__ import with_statement, division
# pylint: disable-msg=E0401
from EFTEMj_pyLib import CorrectDrift as drift
from EFTEMj_pyLib import Tools as tools
from ij import IJ, WindowManager
from ij.gui import GenericDialog
from ij.plugin import ImageCalculator
from ij.process import ImageStatistics as Stats
# pylint: enable-msg=E0401
def get_setup():
""" Returns the drift correction mode and two image."""
options = drift.get_options()
modes = drift.get_modes()
dialog = GenericDialog('Jump-ratio setup')
dialog.addMessage('Select the mode for drift correction\n' +
'and the images to process.')
dialog.addChoice('Mode:', options, options[0])
image_ids = WindowManager.getIDList()
if not image_ids or len(image_ids) < 2:
return [None] * 3
image_titles = [WindowManager.getImage(id).getTitle() for id in image_ids]
dialog.addMessage('Post-edge is divided by the pre-edge.')
dialog.addChoice('Pre-edge', image_titles, image_titles[0])
dialog.addChoice('Post-edge', image_titles, image_titles[1])
dialog.showDialog()
if dialog.wasCanceled():
return [None] * 3
mode = modes[dialog.getNextChoiceIndex()]
img1 = WindowManager.getImage(image_ids[dialog.getNextChoiceIndex()])
img2 = WindowManager.getImage(image_ids[dialog.getNextChoiceIndex()])
return mode, img1, img2
def run_script():
"""Function to be run when this file is used as a script"""
selected_mode, img1_in, img2_in = get_setup()
if not selected_mode:
return
corrected_stack = drift.get_corrected_stack(
(img1_in, img2_in), mode=selected_mode)
img1, img2 = tools.stack_to_list_of_imp(corrected_stack)
img_ratio = ImageCalculator().run('Divide create', img2, img1)
img_ratio.setTitle('Jump-ratio [%s divided by %s]' % (img2.getShortTitle(),
img1.getShortTitle()
)
)
img_ratio.changes = True
img_ratio.copyScale(img1_in)
img_ratio.show()
IJ.run(img_ratio, 'Enhance Contrast', 'saturated=0.35')
# We want to optimise the lower displaylimit:
minimum = img_ratio.getProcessor().getMin()
maximum = img_ratio.getProcessor().getMax()
stat = img_ratio.getStatistics(Stats.MEAN + Stats.STD_DEV)
mean = stat.mean
stdv = stat.stdDev
if minimum < mean - stdv:
if mean - stdv >= 0:
img_ratio.getProcessor().setMinAndMax(mean - stdv, maximum)
else:
img_ratio.getProcessor().setMinAndMax(0, maximum)
img_ratio.updateAndDraw()
if __name__ == '__main__':
run_script()
| m-entrup/EFTEMj | EFTEMj-pyScripts/src/main/resources/scripts/Plugins/EFTEMj/ESI/Calculate_Jump-ratio.py | Python | bsd-2-clause | 3,118 | 0.000962 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('chef_buddy', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='ingredientflavorcompound',
name='score',
field=models.FloatField(default=1.0),
preserve_default=False,
),
]
| chef-buddy/chef-buddy-django | chef_buddy/migrations/0002_ingredientflavorcompound_score.py | Python | mit | 444 | 0 |
from plow.gui.manifest import QtCore, QtGui
from plow.gui.util import formatDateTime, formatDuration
__all__ = [
"Text",
"Number",
"Decimal",
"DateTime",
"PillWidget",
"Checkbox"
]
class FormWidget(QtGui.QWidget):
"""
The base class for all form widgets.
"""
__LOCKED_PIX = None
def __init__(self, value, parent=None):
QtGui.QWidget.__init__(self, parent)
layout = QtGui.QGridLayout(self)
layout.setSpacing(0)
layout.setContentsMargins(0, 0, 0, 0)
self._widget = None
self.__status = QtGui.QLabel(self)
self.__status.setContentsMargins(5, 0, 0, 0)
layout.addWidget(self.__status, 0, 2)
if not FormWidget.__LOCKED_PIX:
FormWidget.__LOCKED_PIX = QtGui.QPixmap(":/images/locked.png")
FormWidget.__LOCKED_PIX = FormWidget.__LOCKED_PIX.scaled(
QtCore.QSize(12, 12), QtCore.Qt.KeepAspectRatio, QtCore.Qt.SmoothTransformation)
def setReadOnly(self, value):
self._setReadOnly(value)
if value:
self.__status.setPixmap(FormWidget.__LOCKED_PIX)
else:
self.__status.setText("")
def setSuffix(self, value):
self._setSuffix(value)
def _setSuffix(self, value):
self.layout().addWidget(QtGui.QLabel(value), 0, 1)
def _setReadOnly(self, value):
pass
def setWidget(self, widget):
self._widget = widget
self.layout().addWidget(widget, 0, 0)
class Text(FormWidget):
def __init__(self, text, parent=None):
FormWidget.__init__(self, parent)
self.setWidget(QtGui.QLineEdit(text, self))
self._widget.setFocusPolicy(QtCore.Qt.NoFocus)
self._widget.setCursorPosition(1)
def _setReadOnly(self, value):
self._widget.setReadOnly(value)
class Number(FormWidget):
def __init__(self, value, parent=None):
FormWidget.__init__(self, parent)
widget = QtGui.QSpinBox(self)
widget.setMinimum(0)
widget.setMaximum(1000000)
widget.setMinimumWidth(100)
widget.setValue(value)
self.setWidget(widget)
self._widget.setFocusPolicy(QtCore.Qt.NoFocus)
def _setReadOnly(self, value):
self._widget.setReadOnly(value)
self._widget.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
def _setSuffix(self, value):
self._widget.setSuffix(value)
class Decimal(FormWidget):
def __init__(self, value, parent=None):
FormWidget.__init__(self, parent)
widget = QtGui.QDoubleSpinBox(self)
widget.setValue(value)
self.setWidget(widget)
widget.setMinimumWidth(100)
self._widget.setFocusPolicy(QtCore.Qt.NoFocus)
def _setReadOnly(self, value):
self._widget.setReadOnly(value)
self._widget.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
def _setSuffix(self, value):
self._widget.setSuffix(value)
class DateTime(FormWidget):
def __init__(self, value, parent=None):
FormWidget.__init__(self, parent)
self.setWidget(QtGui.QLabel(formatDateTime(value), self))
class Duration(FormWidget):
def __init__(self, times, parent=None):
FormWidget.__init__(self, parent)
self.setWidget(QtGui.QLabel(formatDuration(times[0], times[1]), self))
class PillWidget(FormWidget):
def __init__(self, value, parent):
FormWidget.__init__(self, parent)
data, color = value
self.label = QtGui.QLabel(data, self)
self.label.setStyleSheet("border: 1px solid #222222; background-color: %s; border-radius: 6px;" % color)
self.label.setMinimumWidth(100)
self.setWidget(self.label)
class Checkbox(FormWidget):
def __init__(self, bvalue, parent=None):
FormWidget.__init__(self, parent)
self.setWidget(QtGui.QCheckBox(self))
self._widget.setCheckState(QtCore.Qt.Checked if bvalue else QtCore.Qt.Unchecked)
self._widget.setFocusPolicy(QtCore.Qt.NoFocus)
def _setReadOnly(self, value):
self._widget.setReadOnly(value)
| chadmv/plow | lib/python/plow/gui/form/fwidgets.py | Python | apache-2.0 | 4,081 | 0.00294 |
import time
from twisted.internet import defer, reactor, protocol
from twisted.python import log
from twisted.enterprise import adbapi
class SideloaderDB(object):
def __init__(self):
self.p = adbapi.ConnectionPool('psycopg2',
database='sideloader',
host='localhost',
user='postgres',
)
def _fetchOneTxn(self, txn, *a, **kw):
" Transaction callback for self.fetchOne "
txn.execute(*a)
r = txn.fetchall()
if r:
return r[0]
else:
return None
def fetchOne(self, *a, **kw):
" Fetch one row only with this query "
return self.p.runInteraction(self._fetchOneTxn, *a, **kw)
def runInsert(self, table, keys):
" Builds a boring INSERT statement and runs it "
# Unzip the items tupple set into matched order k/v's
keys, values = zip(*keys.items())
st = "INSERT INTO %s (%s) VALUES (%s) RETURNING id" % (
table,
','.join(keys),
','.join(['%s']*len(keys)) # Witchcraft
)
return self.fetchOne(st, values)
@defer.inlineCallbacks
def select(self, table, fields, **kw):
q = []
args = []
for k, v in kw.items():
q.append('%s=%%%%s' % k)
args.append(v)
query = "SELECT %s FROM %s"
if q:
query += " WHERE " + ' and '.join(q)
results = yield self.p.runQuery(query % (
','.join(fields),
table,
), tuple(args))
res = []
for r in results:
obj = {}
for i, col in enumerate(r):
obj[fields[i]] = col
res.append(obj)
defer.returnValue(res)
# Project queries
@defer.inlineCallbacks
def getProject(self, id):
r = yield self.select('sideloader_project',
['id', 'name', 'github_url', 'branch', 'deploy_file', 'idhash',
'notifications', 'slack_channel', 'created_by_user_id',
'release_stream_id', 'build_script', 'package_name',
'postinstall_script', 'package_manager', 'deploy_type'], id=id)
defer.returnValue(r[0])
def updateBuildLog(self, id, log):
return self.p.runOperation('UPDATE sideloader_build SET log=%s WHERE id=%s', (log, id))
@defer.inlineCallbacks
def getProjectNotificationSettings(self, id):
q = yield self.p.runQuery(
'SELECT name, notifications, slack_channel FROM sideloader_project'
' WHERE id=%s', (id,))
defer.returnValue(q[0])
# Build queries
@defer.inlineCallbacks
def getBuild(self, id):
r = yield self.select('sideloader_build', ['id', 'build_time',
'task_id', 'log', 'project_id', 'state', 'build_file'], id=id)
defer.returnValue(r[0])
@defer.inlineCallbacks
def getBuildNumber(self, repo):
q = yield self.p.runQuery('SELECT build_num FROM sideloader_buildnumbers WHERE package=%s', (repo,))
if q:
defer.returnValue(q[0][0])
else:
defer.returnValue(0)
def setBuildNumber(self, repo, num):
return self.p.runOperation('UPDATE sideloader_buildnumbers SET build_num=%s WHERE package=%s', (num, repo))
def setBuildState(self, id, state):
return self.p.runOperation('UPDATE sideloader_build SET state=%s WHERE id=%s', (state, id))
def setBuildFile(self, id, f):
return self.p.runOperation('UPDATE sideloader_build SET build_file=%s WHERE id=%s', (f, id))
# Release queries
def createRelease(self, release):
return self.runInsert('sideloader_release', release)
def checkReleaseSchedule(self, release):
if not release['scheduled']:
return True
t = int(time.mktime(release['scheduled'].timetuple()))
if (time.time() - t) > 0:
return True
return False
@defer.inlineCallbacks
def releaseSignoffCount(self, release_id):
q = yield self.p.runQuery(
'SELECT COUNT(*) FROM sideloader_releasesignoff WHERE release_id=%s AND signed=true', (release_id))
defer.returnValue(q[0][0])
@defer.inlineCallbacks
def signoff_remaining(self, release_id, flow):
q = flow['quorum']
count = yield self.releaseSignoffCount(release_id)
email_list = self.getFlowSignoffList(flow)
if q == 0:
defer.returnValue(len(email_list) - count)
defer.returnValue(q - count)
@defer.inlineCallbacks
def checkReleaseSignoff(self, release_id, flow):
if not flow['require_signoff']:
defer.returnValue(True)
rem = yield self.signoff_remaining(release_id, flow)
if rem > 0:
defer.returnValue(False)
defer.returnValue(True)
@defer.inlineCallbacks
def countReleases(self, id, waiting=False, lock=False):
q = yield self.p.runQuery('SELECT count(*) FROM sideloader_release'
' WHERE flow_id=%s AND waiting=%s AND lock=%s', (id, waiting, lock)
)
defer.returnValue(q[0][0])
def getReleases(self, flowid=None, waiting=None, lock=None):
q = {}
if flowid is not None:
q['flowid'] = flowid
if waiting is not None:
q['waiting'] = waiting
if lock is not None:
q['lock'] = lock
return self.select('sideloader_release',
['id', 'release_date', 'scheduled', 'waiting', 'lock', 'build_id', 'flow_id'], **q)
@defer.inlineCallbacks
def getRelease(self, id):
r = yield self.select('sideloader_release',
['id', 'release_date', 'scheduled', 'waiting', 'lock', 'build_id', 'flow_id'], id=id)
defer.returnValue(r[0])
@defer.inlineCallbacks
def getReleaseStream(self, id):
r = yield self.select('sideloader_releasestream',
['id', 'name', 'push_command'], id=id)
defer.returnValue(r[0])
def updateReleaseLocks(self, id, lock):
return self.p.runOperation('UPDATE sideloader_release SET lock=%s WHERE id=%s', (lock, id))
def updateReleaseState(self, id, lock=False, waiting=False):
return self.p.runOperation('UPDATE sideloader_release SET lock=%s, waiting=%s WHERE id=%s', (lock, waiting, id))
# Flow queries
@defer.inlineCallbacks
def getFlow(self, id):
r = yield self.select('sideloader_releaseflow', [
'id', 'name', 'stream_mode', 'require_signoff', 'signoff_list',
'quorum', 'service_restart', 'service_pre_stop', 'puppet_run',
'auto_release', 'project_id', 'stream_id', 'notify', 'notify_list'
], id=id)
if r:
defer.returnValue(r[0])
else:
defer.returnValue(None)
def getFlowSignoffList(self, flow):
return flow['signoff_list'].replace('\r', ' ').replace(
'\n', ' ').replace(',', ' ').strip().split()
def getFlowNotifyList(self, flow):
if flow['notify']:
return flow['notify_list'].replace('\r', ' ').replace(
'\n', ' ').replace(',', ' ').strip().split()
else:
return []
def getAutoFlows(self, project):
return self.select('sideloader_releaseflow', [
'id', 'name', 'stream_mode', 'require_signoff', 'signoff_list',
'quorum', 'service_restart', 'service_pre_stop', 'puppet_run',
'auto_release', 'project_id', 'stream_id', 'notify', 'notify_list'
], project_id=project, auto_release=True)
@defer.inlineCallbacks
def getNextFlowRelease(self, flow_id):
q = yield self.p.runQuery('SELECT id FROM sideloader_release'
' WHERE flow_id=%s AND waiting=true ORDER BY release_date DESC LIMIT 1', (flow_id,)
)
if q:
release = yield self.getRelease(q[0][0])
else:
release = None
defer.returnValue(release)
@defer.inlineCallbacks
def getLastFlowRelease(self, flow_id):
q = yield self.p.runQuery('SELECT id FROM sideloader_release'
' WHERE flow_id=%s AND waiting=false ORDER BY release_date DESC LIMIT 1', (flow_id,)
)
if q:
release = yield self.getRelease(q[0][0])
else:
release = None
defer.returnValue(release)
# Targets
def getFlowTargets(self, flow_id):
return self.select('sideloader_target', ['id', 'deploy_state',
'log', 'current_build_id', 'release_id', 'server_id'],
release_id = flow_id)
@defer.inlineCallbacks
def getServer(self, id):
s = yield self.select('sideloader_server', ['id', 'name',
'last_checkin', 'last_puppet_run', 'status', 'change',
'specter_status'], id=id)
if s:
defer.returnValue(s[0])
else:
defer.returnValue(None)
def updateTargetState(self, id, state):
return self.p.runOperation('UPDATE sideloader_target SET deploy_state=%s WHERE id=%s', (state, id))
def updateTargetLog(self, id, log):
return self.p.runOperation('UPDATE sideloader_target SET log=%s WHERE id=%s', (log, id))
def updateTargetBuild(self, id, build):
return self.p.runOperation('UPDATE sideloader_target SET current_build_id=%s WHERE id=%s', (build, id))
def updateServerStatus(self, id, status):
return self.p.runOperation('UPDATE sideloader_server SET status=%s WHERE id=%s', (status, id))
| praekelt/sideloader2 | sideloader.worker/sideloader/worker/task_db.py | Python | mit | 9,546 | 0.005133 |
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .validators import integer, positive_integer, network_port, boolean
class AliasTarget(AWSProperty):
props = {
'HostedZoneId': (basestring, True),
'DNSName': (basestring, True),
'EvaluateTargetHealth': (boolean, False)
}
def __init__(self,
hostedzoneid=None,
dnsname=None,
evaluatetargethealth=None,
**kwargs):
# provided for backward compatibility
if hostedzoneid is not None:
kwargs['HostedZoneId'] = hostedzoneid
if dnsname is not None:
kwargs['DNSName'] = dnsname
if evaluatetargethealth is not None:
kwargs['EvaluateTargetHealth'] = evaluatetargethealth
super(AliasTarget, self).__init__(**kwargs)
class GeoLocation(AWSProperty):
props = {
'ContinentCode': (basestring, False),
'CountryCode': (basestring, False),
'SubdivisionCode': (basestring, False),
}
class BaseRecordSet(object):
props = {
'AliasTarget': (AliasTarget, False),
'Comment': (basestring, False),
'Failover': (basestring, False),
'GeoLocation': (GeoLocation, False),
'HealthCheckId': (basestring, False),
'HostedZoneId': (basestring, False),
'HostedZoneName': (basestring, False),
'MultiValueAnswer': (boolean, False),
'Name': (basestring, True),
'Region': (basestring, False),
'ResourceRecords': (list, False),
'SetIdentifier': (basestring, False),
'TTL': (integer, False),
'Type': (basestring, True),
'Weight': (integer, False),
}
class RecordSetType(AWSObject, BaseRecordSet):
# This is a top-level resource
resource_type = "AWS::Route53::RecordSet"
class RecordSet(AWSProperty, BaseRecordSet):
# This is for use in a list with RecordSetGroup (below)
pass
class RecordSetGroup(AWSObject):
resource_type = "AWS::Route53::RecordSetGroup"
props = {
'HostedZoneId': (basestring, False),
'HostedZoneName': (basestring, False),
'RecordSets': (list, False),
'Comment': (basestring, False),
}
class AlarmIdentifier(AWSProperty):
props = {
'Name': (basestring, True),
'Region': (basestring, True),
}
class HealthCheckConfiguration(AWSProperty):
props = {
'AlarmIdentifier': (AlarmIdentifier, False),
'ChildHealthChecks': ([basestring], False),
'EnableSNI': (boolean, False),
'FailureThreshold': (positive_integer, False),
'FullyQualifiedDomainName': (basestring, False),
'HealthThreshold': (positive_integer, False),
'InsufficientDataHealthStatus': (basestring, False),
'Inverted': (boolean, False),
'IPAddress': (basestring, False),
'MeasureLatency': (boolean, False),
'Port': (network_port, False),
'Regions': ([basestring], False),
'RequestInterval': (positive_integer, False),
'ResourcePath': (basestring, False),
'SearchString': (basestring, False),
'Type': (basestring, True),
}
class HealthCheck(AWSObject):
resource_type = "AWS::Route53::HealthCheck"
props = {
'HealthCheckConfig': (HealthCheckConfiguration, True),
'HealthCheckTags': (Tags, False),
}
class HostedZoneConfiguration(AWSProperty):
props = {
'Comment': (basestring, False),
}
class HostedZoneVPCs(AWSProperty):
props = {
'VPCId': (basestring, True),
'VPCRegion': (basestring, True),
}
class QueryLoggingConfig(AWSProperty):
props = {
'CloudWatchLogsLogGroupArn': (basestring, True),
}
class HostedZone(AWSObject):
resource_type = "AWS::Route53::HostedZone"
props = {
'HostedZoneConfig': (HostedZoneConfiguration, False),
'HostedZoneTags': (Tags, False),
'Name': (basestring, True),
'QueryLoggingConfig': (QueryLoggingConfig, False),
'VPCs': ([HostedZoneVPCs], False),
}
| pas256/troposphere | troposphere/route53.py | Python | bsd-2-clause | 4,197 | 0 |
'''
Created on Mar 18, 2013
@author: Gooch
'''
import unittest
from Pipeline.PipelineSampleData import SampleData
class SampleDataTest(unittest.TestCase):
def setUp(self):
#might not need this, runs before each test
pass
def tearDown(self):
#might not need this, runs after each test
pass
def testSingleManyColumns(self):
pass
def testSingleFewColumns(self):
pass
def testPairedManyColumns(self):
pass
def testPairedFewColumns(self):
pass
def testEmptyFile(self):
pass
def testIgnoresBlankLines(self):
pass
def testDuplicateEntrySingle(self):
pass
def testDuplicateEntryPaired(self):
pass
def testDuplicateEntryMixed1(self):
pass
def testDuplicateEntryMixed2(self):
pass
def testValidSingle(self):
pass
def testValidPaired(self):
pass
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main() | kotoroshinoto/Cluster_SimpleJob_Generator | pybin/Test/PipelineSampleDataTest.py | Python | unlicense | 1,017 | 0.019666 |
# python
from distutils.core import setup
setup(
name = 'zplot',
packages = ['zplot'],
version = '1.41',
description = 'A simple graph-creation library',
author = 'Remzi H. Arpaci-Dusseau',
author_email = 'remzi.arpacidusseau@gmail.com',
url = 'https://github.com/z-plot/z-plot',
download_url = 'https://github.com/z-plot/z-plot/tarball/1.4',
keywords = ['plotting', 'graphing', 'postscript', 'svg'],
classifiers = [],
)
| z-plot/z-plot | setup.py | Python | bsd-3-clause | 472 | 0.055085 |
# (c) 2011, 2012 Georgia Tech Research Corporation
# This source code is released under the New BSD license. Please see
# http://wiki.quantsoftware.org/index.php?title=QSTK_License
# for license details.
#
# Created on October <day>, 2011
#
# @author: Vishal Shekhar
# @contact: mailvishalshekhar@gmail.com
# @summary: Utiltiy script to create list of symbols for study.
import qstkutil.DataAccess as da
import qstkutil.qsdateutil as du
import datetime as dt
dataobj = da.DataAccess('Norgate')
delistSymbols = set(dataobj.get_symbols_in_sublist('/US/Delisted Securities'))
allSymbols = set(dataobj.get_all_symbols()) #by default Alive symbols only
aliveSymbols = list(allSymbols - delistSymbols) # set difference is smart
startday = dt.datetime(2008,1,1)
endday = dt.datetime(2009,12,31)
timeofday=dt.timedelta(hours=16)
timestamps = du.getNYSEdays(startday,endday,timeofday)
#Actual Close Prices of aliveSymbols and allSymbols
aliveSymbsclose = dataobj.get_data(timestamps, aliveSymbols, 'actual_close')
allSymbsclose = dataobj.get_data(timestamps, allSymbols, 'actual_close')
file = open('aliveSymbols2','w')
for symbol in aliveSymbols:
belowdollar = len(aliveSymbsclose[symbol][aliveSymbsclose[symbol]<1.0])
if belowdollar and (len(aliveSymbsclose[symbol]) > belowdollar):
file.write(str(symbol)+'\n')
file.close()
file = open('allSymbols2','w')
for symbol in allSymbols:
belowdollar = len(allSymbsclose[symbol][allSymbsclose[symbol]<1.0])
if belowdollar and (len(allSymbsclose[symbol]) > belowdollar):
file.write(str(symbol)+'\n')
file.close()
| grahesh/Stock-Market-Event-Analysis | qstkstudy/stockListGen.py | Python | bsd-3-clause | 1,592 | 0.013819 |
#!/usr/bin/env python
"""Implementation of a router class that does no ACL checks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from typing import Optional
from grr_response_server import access_control
from grr_response_server.gui import api_call_router
from grr_response_server.gui.api_plugins import artifact as api_artifact
from grr_response_server.gui.api_plugins import client as api_client
from grr_response_server.gui.api_plugins import config as api_config
from grr_response_server.gui.api_plugins import cron as api_cron
from grr_response_server.gui.api_plugins import flow as api_flow
from grr_response_server.gui.api_plugins import hunt as api_hunt
from grr_response_server.gui.api_plugins import output_plugin as api_output_plugin
from grr_response_server.gui.api_plugins import reflection as api_reflection
from grr_response_server.gui.api_plugins import stats as api_stats
from grr_response_server.gui.api_plugins import timeline as api_timeline
from grr_response_server.gui.api_plugins import user as api_user
from grr_response_server.gui.api_plugins import vfs as api_vfs
from grr_response_server.gui.api_plugins import yara as api_yara
class ApiCallRouterWithoutChecks(api_call_router.ApiCallRouterStub):
"""Router that does no ACL checks whatsoever."""
# Artifacts methods.
# =================
#
def ListArtifacts(self, args, token=None):
return api_artifact.ApiListArtifactsHandler()
def UploadArtifact(self, args, token=None):
return api_artifact.ApiUploadArtifactHandler()
def DeleteArtifacts(self, args, token=None):
return api_artifact.ApiDeleteArtifactsHandler()
# Clients methods.
# ===============
#
def SearchClients(self, args, token=None):
return api_client.ApiSearchClientsHandler()
def VerifyAccess(self, args, token=None):
return api_client.ApiVerifyAccessHandler()
def GetClient(self, args, token=None):
return api_client.ApiGetClientHandler()
def GetClientVersions(self, args, token=None):
return api_client.ApiGetClientVersionsHandler()
def GetClientVersionTimes(self, args, token=None):
return api_client.ApiGetClientVersionTimesHandler()
def InterrogateClient(self, args, token=None):
return api_client.ApiInterrogateClientHandler()
def GetInterrogateOperationState(self, args, token=None):
return api_client.ApiGetInterrogateOperationStateHandler()
def GetLastClientIPAddress(self, args, token=None):
return api_client.ApiGetLastClientIPAddressHandler()
def ListClientCrashes(self, args, token=None):
return api_client.ApiListClientCrashesHandler()
def ListClientActionRequests(self, args, token=None):
return api_client.ApiListClientActionRequestsHandler()
def GetClientLoadStats(self, args, token=None):
return api_client.ApiGetClientLoadStatsHandler()
# Virtual file system methods.
# ============================
#
def ListFiles(self, args, token=None):
return api_vfs.ApiListFilesHandler()
def GetVfsFilesArchive(self, args, token=None):
return api_vfs.ApiGetVfsFilesArchiveHandler()
def GetFileDetails(self, args, token=None):
return api_vfs.ApiGetFileDetailsHandler()
def GetFileText(self, args, token=None):
return api_vfs.ApiGetFileTextHandler()
def GetFileBlob(self, args, token=None):
return api_vfs.ApiGetFileBlobHandler()
def GetFileVersionTimes(self, args, token=None):
return api_vfs.ApiGetFileVersionTimesHandler()
def GetFileDownloadCommand(self, args, token=None):
return api_vfs.ApiGetFileDownloadCommandHandler()
def CreateVfsRefreshOperation(self, args, token=None):
return api_vfs.ApiCreateVfsRefreshOperationHandler()
def GetVfsRefreshOperationState(self, args, token=None):
return api_vfs.ApiGetVfsRefreshOperationStateHandler()
def GetVfsTimeline(self, args, token=None):
return api_vfs.ApiGetVfsTimelineHandler()
def GetVfsTimelineAsCsv(self, args, token=None):
return api_vfs.ApiGetVfsTimelineAsCsvHandler()
def UpdateVfsFileContent(self, args, token=None):
return api_vfs.ApiUpdateVfsFileContentHandler()
def GetVfsFileContentUpdateState(self, args, token=None):
return api_vfs.ApiGetVfsFileContentUpdateStateHandler()
def GetFileDecoders(self, args, token=None):
return api_vfs.ApiGetFileDecodersHandler()
def GetDecodedFileBlob(self, args, token=None):
return api_vfs.ApiGetDecodedFileHandler()
# Clients labels methods.
# ======================
#
def ListClientsLabels(self, args, token=None):
return api_client.ApiListClientsLabelsHandler()
def AddClientsLabels(self, args, token=None):
return api_client.ApiAddClientsLabelsHandler()
def RemoveClientsLabels(self, args, token=None):
return api_client.ApiRemoveClientsLabelsHandler()
# Clients flows methods.
# =====================
#
def ListFlows(self, args, token=None):
return api_flow.ApiListFlowsHandler()
def GetFlow(self, args, token=None):
return api_flow.ApiGetFlowHandler()
def CreateFlow(self, args, token=None):
return api_flow.ApiCreateFlowHandler()
def CancelFlow(self, args, token=None):
return api_flow.ApiCancelFlowHandler()
def ListFlowRequests(self, args, token=None):
return api_flow.ApiListFlowRequestsHandler()
def ListFlowResults(self, args, token=None):
return api_flow.ApiListFlowResultsHandler()
def GetExportedFlowResults(self, args, token=None):
return api_flow.ApiGetExportedFlowResultsHandler()
def GetFlowResultsExportCommand(self, args, token=None):
return api_flow.ApiGetFlowResultsExportCommandHandler()
def GetFlowFilesArchive(self, args, token=None):
return api_flow.ApiGetFlowFilesArchiveHandler()
def ListFlowOutputPlugins(self, args, token=None):
return api_flow.ApiListFlowOutputPluginsHandler()
def ListFlowOutputPluginLogs(self, args, token=None):
return api_flow.ApiListFlowOutputPluginLogsHandler()
def ListFlowOutputPluginErrors(self, args, token=None):
return api_flow.ApiListFlowOutputPluginErrorsHandler()
def ListFlowLogs(self, args, token=None):
return api_flow.ApiListFlowLogsHandler()
def GetCollectedTimeline(self, args, token=None):
return api_timeline.ApiGetCollectedTimelineHandler()
def UploadYaraSignature(
self,
args,
token = None,
):
del args, token # Unused.
return api_yara.ApiUploadYaraSignatureHandler()
# Cron jobs methods.
# =================
#
def ListCronJobs(self, args, token=None):
return api_cron.ApiListCronJobsHandler()
def CreateCronJob(self, args, token=None):
return api_cron.ApiCreateCronJobHandler()
def GetCronJob(self, args, token=None):
return api_cron.ApiGetCronJobHandler()
def ForceRunCronJob(self, args, token=None):
return api_cron.ApiForceRunCronJobHandler()
def ModifyCronJob(self, args, token=None):
return api_cron.ApiModifyCronJobHandler()
def ListCronJobRuns(self, args, token=None):
return api_cron.ApiListCronJobRunsHandler()
def GetCronJobRun(self, args, token=None):
return api_cron.ApiGetCronJobRunHandler()
def DeleteCronJob(self, args, token=None):
return api_cron.ApiDeleteCronJobHandler()
# Hunts methods.
# =============
#
def ListHunts(self, args, token=None):
return api_hunt.ApiListHuntsHandler()
def GetHunt(self, args, token=None):
return api_hunt.ApiGetHuntHandler()
def ListHuntErrors(self, args, token=None):
return api_hunt.ApiListHuntErrorsHandler()
def ListHuntLogs(self, args, token=None):
return api_hunt.ApiListHuntLogsHandler()
def ListHuntResults(self, args, token=None):
return api_hunt.ApiListHuntResultsHandler()
def GetExportedHuntResults(self, args, token=None):
return api_hunt.ApiGetExportedHuntResultsHandler()
def GetHuntResultsExportCommand(self, args, token=None):
return api_hunt.ApiGetHuntResultsExportCommandHandler()
def ListHuntOutputPlugins(self, args, token=None):
return api_hunt.ApiListHuntOutputPluginsHandler()
def ListHuntOutputPluginLogs(self, args, token=None):
return api_hunt.ApiListHuntOutputPluginLogsHandler()
def ListHuntOutputPluginErrors(self, args, token=None):
return api_hunt.ApiListHuntOutputPluginErrorsHandler()
def ListHuntCrashes(self, args, token=None):
return api_hunt.ApiListHuntCrashesHandler()
def GetHuntClientCompletionStats(self, args, token=None):
return api_hunt.ApiGetHuntClientCompletionStatsHandler()
def GetHuntStats(self, args, token=None):
return api_hunt.ApiGetHuntStatsHandler()
def ListHuntClients(self, args, token=None):
return api_hunt.ApiListHuntClientsHandler()
def GetHuntContext(self, args, token=None):
return api_hunt.ApiGetHuntContextHandler()
def CreateHunt(self, args, token=None):
return api_hunt.ApiCreateHuntHandler()
def ModifyHunt(self, args, token=None):
return api_hunt.ApiModifyHuntHandler()
def DeleteHunt(self, args, token=None):
return api_hunt.ApiDeleteHuntHandler()
def GetHuntFilesArchive(self, args, token=None):
return api_hunt.ApiGetHuntFilesArchiveHandler()
def GetHuntFile(self, args, token=None):
return api_hunt.ApiGetHuntFileHandler()
# Stats metrics methods.
# =====================
#
def ListReports(self, args, token=None):
return api_stats.ApiListReportsHandler()
def GetReport(self, args, token=None):
return api_stats.ApiGetReportHandler()
# Approvals methods.
# =================
#
def CreateClientApproval(self, args, token=None):
return api_user.ApiCreateClientApprovalHandler()
def GetClientApproval(self, args, token=None):
return api_user.ApiGetClientApprovalHandler()
def GrantClientApproval(self, args, token=None):
return api_user.ApiGrantClientApprovalHandler()
def ListClientApprovals(self, args, token=None):
return api_user.ApiListClientApprovalsHandler()
def CreateHuntApproval(self, args, token=None):
return api_user.ApiCreateHuntApprovalHandler()
def GetHuntApproval(self, args, token=None):
return api_user.ApiGetHuntApprovalHandler()
def GrantHuntApproval(self, args, token=None):
return api_user.ApiGrantHuntApprovalHandler()
def ListHuntApprovals(self, args, token=None):
return api_user.ApiListHuntApprovalsHandler()
def CreateCronJobApproval(self, args, token=None):
return api_user.ApiCreateCronJobApprovalHandler()
def GetCronJobApproval(self, args, token=None):
return api_user.ApiGetCronJobApprovalHandler()
def GrantCronJobApproval(self, args, token=None):
return api_user.ApiGrantCronJobApprovalHandler()
def ListCronJobApprovals(self, args, token=None):
return api_user.ApiListCronJobApprovalsHandler()
def ListApproverSuggestions(self, args, token=None):
return api_user.ApiListApproverSuggestionsHandler()
# User settings methods.
# =====================
#
def GetPendingUserNotificationsCount(self, args, token=None):
return api_user.ApiGetPendingUserNotificationsCountHandler()
def ListPendingUserNotifications(self, args, token=None):
return api_user.ApiListPendingUserNotificationsHandler()
def DeletePendingUserNotification(self, args, token=None):
return api_user.ApiDeletePendingUserNotificationHandler()
def ListAndResetUserNotifications(self, args, token=None):
return api_user.ApiListAndResetUserNotificationsHandler()
def GetGrrUser(self, args, token=None):
return api_user.ApiGetOwnGrrUserHandler(
interface_traits=api_user.ApiGrrUserInterfaceTraits().EnableAll())
def UpdateGrrUser(self, args, token=None):
return api_user.ApiUpdateGrrUserHandler()
# Config methods.
# ==============
#
def GetConfig(self, args, token=None):
return api_config.ApiGetConfigHandler()
def GetConfigOption(self, args, token=None):
return api_config.ApiGetConfigOptionHandler()
def ListGrrBinaries(self, args, token=None):
return api_config.ApiListGrrBinariesHandler()
def GetGrrBinary(self, args, token=None):
return api_config.ApiGetGrrBinaryHandler()
def GetGrrBinaryBlob(self, args, token=None):
return api_config.ApiGetGrrBinaryBlobHandler()
# Reflection methods.
# ==================
#
def ListKbFields(self, args, token=None):
return api_client.ApiListKbFieldsHandler()
def ListFlowDescriptors(self, args, token=None):
# TODO(user): move to reflection.py
return api_flow.ApiListFlowDescriptorsHandler()
def GetRDFValueDescriptor(self, args, token=None):
return api_reflection.ApiGetRDFValueDescriptorHandler()
def ListRDFValuesDescriptors(self, args, token=None):
return api_reflection.ApiListRDFValuesDescriptorsHandler()
def ListOutputPluginDescriptors(self, args, token=None):
return api_output_plugin.ApiListOutputPluginDescriptorsHandler()
def ListKnownEncodings(self, args, token=None):
return api_vfs.ApiListKnownEncodingsHandler()
def ListApiMethods(self, args, token=None):
return api_reflection.ApiListApiMethodsHandler(self)
| dunkhong/grr | grr/server/grr_response_server/gui/api_call_router_without_checks.py | Python | apache-2.0 | 13,048 | 0.011343 |
import sys, os, os.path
from distutils.core import Extension
from distutils.errors import DistutilsOptionError
from versioninfo import get_base_dir, split_version
try:
from Cython.Distutils import build_ext as build_pyx
import Cython.Compiler.Version
CYTHON_INSTALLED = True
except ImportError:
CYTHON_INSTALLED = False
EXT_MODULES = ["lxml.etree", "lxml.objectify"]
PACKAGE_PATH = "src/lxml/"
if sys.version_info[0] >= 3:
_system_encoding = sys.getdefaultencoding()
if _system_encoding is None:
_system_encoding = "iso-8859-1" # :-)
def decode_input(data):
if isinstance(data, str):
return data
return data.decode(_system_encoding)
else:
def decode_input(data):
return data
def env_var(name):
value = os.getenv(name)
if value:
value = decode_input(value)
if sys.platform == 'win32' and ';' in value:
return value.split(';')
else:
return value.split()
else:
return []
def ext_modules(static_include_dirs, static_library_dirs,
static_cflags, static_binaries):
global XML2_CONFIG, XSLT_CONFIG
if OPTION_BUILD_LIBXML2XSLT:
from buildlibxml import build_libxml2xslt, get_prebuilt_libxml2xslt
if sys.platform.startswith('win'):
get_prebuilt_libxml2xslt(
'libs', static_include_dirs, static_library_dirs)
else:
XML2_CONFIG, XSLT_CONFIG = build_libxml2xslt(
'libs', 'build/tmp',
static_include_dirs, static_library_dirs,
static_cflags, static_binaries,
libiconv_version=OPTION_LIBICONV_VERSION,
libxml2_version=OPTION_LIBXML2_VERSION,
libxslt_version=OPTION_LIBXSLT_VERSION,
multicore=OPTION_MULTICORE)
if CYTHON_INSTALLED:
source_extension = ".pyx"
print("Building with Cython %s." % Cython.Compiler.Version.version)
from Cython.Compiler import Options
Options.generate_cleanup_code = 3
else:
source_extension = ".c"
if not os.path.exists(PACKAGE_PATH + 'lxml.etree.c'):
print ("WARNING: Trying to build without Cython, but pre-generated "
"'%slxml.etree.c' does not seem to be available." % PACKAGE_PATH)
else:
print ("Building without Cython.")
if OPTION_WITHOUT_OBJECTIFY:
modules = [ entry for entry in EXT_MODULES
if 'objectify' not in entry ]
else:
modules = EXT_MODULES
lib_versions = get_library_versions()
if lib_versions[0]:
print("Using build configuration of libxml2 %s and libxslt %s" %
lib_versions)
else:
print("Using build configuration of libxslt %s" %
lib_versions[1])
_include_dirs = include_dirs(static_include_dirs)
_library_dirs = library_dirs(static_library_dirs)
_cflags = cflags(static_cflags)
_define_macros = define_macros()
_libraries = libraries()
if _library_dirs:
message = "Building against libxml2/libxslt in "
if len(_library_dirs) > 1:
print(message + "one of the following directories:")
for dir in _library_dirs:
print(" " + dir)
else:
print(message + "the following directory: " +
_library_dirs[0])
if OPTION_AUTO_RPATH:
runtime_library_dirs = _library_dirs
else:
runtime_library_dirs = []
if not OPTION_SHOW_WARNINGS:
_cflags = ['-w'] + _cflags
result = []
for module in modules:
main_module_source = PACKAGE_PATH + module + source_extension
result.append(
Extension(
module,
sources = [main_module_source],
depends = find_dependencies(module),
extra_compile_args = _cflags,
extra_objects = static_binaries,
define_macros = _define_macros,
include_dirs = _include_dirs,
library_dirs = _library_dirs,
runtime_library_dirs = runtime_library_dirs,
libraries = _libraries,
))
return result
def find_dependencies(module):
if not CYTHON_INSTALLED:
return []
package_dir = os.path.join(get_base_dir(), PACKAGE_PATH)
files = os.listdir(package_dir)
pxd_files = [ os.path.join(PACKAGE_PATH, filename) for filename in files
if filename.endswith('.pxd') ]
if 'etree' in module:
pxi_files = [ os.path.join(PACKAGE_PATH, filename)
for filename in files
if filename.endswith('.pxi')
and 'objectpath' not in filename ]
pxd_files = [ filename for filename in pxd_files
if 'etreepublic' not in filename ]
elif 'objectify' in module:
pxi_files = [ os.path.join(PACKAGE_PATH, 'objectpath.pxi') ]
else:
pxi_files = []
return pxd_files + pxi_files
def extra_setup_args():
result = {}
if CYTHON_INSTALLED:
result['cmdclass'] = {'build_ext': build_pyx}
return result
def libraries():
if sys.platform in ('win32',):
libs = ['libxslt', 'libexslt', 'libxml2', 'iconv']
if OPTION_STATIC:
libs = ['%s_a' % lib for lib in libs]
libs.extend(['zlib', 'WS2_32'])
elif OPTION_STATIC:
libs = ['z', 'm']
else:
libs = ['xslt', 'exslt', 'xml2', 'z', 'm']
return libs
def library_dirs(static_library_dirs):
if OPTION_STATIC:
if not static_library_dirs:
static_library_dirs = env_var('LIBRARY')
assert static_library_dirs, "Static build not configured, see doc/build.txt"
return static_library_dirs
# filter them from xslt-config --libs
result = []
possible_library_dirs = flags('libs')
for possible_library_dir in possible_library_dirs:
if possible_library_dir.startswith('-L'):
result.append(possible_library_dir[2:])
return result
def include_dirs(static_include_dirs):
if OPTION_STATIC:
if not static_include_dirs:
static_include_dirs = env_var('INCLUDE')
return static_include_dirs
# filter them from xslt-config --cflags
result = []
possible_include_dirs = flags('cflags')
for possible_include_dir in possible_include_dirs:
if possible_include_dir.startswith('-I'):
result.append(possible_include_dir[2:])
return result
def cflags(static_cflags):
result = []
if OPTION_DEBUG_GCC:
result.append('-g2')
if OPTION_STATIC:
if not static_cflags:
static_cflags = env_var('CFLAGS')
result.extend(static_cflags)
else:
# anything from xslt-config --cflags that doesn't start with -I
possible_cflags = flags('cflags')
for possible_cflag in possible_cflags:
if not possible_cflag.startswith('-I'):
result.append(possible_cflag)
if sys.platform in ('darwin',):
for opt in result:
if 'flat_namespace' in opt:
break
else:
result.append('-flat_namespace')
return result
def define_macros():
macros = []
if OPTION_WITHOUT_ASSERT:
macros.append(('PYREX_WITHOUT_ASSERTIONS', None))
if OPTION_WITHOUT_THREADING:
macros.append(('WITHOUT_THREADING', None))
if OPTION_WITH_REFNANNY:
macros.append(('CYTHON_REFNANNY', None))
return macros
_ERROR_PRINTED = False
def run_command(cmd, *args):
if not cmd:
return ''
if args:
cmd = ' '.join((cmd,) + args)
try:
import subprocess
except ImportError:
# Python 2.3
sf, rf, ef = os.popen3(cmd)
sf.close()
errors = ef.read()
stdout_data = rf.read()
else:
# Python 2.4+
p = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout_data, errors = p.communicate()
global _ERROR_PRINTED
if errors and not _ERROR_PRINTED:
_ERROR_PRINTED = True
print("ERROR: %s" % errors)
print("** make sure the development packages of libxml2 and libxslt are installed **\n")
return decode_input(stdout_data).strip()
def get_library_versions():
xml2_version = run_command(find_xml2_config(), "--version")
xslt_version = run_command(find_xslt_config(), "--version")
return xml2_version, xslt_version
def flags(option):
xml2_flags = run_command(find_xml2_config(), "--%s" % option)
xslt_flags = run_command(find_xslt_config(), "--%s" % option)
flag_list = xml2_flags.split()
for flag in xslt_flags.split():
if flag not in flag_list:
flag_list.append(flag)
return flag_list
XSLT_CONFIG = None
XML2_CONFIG = None
def find_xml2_config():
global XML2_CONFIG
if XML2_CONFIG:
return XML2_CONFIG
option = '--with-xml2-config='
for arg in sys.argv:
if arg.startswith(option):
sys.argv.remove(arg)
XML2_CONFIG = arg[len(option):]
return XML2_CONFIG
else:
# default: do nothing, rely only on xslt-config
XML2_CONFIG = os.getenv('XML2_CONFIG', '')
return XML2_CONFIG
def find_xslt_config():
global XSLT_CONFIG
if XSLT_CONFIG:
return XSLT_CONFIG
option = '--with-xslt-config='
for arg in sys.argv:
if arg.startswith(option):
sys.argv.remove(arg)
XSLT_CONFIG = arg[len(option):]
return XSLT_CONFIG
else:
XSLT_CONFIG = os.getenv('XSLT_CONFIG', 'xslt-config')
return XSLT_CONFIG
## Option handling:
def has_option(name):
try:
sys.argv.remove('--%s' % name)
return True
except ValueError:
pass
# allow passing all cmd line options also as environment variables
env_val = os.getenv(name.upper().replace('-', '_'), 'false').lower()
if env_val == "true":
return True
return False
def option_value(name):
for index, option in enumerate(sys.argv):
if option == '--' + name:
if index+1 >= len(sys.argv):
raise DistutilsOptionError(
'The option %s requires a value' % option)
value = sys.argv[index+1]
sys.argv[index:index+2] = []
return value
if option.startswith('--' + name + '='):
value = option[len(name)+3:]
sys.argv[index:index+1] = []
return value
env_val = os.getenv(name.upper().replace('-', '_'))
return env_val
staticbuild = bool(os.environ.get('STATICBUILD', ''))
# pick up any commandline options and/or env variables
OPTION_WITHOUT_OBJECTIFY = has_option('without-objectify')
OPTION_WITHOUT_ASSERT = has_option('without-assert')
OPTION_WITHOUT_THREADING = has_option('without-threading')
OPTION_WITHOUT_CYTHON = has_option('without-cython')
OPTION_WITH_REFNANNY = has_option('with-refnanny')
if OPTION_WITHOUT_CYTHON:
CYTHON_INSTALLED = False
OPTION_STATIC = staticbuild or has_option('static')
OPTION_DEBUG_GCC = has_option('debug-gcc')
OPTION_SHOW_WARNINGS = has_option('warnings')
OPTION_AUTO_RPATH = has_option('auto-rpath')
OPTION_BUILD_LIBXML2XSLT = staticbuild or has_option('static-deps')
if OPTION_BUILD_LIBXML2XSLT:
OPTION_STATIC = True
OPTION_LIBXML2_VERSION = option_value('libxml2-version')
OPTION_LIBXSLT_VERSION = option_value('libxslt-version')
OPTION_LIBICONV_VERSION = option_value('libiconv-version')
OPTION_MULTICORE = option_value('multicore')
| rajendrakrp/GeoMicroFormat | build/lxml/setupinfo.py | Python | bsd-3-clause | 11,648 | 0.00601 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import json
import cgi
import urllib2
#取得本机外网IP
myip = urllib2.urlopen('http://members.3322.org/dyndns/getip').read()
myip=myip.strip()
#加载SSR JSON文件
f = file("/usr/local/shadowsocksr/mudb.json");
json = json.load(f);
# 接受表达提交的数据
form = cgi.FieldStorage()
# 解析处理提交的数据
getport = form['port'].value
getpasswd = form['passwd'].value
#判断端口是否找到
portexist=0
passwdcorrect=0
#循环查找端口
for x in json:
#当输入的端口与json端口一样时视为找到
if(str(x[u"port"]) == str(getport)):
portexist=1
if(str(x[u"passwd"]) == str(getpasswd)):
passwdcorrect=1
jsonmethod=str(x[u"method"])
jsonobfs=str(x[u"obfs"])
jsonprotocol=str(x[u"protocol"])
break
if(portexist==0):
getport = "未找到此端口,请检查是否输入错误!"
myip = ""
getpasswd = ""
jsonmethod = ""
jsonprotocol = ""
jsonobfs = ""
if(portexist!=0 and passwdcorrect==0):
getport = "连接密码输入错误,请重试"
myip = ""
getpasswd = ""
jsonmethod = ""
jsonprotocol = ""
jsonobfs = ""
header = '''
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta content="IE=edge" http-equiv="X-UA-Compatible">
<meta content="initial-scale=1.0, width=device-width" name="viewport">
<title>连接信息</title>
<!-- css -->
<link href="../css/base.min.css" rel="stylesheet">
<!-- favicon -->
<!-- ... -->
<!-- ie -->
<!--[if lt IE 9]>
<script src="../js/html5shiv.js" type="text/javascript"></script>
<script src="../js/respond.js" type="text/javascript"></script>
<![endif]-->
</head>
<body>
<div class="content">
<div class="content-heading">
<div class="container">
<h1 class="heading"> 连接信息</h1>
</div>
</div>
<div class="content-inner">
<div class="container">
'''
footer = '''
</div>
</div>
</div>
<footer class="footer">
<div class="container">
<p>Hello</p>
</div>
</footer>
<script src="../js/base.min.js" type="text/javascript"></script>
</body>
</html>
'''
#打印返回的内容
print header
formhtml = '''
<div class="card-wrap">
<div class="row">
<div class="col-lg-4 col-sm-6">
<div class="card card-green">
<a class="card-side" href="/"><span class="card-heading">连接信息</span></a>
<div class="card-main">
<div class="card-inner">
<p>
<strong>服务器地址:</strong> %s </br></br>
<strong>连接端口:</strong> %s </br></br>
<strong>连接密码:</strong> %s </br></br>
<strong>加密方式: </strong> %s </br></br>
<strong>协议方式: </strong> </br>%s </br></br>
<strong>混淆方式:</strong> </br>%s
</p>
</div>
<div class="card-action">
<ul class="nav nav-list pull-left">
<li>
<a href="../index.html"><span class="icon icon-check"></span> 返回</a>
</li>
</ul>
</div>
</div>
</div>
</div>
</div>
</div>
'''
print formhtml % (myip,getport,getpasswd,jsonmethod,jsonprotocol,jsonobfs)
print footer
f.close();
| zhaifangzhi/1 | www/cgi-bin/show_info.py | Python | gpl-3.0 | 3,348 | 0.034528 |
extension = None
#httpDirectory = None
omlCitation = None | OsirisSPS/osiris-sps | client/data/extensions/5B1D133CA24D2B5B93B675279CB60C9CB7E47502/scripts/globalvars.py | Python | gpl-3.0 | 57 | 0.035088 |
#!/bin/python
# -*- coding: utf-8 -*-
# ####################################################################
# gofed-ng - Golang system
# Copyright (C) 2016 Fridolin Pokorny, fpokorny@redhat.com
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# ####################################################################
import sys
from tqdm import tqdm
from common.helpers.output import log
from scenario import Scenario, SwitchAttr, Flag
class GolangDepsUpdate(Scenario):
''' update dependencies of Golang projects packaged in Fedora '''
max_depth = SwitchAttr("--max-depth", int, default=None,
help="specify commit depth limit")
since_date = SwitchAttr("--since-date", str, default=None,
help="specify since date")
skip_errors = Flag("--skip-errors",
help="errors will be reported, but the computation will not be interrupted")
def main(self):
with self.get_system() as system:
golang_pkgs = system.async_call.goland_package_listing()
stored_projects = system.async_call.deps_project_listing()
for pkg in golang_pkgs.result:
if not pkg['name'].startswith('golang-github-'):
log.warning("Skipping %s" % pkg['name'])
# TODO: remove once support for mercurial and full package->upstream translation will be available
continue
try:
raise ValueError("value error")
print("Inspecting '%s'" % pkg['name'])
upstream_url = system.async_call.golang_package2upstream(pkg['name'])
if pkg['name'] in stored_projects.result:
stored_commits = system.async_call.deps_project_commit_listing(pkg['name'])
else:
stored_commits = None
scm_log = system.async_call.scm_log(upstream_url.result,
max_depth=self.max_depth,
since_date=self.since_date)
for commit in tqdm(scm_log.result):
log.debug("Commit %s project %s" % (commit['hash'], pkg['name']))
if not stored_commits or commit not in stored_commits.result:
file_id = system.async_call.scm_store(upstream_url.result, commit['hash'])
deps = system.async_call.deps_analysis(file_id.result)
system.async_call.deps_store_project(pkg['name'], commit['hash'], commit['time'],
deps.result, deps.meta)
except:
exc_info = sys.exc_info()
if self.skip_errors:
log.error(exc_info[2].print_exc())
else:
raise exc_info
if __name__ == '__main__':
sys.exit(1)
| gofed/gofed-ng | scenarios/golangDepsUpdate.py | Python | gpl-3.0 | 3,711 | 0.004042 |
import copy
import os
import re
import subprocess
from conans.client import tools
from conans.client.build.visual_environment import (VisualStudioBuildEnvironment,
vs_build_type_flags, vs_std_cpp)
from conans.client.tools.oss import cpu_count
from conans.client.tools.win import vcvars_command
from conans.errors import ConanException
from conans.model.conan_file import ConanFile
from conans.model.version import Version
from conans.tools import vcvars_command as tools_vcvars_command
from conans.util.env_reader import get_env
from conans.util.files import decode_text, save
class MSBuild(object):
def __init__(self, conanfile):
if isinstance(conanfile, ConanFile):
self._conanfile = conanfile
self._settings = self._conanfile.settings
self._output = self._conanfile.output
self.build_env = VisualStudioBuildEnvironment(self._conanfile,
with_build_type_flags=False)
else: # backwards compatible with build_sln_command
self._settings = conanfile
self.build_env = None
def build(self, project_file, targets=None, upgrade_project=True, build_type=None, arch=None,
parallel=True, force_vcvars=False, toolset=None, platforms=None, use_env=True,
vcvars_ver=None, winsdk_version=None, properties=None, output_binary_log=None,
property_file_name=None, verbosity=None, definitions=None):
"""
:param project_file: Path to the .sln file.
:param targets: List of targets to build.
:param upgrade_project: Will call devenv to upgrade the solution to your
current Visual Studio.
:param build_type: Use a custom build type instead of the default settings.build_type one.
:param arch: Use a custom architecture name instead of the settings.arch one.
It will be used to build the /p:Configuration= parameter of MSBuild.
It can be used as the key of the platforms parameter.
E.g. arch="x86", platforms={"x86": "i386"}
:param parallel: Will use the configured number of cores in the conan.conf file or
tools.cpu_count():
In the solution: Building the solution with the projects in parallel. (/m: parameter).
CL compiler: Building the sources in parallel. (/MP: compiler flag)
:param force_vcvars: Will ignore if the environment is already set for a different
Visual Studio version.
:param toolset: Specify a toolset. Will append a /p:PlatformToolset option.
:param platforms: Dictionary with the mapping of archs/platforms from Conan naming to another
one. It is useful for Visual Studio solutions that have a different naming in architectures.
Example: platforms={"x86":"Win32"} (Visual solution uses "Win32" instead of "x86").
This dictionary will update the default one:
msvc_arch = {'x86': 'x86', 'x86_64': 'x64', 'armv7': 'ARM', 'armv8': 'ARM64'}
:param use_env: Applies the argument /p:UseEnv=true to the MSBuild call.
:param vcvars_ver: Specifies the Visual Studio compiler toolset to use.
:param winsdk_version: Specifies the version of the Windows SDK to use.
:param properties: Dictionary with new properties, for each element in the dictionary
{name: value} it will append a /p:name="value" option.
:param output_binary_log: If set to True then MSBuild will output a binary log file
called msbuild.binlog in the working directory. It can also be used to set the name of
log file like this output_binary_log="my_log.binlog".
This parameter is only supported starting from MSBuild version 15.3 and onwards.
:param property_file_name: When None it will generate a file named conan_build.props.
You can specify a different name for the generated properties file.
:param verbosity: Specifies verbosity level (/verbosity: parameter)
:param definitions: Dictionary with additional compiler definitions to be applied during
the build. Use value of None to set compiler definition with no value.
:return: status code of the MSBuild command invocation
"""
property_file_name = property_file_name or "conan_build.props"
self.build_env.parallel = parallel
with tools.environment_append(self.build_env.vars):
# Path for custom properties file
props_file_contents = self._get_props_file_contents(definitions)
property_file_name = os.path.abspath(property_file_name)
save(property_file_name, props_file_contents)
vcvars = vcvars_command(self._conanfile.settings, force=force_vcvars,
vcvars_ver=vcvars_ver, winsdk_version=winsdk_version,
output=self._output)
command = self.get_command(project_file, property_file_name,
targets=targets, upgrade_project=upgrade_project,
build_type=build_type, arch=arch, parallel=parallel,
toolset=toolset, platforms=platforms,
use_env=use_env, properties=properties,
output_binary_log=output_binary_log,
verbosity=verbosity)
command = "%s && %s" % (vcvars, command)
return self._conanfile.run(command)
def get_command(self, project_file, props_file_path=None, targets=None, upgrade_project=True,
build_type=None, arch=None, parallel=True, toolset=None, platforms=None,
use_env=False, properties=None, output_binary_log=None, verbosity=None):
targets = targets or []
properties = properties or {}
command = []
if upgrade_project and not get_env("CONAN_SKIP_VS_PROJECTS_UPGRADE", False):
command.append('devenv "%s" /upgrade &&' % project_file)
else:
self._output.info("Skipped sln project upgrade")
build_type = build_type or self._settings.get_safe("build_type")
arch = arch or self._settings.get_safe("arch")
if toolset is None: # False value to skip adjusting
toolset = tools.msvs_toolset(self._settings)
verbosity = os.getenv("CONAN_MSBUILD_VERBOSITY") or verbosity or "minimal"
if not build_type:
raise ConanException("Cannot build_sln_command, build_type not defined")
if not arch:
raise ConanException("Cannot build_sln_command, arch not defined")
command.append('msbuild "%s" /p:Configuration="%s"' % (project_file, build_type))
msvc_arch = {'x86': 'x86',
'x86_64': 'x64',
'armv7': 'ARM',
'armv8': 'ARM64'}
if platforms:
msvc_arch.update(platforms)
msvc_arch = msvc_arch.get(str(arch))
if self._settings.get_safe("os") == "WindowsCE":
msvc_arch = self._settings.get_safe("os.platform")
try:
sln = tools.load(project_file)
pattern = re.compile(r"GlobalSection\(SolutionConfigurationPlatforms\)"
r"(.*?)EndGlobalSection", re.DOTALL)
solution_global = pattern.search(sln).group(1)
lines = solution_global.splitlines()
lines = [s.split("=")[0].strip() for s in lines]
except Exception:
pass # TODO: !!! what are we catching here? tools.load? .group(1)? .splitlines?
else:
config = "%s|%s" % (build_type, msvc_arch)
if config not in "".join(lines):
self._output.warn("***** The configuration %s does not exist in this solution *****"
% config)
self._output.warn("Use 'platforms' argument to define your architectures")
if output_binary_log:
msbuild_version = MSBuild.get_version(self._settings)
if msbuild_version >= "15.3": # http://msbuildlog.com/
command.append('/bl' if isinstance(output_binary_log, bool)
else '/bl:"%s"' % output_binary_log)
else:
raise ConanException("MSBuild version detected (%s) does not support "
"'output_binary_log' ('/bl')" % msbuild_version)
if use_env:
command.append('/p:UseEnv=true')
if msvc_arch:
command.append('/p:Platform="%s"' % msvc_arch)
if parallel:
command.append('/m:%s' % cpu_count(output=self._output))
if targets:
command.append("/target:%s" % ";".join(targets))
if toolset:
command.append('/p:PlatformToolset="%s"' % toolset)
if verbosity:
command.append('/verbosity:%s' % verbosity)
if props_file_path:
command.append('/p:ForceImportBeforeCppTargets="%s"'
% os.path.abspath(props_file_path))
for name, value in properties.items():
command.append('/p:%s="%s"' % (name, value))
return " ".join(command)
def _get_props_file_contents(self, definitions=None):
def format_macro(name, value):
return "%s=%s" % (name, value) if value else name
# how to specify runtime in command line:
# https://stackoverflow.com/questions/38840332/msbuild-overrides-properties-while-building-vc-project
runtime_library = {"MT": "MultiThreaded",
"MTd": "MultiThreadedDebug",
"MD": "MultiThreadedDLL",
"MDd": "MultiThreadedDebugDLL"}.get(
self._settings.get_safe("compiler.runtime"), "")
if self.build_env:
# Take the flags from the build env, the user was able to alter them if needed
flags = copy.copy(self.build_env.flags)
flags.append(self.build_env.std)
else: # To be removed when build_sln_command is deprecated
flags = vs_build_type_flags(self._settings, with_flags=False)
flags.append(vs_std_cpp(self._settings))
if definitions:
definitions = ";".join([format_macro(name, definitions[name]) for name in definitions])
flags_str = " ".join(list(filter(None, flags))) # Removes empty and None elements
additional_node = "<AdditionalOptions>" \
"{} %(AdditionalOptions)" \
"</AdditionalOptions>".format(flags_str) if flags_str else ""
runtime_node = "<RuntimeLibrary>" \
"{}" \
"</RuntimeLibrary>".format(runtime_library) if runtime_library else ""
definitions_node = "<PreprocessorDefinitions>" \
"{};%(PreprocessorDefinitions)" \
"</PreprocessorDefinitions>".format(definitions) if definitions else ""
template = """<?xml version="1.0" encoding="utf-8"?>
<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemDefinitionGroup>
<ClCompile>
{runtime_node}
{additional_node}
{definitions_node}
</ClCompile>
</ItemDefinitionGroup>
</Project>""".format(**{"runtime_node": runtime_node,
"additional_node": additional_node,
"definitions_node": definitions_node})
return template
@staticmethod
def get_version(settings):
msbuild_cmd = "msbuild -version"
vcvars = tools_vcvars_command(settings)
command = "%s && %s" % (vcvars, msbuild_cmd)
try:
out, _ = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True).communicate()
version_line = decode_text(out).split("\n")[-1]
prog = re.compile("(\d+\.){2,3}\d+")
result = prog.match(version_line).group()
return Version(result)
except Exception as e:
raise ConanException("Error retrieving MSBuild version: '{}'".format(e))
| memsharded/conan | conans/client/build/msbuild.py | Python | mit | 12,275 | 0.004236 |
s = input().rstrip()
print(s[:4],s[4:])
| utgw/programming-contest | codefestival/2016/qualA/a.py | Python | mit | 40 | 0.025 |
from unittest import TestCase
import pyb
class TestGPIO (TestCase):
flagInCallback=False
def test_1(self):
flagOk=False
try:
p = pyb.Pin(0) #GPIO0
p.init(pyb.Pin.IN,pyb.Pin.PULL_NONE)
flagOk=True
except:
pass
self.assertEqual(flagOk,True,"Enable GPIO0 IN")
def test_2(self):
flagOk=False
try:
p = pyb.Pin(8) #GPIO8
p.init(pyb.Pin.IN,pyb.Pin.PULL_NONE)
flagOk=True
except:
pass
self.assertEqual(flagOk,True,"Enable GPIO8 IN")
def test_3(self):
flagOk=False
try:
p = pyb.Pin(9) #GPIO9 - invalid
p.init(pyb.Pin.IN,pyb.Pin.PULL_NONE)
flagOk=True
except:
pass
self.assertEqual(flagOk,False,"Enable invalid GPIO9 IN")
def test_4(self):
flagOk=False
try:
p = pyb.Pin(0) #GPIO0
p.init(99,pyb.Pin.PULL_NONE) #invalid mode
flagOk=True
except:
pass
self.assertEqual(flagOk,False,"Enable invalid mode GPIO0 IN")
def test_5(self):
flagOk=False
try:
p = pyb.Pin(0) #GPIO0
p.init(pyb.Pin.IN,99) #invalid pullup mode
flagOk=True
except:
pass
self.assertEqual(flagOk,False,"Enable invalid pullup GPIO0 IN")
def test_6(self):
p = pyb.Pin(0)
p.init(pyb.Pin.IN,pyb.Pin.PULL_UP)
pyb.delay(10)
v = p.value()
self.assertEqual(v,1,"Pull Up")
def test_7(self):
p = pyb.Pin(0)
p.init(pyb.Pin.IN,pyb.Pin.PULL_DOWN)
pyb.delay(10)
v = p.value()
self.assertEqual(v,0,"Pull Down")
def __writeSetup(self):
self.p7 = pyb.Pin(7)
self.p7.init(pyb.Pin.OUT_PP,pyb.Pin.PULL_NONE)
pyb.delay(1)
self.p8 = pyb.Pin(8)
self.p8.init(pyb.Pin.IN,pyb.Pin.PULL_NONE)
pyb.delay(1)
def test_8(self):
self.__writeSetup()
self.p7.low()
pyb.delay(10)
v = self.p8.value()
self.assertEqual(v,0,"Write Low test")
def test_9(self):
self.__writeSetup()
self.p7.high()
pyb.delay(10)
v = self.p8.value()
self.assertEqual(v,1,"Write High test")
def callbackTest(self,line):
TestGPIO.flagInCallback=True
def test_10(self):
self.__writeSetup()
int = pyb.ExtInt(self.p8,pyb.ExtInt.IRQ_RISING,pyb.Pin.PULL_NONE,self.callbackTest)
int.disable()
pyb.delay(10)
self.p7.low()
int.enable()
# generate interrupt
TestGPIO.flagInCallback=False
self.p7.high()
pyb.delay(10)
self.assertEqual(TestGPIO.flagInCallback,True,"Rising edge interrupt")
def test_11(self):
flagOk=False
try:
int = pyb.ExtInt(None,pyb.ExtInt.IRQ_RISING,pyb.Pin.PULL_NONE,self.callbackTest)
flagOk=True
except:
pass
self.assertEqual(flagOk,False,"Interrupt. Invalid Pin obj")
def test_12(self):
flagOk=False
try:
int = pyb.ExtInt(Self.p8,99,pyb.Pin.PULL_NONE,self.callbackTest)
flagOk=True
except:
pass
self.assertEqual(flagOk,False,"Interrupt. Invalid edge")
def test_13(self):
flagOk=False
try:
int = pyb.ExtInt(Self.p8,pyb.ExtInt.IRQ_RISING,99,self.callbackTest)
flagOk=True
except:
pass
self.assertEqual(flagOk,False,"Interrupt. Invalid pull")
| martinribelotta/micropython | ciaa-nxp/frozen/testing/TestGPIO.py | Python | mit | 3,678 | 0.028276 |
# Derived from keras-rl
import opensim as osim
import numpy as np
import sys
from keras.models import Sequential, Model
from keras.layers import Dense, Activation, Flatten, Input, concatenate
from keras.optimizers import Adam
import numpy as np
from rl.agents import DDPGAgent
from rl.memory import SequentialMemory
from rl.random import OrnsteinUhlenbeckProcess
from osim.env import *
from osim.http.client import Client
from keras.optimizers import RMSprop
import argparse
import math
# Command line parameters
parser = argparse.ArgumentParser(description='Train or test neural net motor controller')
parser.add_argument('--train', dest='train', action='store_true', default=True)
parser.add_argument('--test', dest='train', action='store_false', default=True)
parser.add_argument('--steps', dest='steps', action='store', default=10000, type=int)
parser.add_argument('--visualize', dest='visualize', action='store_true', default=False)
parser.add_argument('--model', dest='model', action='store', default="example.h5f")
parser.add_argument('--token', dest='token', action='store', required=False)
args = parser.parse_args()
# Load walking environment
env = RunEnv(args.visualize)
env.reset()
nb_actions = env.action_space.shape[0]
# Total number of steps in training
nallsteps = args.steps
# Create networks for DDPG
# Next, we build a very simple model.
actor = Sequential()
actor.add(Flatten(input_shape=(1,) + env.observation_space.shape))
actor.add(Dense(32))
actor.add(Activation('relu'))
actor.add(Dense(32))
actor.add(Activation('relu'))
actor.add(Dense(32))
actor.add(Activation('relu'))
actor.add(Dense(nb_actions))
actor.add(Activation('sigmoid'))
print(actor.summary())
action_input = Input(shape=(nb_actions,), name='action_input')
observation_input = Input(shape=(1,) + env.observation_space.shape, name='observation_input')
flattened_observation = Flatten()(observation_input)
x = concatenate([action_input, flattened_observation])
x = Dense(64)(x)
x = Activation('relu')(x)
x = Dense(64)(x)
x = Activation('relu')(x)
x = Dense(64)(x)
x = Activation('relu')(x)
x = Dense(1)(x)
x = Activation('linear')(x)
critic = Model(inputs=[action_input, observation_input], outputs=x)
print(critic.summary())
# Set up the agent for training
memory = SequentialMemory(limit=100000, window_length=1)
random_process = OrnsteinUhlenbeckProcess(theta=.15, mu=0., sigma=.2, size=env.noutput)
agent = DDPGAgent(nb_actions=nb_actions, actor=actor, critic=critic, critic_action_input=action_input,
memory=memory, nb_steps_warmup_critic=100, nb_steps_warmup_actor=100,
random_process=random_process, gamma=.99, target_model_update=1e-3,
delta_clip=1.)
# agent = ContinuousDQNAgent(nb_actions=env.noutput, V_model=V_model, L_model=L_model, mu_model=mu_model,
# memory=memory, nb_steps_warmup=1000, random_process=random_process,
# gamma=.99, target_model_update=0.1)
agent.compile(Adam(lr=.001, clipnorm=1.), metrics=['mae'])
# Okay, now it's time to learn something! We visualize the training here for show, but this
# slows down training quite a lot. You can always safely abort the training prematurely using
# Ctrl + C.
if args.train:
agent.fit(env, nb_steps=nallsteps, visualize=False, verbose=1, nb_max_episode_steps=env.timestep_limit, log_interval=10000)
# After training is done, we save the final weights.
agent.save_weights(args.model, overwrite=True)
# If TEST and TOKEN, submit to crowdAI
if not args.train and args.token:
agent.load_weights(args.model)
# Settings
remote_base = 'http://grader.crowdai.org:1729'
client = Client(remote_base)
# Create environment
observation = client.env_create(args.token)
# Run a single step
# The grader runs 3 simulations of at most 1000 steps each. We stop after the last one
while True:
v = np.array(observation).reshape((env.observation_space.shape[0]))
action = agent.forward(v)
[observation, reward, done, info] = client.env_step(action.tolist())
if done:
observation = client.env_reset()
if not observation:
break
client.submit()
# If TEST and no TOKEN, run some test experiments
if not args.train and not args.token:
agent.load_weights(args.model)
# Finally, evaluate our algorithm for 1 episode.
agent.test(env, nb_episodes=1, visualize=False, nb_max_episode_steps=500)
| stanfordnmbl/osim-rl | examples/legacy/example.py | Python | mit | 4,483 | 0.003346 |
#!/usr/bin/env python
# encoding: utf-8
#
# AuthorDetector
# Copyright (C) 2013 Larroque Stephen
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from auxlib import *
from collections import OrderedDict
json = import_module('ujson')
if json is None:
json = import_module('json')
if json is None:
raise RuntimeError('Unable to find a json implementation')
## ConfigParser
#
# Configuration parser, will parse and load in memory the configuration and commandline switches
class ConfigParser(object):
# Configuration file path
configfile = 'config.json'
# Configuration parameters tree (will be referenced by almost all other objects across the whole application)
config = []
## Constructor
def __init__(self, *args, **kwargs):
return object.__init__(self, *args, **kwargs)
## Initialize the ConfigParser object by checking that the configuration file exists
# @param configfile Path to the configuration file (must exists or else the application will crash!)
def init(self, configfile=None, *args, **kwargs):
if configfile:
try:
with open(configfile): pass # check that the file exists
self.configfile = configfile
except IOError, e:
print "Can't open the specified configuration file %s, error: %s" % (configfile, str(e))
return
## Load a configuration file into the local dict
# @param pargs Recognized (processed) commandline arguments (this will overwrite parameters from the configuration file in case of conflicts)
# @param extras Unrecognized (unprocessed) commandline arguments (will also overwrite parameters from the configuration file)
# @param comments If set to true, Javascript-like comments will be filtered from the configuration file
def load(self, pargs=None, extras=None, comments=True, *args, **kwargs):
# Loading the configuration file
with file(self.configfile) as f:
# If there are comments in the config, filter them before converting the json to a Python object
if comments:
self.config = json.loads(self._removecomments(f.read()))
# Else we can directly load the json
else:
self.config = json.loads(f.read())
# Overwriting with recognized commandline switches
if pargs:
for key, value in pargs.iteritems():
# only add the argument in config if the argument has a value (not False nor None) and this key is not already defined in the config (so an argument can only overwrite config if defined)
if not (self.config.has_key(key) and not value):
self.config[key] = value
# Overwriting with extras commandline switches
if extras:
i = 0
while i < len(extras):
key = extras[i]
# Check if the argument accepts a value
if '--' in key and i+1 < len(extras) and not '--' in extras[i+1]: # if the argument begins with --, and there is an argument after this one, and the next argument is in fact a value (does not begin with --), we store it with the value
self.config[key.lstrip('-')] = extras[i+1]
i += 1 # skip the next argument (which we used as a value)
# Else this argument has no value, we just set it to True
else:
self.config[key.lstrip('-')] = True
i += 1
## Reload the configuration file
def reload(self, *args, **kwargs):
self.load(comments=True, *args, **kwargs)
## Save the current configuration (with commandline arguments processed) into a file
# @param file Path to where the configuration file should be saved
def save(self, file, *args, **kwargs):
with open(file, 'wb') as f: # open in binary mode to avoid line returns translation (else the reading will be flawed!). We have to do it both at saving and at reading.
f.write( json.dumps(self.config, sort_keys=True, indent=4) ) # write the config as a json serialized string, but beautified to be more human readable
return True
# Get a value from the config dict (this is a proxy method)
def get(self, *args, **kwargs):
if isinstance(self.config, (dict, OrderedDict)):
return self.config.get(*args, **kwargs)
else:
# Safe list getter, with exception handling and default value supplied
try:
return self.config[args[0]]
except IndexError:
if len(args > 1):
return args[1]
else: # by default if no default value was specified, we return None (just like for dictionaries)
return None
# Set a value in the config dict (this is a proxy method)
def set(self, *args, **kwargs):
return self.config.update(*args, **kwargs)
# Set a value in the config dict (this is a proxy method)
def update(self, *args, **kwargs):
return self.config.update(*args, **kwargs)
## Filter efficiently Javascript-like inline and multiline comments from a JSON file
# Author: WizKid https://gist.github.com/WizKid/1170297
# @param s string to filter
# @return string filtered string without comments
def _removecomments(self, s):
inCommentSingle = False
inCommentMulti = False
inString = False
t = []
l = len(s)
i = 0
fromIndex = 0
while i < l:
c = s[i]
if not inCommentMulti and not inCommentSingle:
if c == '"':
slashes = 0
for j in xrange(i - 1, 0, -1):
if s[j] != '\\':
break
slashes += 1
if slashes % 2 == 0:
inString = not inString
elif not inString:
if c == '#':
inCommentSingle = True
t.append(s[fromIndex:i])
elif c == '/' and i + 1 < l:
cn = s[i + 1]
if cn == '/':
inCommentSingle = True
t.append(s[fromIndex:i])
i += 1
elif cn == '*':
inCommentMulti = True
t.append(s[fromIndex:i])
i += 1
elif inCommentSingle and (c == '\n' or c == '\r'):
inCommentSingle = False
fromIndex = i
elif inCommentMulti and c == '*' and i + 1 < l and s[i + 1] == '/':
inCommentMulti = False
i += 1
fromIndex = i + 1
i += 1
if not inCommentSingle and not inCommentMulti:
t.append(s[fromIndex:len(s)])
return "".join(t)
| lrq3000/author-detector | authordetector/configparser.py | Python | gpl-3.0 | 7,707 | 0.004801 |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import apache_beam as beam
from apache_beam import pvalue
class SplitInputsDoFn(beam.DoFn):
"""ParDo function to split reports data into each particular table."""
def process(self, line):
element = line.split(',')
if element[1] is not None:
yield pvalue.TaggedOutput(element[1], element)
else:
yield element
| google/hotel-ads-etl-tool | hotel_ads_beam_utils/hotel_ads_beam_utils/do_fn.py | Python | apache-2.0 | 1,021 | 0.003918 |
#!/usr/bin/env python
import sys
import os.path
import datetime
import subprocess
import time
"""
Run snapstream reader script for several files and out a table of match counts.
Example:
$ python run_on_dates.py eg01china.c 2014-01-01 2014-07-01
"""
def validate(date_string):
try:
datetime.datetime.strptime(date_string,"%Y-%m-%d")
except ValueError as e:
raise ValueError("Incorrect date format, should be YYYY-MM-DD")
if __name__ == "__main__":
start_time = time.time()
if len(sys.argv) != 4:
print("3 arguments are needed, file, begin date and end date")
exit(-1)
file_name = sys.argv[1]
begin_date = sys.argv[2]
end_date = sys.argv[3]
if os.path.isfile(file_name) is False:
print(file_name + " does not exist or is an invalid file")
exit(-1)
validate(begin_date)
validate(end_date)
print("Running %s from %s to %s..." % (file_name, begin_date, end_date))
data_files = ["Data/" + f for f in os.listdir("Data") if f >= begin_date and f < end_date]
data_files.sort()
full_output = open("full_output.txt","w")
print("\t".join(["dt","total_matches_cnt","matching_programs_cnt","total_programs_cnt","selected_programs_cnt"]))
os.system("gcc " + file_name)
for f in data_files:
date_string = f[5:15]
full_output.write(f + "\n" + "====================\n\n")
proc = subprocess.Popen(["./a.out",f], stdout=subprocess.PIPE)
proc_out = proc.communicate()[0].decode('utf-8')
full_output.write(proc_out)
proc_out = proc_out.split('\n')
print("\t".join([date_string]) + '\t' + proc_out[-2])
full_output.close()
print(str(time.time() - start_time) + " seconds taken")
| ppham27/snapstream-reader | legacy/run_on_dates.py | Python | mit | 1,762 | 0.008513 |
from cosrlib.document.html import HTMLDocument
import pytest
def _links(html, url=None):
return HTMLDocument(html, url=url).parse().get_hyperlinks()
def test_get_hyperlinks():
links = _links("""<html><head><title>Test title</title></head><body>x</body></html>""")
assert len(links) == 0
links = _links("""<html><head><title>Test title</title></head><body>
<a name="x">Y</a>
</body></html>""")
assert len(links) == 0
links = _links("""<html><head><title>Test title</title></head><body>
<a href="">Y</a>
</body></html>""")
assert len(links) == 0
links = _links("""<html><head><title>Test title</title></head><body>
<a href="ftp://test.com">Y</a>
</body></html>""")
assert len(links) == 0
links = _links("""<html><head><title>Test title</title></head><body>
<a href="http://sub.test.com/page1?q=2&a=b#xxx">Y </a>
</body></html>""")
assert len(links) == 1
assert links[0]["href"].url == "http://sub.test.com/page1?q=2&a=b#xxx"
assert links[0]["text"] == "Y"
links = _links("""<html><head><title>Test title</title></head><body>
<a href="/page1?q=2&a=b#xxx">Y x</a>
</body></html>""", url="http://sub.test.com/page2")
assert len(links) == 1
assert links[0]["href"].url == "http://sub.test.com/page1?q=2&a=b#xxx"
assert links[0]["text"] == "Y x"
links = _links("""<html><head><title>Test title</title></head><body>
<a href="../page1?q=2&a=b#xxx">Y_</a>
</body></html>""", url="http://sub.test.com/page2/x.html")
assert len(links) == 1
assert links[0]["href"].url == "http://sub.test.com/page1?q=2&a=b#xxx"
assert links[0]["text"] == "Y_"
links = _links("""<html><head><title>Test title</title></head><body>
<a href="http://UPPER.CASE.coM/PATH?QUERY=V">*Y</a>
</body></html>""", url="http://sub.test.com/page2/x.html")
assert len(links) == 1
assert links[0]["href"].url == "http://upper.case.com/PATH?QUERY=V"
assert links[0]["text"] == "*Y"
links = _links("""<html><head><title>Test title</title></head><body>
<a href="//UPPER.CASE.coM/PATH?QUERY=V">Y</a>
</body></html>""", url="http://sub.test.com/page2/x.html")
assert len(links) == 1
assert links[0]["href"].url == "http://upper.case.com/PATH?QUERY=V"
assert links[0]["text"] == "Y"
# We do not index links behind any kind of auth
links = _links("""<html><head><title>Test title</title></head><body>
<a href="http://user@domain.com">Y</a>
</body></html>""", url="http://sub.test.com/page2/x.html")
assert len(links) == 0
# Looks like a forgotten mailto:, don't index
links = _links("""<html><head><title>Test title</title></head><body>
<a href="user@domain.com">Y</a>
</body></html>""", url="http://sub.test.com/page2/x.html")
assert len(links) == 0
# Invalid URL should be filtered
links = _links("""<html><head><title>Test title</title></head><body>
<a href="http://www.[wsj-ticker ticker=">Y</a>
</body></html>""", url="http://sub.test.com/page2/x.html")
assert len(links) == 0
links = _links("""<html><head><title>Test title</title></head><body>
<a href="<object width=">Y</a>
</body></html>""", url="http://sub.test.com/page2/x.html")
assert len(links) == 0
links = _links("""<html><head><title>Test title</title></head><body>
<a href="http://<object width=">Y</a>
</body></html>""", url="http://sub.test.com/page2/x.html")
assert len(links) == 0
# We don't index TLDs either
links = _links("""<html><head><title>Test title</title></head><body>
<a href="http://com/x">Y</a>
</body></html>""", url="http://sub.test.com/page2/x.html")
assert len(links) == 0
links = _links("""<html><head><title>Test title</title></head><body>
<a href="http://newunknowntldxx/x">Y</a>
</body></html>""", url="http://sub.test.com/page2/x.html")
assert len(links) == 0
def test_get_hyperlinks_base_tag():
links = _links("""<html><head><base href="https://example.com/d1/d2/" /><title>Test title</title></head><body>
<a href="../page1?q=2&a=b#xxx">Y</a>
</body></html>""", url="http://sub.test.com/page2/x.html")
assert len(links) == 1
assert links[0]["href"].url == "https://example.com/d1/page1?q=2&a=b#xxx"
assert links[0]["text"] == "Y"
| commonsearch/cosr-back | tests/cosrlibtests/document/html/test_hyperlinks.py | Python | apache-2.0 | 4,389 | 0.000456 |
"""
SkCode text align tag definitions code.
"""
from ..etree import TreeNode
class TextAlignBaseTreeNode(TreeNode):
""" Base class for all text alignment tag class. """
# HTML template for rendering
html_render_template = '<p class="text-{text_alignment}">{inner_html}</p>\n'
# Default text alignment
text_alignment = ''
def render_html(self, inner_html, **kwargs):
"""
Callback function for rendering HTML.
:param inner_html: The inner HTML of this tree node.
:param kwargs: Extra keyword arguments for rendering.
:return The rendered HTML of this node.
"""
return self.html_render_template.format(text_alignment=self.text_alignment, inner_html=inner_html)
def render_text(self, inner_text, **kwargs):
"""
Callback function for rendering text.
:param inner_text: The inner text of this tree node.
:param kwargs: Extra keyword arguments for rendering.
:return The rendered text of this node.
"""
return inner_text
class CenterTextTreeNode(TextAlignBaseTreeNode):
""" Center align text tree node class. """
canonical_tag_name = 'center'
alias_tag_names = ()
text_alignment = 'center'
class LeftTextTreeNode(TextAlignBaseTreeNode):
""" Left align text tree node class. """
canonical_tag_name = 'left'
alias_tag_names = ()
text_alignment = 'left'
class RightTextTreeNode(TextAlignBaseTreeNode):
""" Right align text tree node class. """
canonical_tag_name = 'right'
alias_tag_names = ()
text_alignment = 'right'
class JustifyTextTreeNode(TextAlignBaseTreeNode):
""" Justify align text tree node class. """
canonical_tag_name = 'justify'
alias_tag_names = ()
text_alignment = 'justify'
| TamiaLab/PySkCode | skcode/tags/textalign.py | Python | agpl-3.0 | 1,809 | 0.001106 |
import StringIO
import traceback
from java.lang import StringBuffer #@UnresolvedImport
from java.lang import String #@UnresolvedImport
import java.lang #@UnresolvedImport
import sys
from _pydev_tipper_common import DoFind
try:
False
True
except NameError: # version < 2.3 -- didn't have the True/False builtins
import __builtin__
setattr(__builtin__, 'True', 1)
setattr(__builtin__, 'False', 0)
from org.python.core import PyReflectedFunction #@UnresolvedImport
from org.python import core #@UnresolvedImport
from org.python.core import PyClass #@UnresolvedImport
#completion types.
TYPE_IMPORT = '0'
TYPE_CLASS = '1'
TYPE_FUNCTION = '2'
TYPE_ATTR = '3'
TYPE_BUILTIN = '4'
TYPE_PARAM = '5'
def _imp(name):
try:
return __import__(name)
except:
if '.' in name:
sub = name[0:name.rfind('.')]
return _imp(sub)
else:
s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path)
raise RuntimeError(s)
def Find(name):
f = None
if name.startswith('__builtin__'):
if name == '__builtin__.str':
name = 'org.python.core.PyString'
elif name == '__builtin__.dict':
name = 'org.python.core.PyDictionary'
mod = _imp(name)
parent = mod
foundAs = ''
if hasattr(mod, '__file__'):
f = mod.__file__
components = name.split('.')
old_comp = None
for comp in components[1:]:
try:
#this happens in the following case:
#we have mx.DateTime.mxDateTime.mxDateTime.pyd
#but after importing it, mx.DateTime.mxDateTime does shadows access to mxDateTime.pyd
mod = getattr(mod, comp)
except AttributeError:
if old_comp != comp:
raise
if hasattr(mod, '__file__'):
f = mod.__file__
else:
if len(foundAs) > 0:
foundAs = foundAs + '.'
foundAs = foundAs + comp
old_comp = comp
return f, mod, parent, foundAs
def formatParamClassName(paramClassName):
if paramClassName.startswith('['):
if paramClassName == '[C':
paramClassName = 'char[]'
elif paramClassName == '[B':
paramClassName = 'byte[]'
elif paramClassName == '[I':
paramClassName = 'int[]'
elif paramClassName.startswith('[L') and paramClassName.endswith(';'):
paramClassName = paramClassName[2:-1]
paramClassName += '[]'
return paramClassName
def GenerateTip(data, log=None):
data = data.replace('\n', '')
if data.endswith('.'):
data = data.rstrip('.')
f, mod, parent, foundAs = Find(data)
tips = GenerateImportsTipForModule(mod)
return f, tips
#=======================================================================================================================
# Info
#=======================================================================================================================
class Info:
def __init__(self, name, **kwargs):
self.name = name
self.doc = kwargs.get('doc', None)
self.args = kwargs.get('args', ()) #tuple of strings
self.varargs = kwargs.get('varargs', None) #string
self.kwargs = kwargs.get('kwargs', None) #string
self.ret = kwargs.get('ret', None) #string
def basicAsStr(self):
'''@returns this class information as a string (just basic format)
'''
s = 'function:%s args=%s, varargs=%s, kwargs=%s, docs:%s' % \
(str(self.name), str(self.args), str(self.varargs), str(self.kwargs), str(self.doc))
return s
def getAsDoc(self):
s = str(self.name)
if self.doc:
s += '\n@doc %s\n' % str(self.doc)
if self.args:
s += '\n@params '
for arg in self.args:
s += str(formatParamClassName(arg))
s += ' '
if self.varargs:
s += '\n@varargs '
s += str(self.varargs)
if self.kwargs:
s += '\n@kwargs '
s += str(self.kwargs)
if self.ret:
s += '\n@return '
s += str(formatParamClassName(str(self.ret)))
return str(s)
def isclass(cls):
return isinstance(cls, core.PyClass)
def ismethod(func):
'''this function should return the information gathered on a function
@param func: this is the function we want to get info on
@return a tuple where:
0 = indicates whether the parameter passed is a method or not
1 = a list of classes 'Info', with the info gathered from the function
this is a list because when we have methods from java with the same name and different signatures,
we actually have many methods, each with its own set of arguments
'''
try:
if isinstance(func, core.PyFunction):
#ok, this is from python, created by jython
#print_ ' PyFunction'
def getargs(func_code):
"""Get information about the arguments accepted by a code object.
Three things are returned: (args, varargs, varkw), where 'args' is
a list of argument names (possibly containing nested lists), and
'varargs' and 'varkw' are the names of the * and ** arguments or None."""
nargs = func_code.co_argcount
names = func_code.co_varnames
args = list(names[:nargs])
step = 0
varargs = None
if func_code.co_flags & func_code.CO_VARARGS:
varargs = func_code.co_varnames[nargs]
nargs = nargs + 1
varkw = None
if func_code.co_flags & func_code.CO_VARKEYWORDS:
varkw = func_code.co_varnames[nargs]
return args, varargs, varkw
args = getargs(func.func_code)
return 1, [Info(func.func_name, args=args[0], varargs=args[1], kwargs=args[2], doc=func.func_doc)]
if isinstance(func, core.PyMethod):
#this is something from java itself, and jython just wrapped it...
#things to play in func:
#['__call__', '__class__', '__cmp__', '__delattr__', '__dir__', '__doc__', '__findattr__', '__name__', '_doget', 'im_class',
#'im_func', 'im_self', 'toString']
#print_ ' PyMethod'
#that's the PyReflectedFunction... keep going to get it
func = func.im_func
if isinstance(func, PyReflectedFunction):
#this is something from java itself, and jython just wrapped it...
#print_ ' PyReflectedFunction'
infos = []
for i in range(len(func.argslist)):
#things to play in func.argslist[i]:
#'PyArgsCall', 'PyArgsKeywordsCall', 'REPLACE', 'StandardCall', 'args', 'compare', 'compareTo', 'data', 'declaringClass'
#'flags', 'isStatic', 'matches', 'precedence']
#print_ ' ', func.argslist[i].data.__class__
#func.argslist[i].data.__class__ == java.lang.reflect.Method
if func.argslist[i]:
met = func.argslist[i].data
name = met.getName()
try:
ret = met.getReturnType()
except AttributeError:
ret = ''
parameterTypes = met.getParameterTypes()
args = []
for j in range(len(parameterTypes)):
paramTypesClass = parameterTypes[j]
try:
try:
paramClassName = paramTypesClass.getName()
except:
paramClassName = paramTypesClass.getName(paramTypesClass)
except AttributeError:
try:
paramClassName = repr(paramTypesClass) #should be something like <type 'object'>
paramClassName = paramClassName.split('\'')[1]
except:
paramClassName = repr(paramTypesClass) #just in case something else happens... it will at least be visible
#if the parameter equals [C, it means it it a char array, so, let's change it
a = formatParamClassName(paramClassName)
#a = a.replace('[]','Array')
#a = a.replace('Object', 'obj')
#a = a.replace('String', 's')
#a = a.replace('Integer', 'i')
#a = a.replace('Char', 'c')
#a = a.replace('Double', 'd')
args.append(a) #so we don't leave invalid code
info = Info(name, args=args, ret=ret)
#print_ info.basicAsStr()
infos.append(info)
return 1, infos
except Exception:
s = StringIO.StringIO()
traceback.print_exc(file=s)
return 1, [Info(str('ERROR'), doc=s.getvalue())]
return 0, None
def ismodule(mod):
#java modules... do we have other way to know that?
if not hasattr(mod, 'getClass') and not hasattr(mod, '__class__') \
and hasattr(mod, '__name__'):
return 1
return isinstance(mod, core.PyModule)
def dirObj(obj):
ret = []
found = java.util.HashMap()
original = obj
if hasattr(obj, '__class__'):
if obj.__class__ == java.lang.Class:
#get info about superclasses
classes = []
classes.append(obj)
try:
c = obj.getSuperclass()
except TypeError:
#may happen on jython when getting the java.lang.Class class
c = obj.getSuperclass(obj)
while c != None:
classes.append(c)
c = c.getSuperclass()
#get info about interfaces
interfs = []
for obj in classes:
try:
interfs.extend(obj.getInterfaces())
except TypeError:
interfs.extend(obj.getInterfaces(obj))
classes.extend(interfs)
#now is the time when we actually get info on the declared methods and fields
for obj in classes:
try:
declaredMethods = obj.getDeclaredMethods()
except TypeError:
declaredMethods = obj.getDeclaredMethods(obj)
try:
declaredFields = obj.getDeclaredFields()
except TypeError:
declaredFields = obj.getDeclaredFields(obj)
for i in range(len(declaredMethods)):
name = declaredMethods[i].getName()
ret.append(name)
found.put(name, 1)
for i in range(len(declaredFields)):
name = declaredFields[i].getName()
ret.append(name)
found.put(name, 1)
elif isclass(obj.__class__):
d = dir(obj.__class__)
for name in d:
ret.append(name)
found.put(name, 1)
#this simple dir does not always get all the info, that's why we have the part before
#(e.g.: if we do a dir on String, some methods that are from other interfaces such as
#charAt don't appear)
d = dir(original)
for name in d:
if found.get(name) != 1:
ret.append(name)
return ret
def formatArg(arg):
'''formats an argument to be shown
'''
s = str(arg)
dot = s.rfind('.')
if dot >= 0:
s = s[dot + 1:]
s = s.replace(';', '')
s = s.replace('[]', 'Array')
if len(s) > 0:
c = s[0].lower()
s = c + s[1:]
return s
def Search(data):
'''@return file, line, col
'''
data = data.replace('\n', '')
if data.endswith('.'):
data = data.rstrip('.')
f, mod, parent, foundAs = Find(data)
try:
return DoFind(f, mod), foundAs
except:
return DoFind(f, parent), foundAs
def GenerateImportsTipForModule(obj_to_complete, dirComps=None, getattr=getattr, filter=lambda name:True):
'''
@param obj_to_complete: the object from where we should get the completions
@param dirComps: if passed, we should not 'dir' the object and should just iterate those passed as a parameter
@param getattr: the way to get a given object from the obj_to_complete (used for the completer)
@param filter: a callable that receives the name and decides if it should be appended or not to the results
@return: list of tuples, so that each tuple represents a completion with:
name, doc, args, type (from the TYPE_* constants)
'''
ret = []
if dirComps is None:
dirComps = dirObj(obj_to_complete)
for d in dirComps:
if d is None:
continue
if not filter(d):
continue
args = ''
doc = ''
retType = TYPE_BUILTIN
try:
obj = getattr(obj_to_complete, d)
except (AttributeError, java.lang.NoClassDefFoundError):
#jython has a bug in its custom classloader that prevents some things from working correctly, so, let's see if
#we can fix that... (maybe fixing it in jython itself would be a better idea, as this is clearly a bug)
#for that we need a custom classloader... we have references from it in the below places:
#
#http://mindprod.com/jgloss/classloader.html
#http://www.javaworld.com/javaworld/jw-03-2000/jw-03-classload-p2.html
#http://freshmeat.net/articles/view/1643/
#
#note: this only happens when we add things to the sys.path at runtime, if they are added to the classpath
#before the run, everything goes fine.
#
#The code below ilustrates what I mean...
#
#import sys
#sys.path.insert(1, r"C:\bin\eclipse310\plugins\org.junit_3.8.1\junit.jar" )
#
#import junit.framework
#print_ dir(junit.framework) #shows the TestCase class here
#
#import junit.framework.TestCase
#
#raises the error:
#Traceback (innermost last):
# File "<console>", line 1, in ?
#ImportError: No module named TestCase
#
#whereas if we had added the jar to the classpath before, everything would be fine by now...
ret.append((d, '', '', retType))
#that's ok, private things cannot be gotten...
continue
else:
isMet = ismethod(obj)
if isMet[0]:
info = isMet[1][0]
try:
args, vargs, kwargs = info.args, info.varargs, info.kwargs
doc = info.getAsDoc()
r = ''
for a in (args):
if len(r) > 0:
r += ', '
r += formatArg(a)
args = '(%s)' % (r)
except TypeError:
traceback.print_exc()
args = '()'
retType = TYPE_FUNCTION
elif isclass(obj):
retType = TYPE_CLASS
elif ismodule(obj):
retType = TYPE_IMPORT
#add token and doc to return - assure only strings.
ret.append((d, doc, args, retType))
return ret
if __name__ == "__main__":
sys.path.append(r'D:\dev_programs\eclipse_3\310\eclipse\plugins\org.junit_3.8.1\junit.jar')
sys.stdout.write('%s\n' % Find('junit.framework.TestCase'))
| AMOboxTV/AMOBox.LegoBuild | script.module.pydevd/lib/_pydev_jy_imports_tipper.py | Python | gpl-2.0 | 16,814 | 0.012727 |
import sys, complete
from argparse import ArgumentParser
from config import config
from file import load, save
from hooks import post_add
def add(conf):
parser = ArgumentParser(usage="%(prog)s add arguments")
parser.add_argument("-n", required=True, dest="name", help="password name")
parser.add_argument("-u", dest="user", help="user name")
parser.add_argument("-e", dest="email", help="email")
parser.add_argument("-o", nargs=2, action="append", dest="other", metavar=("NAME", "VALUE"), help="other informations")
parser.add_argument("-t", nargs="+", dest="tags", help="password tags")
complete.complete_options(parser, sys.argv[2])
args = parser.parse_args(sys.argv[2:])
if args.other != None:
other = dict()
for o in args.other:
other[o[0]] = o[1]
data = load(conf)
pssw = raw_input("enter password: ")
nrec = dict()
nrec["name"] = args.name
nrec["pass"] = pssw
if args.user != None:
nrec["user"] = args.user
if args.email != None:
nrec["mail"] = args.email
if args.other != None:
nrec["othr"] = other
if args.tags != None:
nrec["tags"] = args.tags
data.append(nrec)
save(conf, data)
post_add(conf, nrec["name"])
def get(conf):
parser = ArgumentParser(usage="%(prog)s get search terms")
parser.add_argument("search", nargs="+", help="search terms")
parser.add_argument("-p", dest="password", action="store_true", help="only password without ending new-line")
complete.complete_options(parser, sys.argv[2])
complete.complete_names_and_tags(conf, sys.argv[2])
args = parser.parse_args(sys.argv[2:])
def search(it):
name = it["name"] in args.search
tags = reduce(lambda b, s: b and "tags" in it and it["tags"].count(s) > 0, args.search, True)
return name or tags
for i, r in enumerate(filter(search, load(conf))):
if i > 0: print
if args.password:
sys.stdout.write(r["pass"])
else:
print "name:\t{0}".format(r["name"])
print "pass:\t{0}".format(r["pass"])
if "user" in r: print "user:\t{0}".format(r["user"])
if "mail" in r: print "mail:\t{0}".format(r["mail"])
for o in r["othr"] if "othr" in r else []:
print "{0}:\t{1}".format(o, r["othr"][o])
def main():
valid_actions = ["add", "get"]
actions_parser = ArgumentParser()
actions_parser.add_argument("action", choices=valid_actions, help="action to take")
actions = actions_parser.parse_args(sys.argv[1:2])
globals()[actions.action](config())
| lkrotowski/passwdk | src/passwdk/main.py | Python | gpl-3.0 | 2,401 | 0.03082 |
#!/usr/bin/env python
"""Test faster version of sematic similarity"""
from __future__ import print_function
# Computing basic semantic similarities between GO terms
# Adapted from book chapter written by _Alex Warwick Vesztrocy and Christophe Dessimoz_
# How to compute semantic similarity between GO terms.
# First we need to write a function that calculates the minimum number
# of branches connecting two GO terms.
import os
import timeit
from collections import Counter
## from goatools.base import get_godag
## from goatools.associations import dnld_assc
## from goatools.semantic import semantic_similarity
## from goatools.semantic import TermCounts
## from goatools.semantic import get_info_content
## from goatools.semantic import deepest_common_ancestor
## from goatools.semantic import resnik_sim
## from goatools.semantic import lin_sim
## from goatools.godag.consts import NS2GO
from goatools.anno.gpad_reader import GpadReader
from goatools.semantic import TermCounts
from tests.utils import get_godag
from tests.utils import get_anno_fullname
from tests.utils import prt_hms
REPO = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")
def test_semantic_similarity():
"""Test faster version of sematic similarity"""
godag_r0 = get_godag('go-basic.obo')
## godag_r1 = get_godag('go-basic.obo', optional_attrs=['relationship'])
annoobj = GpadReader(get_anno_fullname('goa_human.gpad'), godag=godag_r0)
ns2assoc = annoobj.get_ns2assc()
assoc = annoobj.get_id2gos('all')
# Get TermCounts for each namespace and for all namespaces
ns2tcnt = {ns:TermCounts(godag_r0, ns2assoc[ns]) for ns in ['BP', 'MF', 'CC']}
tic = timeit.default_timer()
tcntobj = TermCounts(godag_r0, assoc)
prt_hms(tic, 'CUR ACTUAL {N:,} TermCounts initialized'.format(N=len(tcntobj.gocnts)))
# Compare various TermCount counts
for nspc in ['BP', 'MF', 'CC']:
for goid, cnt in ns2tcnt[nspc].gocnts.items():
assert tcntobj.gocnts[goid] == cnt
# Compare old and new count
tic = timeit.default_timer()
gocnts_old = _old_init_count_terms(godag_r0, assoc.values())
assert gocnts_old
prt_hms(tic, 'OLD EXPECTED {N:,} TermCounts initialized'.format(N=len(gocnts_old)))
for goid, cnt_old in gocnts_old.items():
assert cnt_old == tcntobj.gocnts[goid]
def _old_init_count_terms(go2obj, annots_values):
'''
Fills in the counts and overall aspect counts.
'''
gocnts = Counter()
gonotindag = set()
# Fill gocnts with GO IDs in annotations and their corresponding counts
for terms in annots_values: # key is 'gene'
# Make a union of all the terms for a gene, if term parents are
# propagated but they won't get double-counted for the gene
allterms = set()
for go_id in terms:
goobj = go2obj.get(go_id, None)
if goobj is not None:
allterms.add(go_id)
allterms |= goobj.get_all_parents()
else:
gonotindag.add(go_id)
# Add 1 for each GO annotated to this gene product
for parent in allterms:
gocnts[parent] += 1
if gonotindag:
print("{N} Assc. GO IDs not found in the GODag\n".format(N=len(gonotindag)))
return gocnts
if __name__ == '__main__':
test_semantic_similarity()
| tanghaibao/goatools | tests/test_semantic_faster.py | Python | bsd-2-clause | 3,361 | 0.005653 |
class Solution(object):
def count_bits(self, n):
c = (n - ((n >> 1) & 0o33333333333) - ((n >> 2) & 0o11111111111))
return ((c + (c >> 3)) & 0o30707070707) % 63
def countBits(self, num):
"""
:type num: int
:rtype: List[int]
"""
return map(self.count_bits, xrange(num + 1))
| ckclark/leetcode | py/counting-bits.py | Python | apache-2.0 | 336 | 0.002976 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2016, René Moser <mail@renemoser.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_host
short_description: Manages hosts on Apache CloudStack based clouds.
description:
- Create, update and remove hosts.
version_added: "2.3"
author: "René Moser (@resmo)"
options:
name:
description:
- Name of the host.
required: true
aliases: [ 'ip_address' ]
url:
description:
- Url of the host used to create a host.
- If not provided, C(http://) and param C(name) is used as url.
- Only considered if C(state=present) and host does not yet exist.
username:
description:
- Username for the host.
- Required if C(state=present) and host does not yet exist.
password:
description:
- Password for the host.
- Required if C(state=present) and host does not yet exist.
pod:
description:
- Name of the pod.
- Required if C(state=present) and host does not yet exist.
cluster:
description:
- Name of the cluster.
hypervisor:
description:
- Name of the cluster.
- Required if C(state=present) and host does not yet exist.
choices: [ 'KVM', 'VMware', 'BareMetal', 'XenServer', 'LXC', 'HyperV', 'UCS', 'OVM', 'Simulator' ]
allocation_state:
description:
- Allocation state of the host.
choices: [ 'enabled', 'disabled' ]
host_tags:
description:
- Tags of the host.
aliases: [ host_tag ]
state:
description:
- State of the host.
default: 'present'
choices: [ 'present', 'absent' ]
zone:
description:
- Name of the zone in which the host should be deployed.
- If not set, default zone is used.
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
- name: Ensure a host is present but disabled
local_action:
module: cs_host
name: ix-pod01-esx01.example.com
cluster: vcenter.example.com/ch-zrh-ix/pod01-cluster01
pod: pod01
zone: ch-zrh-ix-01
hypervisor: VMware
allocation_state: disabled
host_tags:
- perf
- gpu
- name: Ensure an existing host is disabled
local_action:
module: cs_host
name: ix-pod01-esx01.example.com
zone: ch-zrh-ix-01
allocation_state: disabled
- name: Ensure an existing host is enabled
local_action:
module: cs_host
name: ix-pod01-esx01.example.com
zone: ch-zrh-ix-01
allocation_state: enabled
- name: Ensure a host is absent
local_action:
module: cs_host
name: ix-pod01-esx01.example.com
zone: ch-zrh-ix-01
state: absent
'''
RETURN = '''
---
capabilities:
description: Capabilities of the host.
returned: success
type: string
sample: hvm
cluster:
description: Cluster of the host.
returned: success
type: string
sample: vcenter.example.com/zone/cluster01
cluster_type:
description: Type of the cluster of the host.
returned: success
type: string
sample: ExternalManaged
cpu_allocated:
description: Amount in percent of the host's CPU currently allocated.
returned: success
type: string
sample: 166.25%
cpu_number:
description: Number of CPUs of the host.
returned: success
type: string
sample: 24
cpu_sockets:
description: Number of CPU sockets of the host.
returned: success
type: int
sample: 2
cpu_speed:
description: CPU speed in Mhz
returned: success
type: int
sample: 1999
cpu_used:
description: Amount of the host's CPU currently used.
returned: success
type: string
sample: 33.6%
cpu_with_overprovisioning:
description: Amount of the host's CPU after applying the cpu.overprovisioning.factor.
returned: success
type: string
sample: 959520.0
created:
description: Date when the host was created.
returned: success
type: string
sample: 2015-05-03T15:05:51+0200
disconnected:
description: Date when the host was disconnected.
returned: success
type: string
sample: 2015-05-03T15:05:51+0200
disk_size_allocated:
description: Host's currently allocated disk size.
returned: success
type: int
sample: 2593
disk_size_total:
description: Total disk size of the host
returned: success
type: int
sample: 259300
events:
description: Events available for the host
returned: success
type: string
sample: "Ping; HostDown; AgentConnected; AgentDisconnected; PingTimeout; ShutdownRequested; Remove; StartAgentRebalance; ManagementServerDown"
ha_host:
description: Whether the host is a HA host.
returned: success
type: bool
sample: false
has_enough_capacity:
description: Whether the host has enough CPU and RAM capacity to migrate a VM to it.
returned: success
type: bool
sample: true
host_tags:
description: Comma-separated list of tags for the host.
returned: success
type: string
sample: "perf"
hypervisor:
description: Host's hypervisor.
returned: success
type: string
sample: VMware
hypervisor_version:
description: Hypervisor version.
returned: success
type: string
sample: 5.1
ip_address:
description: IP address of the host
returned: success
type: string
sample: 10.10.10.1
is_local_storage_active:
description: Whether the local storage is available or not.
returned: success
type: bool
sample: false
last_pinged:
description: Date and time the host was last pinged.
returned: success
type: string
sample: "1970-01-17T17:27:32+0100"
management_server_id:
description: Management server ID of the host.
returned: success
type: int
sample: 345050593418
memory_allocated:
description: Amount of the host's memory currently allocated.
returned: success
type: int
sample: 69793218560
memory_total:
description: Total of memory of the host.
returned: success
type: int
sample: 206085263360
memory_used:
description: Amount of the host's memory currently used.
returned: success
type: int
sample: 65504776192
name:
description: Name of the host.
returned: success
type: string
sample: esx32.example.com
network_kbs_read:
description: Incoming network traffic on the host.
returned: success
type: int
sample: 0
network_kbs_write:
description: Outgoing network traffic on the host.
returned: success
type: int
sample: 0
os_category:
description: OS category name of the host.
returned: success
type: string
sample: ...
out_of_band_management:
description: Host out-of-band management information.
returned: success
type: string
sample: ...
pod:
description: Pod name of the host.
returned: success
type: string
sample: Pod01
removed:
description: Date and time the host was removed.
returned: success
type: string
sample: "1970-01-17T17:27:32+0100"
resource_state:
description: Resource state of the host.
returned: success
type: string
sample: Enabled
allocation_state::
description: Allocation state of the host.
returned: success
type: string
sample: enabled
state:
description: State of the host.
returned: success
type: string
sample: Up
suitable_for_migration:
description: Whether this host is suitable (has enough capacity and satisfies all conditions like hosttags, max guests VM limit, etc) to migrate a VM
to it or not.
returned: success
type: string
sample: true
host_type:
description: Type of the host.
returned: success
type: string
sample: Routing
host_version:
description: Version of the host.
returned: success
type: string
sample: 4.5.2
gpu_group:
description: GPU cards present in the host.
returned: success
type: list
sample: []
zone:
description: Zone of the host.
returned: success
type: string
sample: zone01
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
cs_argument_spec,
cs_required_together,
CS_HYPERVISORS
)
import time
class AnsibleCloudStackHost(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackHost, self).__init__(module)
self.returns = {
'averageload': 'average_load',
'capabilities': 'capabilities',
'clustername': 'cluster',
'clustertype': 'cluster_type',
'cpuallocated': 'cpu_allocated',
'cpunumber': 'cpu_number',
'cpusockets': 'cpu_sockets',
'cpuspeed': 'cpu_speed',
'cpuused': 'cpu_used',
'cpuwithoverprovisioning': 'cpu_with_overprovisioning',
'disconnected': 'disconnected',
'details': 'details',
'disksizeallocated': 'disk_size_allocated',
'disksizetotal': 'disk_size_total',
'events': 'events',
'hahost': 'ha_host',
'hasenoughcapacity': 'has_enough_capacity',
'hypervisor': 'hypervisor',
'hypervisorversion': 'hypervisor_version',
'ipaddress': 'ip_address',
'islocalstorageactive': 'is_local_storage_active',
'lastpinged': 'last_pinged',
'managementserverid': 'management_server_id',
'memoryallocated': 'memory_allocated',
'memorytotal': 'memory_total',
'memoryused': 'memory_used',
'networkkbsread': 'network_kbs_read',
'networkkbswrite': 'network_kbs_write',
'oscategoryname': 'os_category',
'outofbandmanagement': 'out_of_band_management',
'podname': 'pod',
'removed': 'removed',
'resourcestate': 'resource_state',
'suitableformigration': 'suitable_for_migration',
'type': 'host_type',
'version': 'host_version',
'gpugroup': 'gpu_group',
}
# States only usable by the updateHost API
self.allocation_states_for_update = {
'enabled': 'Enable',
'disabled': 'Disable',
}
self.host = None
def get_pod(self, key=None):
pod_name = self.module.params.get('pod')
if not pod_name:
return None
args = {
'name': pod_name,
'zoneid': self.get_zone(key='id'),
}
pods = self.query_api('listPods', **args)
if pods:
return self._get_by_key(key, pods['pod'][0])
self.module.fail_json(msg="Pod %s not found" % pod_name)
def get_cluster(self, key=None):
cluster_name = self.module.params.get('cluster')
if not cluster_name:
return None
args = {
'name': cluster_name,
'zoneid': self.get_zone(key='id'),
}
clusters = self.query_api('listClusters', **args)
if clusters:
return self._get_by_key(key, clusters['cluster'][0])
self.module.fail_json(msg="Cluster %s not found" % cluster_name)
def get_host_tags(self):
host_tags = self.module.params.get('host_tags')
if host_tags is None:
return None
return ','.join(host_tags)
def get_host(self, refresh=False):
if self.host is not None and not refresh:
return self.host
name = self.module.params.get('name')
args = {
'zoneid': self.get_zone(key='id'),
'fetch_list': True,
}
res = self.query_api('listHosts', **args)
if res:
for h in res:
if name in [h['ipaddress'], h['name']]:
self.host = h
return self.host
def _handle_allocation_state(self, host):
allocation_state = self.module.params.get('allocation_state')
if not allocation_state:
return host
host = self._set_host_allocation_state(host)
# In case host in maintenance and target is maintenance
if host['allocationstate'].lower() == allocation_state and allocation_state == 'maintenance':
return host
# Cancel maintenance if target state is enabled/disabled
elif allocation_state in list(self.allocation_states_for_update.keys()):
host = self.disable_maintenance(host)
host = self._update_host(host, self.allocation_states_for_update[allocation_state])
# Only an enabled host can put in maintenance
elif allocation_state == 'maintenance':
host = self._update_host(host, 'Enable')
host = self.enable_maintenance(host)
return host
def _set_host_allocation_state(self, host):
if host is None:
host['allocationstate'] = 'Enable'
# Set host allocationstate to be disabled/enabled
elif host['resourcestate'].lower() in list(self.allocation_states_for_update.keys()):
host['allocationstate'] = self.allocation_states_for_update[host['resourcestate'].lower()]
else:
host['allocationstate'] = host['resourcestate']
return host
def present_host(self):
host = self.get_host()
if not host:
host = self._create_host(host)
else:
host = self._update_host(host)
if host:
host = self._handle_allocation_state(host)
return host
def _get_url(self):
url = self.module.params.get('url')
if url:
return url
else:
return "http://%s" % self.module.params.get('name')
def _create_host(self, host):
required_params = [
'password',
'username',
'hypervisor',
'pod',
]
self.module.fail_on_missing_params(required_params=required_params)
self.result['changed'] = True
args = {
'hypervisor': self.module.params.get('hypervisor'),
'url': self._get_url(),
'username': self.module.params.get('username'),
'password': self.module.params.get('password'),
'podid': self.get_pod(key='id'),
'zoneid': self.get_zone(key='id'),
'clusterid': self.get_cluster(key='id'),
'hosttags': self.get_host_tags(),
}
if not self.module.check_mode:
host = self.query_api('addHost', **args)
host = host['host'][0]
return host
def _update_host(self, host, allocation_state=None):
args = {
'id': host['id'],
'hosttags': self.get_host_tags(),
'allocationstate': allocation_state,
}
if allocation_state is not None:
host = self._set_host_allocation_state(host)
if self.has_changed(args, host):
self.result['changed'] = True
if not self.module.check_mode:
host = self.query_api('updateHost', **args)
host = host['host']
return host
def absent_host(self):
host = self.get_host()
if host:
self.result['changed'] = True
args = {
'id': host['id'],
}
if not self.module.check_mode:
res = self.enable_maintenance(host)
if res:
res = self.query_api('deleteHost', **args)
return host
def enable_maintenance(self, host):
if host['resourcestate'] not in ['PrepareForMaintenance', 'Maintenance']:
self.result['changed'] = True
args = {
'id': host['id'],
}
if not self.module.check_mode:
res = self.query_api('prepareHostForMaintenance', **args)
self.poll_job(res, 'host')
host = self._poll_for_maintenance()
return host
def disable_maintenance(self, host):
if host['resourcestate'] in ['PrepareForMaintenance', 'Maintenance']:
self.result['changed'] = True
args = {
'id': host['id'],
}
if not self.module.check_mode:
res = self.query_api('cancelHostMaintenance', **args)
host = self.poll_job(res, 'host')
return host
def _poll_for_maintenance(self):
for i in range(0, 300):
time.sleep(2)
host = self.get_host(refresh=True)
if not host:
return None
elif host['resourcestate'] != 'PrepareForMaintenance':
return host
self.fail_json(msg="Polling for maintenance timed out")
def get_result(self, host):
super(AnsibleCloudStackHost, self).get_result(host)
if host:
self.result['allocation_state'] = host['resourcestate'].lower()
self.result['host_tags'] = host['hosttags'].split(',') if host.get('hosttags') else []
return self.result
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
name=dict(required=True, aliases=['ip_address']),
url=dict(),
password=dict(no_log=True),
username=dict(),
hypervisor=dict(choices=CS_HYPERVISORS),
allocation_state=dict(choices=['enabled', 'disabled', 'maintenance']),
pod=dict(),
cluster=dict(),
host_tags=dict(type='list', aliases=['host_tag']),
zone=dict(),
state=dict(choices=['present', 'absent'], default='present'),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
acs_host = AnsibleCloudStackHost(module)
state = module.params.get('state')
if state == 'absent':
host = acs_host.absent_host()
else:
host = acs_host.present_host()
result = acs_host.get_result(host)
module.exit_json(**result)
if __name__ == '__main__':
main()
| hryamzik/ansible | lib/ansible/modules/cloud/cloudstack/cs_host.py | Python | gpl-3.0 | 18,456 | 0.000813 |
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import decorators
from telemetry.internal.browser import user_agent
from telemetry.unittest_util import tab_test_case
class MobileUserAgentTest(tab_test_case.TabTestCase):
@classmethod
def CustomizeBrowserOptions(cls, options):
options.browser_user_agent_type = 'mobile'
@decorators.Disabled('chromeos') # crbug.com/483212
def testUserAgent(self):
ua = self._tab.EvaluateJavaScript('window.navigator.userAgent')
self.assertEquals(ua, user_agent.UA_TYPE_MAPPING['mobile'])
class TabletUserAgentTest(tab_test_case.TabTestCase):
@classmethod
def CustomizeBrowserOptions(cls, options):
options.browser_user_agent_type = 'tablet'
@decorators.Disabled('chromeos') # crbug.com/483212
def testUserAgent(self):
ua = self._tab.EvaluateJavaScript('window.navigator.userAgent')
self.assertEquals(ua, user_agent.UA_TYPE_MAPPING['tablet'])
class DesktopUserAgentTest(tab_test_case.TabTestCase):
@classmethod
def CustomizeBrowserOptions(cls, options):
options.browser_user_agent_type = 'desktop'
@decorators.Disabled('chromeos') # crbug.com/483212
def testUserAgent(self):
ua = self._tab.EvaluateJavaScript('window.navigator.userAgent')
self.assertEquals(ua, user_agent.UA_TYPE_MAPPING['desktop'])
| SaschaMester/delicium | tools/telemetry/telemetry/internal/browser/user_agent_unittest.py | Python | bsd-3-clause | 1,431 | 0.009783 |
from PyQt4.QtGui import *
import pypipe.formats
import pypipe.basefile
from pypipe.core import pipeline
from widgets.combobox import ComboBox
class AddFileDialog(QDialog):
def __init__(self, parent=None):
super(AddFileDialog, self).__init__(parent)
self.formats_combo = ComboBox()
self.filename_edit = QLineEdit()
self.open_button = QPushButton('Open')
self.ok_button = QPushButton('&OK')
self.cancel_button = QPushButton('&Cancel')
self.setWindowTitle('Add file')
top_layout = QVBoxLayout()
top_layout.addWidget(QLabel('<b>File format:</b>'))
top_layout.addWidget(self.formats_combo)
top_layout.addWidget(QLabel('<b>File Name:</b>'))
center_layout = QHBoxLayout()
center_layout.addWidget(self.filename_edit)
center_layout.addWidget(self.open_button)
bottom_layout = QHBoxLayout()
bottom_layout.addWidget(self.ok_button)
bottom_layout.addWidget(self.cancel_button)
layout = QVBoxLayout()
layout.addLayout(top_layout)
layout.addLayout(center_layout)
layout.addLayout(bottom_layout)
self.setLayout(layout)
self.formats_combo.add_classes_from_module(pypipe.formats)
self.connect_all()
def connect_all(self):
self.cancel_button.clicked.connect(self.reject)
self.filename_edit.textChanged.connect(self.turn_ok_button)
self.formats_combo.currentIndexChanged.connect(self.turn_ok_button)
self.ok_button.clicked.connect(self.accept)
self.open_button.clicked.connect(self.open_file)
def turn_ok_button(self):
try:
f = self.get_file()
self.ok_button.setEnabled(True)
except pypipe.basefile.FileNotExistsError:
self.ok_button.setEnabled(False)
return
if pypipe.core.pipeline.can_add_file(f):
self.ok_button.setEnabled(True)
else:
self.ok_button.setEnabled(False)
def open_file(self):
file_name = QFileDialog.getOpenFileName(self, 'Open file')
self.filename_edit.setText(file_name)
def get_file(self):
init = self.formats_combo.get_current_item()
path = str(self.filename_edit.text())
return init(path)
def exec_(self):
self.turn_ok_button()
super(AddFileDialog, self).exec_()
| ctlab/pypipe | pypipe-gui/windows/addfiledialog.py | Python | mit | 2,392 | 0 |
"""
Browser set up for acceptance tests.
"""
# pylint: disable=no-member
# pylint: disable=unused-argument
from base64 import encodestring
from json import dumps
from logging import getLogger
import requests
from django.conf import settings
from django.core.management import call_command
from lettuce import after, before, world
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from splinter.browser import Browser
import xmodule.modulestore.django
from xmodule.contentstore.django import _CONTENTSTORE
LOGGER = getLogger(__name__)
LOGGER.info("Loading the lettuce acceptance testing terrain file...")
MAX_VALID_BROWSER_ATTEMPTS = 20
GLOBAL_SCRIPT_TIMEOUT = 60
def get_saucelabs_username_and_key():
"""
Returns the Sauce Labs username and access ID as set by environment variables
"""
return {"username": settings.SAUCE.get('USERNAME'), "access-key": settings.SAUCE.get('ACCESS_ID')}
def set_saucelabs_job_status(jobid, passed=True):
"""
Sets the job status on sauce labs
"""
config = get_saucelabs_username_and_key()
url = 'http://saucelabs.com/rest/v1/{}/jobs/{}'.format(config['username'], world.jobid)
body_content = dumps({"passed": passed})
base64string = encodestring('{}:{}'.format(config['username'], config['access-key']))[:-1]
headers = {"Authorization": "Basic {}".format(base64string)}
result = requests.put(url, data=body_content, headers=headers)
return result.status_code == 200
def make_saucelabs_desired_capabilities():
"""
Returns a DesiredCapabilities object corresponding to the environment sauce parameters
"""
desired_capabilities = settings.SAUCE.get('BROWSER', DesiredCapabilities.CHROME)
desired_capabilities['platform'] = settings.SAUCE.get('PLATFORM')
desired_capabilities['version'] = settings.SAUCE.get('VERSION')
desired_capabilities['device-type'] = settings.SAUCE.get('DEVICE')
desired_capabilities['name'] = settings.SAUCE.get('SESSION')
desired_capabilities['build'] = settings.SAUCE.get('BUILD')
desired_capabilities['video-upload-on-pass'] = False
desired_capabilities['sauce-advisor'] = False
desired_capabilities['capture-html'] = True
desired_capabilities['record-screenshots'] = True
desired_capabilities['selenium-version'] = "2.34.0"
desired_capabilities['max-duration'] = 3600
desired_capabilities['public'] = 'public restricted'
return desired_capabilities
@before.harvest
def initial_setup(server):
"""
Launch the browser once before executing the tests.
"""
world.absorb(settings.LETTUCE_SELENIUM_CLIENT, 'LETTUCE_SELENIUM_CLIENT')
if world.LETTUCE_SELENIUM_CLIENT == 'local':
browser_driver = getattr(settings, 'LETTUCE_BROWSER', 'chrome')
if browser_driver == 'chrome':
desired_capabilities = DesiredCapabilities.CHROME
desired_capabilities['loggingPrefs'] = {
'browser': 'ALL',
}
else:
desired_capabilities = {}
# There is an issue with ChromeDriver2 r195627 on Ubuntu
# in which we sometimes get an invalid browser session.
# This is a work-around to ensure that we get a valid session.
success = False
num_attempts = 0
while (not success) and num_attempts < MAX_VALID_BROWSER_ATTEMPTS:
# Load the browser and try to visit the main page
# If the browser couldn't be reached or
# the browser session is invalid, this will
# raise a WebDriverException
try:
if browser_driver == 'firefox':
# Lettuce initializes differently for firefox, and sending
# desired_capabilities will not work. So initialize without
# sending desired_capabilities.
world.browser = Browser(browser_driver)
else:
world.browser = Browser(browser_driver, desired_capabilities=desired_capabilities)
world.browser.driver.set_script_timeout(GLOBAL_SCRIPT_TIMEOUT)
world.visit('/')
except WebDriverException:
LOGGER.warn("Error acquiring %s browser, retrying", browser_driver, exc_info=True)
if hasattr(world, 'browser'):
world.browser.quit()
num_attempts += 1
else:
success = True
# If we were unable to get a valid session within the limit of attempts,
# then we cannot run the tests.
if not success:
raise IOError("Could not acquire valid {driver} browser session.".format(driver=browser_driver))
world.absorb(0, 'IMPLICIT_WAIT')
world.browser.driver.set_window_size(1280, 1024)
elif world.LETTUCE_SELENIUM_CLIENT == 'saucelabs':
config = get_saucelabs_username_and_key()
world.browser = Browser(
'remote',
url="http://{}:{}@ondemand.saucelabs.com:80/wd/hub".format(config['username'], config['access-key']),
**make_saucelabs_desired_capabilities()
)
world.absorb(30, 'IMPLICIT_WAIT')
world.browser.set_script_timeout(GLOBAL_SCRIPT_TIMEOUT)
elif world.LETTUCE_SELENIUM_CLIENT == 'grid':
world.browser = Browser(
'remote',
url=settings.SELENIUM_GRID.get('URL'),
browser=settings.SELENIUM_GRID.get('BROWSER'),
)
world.absorb(30, 'IMPLICIT_WAIT')
world.browser.driver.set_script_timeout(GLOBAL_SCRIPT_TIMEOUT)
else:
raise Exception("Unknown selenium client '{}'".format(world.LETTUCE_SELENIUM_CLIENT))
world.browser.driver.implicitly_wait(world.IMPLICIT_WAIT)
world.absorb(world.browser.driver.session_id, 'jobid')
@before.each_scenario
def reset_data(scenario):
"""
Clean out the django test database defined in the
envs/acceptance.py file: edx-platform/db/test_edx.db
"""
LOGGER.debug("Flushing the test database...")
call_command('flush', interactive=False, verbosity=0)
world.absorb({}, 'scenario_dict')
@before.each_scenario
def configure_screenshots(scenario):
"""
Before each scenario, turn off automatic screenshots.
Args: str, scenario. Name of current scenario.
"""
world.auto_capture_screenshots = False
@after.each_scenario
def clear_data(scenario):
world.spew('scenario_dict')
@after.each_scenario
def reset_databases(scenario):
'''
After each scenario, all databases are cleared/dropped. Contentstore data are stored in unique databases
whereas modulestore data is in unique collection names. This data is created implicitly during the scenarios.
If no data is created during the test, these lines equivilently do nothing.
'''
xmodule.modulestore.django.modulestore()._drop_database() # pylint: disable=protected-access
xmodule.modulestore.django.clear_existing_modulestores()
_CONTENTSTORE.clear()
@world.absorb
def capture_screenshot(image_name):
"""
Capture a screenshot outputting it to a defined directory.
This function expects only the name of the file. It will generate
the full path of the output screenshot.
If the name contains spaces, they ill be converted to underscores.
"""
output_dir = '{}/log/auto_screenshots'.format(settings.TEST_ROOT)
image_name = '{}/{}.png'.format(output_dir, image_name.replace(' ', '_'))
try:
world.browser.driver.save_screenshot(image_name)
except WebDriverException:
LOGGER.error("Could not capture a screenshot '{}'".format(image_name))
@after.each_scenario
def screenshot_on_error(scenario):
"""
Save a screenshot to help with debugging.
"""
if scenario.failed:
try:
output_dir = '{}/log'.format(settings.TEST_ROOT)
image_name = '{}/{}.png'.format(output_dir, scenario.name.replace(' ', '_'))
world.browser.driver.save_screenshot(image_name)
except WebDriverException:
LOGGER.error('Could not capture a screenshot')
@after.each_scenario
def capture_console_log(scenario):
"""
Save the console log to help with debugging.
"""
if scenario.failed:
log = world.browser.driver.get_log('browser')
try:
output_dir = '{}/log'.format(settings.TEST_ROOT)
file_name = '{}/{}.log'.format(output_dir, scenario.name.replace(' ', '_'))
with open(file_name, 'w') as output_file:
for line in log:
output_file.write("{}{}".format(dumps(line), '\n'))
except WebDriverException:
LOGGER.error('Could not capture the console log')
def capture_screenshot_for_step(step, when):
"""
Useful method for debugging acceptance tests that are run in Vagrant.
This method runs automatically before and after each step of an acceptance
test scenario. The variable:
world.auto_capture_screenshots
either enables or disabled the taking of screenshots. To change the
variable there is a convenient step defined:
I (enable|disable) auto screenshots
If you just want to capture a single screenshot at a desired point in code,
you should use the method:
world.capture_screenshot("image_name")
"""
if world.auto_capture_screenshots:
scenario_num = step.scenario.feature.scenarios.index(step.scenario) + 1
step_num = step.scenario.steps.index(step) + 1
step_func_name = step.defined_at.function.func_name
image_name = "{prefix:03d}__{num:03d}__{name}__{postfix}".format(
prefix=scenario_num,
num=step_num,
name=step_func_name,
postfix=when
)
world.capture_screenshot(image_name)
@before.each_step
def before_each_step(step):
capture_screenshot_for_step(step, '1_before')
@after.each_step
def after_each_step(step):
capture_screenshot_for_step(step, '2_after')
@after.harvest
def saucelabs_status(total):
"""
Collect data for saucelabs.
"""
if world.LETTUCE_SELENIUM_CLIENT == 'saucelabs':
set_saucelabs_job_status(world.jobid, total.scenarios_ran == total.scenarios_passed)
| pepeportela/edx-platform | common/djangoapps/terrain/browser.py | Python | agpl-3.0 | 10,330 | 0.001742 |
import unittest
from katas.kyu_6.help_the_bookseller import stock_list
class StockListTestCase(unittest.TestCase):
def setUp(self):
self.a = ['ABAR 200', 'CDXE 500', 'BKWR 250', 'BTSQ 890', 'DRTY 600']
self.b = ['A', 'B']
def test_equals(self):
self.assertEqual(stock_list(self.a, self.b), '(A : 200) - (B : 1140)')
def test_equals_2(self):
self.assertEqual(stock_list(self.a, []), '')
def test_equals_3(self):
self.assertEqual(stock_list([], self.b), '')
| the-zebulan/CodeWars | tests/kyu_6_tests/test_help_the_bookseller.py | Python | mit | 518 | 0 |
# -*- coding: utf-8 -*-
"""
Package with support for target classification on image forming sensors.
---
type:
python_module
validation_level:
v00_minimum
protection:
k00_public
copyright:
"Copyright 2016 High Integrity Artificial Intelligence Systems"
license:
"Licensed under the Apache License, Version 2.0 (the License);
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an AS IS BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License."
...
"""
__all__ = ()
| wtpayne/hiai | a3_src/h20_capability/sensor/imaging/classify/__init__.py | Python | apache-2.0 | 863 | 0 |
#!/usr/bin/env python
# encoding: utf-8
from __future__ import print_function
import sys
import os.path
import argparse
from . import censuscsv
from . import dbfwriter
def main():
'''Command line util for converting census CSV to DBF'''
parser = argparse.ArgumentParser(description='Convert a US Census csv to dbf.')
parser.add_argument('input', type=str, help='input csv')
parser.add_argument('-o', '--output', metavar='dbf', type=str, help='output dbf file. If omitted, dbf will have same name as csv. Use "-" for stdout.')
parser.add_argument('-i', '--include-cols', metavar='cols', type=str, help='A comma-delimited list of the columns to', default='')
parser.add_argument('--dd', default=False, action='store_true', help='output a data dictionary made from the header')
args = parser.parse_args()
output_file = args.output
if output_file is None:
base, _ = os.path.splitext(args.input)
output_file = base + '.dbf'
if output_file == '-':
output_file = sys.stdout
else:
print(output_file)
if args.include_cols:
include_cols = args.include_cols.split(',')
else:
include_cols = None
with open(args.input, 'r') as handle:
# Parse the csv.
fields, numrows, reader = censuscsv.parse(handle, cols=include_cols)
# Write to dbf.
with open(output_file, 'w') as sink:
kwargs = {
'numrows': numrows,
'nulls': censuscsv.NULLS,
'strip': r'\(r\d+\)'
}
dbfwriter.dbfwriter(sink, fields, reader, **kwargs)
if args.dd:
dd_file = base + '-data-dictionary.txt'
print(dd_file, file=sys.stderr)
censuscsv.write_dd(args.input, dd_file, include_cols=include_cols)
if __name__ == '__main__':
main()
| fitnr/census2dbf | census2dbf/cli.py | Python | gpl-3.0 | 1,842 | 0.002714 |
import os
import pygame
import sys
import threading, time
from pygame.locals import *
import logging
log = logging.getLogger('pytality.term.pygame')
log.debug("pygame version: %r", pygame.version.ver)
"""
A mapping of special keycodes into representative strings.
Based off the keymap in WConio, but with 'alt', 'ctrl', and 'shift'
stripped in order to be portable with the other pytality backends.
"""
key_map = {
K_RETURN: 'enter',
K_F1 : 'f1',
K_F2 : 'f2',
K_F3 : 'f3',
K_F4 : 'f4',
K_F5 : 'f5',
K_F6 : 'f6',
K_F7 : 'f7',
K_F8 : 'f8',
K_F9 : 'f9',
K_F10 : 'f10',
K_INSERT : 'ins',
K_DELETE : 'del',
K_HOME : 'home',
K_END : 'end',
K_PAGEDOWN : 'pgdn',
K_PAGEUP : 'pgup',
K_DOWN : 'down',
K_LEFT : 'left',
K_RIGHT : 'right',
K_UP : 'up',
}
#image path
#todo: figure out how I want to make this configurable
if hasattr(sys, 'frozen'):
base_path = os.path.join(os.path.realpath(os.path.dirname(sys.argv[0])), 'data')
else:
base_path = os.path.join(os.path.dirname(__file__), 'silverlight_html', 'images')
#pixel dimensions of each cell
W = 8
H = 12
#loaded sprite data
sprites = {}
#have we quit?
quit = False
#blinky cursor stuff
cursor_thread = None
replaced_character = None
cursor_x = 0
cursor_y = 0
cursor_type = None
class CursorThread(threading.Thread):
def __init__(self, *args, **kwargs):
super(CursorThread, self).__init__(*args, **kwargs)
self.quitEvent = threading.Event()
def run(self):
blink = True
while True:
blink = not blink
try:
pygame.event.post(pygame.event.Event(USEREVENT, blink=blink))
except pygame.error:
return
if self.quitEvent.wait(timeout=0.5):
break
def init(use_cp437=True, blink=False):
pygame.init()
#There are several kinds of event we are patently not interested in
pygame.event.set_blocked([
MOUSEBUTTONUP,
JOYAXISMOTION, JOYBALLMOTION, JOYHATMOTION, JOYBUTTONDOWN, JOYBUTTONUP,
#we only need KEYDOWN
KEYUP
])
pygame.mouse.set_visible(False)
#prepare the raw_getkey generator
prepare_raw_getkey()
global quit
quit = False
#spawn a blinky-cursor manager
global cursor_thread, replaced_character, cursor_x, cursor_y, cursor_type
cursor_x = 0
cursor_y = 0
replaced_character = None
cursor_type = None
if blink:
cursor_thread = CursorThread()
cursor_thread.daemon = True
cursor_thread.start()
def load_sprites():
if 'bg' in sprites:
#we only need to load once
return
def load_image(key_name, *filepath):
full_path = os.path.join(base_path, *filepath)
surface = pygame.image.load(full_path).convert_alpha()
sprites[key_name] = surface
load_image('bg', 'colors.png')
for color_id in range(16):
load_image(color_id, 'char', '%s.png' % color_id)
def blink_cursor(event):
global replaced_character
if event.blink:
replace_character()
else:
restore_character()
def replace_character():
global replaced_character
if not cursor_type:
return
fg, bg, ch = get_at(cursor_x, cursor_y)
replaced_character = (cursor_x, cursor_y, fg, bg, ch)
new_fg = 15
if bg == 15:
new_fg = 7
blit_at(cursor_x, cursor_y, new_fg, bg, cursor_type)
pygame.display.flip()
def restore_character():
global replaced_character
if not replaced_character:
return
x, y, fg, bg, ch = replaced_character
blit_at(x, y, fg, bg, ch)
pygame.display.flip()
replaced_character = None
#----------------------------------------------------------------------------
#Actual functions
def flip():
#keep the event queue happy
for event in pygame.event.get([
#this should be all the event types we aren't blocking
#and aren't about keyboard input
QUIT,
ACTIVEEVENT,
VIDEORESIZE,
VIDEOEXPOSE,
USEREVENT
]):
if event.type == QUIT:
raise KeyboardInterrupt()
elif event.type == USEREVENT:
blink_cursor(event)
else:
#we don't actually care
pass
#flip the screen
pygame.display.flip()
def clear():
if quit:
return
screen.fill((0, 0, 0))
global cell_data
cell_data = [
[
[0, 0, ' ']
for cell in range(max_x)
]
for row in range(max_y)
]
def resize(width, height):
global screen
screen = pygame.display.set_mode((width*W, height*H))
#we don't use alpha, and turning it off makes it a tad faster
screen.set_alpha(None)
#load the console images to blit later
load_sprites()
#set our max dimensions
global max_x, max_y
max_x, max_y = width, height
clear()
flip()
def reset():
pygame.display.quit()
global quit
quit = True
if cursor_thread:
cursor_thread.quitEvent.set()
cursor_thread.join()
def move_cursor(x, y):
global cursor_x, cursor_y
restore_character()
cursor_x = x
cursor_y = y
replace_character()
def set_title(title):
pygame.display.set_caption(title)
def set_cursor_type(i):
global cursor_type
cursor_map = {
0: None,
1: '_',
2: chr(0xDB)
}
restore_character()
cursor_type = cursor_map[i]
def cache_sprite(fg, bg, ch):
bg_sprite = sprites['bg']
fg_sprite = sprites[fg]
index = ord(ch)
#coordinates on the bg sprite map
bg_x = bg * W
#coordinates on the fg sprite map
fg_x = (index % 16) * W
fg_y = int(index / 16) * H
cell_sprite = pygame.Surface((W, H))
#voodoo: this helps a little bit.
cell_sprite.set_alpha(None)
#blit the background and foreground to the cell
cell_sprite.blit(bg_sprite, dest=(0, 0), area=pygame.Rect(bg_x, 0, W, H))
cell_sprite.blit(fg_sprite, dest=(0, 0), area=pygame.Rect(fg_x, fg_y, W, H))
sprites[(fg, bg, ch)] = cell_sprite
return cell_sprite
def blit_at(x, y, fg, bg, ch):
#blit one character to the screen.
#because function calls are pricey, this is also inlined (ew) in draw_buffer, so the contents are kept short.
#coordinates on the screen
screen_x = x * W
screen_y = y * H
#cache each (bg, fg, index) cell we draw into a surface so it's easier to redraw.
#it's a little bit of a memory waste, and takes longer on the first draw, but we're dealing with ascii here
#so there's probably a lot of reuse.
try:
cell_sprite = sprites[(fg, bg, ch)]
except KeyError:
#make a new one
cell_sprite = cache_sprite(fg, bg, ch)
#blit the cell to the screen
screen.blit(cell_sprite, dest=(screen_x, screen_y))
def draw_buffer(source, start_x, start_y):
"""
render the buffer to our backing.
This is a hotpath, and there's more microoptimization here than i'd like, but FPS is kindof important.
"""
y = start_y
#lookups we can cache into locals
#i know, it's such a microoptimization, but this path qualifies as hot
local_cell_data, local_sprites, local_screen_blit = cell_data, sprites, screen.blit
local_W, local_H = W, H
screen_width, screen_height = max_x, max_y
source_width = source.width
is_overlay = source.is_overlay
for row in source._data:
if y < 0:
y += 1
continue
if y >= screen_height:
break
x = start_x
#do something analogous to row[:source.width]
#but without the pointless copy that requires
w = 0
for fg, bg, ch in row:
if x >= screen_width or w >= source_width:
break
if x >= 0:
#no need to blit if it's already identical
old_data = local_cell_data[y][x]
new_data = [fg, bg, ch]
if new_data != old_data and not (is_overlay and ch == ' '):
#draw it and remember the info for our cache
#this used to call blit_at but now it's inline.
try:
cell_sprite = sprites[(fg, bg, ch)]
except KeyError:
#make a new one
cell_sprite = cache_sprite(fg, bg, ch)
#blit the cell to the screen
local_screen_blit(cell_sprite, dest=(x*local_W, y*local_H))
#remember the info for the cache
local_cell_data[y][x] = new_data
x += 1
w += 1
y += 1
source.dirty = False
return
def get_at(x, y):
if x < 0 or x >= max_x or y < 0 or y >= max_y:
raise ValueError("get_at: Invalid coordinate (%r, %r)" % (x,y))
global cell_data
return cell_data[y][x]
def prepare_raw_getkey():
"""
It looks like pygame fully intends for you to process _all_ keyboard input at the moment you
look at the event queue.
That won't do here. so we turn raw_getkey into a generator.
Worse, pygame.event.wait() can't filter by type and removes the event from the queue,
so we have to keep re-adding events we didn't want in the first place. Ugh.
"""
#this is weird - pygame turns off keyboard repeat by default, which you can re-enable
#by setting a delay in ms, but "what the system normally does" is not an option.
#it seems like 150ms delay and 15 keys-per-second is normalish.
pygame.key.set_repeat(150, 1000 / 15)
global raw_getkey
def translate(event):
if event.type == MOUSEMOTION:
x, y = event.pos
return ("mouse_motion", x / W, y / H)
if event.type == KEYDOWN:
log.debug("key event: %r", event.dict)
if event.key in key_map:
return key_map[event.key]
return event.unicode
if event.type == MOUSEBUTTONDOWN:
x, y = event.pos
return ("mouse_down", x / W, y / H)
def keypump():
items = []
event_types = [MOUSEMOTION, KEYDOWN, MOUSEBUTTONDOWN]
while True:
if not items:
if pygame.event.peek(event_types):
#there's keyboard input pending! great!
items.extend(pygame.event.get(event_types))
else:
#there's no keyboard input pending, so we need to take a nap until there is.
#if we get an event we dont care about, we have to put it back
#but if we put it back, .wait() will give it right back to us
#so we have to keep it around until we find what we want, then re-add it.
#ugh.
ignored_items = []
while True:
item = pygame.event.wait()
if item.type == USEREVENT:
blink_cursor(item)
elif item.type not in event_types:
ignored_items.append(item)
else:
items.append(item)
break
for ignored_item in ignored_items:
pygame.event.post(ignored_item)
yield translate(items.pop(0))
#assign the generator's next() method as raw_getkey
raw_getkey = keypump().next
| jtruscott/ld27 | pytality/term_pygame.py | Python | bsd-3-clause | 11,584 | 0.009237 |
'''Defines the Special class for theia.'''
# Provides:
# class Special
# __init__
# lines
import numpy as np
from ..helpers import geometry, settings
from ..helpers.units import deg, cm, pi
from .optic import Optic
class Special(Optic):
'''
Special class.
This class represents general optics, as their actions on R and T are left
to the user to input. They are useful for special optics which are neither
reflective nor transmissive.
Actions:
* T on HR: user input
* R on HR: user input
* T on AR: user input
* R on AR: user input
**Note**: by default the actions of these objects are those of
beamsplitters (0, 0, 0, 0)
*=== Additional attributes with respect to the Optic class ===*
None
*=== Name ===*
Special
**Note**: the curvature of any surface is positive for a concave surface
(coating inside the sphere).
Thus kurv*HRNorm/|kurv| always points to the center
of the sphere of the surface, as is the convention for the lineSurfInter of
geometry module. Same for AR.
******* HRK > 0 and ARK > 0 ******* HRK > 0 and ARK < 0
***** ******** and |ARK| > |HRK|
H***A H*********A
***** ********
******* *******
'''
Name = "Special"
def __init__(self, Wedge = 0., Alpha = 0., X = 0., Y = 0., Z = 0.,
Theta = pi/2., Phi = 0., Diameter = 10.e-2,
HRr = .99, HRt = .01, ARr = .1, ARt = .9,
HRK = 0.01, ARK = 0, Thickness = 2.e-2,
N = 1.4585, KeepI = False,
RonHR = 0, TonHR = 0, RonAR = 0, TonAR = 0,
Ref = None):
'''Special optic initializer.
Parameters are the attributes.
Returns a special optic.
'''
# actions
TonHR = int(TonHR)
RonHR = int(RonHR)
TonAR = int(TonAR)
RonAR = int(RonAR)
# Initialize input data
N = float(N)
Wedge = float(Wedge)
Alpha = float(Alpha)
Theta = float(Theta)
Phi = float(Phi)
Diameter = float(Diameter)
Thickness = float(Thickness)
HRK = float(HRK)
ARK = float(ARK)
HRt = float(HRt)
HRr = float(HRr)
ARt = float(ARt)
ARr = float(ARr)
#prepare for mother initializer
HRNorm = np.array([np.sin(Theta)*np.cos(Phi),
np.sin(Theta) * np.sin(Phi),
np.cos(Theta)], dtype = np.float64)
HRCenter = np.array([X, Y, Z], dtype = np.float64)
#Calculate ARCenter and ARNorm with wedge and alpha and thickness:
ARCenter = HRCenter\
- (Thickness + .5 * np.tan(Wedge) * Diameter) * HRNorm
a,b = geometry.basis(HRNorm)
ARNorm = -np.cos(Wedge) * HRNorm\
+ np.sin(Wedge) * (np.cos(Alpha) * a\
+ np.sin(Alpha) * b)
super(Special, self).__init__(ARCenter = ARCenter, ARNorm = ARNorm,
N = N, HRK = HRK, ARK = ARK, ARr = ARr, ARt = ARt, HRr = HRr, HRt = HRt,
KeepI = KeepI, HRCenter = HRCenter, HRNorm = HRNorm,
Thickness = Thickness, Diameter = Diameter,
Wedge = Wedge, Alpha = Alpha,
TonHR = TonHR, RonHR = RonHR, TonAR = TonAR, RonAR = RonAR,
Ref = Ref)
#Warnings for console output
if settings.warning:
self.geoCheck("mirror")
def lines(self):
'''Returns the list of lines necessary to print the object.'''
sph = geometry.rectToSph(self.HRNorm)
return ["Special: %s {" % str(self.Ref),
"TonHR, RonHR: %s, %s" % (str(self.TonHR), str(self.RonHR)),
"TonAR, RonAR: %s, %s" % (str(self.TonAR), str(self.RonAR)),
"Thick: %scm" % str(self.Thick/cm),
"Diameter: %scm" % str(self.Dia/cm),
"Wedge: %sdeg" % str(self.Wedge/deg),
"Alpha: %sdeg" % str(self.Alpha/deg),
"HRCenter: %s" % str(self.HRCenter),
"HRNorm: (%s, %s)deg" % (str(sph[0]/deg), str(sph[1]/deg)),
"Index: %s" %str(self.N),
"HRKurv, ARKurv: %s, %s" % (str(self.HRK), str(self.ARK)),
"HRr, HRt, ARr, ARt: %s, %s, %s, %s" \
% (str(self.HRr), str(self.HRt), str(self.ARr), str(self.ARt)),
"}"]
| bandang0/theia | theia/optics/special.py | Python | gpl-3.0 | 4,456 | 0.02895 |
####################################
# Driftwood 2D Game Dev. Suite #
# entitymanager.py #
# Copyright 2014-2017 #
# Michael D. Reiley & Paul Merrill #
####################################
# **********
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
# **********
import jsonschema
import sys
import traceback
from typing import ItemsView, List, Optional
import entity
import spritesheet
from __schema__ import _SCHEMA
# Keep a reference to the entity module, which is overridden by the EntityManager.entity function later in the file.
# It is only overridden while inside type annotations.
_entity = entity
class EntityManager:
"""The Entity Manager
This class manages entities in the current area, as well as the persistent player entity.
Attributes:
driftwood: Base class instance.
player: The player entity.
collider: The collision callback. The callback must take as arguments the two entities that collided.
entities: The dictionary of Entity class instances for each entity. Stored by eid.
spritesheets: The dictionary of Spritesheet class instances for each sprite sheet. Sorted by filename.
"""
def __init__(self, driftwood):
"""EntityManager class initializer.
Args:
driftwood: Base class instance.
"""
self.driftwood = driftwood
self.player = None
self.collider = None
self.entities = {}
self.spritesheets = {}
self.__last_eid = -1
def __contains__(self, eid: int) -> bool:
if self.entity(eid):
return True
return False
def __getitem__(self, eid: int) -> Optional[entity.Entity]:
return self.entity(eid)
def __delitem__(self, eid: int) -> bool:
return self.kill(eid)
def __iter__(self) -> ItemsView:
return self.entities.items()
def insert(self, filename: str, layer: int, x: int, y: int, template_vars: dict = {}) -> Optional[entity.Entity]:
"""Insert an entity at a position in the area. Supports Jinja2.
Args:
filename: Filename of the JSON entity descriptor.
layer: Layer of insertion.
x: x-coordinate of insertion, by tile.
y: y-coordinate of insertion, by tile.
Returns: Entity ID of new entity if succeeded, None if failed.
"""
# Input Check
try:
CHECK(filename, str)
CHECK(layer, int, _min=0)
CHECK(x, int, _min=0)
CHECK(y, int, _min=0)
except CheckFailure as e:
self.driftwood.log.msg("ERROR", "Entity", "insert", "bad argument", e)
return None
if not self.driftwood.area.tilemap:
self.driftwood.log.msg("ERROR", "Entity", "insert", "no area loaded")
return None
data = self.driftwood.resource.request_template(filename, template_vars)
if not data:
self.driftwood.log.msg("ERROR", "Entity", "insert", "could not get resource", filename)
return None
schema = _SCHEMA["entity"]
self.__last_eid += 1
eid = self.__last_eid
# Attempt to validate against the schema.
try:
jsonschema.validate(data, schema)
except jsonschema.ValidationError:
self.driftwood.log.msg("ERROR", "Entity", "insert", "failed validation", filename)
traceback.print_exc(1, sys.stdout)
sys.stdout.flush()
return None
# Set movement mode.
if data["init"]["mode"] == "tile":
self.entities[eid] = entity.TileModeEntity(self)
elif data["init"]["mode"] == "pixel":
self.entities[eid] = entity.PixelModeEntity(self)
# Read the entity.
self.entities[eid]._read(filename, data, eid)
# Set the initial position.
self.entities[eid].x = x * self.driftwood.area.tilemap.tilewidth
self.entities[eid].y = y * self.driftwood.area.tilemap.tileheight
self.entities[eid].layer = layer
self.entities[eid].tile = self.driftwood.area.tilemap.layers[layer].tile(x, y)
# In pixel mode, record which tile(s) the entity occupies at first.
if self.entities[eid].mode == "pixel":
self.entities[eid]._occupies = [self.entities[eid].tile, None, None, None]
self.driftwood.area.changed = True
self.driftwood.log.info("Entity", "inserted", "{0} on layer {1} at position {2}t, {3}t".format(filename,
layer,
x, y))
# Function to call when inserting the entity.
if data["init"]["on_insert"]:
args = data["init"]["on_insert"].split(',')
self.driftwood.script.call(args[0], args[1], self.entities[eid])
# Function to call before killing the entity.
if data["init"]["on_kill"]:
self.entities[eid]._on_kill = data["init"]["on_kill"].split(',')
return eid
def entity(self, eid: int) -> Optional[entity.Entity]:
"""Retrieve an entity by eid.
Args:
eid: The Entity ID of the entity to retrieve.
Returns: Entity class instance if succeeded, None if failed.
"""
# Input Check
try:
CHECK(eid, int, _min=0)
except CheckFailure as e:
self.driftwood.log.msg("ERROR", "Entity", "entity", "bad argument", e)
return None
if eid in self.entities:
return self.entities[eid]
return None
def entity_at(self, x: int, y: int) -> Optional[_entity.Entity]:
"""Retrieve an entity by pixel coordinate.
Args:
x: The x coordinate of the tile.
y: The y coordinate of the tile.
Returns: Entity class instance if succeeded, None if failed.
"""
# Input Check
try:
CHECK(x, int, _min=0)
CHECK(y, int, _min=0)
except CheckFailure as e:
self.driftwood.log.msg("ERROR", "Entity", "entity_at", "bad argument", e)
return None
for eid in self.entities:
if self.entities[eid].x == x and self.entities[eid].y == y:
return self.entities[eid]
return None
def layer(self, layer: int) -> List[_entity.Entity]:
"""Retrieve a list of entities on a certain layer.
Args:
layer: Layer to find entities on.
Returns: List of Entity class instances.
"""
# Input Check
try:
CHECK(layer, int, _min=0)
except CheckFailure as e:
self.driftwood.log.msg("ERROR", "Entity", "layer", "bad argument", e)
return []
ents = []
for eid in self.entities:
if self.entities[eid].layer == layer:
ents.append(self.entities[eid])
# Put them in order of eid so they don't switch around if we iterate them.
return sorted(ents, key=lambda by_eid: by_eid.eid)
def kill(self, eid: int) -> bool:
"""Kill an entity by eid.
Args:
eid: The Entity ID of the entity to kill.
Returns:
True if succeeded, False if failed.
"""
# Input Check
try:
CHECK(eid, int, _min=0)
except CheckFailure as e:
self.driftwood.log.msg("ERROR", "Entity", "kill", "bad argument", e)
return False
if eid in self.entities:
if self.entities[eid]._on_kill: # Call a function before killing the entity.
self.driftwood.script.call(self.entities[eid]._on_kill[0], self.entities[eid]._on_kill[1],
self.entities[eid])
self.entities[eid]._terminate()
del self.entities[eid]
self.driftwood.area.changed = True
return True
self.driftwood.log.msg("WARNING", "Entity", "kill", "attempt to kill nonexistent entity", eid)
return False
def killall(self, filename: str) -> bool:
"""Kill all entities by filename.
Args:
filename: Filename of the JSON entity descriptor whose insertions should be killed.
Returns:
True if succeeded, False if failed.
"""
# Input Check
try:
CHECK(filename, str)
except CheckFailure as e:
self.driftwood.log.msg("ERROR", "Entity", "killall", "bad argument", e)
return False
to_kill = []
for eid in self.entities:
if self.entities[eid].filename == filename:
to_kill += eid
for eid in to_kill:
if self.entities[eid]._on_kill: # Call a function before killing the entity.
self.driftwood.script.call(self.entities[eid]._on_kill[0], self.entities[eid]._on_kill[1],
self.entities[eid])
self.entities[eid]._terminate()
del self.entities[eid]
self.driftwood.area.changed = True
if to_kill:
return True
self.driftwood.log.msg("WARNING", "Entity", "killall", "attempt to kill nonexistent entities", filename)
return False
def spritesheet(self, filename: spritesheet.Spritesheet) -> Optional[bool]:
"""Retrieve a sprite sheet by its filename.
Args:
filename: Filename of the sprite sheet image.
Returns: Spritesheet class instance if succeeded, False if failed.
"""
# Input Check
try:
CHECK(filename, str)
except CheckFailure as e:
self.driftwood.log.msg("ERROR", "Entity", "spritesheet", "bad argument", e)
return None
for ss in self.spritesheets:
if self.spritesheets[ss].filename == filename:
return self.spritesheets[ss]
return False
def collision(self, a: _entity.Entity, b: _entity.Entity) -> bool:
"""Notify the collision callback, if set, that entity "a" has collided with entity or tile "b".
Args:
a: First colliding entity.
b: Second colliding entity or tile.
Returns:
True
"""
if self.collider:
self.collider(a, b)
return True
def _terminate(self) -> None:
"""Cleanup before deletion.
"""
for eid in self.entities:
self.entities[eid]._terminate()
self.entities = None
self.spritesheets = None
| Driftwood2D/Driftwood | src/entitymanager.py | Python | mit | 11,732 | 0.002642 |
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import unittest
from django.core.checks import Error, Warning as DjangoWarning
from django.db import connection, models
from django.test import SimpleTestCase, TestCase
from django.test.utils import isolate_apps, override_settings
from django.utils.timezone import now
@isolate_apps('invalid_models_tests')
class AutoFieldTests(SimpleTestCase):
def test_valid_case(self):
class Model(models.Model):
id = models.AutoField(primary_key=True)
field = Model._meta.get_field('id')
errors = field.check()
expected = []
self.assertEqual(errors, expected)
def test_primary_key(self):
# primary_key must be True. Refs #12467.
class Model(models.Model):
field = models.AutoField(primary_key=False)
# Prevent Django from autocreating `id` AutoField, which would
# result in an error, because a model must have exactly one
# AutoField.
another = models.IntegerField(primary_key=True)
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
'AutoFields must set primary_key=True.',
obj=field,
id='fields.E100',
),
]
self.assertEqual(errors, expected)
@isolate_apps('invalid_models_tests')
class BooleanFieldTests(SimpleTestCase):
def test_nullable_boolean_field(self):
class Model(models.Model):
field = models.BooleanField(null=True)
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
'BooleanFields do not accept null values.',
hint='Use a NullBooleanField instead.',
obj=field,
id='fields.E110',
),
]
self.assertEqual(errors, expected)
@isolate_apps('invalid_models_tests')
class CharFieldTests(TestCase):
def test_valid_field(self):
class Model(models.Model):
field = models.CharField(
max_length=255,
choices=[
('1', 'item1'),
('2', 'item2'),
],
db_index=True)
field = Model._meta.get_field('field')
errors = field.check()
expected = []
self.assertEqual(errors, expected)
def test_missing_max_length(self):
class Model(models.Model):
field = models.CharField()
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"CharFields must define a 'max_length' attribute.",
obj=field,
id='fields.E120',
),
]
self.assertEqual(errors, expected)
def test_negative_max_length(self):
class Model(models.Model):
field = models.CharField(max_length=-1)
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'max_length' must be a positive integer.",
obj=field,
id='fields.E121',
),
]
self.assertEqual(errors, expected)
def test_bad_max_length_value(self):
class Model(models.Model):
field = models.CharField(max_length="bad")
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'max_length' must be a positive integer.",
obj=field,
id='fields.E121',
),
]
self.assertEqual(errors, expected)
def test_str_max_length_value(self):
class Model(models.Model):
field = models.CharField(max_length='20')
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'max_length' must be a positive integer.",
obj=field,
id='fields.E121',
),
]
self.assertEqual(errors, expected)
def test_non_iterable_choices(self):
class Model(models.Model):
field = models.CharField(max_length=10, choices='bad')
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'choices' must be an iterable (e.g., a list or tuple).",
obj=field,
id='fields.E004',
),
]
self.assertEqual(errors, expected)
def test_choices_containing_non_pairs(self):
class Model(models.Model):
field = models.CharField(max_length=10, choices=[(1, 2, 3), (1, 2, 3)])
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'choices' must be an iterable containing (actual value, human readable name) tuples.",
obj=field,
id='fields.E005',
),
]
self.assertEqual(errors, expected)
def test_bad_db_index_value(self):
class Model(models.Model):
field = models.CharField(max_length=10, db_index='bad')
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'db_index' must be None, True or False.",
obj=field,
id='fields.E006',
),
]
self.assertEqual(errors, expected)
@unittest.skipUnless(connection.vendor == 'mysql',
"Test valid only for MySQL")
def test_too_long_char_field_under_mysql(self):
from django.db.backends.mysql.validation import DatabaseValidation
class Model(models.Model):
field = models.CharField(unique=True, max_length=256)
field = Model._meta.get_field('field')
validator = DatabaseValidation(connection=None)
errors = validator.check_field(field)
expected = [
Error(
'MySQL does not allow unique CharFields to have a max_length > 255.',
obj=field,
id='mysql.E001',
)
]
self.assertEqual(errors, expected)
@isolate_apps('invalid_models_tests')
class DateFieldTests(TestCase):
def test_auto_now_and_auto_now_add_raise_error(self):
class Model(models.Model):
field0 = models.DateTimeField(auto_now=True, auto_now_add=True, default=now)
field1 = models.DateTimeField(auto_now=True, auto_now_add=False, default=now)
field2 = models.DateTimeField(auto_now=False, auto_now_add=True, default=now)
field3 = models.DateTimeField(auto_now=True, auto_now_add=True, default=None)
expected = []
checks = []
for i in range(4):
field = Model._meta.get_field('field%d' % i)
expected.append(Error(
"The options auto_now, auto_now_add, and default "
"are mutually exclusive. Only one of these options "
"may be present.",
obj=field,
id='fields.E160',
))
checks.extend(field.check())
self.assertEqual(checks, expected)
def test_fix_default_value(self):
class Model(models.Model):
field_dt = models.DateField(default=now())
field_d = models.DateField(default=now().date())
field_now = models.DateField(default=now)
field_dt = Model._meta.get_field('field_dt')
field_d = Model._meta.get_field('field_d')
field_now = Model._meta.get_field('field_now')
errors = field_dt.check()
errors.extend(field_d.check())
errors.extend(field_now.check()) # doesn't raise a warning
expected = [
DjangoWarning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=field_dt,
id='fields.W161',
),
DjangoWarning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=field_d,
id='fields.W161',
)
]
maxDiff = self.maxDiff
self.maxDiff = None
self.assertEqual(errors, expected)
self.maxDiff = maxDiff
@override_settings(USE_TZ=True)
def test_fix_default_value_tz(self):
self.test_fix_default_value()
@isolate_apps('invalid_models_tests')
class DateTimeFieldTests(TestCase):
def test_fix_default_value(self):
class Model(models.Model):
field_dt = models.DateTimeField(default=now())
field_d = models.DateTimeField(default=now().date())
field_now = models.DateTimeField(default=now)
field_dt = Model._meta.get_field('field_dt')
field_d = Model._meta.get_field('field_d')
field_now = Model._meta.get_field('field_now')
errors = field_dt.check()
errors.extend(field_d.check())
errors.extend(field_now.check()) # doesn't raise a warning
expected = [
DjangoWarning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=field_dt,
id='fields.W161',
),
DjangoWarning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=field_d,
id='fields.W161',
)
]
maxDiff = self.maxDiff
self.maxDiff = None
self.assertEqual(errors, expected)
self.maxDiff = maxDiff
@override_settings(USE_TZ=True)
def test_fix_default_value_tz(self):
self.test_fix_default_value()
@isolate_apps('invalid_models_tests')
class DecimalFieldTests(SimpleTestCase):
def test_required_attributes(self):
class Model(models.Model):
field = models.DecimalField()
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"DecimalFields must define a 'decimal_places' attribute.",
obj=field,
id='fields.E130',
),
Error(
"DecimalFields must define a 'max_digits' attribute.",
obj=field,
id='fields.E132',
),
]
self.assertEqual(errors, expected)
def test_negative_max_digits_and_decimal_places(self):
class Model(models.Model):
field = models.DecimalField(max_digits=-1, decimal_places=-1)
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'decimal_places' must be a non-negative integer.",
obj=field,
id='fields.E131',
),
Error(
"'max_digits' must be a positive integer.",
obj=field,
id='fields.E133',
),
]
self.assertEqual(errors, expected)
def test_bad_values_of_max_digits_and_decimal_places(self):
class Model(models.Model):
field = models.DecimalField(max_digits="bad", decimal_places="bad")
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'decimal_places' must be a non-negative integer.",
obj=field,
id='fields.E131',
),
Error(
"'max_digits' must be a positive integer.",
obj=field,
id='fields.E133',
),
]
self.assertEqual(errors, expected)
def test_decimal_places_greater_than_max_digits(self):
class Model(models.Model):
field = models.DecimalField(max_digits=9, decimal_places=10)
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'max_digits' must be greater or equal to 'decimal_places'.",
obj=field,
id='fields.E134',
),
]
self.assertEqual(errors, expected)
def test_valid_field(self):
class Model(models.Model):
field = models.DecimalField(max_digits=10, decimal_places=10)
field = Model._meta.get_field('field')
errors = field.check()
expected = []
self.assertEqual(errors, expected)
@isolate_apps('invalid_models_tests')
class FileFieldTests(SimpleTestCase):
def test_valid_case(self):
class Model(models.Model):
field = models.FileField(upload_to='somewhere')
field = Model._meta.get_field('field')
errors = field.check()
expected = []
self.assertEqual(errors, expected)
def test_unique(self):
class Model(models.Model):
field = models.FileField(unique=False, upload_to='somewhere')
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'unique' is not a valid argument for a FileField.",
obj=field,
id='fields.E200',
)
]
self.assertEqual(errors, expected)
def test_primary_key(self):
class Model(models.Model):
field = models.FileField(primary_key=False, upload_to='somewhere')
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'primary_key' is not a valid argument for a FileField.",
obj=field,
id='fields.E201',
)
]
self.assertEqual(errors, expected)
@isolate_apps('invalid_models_tests')
class FilePathFieldTests(SimpleTestCase):
def test_forbidden_files_and_folders(self):
class Model(models.Model):
field = models.FilePathField(allow_files=False, allow_folders=False)
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"FilePathFields must have either 'allow_files' or 'allow_folders' set to True.",
obj=field,
id='fields.E140',
),
]
self.assertEqual(errors, expected)
@isolate_apps('invalid_models_tests')
class GenericIPAddressFieldTests(SimpleTestCase):
def test_non_nullable_blank(self):
class Model(models.Model):
field = models.GenericIPAddressField(null=False, blank=True)
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
('GenericIPAddressFields cannot have blank=True if null=False, '
'as blank values are stored as nulls.'),
obj=field,
id='fields.E150',
),
]
self.assertEqual(errors, expected)
@isolate_apps('invalid_models_tests')
class ImageFieldTests(SimpleTestCase):
def test_pillow_installed(self):
try:
from PIL import Image # NOQA
except ImportError:
pillow_installed = False
else:
pillow_installed = True
class Model(models.Model):
field = models.ImageField(upload_to='somewhere')
field = Model._meta.get_field('field')
errors = field.check()
expected = [] if pillow_installed else [
Error(
'Cannot use ImageField because Pillow is not installed.',
hint=('Get Pillow at https://pypi.python.org/pypi/Pillow '
'or run command "pip install Pillow".'),
obj=field,
id='fields.E210',
),
]
self.assertEqual(errors, expected)
@isolate_apps('invalid_models_tests')
class IntegerFieldTests(SimpleTestCase):
def test_max_length_warning(self):
class Model(models.Model):
value = models.IntegerField(max_length=2)
value = Model._meta.get_field('value')
errors = Model.check()
expected = [
DjangoWarning(
"'max_length' is ignored when used with IntegerField",
hint="Remove 'max_length' from field",
obj=value,
id='fields.W122',
)
]
self.assertEqual(errors, expected)
@isolate_apps('invalid_models_tests')
class TimeFieldTests(TestCase):
def test_fix_default_value(self):
class Model(models.Model):
field_dt = models.TimeField(default=now())
field_t = models.TimeField(default=now().time())
field_now = models.DateField(default=now)
field_dt = Model._meta.get_field('field_dt')
field_t = Model._meta.get_field('field_t')
field_now = Model._meta.get_field('field_now')
errors = field_dt.check()
errors.extend(field_t.check())
errors.extend(field_now.check()) # doesn't raise a warning
expected = [
DjangoWarning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=field_dt,
id='fields.W161',
),
DjangoWarning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=field_t,
id='fields.W161',
)
]
maxDiff = self.maxDiff
self.maxDiff = None
self.assertEqual(errors, expected)
self.maxDiff = maxDiff
@override_settings(USE_TZ=True)
def test_fix_default_value_tz(self):
self.test_fix_default_value()
| filias/django | tests/invalid_models_tests/test_ordinary_fields.py | Python | bsd-3-clause | 19,411 | 0.000515 |
from sha3 import sha3_256
from ethereum.utils import big_endian_to_int
def sha3(seed):
return sha3_256(bytes(seed)).digest()
# colors
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def DEBUG(*args, **kargs):
print(FAIL + repr(args) + repr(kargs) + ENDC)
colors = ['\033[9%dm' % i for i in range(0, 7)]
colors += ['\033[4%dm' % i for i in range(1, 8)]
num_colors = len(colors)
def cstr(num, txt):
if isinstance(num, bytes):
num = big_endian_to_int(num)
return '%s%s%s' % (colors[num % len(colors)], txt, ENDC)
def cprint(num, txt):
print cstr(num, txt)
def phx(x):
return x.encode('hex')[:8]
if __name__ == '__main__':
for i in range(len(colors)):
cprint(i, 'test')
| HydraChain/hydrachain | hydrachain/consensus/utils.py | Python | mit | 750 | 0.004 |
# subrepo.py - sub-repository handling for Mercurial
#
# Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
import errno, os, re, shutil, posixpath, sys
import xml.dom.minidom
import stat, subprocess, tarfile
from i18n import _
import config, scmutil, util, node, error, cmdutil, bookmarks, match as matchmod
hg = None
propertycache = util.propertycache
nullstate = ('', '', 'empty')
def _expandedabspath(path):
'''
get a path or url and if it is a path expand it and return an absolute path
'''
expandedpath = util.urllocalpath(util.expandpath(path))
u = util.url(expandedpath)
if not u.scheme:
path = util.normpath(os.path.abspath(u.path))
return path
def _getstorehashcachename(remotepath):
'''get a unique filename for the store hash cache of a remote repository'''
return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
def _calcfilehash(filename):
data = ''
if os.path.exists(filename):
fd = open(filename, 'rb')
data = fd.read()
fd.close()
return util.sha1(data).hexdigest()
class SubrepoAbort(error.Abort):
"""Exception class used to avoid handling a subrepo error more than once"""
def __init__(self, *args, **kw):
error.Abort.__init__(self, *args, **kw)
self.subrepo = kw.get('subrepo')
self.cause = kw.get('cause')
def annotatesubrepoerror(func):
def decoratedmethod(self, *args, **kargs):
try:
res = func(self, *args, **kargs)
except SubrepoAbort, ex:
# This exception has already been handled
raise ex
except error.Abort, ex:
subrepo = subrelpath(self)
errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
# avoid handling this exception by raising a SubrepoAbort exception
raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
cause=sys.exc_info())
return res
return decoratedmethod
def state(ctx, ui):
"""return a state dict, mapping subrepo paths configured in .hgsub
to tuple: (source from .hgsub, revision from .hgsubstate, kind
(key in types dict))
"""
p = config.config()
def read(f, sections=None, remap=None):
if f in ctx:
try:
data = ctx[f].data()
except IOError, err:
if err.errno != errno.ENOENT:
raise
# handle missing subrepo spec files as removed
ui.warn(_("warning: subrepo spec file %s not found\n") % f)
return
p.parse(f, data, sections, remap, read)
else:
raise util.Abort(_("subrepo spec file %s not found") % f)
if '.hgsub' in ctx:
read('.hgsub')
for path, src in ui.configitems('subpaths'):
p.set('subpaths', path, src, ui.configsource('subpaths', path))
rev = {}
if '.hgsubstate' in ctx:
try:
for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
l = l.lstrip()
if not l:
continue
try:
revision, path = l.split(" ", 1)
except ValueError:
raise util.Abort(_("invalid subrepository revision "
"specifier in .hgsubstate line %d")
% (i + 1))
rev[path] = revision
except IOError, err:
if err.errno != errno.ENOENT:
raise
def remap(src):
for pattern, repl in p.items('subpaths'):
# Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
# does a string decode.
repl = repl.encode('string-escape')
# However, we still want to allow back references to go
# through unharmed, so we turn r'\\1' into r'\1'. Again,
# extra escapes are needed because re.sub string decodes.
repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
try:
src = re.sub(pattern, repl, src, 1)
except re.error, e:
raise util.Abort(_("bad subrepository pattern in %s: %s")
% (p.source('subpaths', pattern), e))
return src
state = {}
for path, src in p[''].items():
kind = 'hg'
if src.startswith('['):
if ']' not in src:
raise util.Abort(_('missing ] in subrepo source'))
kind, src = src.split(']', 1)
kind = kind[1:]
src = src.lstrip() # strip any extra whitespace after ']'
if not util.url(src).isabs():
parent = _abssource(ctx._repo, abort=False)
if parent:
parent = util.url(parent)
parent.path = posixpath.join(parent.path or '', src)
parent.path = posixpath.normpath(parent.path)
joined = str(parent)
# Remap the full joined path and use it if it changes,
# else remap the original source.
remapped = remap(joined)
if remapped == joined:
src = remap(src)
else:
src = remapped
src = remap(src)
state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
return state
def writestate(repo, state):
"""rewrite .hgsubstate in (outer) repo with these subrepo states"""
lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)]
repo.wwrite('.hgsubstate', ''.join(lines), '')
def submerge(repo, wctx, mctx, actx, overwrite):
"""delegated from merge.applyupdates: merging of .hgsubstate file
in working context, merging context and ancestor context"""
if mctx == actx: # backwards?
actx = wctx.p1()
s1 = wctx.substate
s2 = mctx.substate
sa = actx.substate
sm = {}
repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
def debug(s, msg, r=""):
if r:
r = "%s:%s:%s" % r
repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
for s, l in sorted(s1.iteritems()):
a = sa.get(s, nullstate)
ld = l # local state with possible dirty flag for compares
if wctx.sub(s).dirty():
ld = (l[0], l[1] + "+")
if wctx == actx: # overwrite
a = ld
if s in s2:
r = s2[s]
if ld == r or r == a: # no change or local is newer
sm[s] = l
continue
elif ld == a: # other side changed
debug(s, "other changed, get", r)
wctx.sub(s).get(r, overwrite)
sm[s] = r
elif ld[0] != r[0]: # sources differ
if repo.ui.promptchoice(
_(' subrepository sources for %s differ\n'
'use (l)ocal source (%s) or (r)emote source (%s)?'
'$$ &Local $$ &Remote') % (s, l[0], r[0]), 0):
debug(s, "prompt changed, get", r)
wctx.sub(s).get(r, overwrite)
sm[s] = r
elif ld[1] == a[1]: # local side is unchanged
debug(s, "other side changed, get", r)
wctx.sub(s).get(r, overwrite)
sm[s] = r
else:
debug(s, "both sides changed")
option = repo.ui.promptchoice(
_(' subrepository %s diverged (local revision: %s, '
'remote revision: %s)\n'
'(M)erge, keep (l)ocal or keep (r)emote?'
'$$ &Merge $$ &Local $$ &Remote')
% (s, l[1][:12], r[1][:12]), 0)
if option == 0:
wctx.sub(s).merge(r)
sm[s] = l
debug(s, "merge with", r)
elif option == 1:
sm[s] = l
debug(s, "keep local subrepo revision", l)
else:
wctx.sub(s).get(r, overwrite)
sm[s] = r
debug(s, "get remote subrepo revision", r)
elif ld == a: # remote removed, local unchanged
debug(s, "remote removed, remove")
wctx.sub(s).remove()
elif a == nullstate: # not present in remote or ancestor
debug(s, "local added, keep")
sm[s] = l
continue
else:
if repo.ui.promptchoice(
_(' local changed subrepository %s which remote removed\n'
'use (c)hanged version or (d)elete?'
'$$ &Changed $$ &Delete') % s, 0):
debug(s, "prompt remove")
wctx.sub(s).remove()
for s, r in sorted(s2.items()):
if s in s1:
continue
elif s not in sa:
debug(s, "remote added, get", r)
mctx.sub(s).get(r)
sm[s] = r
elif r != sa[s]:
if repo.ui.promptchoice(
_(' remote changed subrepository %s which local removed\n'
'use (c)hanged version or (d)elete?'
'$$ &Changed $$ &Delete') % s, 0) == 0:
debug(s, "prompt recreate", r)
wctx.sub(s).get(r)
sm[s] = r
# record merged .hgsubstate
writestate(repo, sm)
return sm
def _updateprompt(ui, sub, dirty, local, remote):
if dirty:
msg = (_(' subrepository sources for %s differ\n'
'use (l)ocal source (%s) or (r)emote source (%s)?\n'
'$$ &Local $$ &Remote')
% (subrelpath(sub), local, remote))
else:
msg = (_(' subrepository sources for %s differ (in checked out '
'version)\n'
'use (l)ocal source (%s) or (r)emote source (%s)?\n'
'$$ &Local $$ &Remote')
% (subrelpath(sub), local, remote))
return ui.promptchoice(msg, 0)
def reporelpath(repo):
"""return path to this (sub)repo as seen from outermost repo"""
parent = repo
while util.safehasattr(parent, '_subparent'):
parent = parent._subparent
p = parent.root.rstrip(os.sep)
return repo.root[len(p) + 1:]
def subrelpath(sub):
"""return path to this subrepo as seen from outermost repo"""
if util.safehasattr(sub, '_relpath'):
return sub._relpath
if not util.safehasattr(sub, '_repo'):
return sub._path
return reporelpath(sub._repo)
def _abssource(repo, push=False, abort=True):
"""return pull/push path of repo - either based on parent repo .hgsub info
or on the top repo config. Abort or return None if no source found."""
if util.safehasattr(repo, '_subparent'):
source = util.url(repo._subsource)
if source.isabs():
return str(source)
source.path = posixpath.normpath(source.path)
parent = _abssource(repo._subparent, push, abort=False)
if parent:
parent = util.url(util.pconvert(parent))
parent.path = posixpath.join(parent.path or '', source.path)
parent.path = posixpath.normpath(parent.path)
return str(parent)
else: # recursion reached top repo
if util.safehasattr(repo, '_subtoppath'):
return repo._subtoppath
if push and repo.ui.config('paths', 'default-push'):
return repo.ui.config('paths', 'default-push')
if repo.ui.config('paths', 'default'):
return repo.ui.config('paths', 'default')
if repo.sharedpath != repo.path:
# chop off the .hg component to get the default path form
return os.path.dirname(repo.sharedpath)
if abort:
raise util.Abort(_("default path for subrepository not found"))
def itersubrepos(ctx1, ctx2):
"""find subrepos in ctx1 or ctx2"""
# Create a (subpath, ctx) mapping where we prefer subpaths from
# ctx1. The subpaths from ctx2 are important when the .hgsub file
# has been modified (in ctx2) but not yet committed (in ctx1).
subpaths = dict.fromkeys(ctx2.substate, ctx2)
subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
for subpath, ctx in sorted(subpaths.iteritems()):
yield subpath, ctx.sub(subpath)
def subrepo(ctx, path):
"""return instance of the right subrepo class for subrepo in path"""
# subrepo inherently violates our import layering rules
# because it wants to make repo objects from deep inside the stack
# so we manually delay the circular imports to not break
# scripts that don't use our demand-loading
global hg
import hg as h
hg = h
scmutil.pathauditor(ctx._repo.root)(path)
state = ctx.substate[path]
if state[2] not in types:
raise util.Abort(_('unknown subrepo type %s') % state[2])
return types[state[2]](ctx, path, state[:2])
# subrepo classes need to implement the following abstract class:
class abstractsubrepo(object):
def storeclean(self, path):
"""
returns true if the repository has not changed since it was last
cloned from or pushed to a given repository.
"""
return False
def dirty(self, ignoreupdate=False):
"""returns true if the dirstate of the subrepo is dirty or does not
match current stored state. If ignoreupdate is true, only check
whether the subrepo has uncommitted changes in its dirstate.
"""
raise NotImplementedError
def basestate(self):
"""current working directory base state, disregarding .hgsubstate
state and working directory modifications"""
raise NotImplementedError
def checknested(self, path):
"""check if path is a subrepository within this repository"""
return False
def commit(self, text, user, date):
"""commit the current changes to the subrepo with the given
log message. Use given user and date if possible. Return the
new state of the subrepo.
"""
raise NotImplementedError
def remove(self):
"""remove the subrepo
(should verify the dirstate is not dirty first)
"""
raise NotImplementedError
def get(self, state, overwrite=False):
"""run whatever commands are needed to put the subrepo into
this state
"""
raise NotImplementedError
def merge(self, state):
"""merge currently-saved state with the new state."""
raise NotImplementedError
def push(self, opts):
"""perform whatever action is analogous to 'hg push'
This may be a no-op on some systems.
"""
raise NotImplementedError
def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
return []
def status(self, rev2, **opts):
return [], [], [], [], [], [], []
def diff(self, ui, diffopts, node2, match, prefix, **opts):
pass
def outgoing(self, ui, dest, opts):
return 1
def incoming(self, ui, source, opts):
return 1
def files(self):
"""return filename iterator"""
raise NotImplementedError
def filedata(self, name):
"""return file data"""
raise NotImplementedError
def fileflags(self, name):
"""return file flags"""
return ''
def archive(self, ui, archiver, prefix, match=None):
if match is not None:
files = [f for f in self.files() if match(f)]
else:
files = self.files()
total = len(files)
relpath = subrelpath(self)
ui.progress(_('archiving (%s)') % relpath, 0,
unit=_('files'), total=total)
for i, name in enumerate(files):
flags = self.fileflags(name)
mode = 'x' in flags and 0755 or 0644
symlink = 'l' in flags
archiver.addfile(os.path.join(prefix, self._path, name),
mode, symlink, self.filedata(name))
ui.progress(_('archiving (%s)') % relpath, i + 1,
unit=_('files'), total=total)
ui.progress(_('archiving (%s)') % relpath, None)
return total
def walk(self, match):
'''
walk recursively through the directory tree, finding all files
matched by the match function
'''
pass
def forget(self, ui, match, prefix):
return ([], [])
def revert(self, ui, substate, *pats, **opts):
ui.warn('%s: reverting %s subrepos is unsupported\n' \
% (substate[0], substate[2]))
return []
class hgsubrepo(abstractsubrepo):
def __init__(self, ctx, path, state):
self._path = path
self._state = state
r = ctx._repo
root = r.wjoin(path)
create = False
if not os.path.exists(os.path.join(root, '.hg')):
create = True
util.makedirs(root)
self._repo = hg.repository(r.baseui, root, create=create)
for s, k in [('ui', 'commitsubrepos')]:
v = r.ui.config(s, k)
if v:
self._repo.ui.setconfig(s, k, v)
self._repo.ui.setconfig('ui', '_usedassubrepo', 'True')
self._initrepo(r, state[0], create)
def storeclean(self, path):
clean = True
lock = self._repo.lock()
itercache = self._calcstorehash(path)
try:
for filehash in self._readstorehashcache(path):
if filehash != itercache.next():
clean = False
break
except StopIteration:
# the cached and current pull states have a different size
clean = False
if clean:
try:
itercache.next()
# the cached and current pull states have a different size
clean = False
except StopIteration:
pass
lock.release()
return clean
def _calcstorehash(self, remotepath):
'''calculate a unique "store hash"
This method is used to to detect when there are changes that may
require a push to a given remote path.'''
# sort the files that will be hashed in increasing (likely) file size
filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
yield '# %s\n' % _expandedabspath(remotepath)
for relname in filelist:
absname = os.path.normpath(self._repo.join(relname))
yield '%s = %s\n' % (relname, _calcfilehash(absname))
def _getstorehashcachepath(self, remotepath):
'''get a unique path for the store hash cache'''
return self._repo.join(os.path.join(
'cache', 'storehash', _getstorehashcachename(remotepath)))
def _readstorehashcache(self, remotepath):
'''read the store hash cache for a given remote repository'''
cachefile = self._getstorehashcachepath(remotepath)
if not os.path.exists(cachefile):
return ''
fd = open(cachefile, 'r')
pullstate = fd.readlines()
fd.close()
return pullstate
def _cachestorehash(self, remotepath):
'''cache the current store hash
Each remote repo requires its own store hash cache, because a subrepo
store may be "clean" versus a given remote repo, but not versus another
'''
cachefile = self._getstorehashcachepath(remotepath)
lock = self._repo.lock()
storehash = list(self._calcstorehash(remotepath))
cachedir = os.path.dirname(cachefile)
if not os.path.exists(cachedir):
util.makedirs(cachedir, notindexed=True)
fd = open(cachefile, 'w')
fd.writelines(storehash)
fd.close()
lock.release()
@annotatesubrepoerror
def _initrepo(self, parentrepo, source, create):
self._repo._subparent = parentrepo
self._repo._subsource = source
if create:
fp = self._repo.opener("hgrc", "w", text=True)
fp.write('[paths]\n')
def addpathconfig(key, value):
if value:
fp.write('%s = %s\n' % (key, value))
self._repo.ui.setconfig('paths', key, value)
defpath = _abssource(self._repo, abort=False)
defpushpath = _abssource(self._repo, True, abort=False)
addpathconfig('default', defpath)
if defpath != defpushpath:
addpathconfig('default-push', defpushpath)
fp.close()
@annotatesubrepoerror
def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
return cmdutil.add(ui, self._repo, match, dryrun, listsubrepos,
os.path.join(prefix, self._path), explicitonly)
@annotatesubrepoerror
def status(self, rev2, **opts):
try:
rev1 = self._state[1]
ctx1 = self._repo[rev1]
ctx2 = self._repo[rev2]
return self._repo.status(ctx1, ctx2, **opts)
except error.RepoLookupError, inst:
self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
% (inst, subrelpath(self)))
return [], [], [], [], [], [], []
@annotatesubrepoerror
def diff(self, ui, diffopts, node2, match, prefix, **opts):
try:
node1 = node.bin(self._state[1])
# We currently expect node2 to come from substate and be
# in hex format
if node2 is not None:
node2 = node.bin(node2)
cmdutil.diffordiffstat(ui, self._repo, diffopts,
node1, node2, match,
prefix=posixpath.join(prefix, self._path),
listsubrepos=True, **opts)
except error.RepoLookupError, inst:
self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
% (inst, subrelpath(self)))
@annotatesubrepoerror
def archive(self, ui, archiver, prefix, match=None):
self._get(self._state + ('hg',))
total = abstractsubrepo.archive(self, ui, archiver, prefix, match)
rev = self._state[1]
ctx = self._repo[rev]
for subpath in ctx.substate:
s = subrepo(ctx, subpath)
submatch = matchmod.narrowmatcher(subpath, match)
total += s.archive(
ui, archiver, os.path.join(prefix, self._path), submatch)
return total
@annotatesubrepoerror
def dirty(self, ignoreupdate=False):
r = self._state[1]
if r == '' and not ignoreupdate: # no state recorded
return True
w = self._repo[None]
if r != w.p1().hex() and not ignoreupdate:
# different version checked out
return True
return w.dirty() # working directory changed
def basestate(self):
return self._repo['.'].hex()
def checknested(self, path):
return self._repo._checknested(self._repo.wjoin(path))
@annotatesubrepoerror
def commit(self, text, user, date):
# don't bother committing in the subrepo if it's only been
# updated
if not self.dirty(True):
return self._repo['.'].hex()
self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self))
n = self._repo.commit(text, user, date)
if not n:
return self._repo['.'].hex() # different version checked out
return node.hex(n)
@annotatesubrepoerror
def remove(self):
# we can't fully delete the repository as it may contain
# local-only history
self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self))
hg.clean(self._repo, node.nullid, False)
def _get(self, state):
source, revision, kind = state
if revision not in self._repo:
self._repo._subsource = source
srcurl = _abssource(self._repo)
other = hg.peer(self._repo, {}, srcurl)
if len(self._repo) == 0:
self._repo.ui.status(_('cloning subrepo %s from %s\n')
% (subrelpath(self), srcurl))
parentrepo = self._repo._subparent
shutil.rmtree(self._repo.path)
other, cloned = hg.clone(self._repo._subparent.baseui, {},
other, self._repo.root,
update=False)
self._repo = cloned.local()
self._initrepo(parentrepo, source, create=True)
self._cachestorehash(srcurl)
else:
self._repo.ui.status(_('pulling subrepo %s from %s\n')
% (subrelpath(self), srcurl))
cleansub = self.storeclean(srcurl)
remotebookmarks = other.listkeys('bookmarks')
self._repo.pull(other)
bookmarks.updatefromremote(self._repo.ui, self._repo,
remotebookmarks, srcurl)
if cleansub:
# keep the repo clean after pull
self._cachestorehash(srcurl)
@annotatesubrepoerror
def get(self, state, overwrite=False):
self._get(state)
source, revision, kind = state
self._repo.ui.debug("getting subrepo %s\n" % self._path)
hg.updaterepo(self._repo, revision, overwrite)
@annotatesubrepoerror
def merge(self, state):
self._get(state)
cur = self._repo['.']
dst = self._repo[state[1]]
anc = dst.ancestor(cur)
def mergefunc():
if anc == cur and dst.branch() == cur.branch():
self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self))
hg.update(self._repo, state[1])
elif anc == dst:
self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self))
else:
self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self))
hg.merge(self._repo, state[1], remind=False)
wctx = self._repo[None]
if self.dirty():
if anc != dst:
if _updateprompt(self._repo.ui, self, wctx.dirty(), cur, dst):
mergefunc()
else:
mergefunc()
else:
mergefunc()
@annotatesubrepoerror
def push(self, opts):
force = opts.get('force')
newbranch = opts.get('new_branch')
ssh = opts.get('ssh')
# push subrepos depth-first for coherent ordering
c = self._repo['']
subs = c.substate # only repos that are committed
for s in sorted(subs):
if c.sub(s).push(opts) == 0:
return False
dsturl = _abssource(self._repo, True)
if not force:
if self.storeclean(dsturl):
self._repo.ui.status(
_('no changes made to subrepo %s since last push to %s\n')
% (subrelpath(self), dsturl))
return None
self._repo.ui.status(_('pushing subrepo %s to %s\n') %
(subrelpath(self), dsturl))
other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
res = self._repo.push(other, force, newbranch=newbranch)
# the repo is now clean
self._cachestorehash(dsturl)
return res
@annotatesubrepoerror
def outgoing(self, ui, dest, opts):
return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
@annotatesubrepoerror
def incoming(self, ui, source, opts):
return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
@annotatesubrepoerror
def files(self):
rev = self._state[1]
ctx = self._repo[rev]
return ctx.manifest()
def filedata(self, name):
rev = self._state[1]
return self._repo[rev][name].data()
def fileflags(self, name):
rev = self._state[1]
ctx = self._repo[rev]
return ctx.flags(name)
def walk(self, match):
ctx = self._repo[None]
return ctx.walk(match)
@annotatesubrepoerror
def forget(self, ui, match, prefix):
return cmdutil.forget(ui, self._repo, match,
os.path.join(prefix, self._path), True)
@annotatesubrepoerror
def revert(self, ui, substate, *pats, **opts):
# reverting a subrepo is a 2 step process:
# 1. if the no_backup is not set, revert all modified
# files inside the subrepo
# 2. update the subrepo to the revision specified in
# the corresponding substate dictionary
ui.status(_('reverting subrepo %s\n') % substate[0])
if not opts.get('no_backup'):
# Revert all files on the subrepo, creating backups
# Note that this will not recursively revert subrepos
# We could do it if there was a set:subrepos() predicate
opts = opts.copy()
opts['date'] = None
opts['rev'] = substate[1]
pats = []
if not opts.get('all'):
pats = ['set:modified()']
self.filerevert(ui, *pats, **opts)
# Update the repo to the revision specified in the given substate
self.get(substate, overwrite=True)
def filerevert(self, ui, *pats, **opts):
ctx = self._repo[opts['rev']]
parents = self._repo.dirstate.parents()
if opts.get('all'):
pats = ['set:modified()']
else:
pats = []
cmdutil.revert(ui, self._repo, ctx, parents, *pats, **opts)
class svnsubrepo(abstractsubrepo):
def __init__(self, ctx, path, state):
self._path = path
self._state = state
self._ctx = ctx
self._ui = ctx._repo.ui
self._exe = util.findexe('svn')
if not self._exe:
raise util.Abort(_("'svn' executable not found for subrepo '%s'")
% self._path)
def _svncommand(self, commands, filename='', failok=False):
cmd = [self._exe]
extrakw = {}
if not self._ui.interactive():
# Making stdin be a pipe should prevent svn from behaving
# interactively even if we can't pass --non-interactive.
extrakw['stdin'] = subprocess.PIPE
# Starting in svn 1.5 --non-interactive is a global flag
# instead of being per-command, but we need to support 1.4 so
# we have to be intelligent about what commands take
# --non-interactive.
if commands[0] in ('update', 'checkout', 'commit'):
cmd.append('--non-interactive')
cmd.extend(commands)
if filename is not None:
path = os.path.join(self._ctx._repo.origroot, self._path, filename)
cmd.append(path)
env = dict(os.environ)
# Avoid localized output, preserve current locale for everything else.
lc_all = env.get('LC_ALL')
if lc_all:
env['LANG'] = lc_all
del env['LC_ALL']
env['LC_MESSAGES'] = 'C'
p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
universal_newlines=True, env=env, **extrakw)
stdout, stderr = p.communicate()
stderr = stderr.strip()
if not failok:
if p.returncode:
raise util.Abort(stderr or 'exited with code %d' % p.returncode)
if stderr:
self._ui.warn(stderr + '\n')
return stdout, stderr
@propertycache
def _svnversion(self):
output, err = self._svncommand(['--version', '--quiet'], filename=None)
m = re.search(r'^(\d+)\.(\d+)', output)
if not m:
raise util.Abort(_('cannot retrieve svn tool version'))
return (int(m.group(1)), int(m.group(2)))
def _wcrevs(self):
# Get the working directory revision as well as the last
# commit revision so we can compare the subrepo state with
# both. We used to store the working directory one.
output, err = self._svncommand(['info', '--xml'])
doc = xml.dom.minidom.parseString(output)
entries = doc.getElementsByTagName('entry')
lastrev, rev = '0', '0'
if entries:
rev = str(entries[0].getAttribute('revision')) or '0'
commits = entries[0].getElementsByTagName('commit')
if commits:
lastrev = str(commits[0].getAttribute('revision')) or '0'
return (lastrev, rev)
def _wcrev(self):
return self._wcrevs()[0]
def _wcchanged(self):
"""Return (changes, extchanges, missing) where changes is True
if the working directory was changed, extchanges is
True if any of these changes concern an external entry and missing
is True if any change is a missing entry.
"""
output, err = self._svncommand(['status', '--xml'])
externals, changes, missing = [], [], []
doc = xml.dom.minidom.parseString(output)
for e in doc.getElementsByTagName('entry'):
s = e.getElementsByTagName('wc-status')
if not s:
continue
item = s[0].getAttribute('item')
props = s[0].getAttribute('props')
path = e.getAttribute('path')
if item == 'external':
externals.append(path)
elif item == 'missing':
missing.append(path)
if (item not in ('', 'normal', 'unversioned', 'external')
or props not in ('', 'none', 'normal')):
changes.append(path)
for path in changes:
for ext in externals:
if path == ext or path.startswith(ext + os.sep):
return True, True, bool(missing)
return bool(changes), False, bool(missing)
def dirty(self, ignoreupdate=False):
if not self._wcchanged()[0]:
if self._state[1] in self._wcrevs() or ignoreupdate:
return False
return True
def basestate(self):
lastrev, rev = self._wcrevs()
if lastrev != rev:
# Last committed rev is not the same than rev. We would
# like to take lastrev but we do not know if the subrepo
# URL exists at lastrev. Test it and fallback to rev it
# is not there.
try:
self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
return lastrev
except error.Abort:
pass
return rev
@annotatesubrepoerror
def commit(self, text, user, date):
# user and date are out of our hands since svn is centralized
changed, extchanged, missing = self._wcchanged()
if not changed:
return self.basestate()
if extchanged:
# Do not try to commit externals
raise util.Abort(_('cannot commit svn externals'))
if missing:
# svn can commit with missing entries but aborting like hg
# seems a better approach.
raise util.Abort(_('cannot commit missing svn entries'))
commitinfo, err = self._svncommand(['commit', '-m', text])
self._ui.status(commitinfo)
newrev = re.search('Committed revision ([0-9]+).', commitinfo)
if not newrev:
if not commitinfo.strip():
# Sometimes, our definition of "changed" differs from
# svn one. For instance, svn ignores missing files
# when committing. If there are only missing files, no
# commit is made, no output and no error code.
raise util.Abort(_('failed to commit svn changes'))
raise util.Abort(commitinfo.splitlines()[-1])
newrev = newrev.groups()[0]
self._ui.status(self._svncommand(['update', '-r', newrev])[0])
return newrev
@annotatesubrepoerror
def remove(self):
if self.dirty():
self._ui.warn(_('not removing repo %s because '
'it has changes.\n' % self._path))
return
self._ui.note(_('removing subrepo %s\n') % self._path)
def onerror(function, path, excinfo):
if function is not os.remove:
raise
# read-only files cannot be unlinked under Windows
s = os.stat(path)
if (s.st_mode & stat.S_IWRITE) != 0:
raise
os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
os.remove(path)
path = self._ctx._repo.wjoin(self._path)
shutil.rmtree(path, onerror=onerror)
try:
os.removedirs(os.path.dirname(path))
except OSError:
pass
@annotatesubrepoerror
def get(self, state, overwrite=False):
if overwrite:
self._svncommand(['revert', '--recursive'])
args = ['checkout']
if self._svnversion >= (1, 5):
args.append('--force')
# The revision must be specified at the end of the URL to properly
# update to a directory which has since been deleted and recreated.
args.append('%s@%s' % (state[0], state[1]))
status, err = self._svncommand(args, failok=True)
if not re.search('Checked out revision [0-9]+.', status):
if ('is already a working copy for a different URL' in err
and (self._wcchanged()[:2] == (False, False))):
# obstructed but clean working copy, so just blow it away.
self.remove()
self.get(state, overwrite=False)
return
raise util.Abort((status or err).splitlines()[-1])
self._ui.status(status)
@annotatesubrepoerror
def merge(self, state):
old = self._state[1]
new = state[1]
wcrev = self._wcrev()
if new != wcrev:
dirty = old == wcrev or self._wcchanged()[0]
if _updateprompt(self._ui, self, dirty, wcrev, new):
self.get(state, False)
def push(self, opts):
# push is a no-op for SVN
return True
@annotatesubrepoerror
def files(self):
output = self._svncommand(['list', '--recursive', '--xml'])[0]
doc = xml.dom.minidom.parseString(output)
paths = []
for e in doc.getElementsByTagName('entry'):
kind = str(e.getAttribute('kind'))
if kind != 'file':
continue
name = ''.join(c.data for c
in e.getElementsByTagName('name')[0].childNodes
if c.nodeType == c.TEXT_NODE)
paths.append(name.encode('utf-8'))
return paths
def filedata(self, name):
return self._svncommand(['cat'], name)[0]
class gitsubrepo(abstractsubrepo):
def __init__(self, ctx, path, state):
self._state = state
self._ctx = ctx
self._path = path
self._relpath = os.path.join(reporelpath(ctx._repo), path)
self._abspath = ctx._repo.wjoin(path)
self._subparent = ctx._repo
self._ui = ctx._repo.ui
self._ensuregit()
def _ensuregit(self):
try:
self._gitexecutable = 'git'
out, err = self._gitnodir(['--version'])
except OSError, e:
if e.errno != 2 or os.name != 'nt':
raise
self._gitexecutable = 'git.cmd'
out, err = self._gitnodir(['--version'])
m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
if not m:
self._ui.warn(_('cannot retrieve git version'))
return
version = (int(m.group(1)), m.group(2), m.group(3))
# git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
# despite the docstring comment. For now, error on 1.4.0, warn on
# 1.5.0 but attempt to continue.
if version < (1, 5, 0):
raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
elif version < (1, 6, 0):
self._ui.warn(_('git subrepo requires at least 1.6.0 or later'))
def _gitcommand(self, commands, env=None, stream=False):
return self._gitdir(commands, env=env, stream=stream)[0]
def _gitdir(self, commands, env=None, stream=False):
return self._gitnodir(commands, env=env, stream=stream,
cwd=self._abspath)
def _gitnodir(self, commands, env=None, stream=False, cwd=None):
"""Calls the git command
The methods tries to call the git command. versions prior to 1.6.0
are not supported and very probably fail.
"""
self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
# unless ui.quiet is set, print git's stderr,
# which is mostly progress and useful info
errpipe = None
if self._ui.quiet:
errpipe = open(os.devnull, 'w')
p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
cwd=cwd, env=env, close_fds=util.closefds,
stdout=subprocess.PIPE, stderr=errpipe)
if stream:
return p.stdout, None
retdata = p.stdout.read().strip()
# wait for the child to exit to avoid race condition.
p.wait()
if p.returncode != 0 and p.returncode != 1:
# there are certain error codes that are ok
command = commands[0]
if command in ('cat-file', 'symbolic-ref'):
return retdata, p.returncode
# for all others, abort
raise util.Abort('git %s error %d in %s' %
(command, p.returncode, self._relpath))
return retdata, p.returncode
def _gitmissing(self):
return not os.path.exists(os.path.join(self._abspath, '.git'))
def _gitstate(self):
return self._gitcommand(['rev-parse', 'HEAD'])
def _gitcurrentbranch(self):
current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
if err:
current = None
return current
def _gitremote(self, remote):
out = self._gitcommand(['remote', 'show', '-n', remote])
line = out.split('\n')[1]
i = line.index('URL: ') + len('URL: ')
return line[i:]
def _githavelocally(self, revision):
out, code = self._gitdir(['cat-file', '-e', revision])
return code == 0
def _gitisancestor(self, r1, r2):
base = self._gitcommand(['merge-base', r1, r2])
return base == r1
def _gitisbare(self):
return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
def _gitupdatestat(self):
"""This must be run before git diff-index.
diff-index only looks at changes to file stat;
this command looks at file contents and updates the stat."""
self._gitcommand(['update-index', '-q', '--refresh'])
def _gitbranchmap(self):
'''returns 2 things:
a map from git branch to revision
a map from revision to branches'''
branch2rev = {}
rev2branch = {}
out = self._gitcommand(['for-each-ref', '--format',
'%(objectname) %(refname)'])
for line in out.split('\n'):
revision, ref = line.split(' ')
if (not ref.startswith('refs/heads/') and
not ref.startswith('refs/remotes/')):
continue
if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
continue # ignore remote/HEAD redirects
branch2rev[ref] = revision
rev2branch.setdefault(revision, []).append(ref)
return branch2rev, rev2branch
def _gittracking(self, branches):
'return map of remote branch to local tracking branch'
# assumes no more than one local tracking branch for each remote
tracking = {}
for b in branches:
if b.startswith('refs/remotes/'):
continue
bname = b.split('/', 2)[2]
remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
if remote:
ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
tracking['refs/remotes/%s/%s' %
(remote, ref.split('/', 2)[2])] = b
return tracking
def _abssource(self, source):
if '://' not in source:
# recognize the scp syntax as an absolute source
colon = source.find(':')
if colon != -1 and '/' not in source[:colon]:
return source
self._subsource = source
return _abssource(self)
def _fetch(self, source, revision):
if self._gitmissing():
source = self._abssource(source)
self._ui.status(_('cloning subrepo %s from %s\n') %
(self._relpath, source))
self._gitnodir(['clone', source, self._abspath])
if self._githavelocally(revision):
return
self._ui.status(_('pulling subrepo %s from %s\n') %
(self._relpath, self._gitremote('origin')))
# try only origin: the originally cloned repo
self._gitcommand(['fetch'])
if not self._githavelocally(revision):
raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
(revision, self._relpath))
@annotatesubrepoerror
def dirty(self, ignoreupdate=False):
if self._gitmissing():
return self._state[1] != ''
if self._gitisbare():
return True
if not ignoreupdate and self._state[1] != self._gitstate():
# different version checked out
return True
# check for staged changes or modified files; ignore untracked files
self._gitupdatestat()
out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
return code == 1
def basestate(self):
return self._gitstate()
@annotatesubrepoerror
def get(self, state, overwrite=False):
source, revision, kind = state
if not revision:
self.remove()
return
self._fetch(source, revision)
# if the repo was set to be bare, unbare it
if self._gitisbare():
self._gitcommand(['config', 'core.bare', 'false'])
if self._gitstate() == revision:
self._gitcommand(['reset', '--hard', 'HEAD'])
return
elif self._gitstate() == revision:
if overwrite:
# first reset the index to unmark new files for commit, because
# reset --hard will otherwise throw away files added for commit,
# not just unmark them.
self._gitcommand(['reset', 'HEAD'])
self._gitcommand(['reset', '--hard', 'HEAD'])
return
branch2rev, rev2branch = self._gitbranchmap()
def checkout(args):
cmd = ['checkout']
if overwrite:
# first reset the index to unmark new files for commit, because
# the -f option will otherwise throw away files added for
# commit, not just unmark them.
self._gitcommand(['reset', 'HEAD'])
cmd.append('-f')
self._gitcommand(cmd + args)
def rawcheckout():
# no branch to checkout, check it out with no branch
self._ui.warn(_('checking out detached HEAD in subrepo %s\n') %
self._relpath)
self._ui.warn(_('check out a git branch if you intend '
'to make changes\n'))
checkout(['-q', revision])
if revision not in rev2branch:
rawcheckout()
return
branches = rev2branch[revision]
firstlocalbranch = None
for b in branches:
if b == 'refs/heads/master':
# master trumps all other branches
checkout(['refs/heads/master'])
return
if not firstlocalbranch and not b.startswith('refs/remotes/'):
firstlocalbranch = b
if firstlocalbranch:
checkout([firstlocalbranch])
return
tracking = self._gittracking(branch2rev.keys())
# choose a remote branch already tracked if possible
remote = branches[0]
if remote not in tracking:
for b in branches:
if b in tracking:
remote = b
break
if remote not in tracking:
# create a new local tracking branch
local = remote.split('/', 3)[3]
checkout(['-b', local, remote])
elif self._gitisancestor(branch2rev[tracking[remote]], remote):
# When updating to a tracked remote branch,
# if the local tracking branch is downstream of it,
# a normal `git pull` would have performed a "fast-forward merge"
# which is equivalent to updating the local branch to the remote.
# Since we are only looking at branching at update, we need to
# detect this situation and perform this action lazily.
if tracking[remote] != self._gitcurrentbranch():
checkout([tracking[remote]])
self._gitcommand(['merge', '--ff', remote])
else:
# a real merge would be required, just checkout the revision
rawcheckout()
@annotatesubrepoerror
def commit(self, text, user, date):
if self._gitmissing():
raise util.Abort(_("subrepo %s is missing") % self._relpath)
cmd = ['commit', '-a', '-m', text]
env = os.environ.copy()
if user:
cmd += ['--author', user]
if date:
# git's date parser silently ignores when seconds < 1e9
# convert to ISO8601
env['GIT_AUTHOR_DATE'] = util.datestr(date,
'%Y-%m-%dT%H:%M:%S %1%2')
self._gitcommand(cmd, env=env)
# make sure commit works otherwise HEAD might not exist under certain
# circumstances
return self._gitstate()
@annotatesubrepoerror
def merge(self, state):
source, revision, kind = state
self._fetch(source, revision)
base = self._gitcommand(['merge-base', revision, self._state[1]])
self._gitupdatestat()
out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
def mergefunc():
if base == revision:
self.get(state) # fast forward merge
elif base != self._state[1]:
self._gitcommand(['merge', '--no-commit', revision])
if self.dirty():
if self._gitstate() != revision:
dirty = self._gitstate() == self._state[1] or code != 0
if _updateprompt(self._ui, self, dirty,
self._state[1][:7], revision[:7]):
mergefunc()
else:
mergefunc()
@annotatesubrepoerror
def push(self, opts):
force = opts.get('force')
if not self._state[1]:
return True
if self._gitmissing():
raise util.Abort(_("subrepo %s is missing") % self._relpath)
# if a branch in origin contains the revision, nothing to do
branch2rev, rev2branch = self._gitbranchmap()
if self._state[1] in rev2branch:
for b in rev2branch[self._state[1]]:
if b.startswith('refs/remotes/origin/'):
return True
for b, revision in branch2rev.iteritems():
if b.startswith('refs/remotes/origin/'):
if self._gitisancestor(self._state[1], revision):
return True
# otherwise, try to push the currently checked out branch
cmd = ['push']
if force:
cmd.append('--force')
current = self._gitcurrentbranch()
if current:
# determine if the current branch is even useful
if not self._gitisancestor(self._state[1], current):
self._ui.warn(_('unrelated git branch checked out '
'in subrepo %s\n') % self._relpath)
return False
self._ui.status(_('pushing branch %s of subrepo %s\n') %
(current.split('/', 2)[2], self._relpath))
self._gitcommand(cmd + ['origin', current])
return True
else:
self._ui.warn(_('no branch checked out in subrepo %s\n'
'cannot push revision %s\n') %
(self._relpath, self._state[1]))
return False
@annotatesubrepoerror
def remove(self):
if self._gitmissing():
return
if self.dirty():
self._ui.warn(_('not removing repo %s because '
'it has changes.\n') % self._relpath)
return
# we can't fully delete the repository as it may contain
# local-only history
self._ui.note(_('removing subrepo %s\n') % self._relpath)
self._gitcommand(['config', 'core.bare', 'true'])
for f in os.listdir(self._abspath):
if f == '.git':
continue
path = os.path.join(self._abspath, f)
if os.path.isdir(path) and not os.path.islink(path):
shutil.rmtree(path)
else:
os.remove(path)
def archive(self, ui, archiver, prefix, match=None):
total = 0
source, revision = self._state
if not revision:
return total
self._fetch(source, revision)
# Parse git's native archive command.
# This should be much faster than manually traversing the trees
# and objects with many subprocess calls.
tarstream = self._gitcommand(['archive', revision], stream=True)
tar = tarfile.open(fileobj=tarstream, mode='r|')
relpath = subrelpath(self)
ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
for i, info in enumerate(tar):
if info.isdir():
continue
if match and not match(info.name):
continue
if info.issym():
data = info.linkname
else:
data = tar.extractfile(info).read()
archiver.addfile(os.path.join(prefix, self._path, info.name),
info.mode, info.issym(), data)
total += 1
ui.progress(_('archiving (%s)') % relpath, i + 1,
unit=_('files'))
ui.progress(_('archiving (%s)') % relpath, None)
return total
@annotatesubrepoerror
def status(self, rev2, **opts):
rev1 = self._state[1]
if self._gitmissing() or not rev1:
# if the repo is missing, return no results
return [], [], [], [], [], [], []
modified, added, removed = [], [], []
self._gitupdatestat()
if rev2:
command = ['diff-tree', rev1, rev2]
else:
command = ['diff-index', rev1]
out = self._gitcommand(command)
for line in out.split('\n'):
tab = line.find('\t')
if tab == -1:
continue
status, f = line[tab - 1], line[tab + 1:]
if status == 'M':
modified.append(f)
elif status == 'A':
added.append(f)
elif status == 'D':
removed.append(f)
deleted = unknown = ignored = clean = []
return modified, added, removed, deleted, unknown, ignored, clean
types = {
'hg': hgsubrepo,
'svn': svnsubrepo,
'git': gitsubrepo,
}
| jordigh/mercurial-crew | mercurial/subrepo.py | Python | gpl-2.0 | 56,360 | 0.000923 |
#!/usr/bin/env python
'''
Copyright (C) 2020, WAFW00F Developers.
See the LICENSE file for copying permission.
'''
NAME = 'Edgecast (Verizon Digital Media)'
def is_waf(self):
schemes = [
self.matchHeader(('Server', r'^ECD(.+)?')),
self.matchHeader(('Server', r'^ECS(.*)?'))
]
if any(i for i in schemes):
return True
return False | EnableSecurity/wafw00f | wafw00f/plugins/edgecast.py | Python | bsd-3-clause | 371 | 0.002695 |
import json
from getJsonData import getJSONData
import os
from datetime import date, datetime
import numpy as np
import stock
import matplotlib.pyplot as plt
dataPath = 'data/SZ#002637.txt'
fileName, fileExtension = os.path.splitext(os.path.basename(dataPath))
jsonPath = os.path.join('data', '{0}.json'.format(fileName))
jsonData = getJSONData(dataPath)
num = 100
close = np.array([i['close'] for i in jsonData][-num:], dtype=np.float64)
ema12 = stock.ema(close, 12)
ema26 = stock.ema(close, 26)
macd = stock.macd(close)
plt.bar(range(num), macd)
plt.plot(range(num), ema12, 'r-')
plt.plot(range(num), ema26, 'g-')
plt.show()
| m860/data-analysis-with-python | practises/macd.py | Python | mit | 635 | 0 |
import asyncio
import functools
import random
import time
from testing import Client
from testing import default_test_setup
from testing import gen_data
from testing import gen_points
from testing import gen_series
from testing import InsertError
from testing import PoolError
from testing import QueryError
from testing import run_test
from testing import Series
from testing import Server
from testing import ServerError
from testing import SiriDB
from testing import TestBase
from testing import UserAuthError
from testing import parse_args
TIME_PRECISION = 'ms'
class TestCompression(TestBase):
title = 'Test compression'
GEN_POINTS = functools.partial(
gen_points, n=100, time_precision=TIME_PRECISION)
async def _test_series(self, client):
result = await client.query('select * from "series float"')
self.assertEqual(result['series float'], self.series_float)
result = await client.query('select * from "series int"')
self.assertEqual(result['series int'], self.series_int)
result = await client.query(
'list series name, length, type, start, end')
result['series'].sort()
self.assertEqual(
result,
{'columns': ['name', 'length', 'type', 'start', 'end'],
'series': [[
'series float',
10000, 'float',
self.series_float[0][0],
self.series_float[-1][0]], [
'series int',
10000, 'integer',
self.series_int[0][0],
self.series_int[-1][0]],
]})
@default_test_setup(
1,
time_precision=TIME_PRECISION,
optimize_interval=500,
compression=True)
async def run(self):
await self.client0.connect()
self.series_float = gen_points(
tp=float, n=10000, time_precision=TIME_PRECISION, ts_gap='5m')
random.shuffle(self.series_float)
self.series_int = gen_points(
tp=int, n=10000, time_precision=TIME_PRECISION, ts_gap='5m')
random.shuffle(self.series_int)
self.assertEqual(
await self.client0.insert({
'series float': self.series_float,
'series int': self.series_int
}), {'success_msg': 'Successfully inserted 20000 point(s).'})
self.series_float.sort()
self.series_int.sort()
await self._test_series(self.client0)
await self.client0.query('drop series /.*/ set ignore_threshold true')
# Create some random series and start 25 insert task parallel
series = gen_series(n=40)
for i in range(40):
await self.client0.insert_some_series(
series,
n=0.8,
timeout=0,
points=self.GEN_POINTS)
# Check the result
await self.assertSeries(self.client0, series)
for i in range(40):
await self.client0.insert_some_series(
series,
n=0.8,
timeout=0,
points=self.GEN_POINTS)
# Check the result
await self.assertSeries(self.client0, series)
self.client0.close()
result = await self.server0.stop()
self.assertTrue(result)
await self.server0.start(sleep=20)
await self.client0.connect()
# Check the result after rebooting the server
await self.assertSeries(self.client0, series)
if __name__ == '__main__':
random.seed(1)
parse_args()
run_test(TestCompression())
| transceptor-technology/siridb-server | itest/test_compression.py | Python | mit | 3,689 | 0 |
import re
import lxml.html
from pupa.scrape import Scraper, Organization
class WYCommitteeScraper(Scraper):
members = {}
urls = {
"list": "http://legisweb.state.wy.us/LegbyYear/CommitteeList.aspx?Year=%s",
"detail": "http://legisweb.state.wy.us/LegbyYear/%s",
}
def scrape(self, session=None):
if session is None:
session = self.latest_session()
self.info('no session specified, using %s', session)
list_url = self.urls["list"] % (session, )
committees = {}
page = self.get(list_url).text
page = lxml.html.fromstring(page)
for el in page.xpath(".//a[contains(@href, 'CommitteeMembers')]"):
committees[el.text.strip()] = el.get("href")
for c in committees:
self.info(c)
detail_url = self.urls["detail"] % (committees[c],)
page = self.get(detail_url).text
page = lxml.html.fromstring(page)
if re.match('\d{1,2}-', c):
c = c.split('-', 1)[1]
jcomm = Organization(name=c.strip(), chamber='joint', classification='committee')
for table in page.xpath(".//table[contains(@id, 'CommitteeMembers')]"):
rows = table.xpath(".//tr")
chamber = rows[0].xpath('.//td')[0].text_content().strip()
chamber = 'upper' if chamber == 'Senator' else 'lower'
comm = Organization(name=c.strip(), chamber=chamber, classification='committee')
for row in rows[1:]:
tds = row.xpath('.//td')
name = tds[0].text_content().strip()
role = 'chairman' if tds[3].text_content().strip() == 'Chairman' else 'member'
comm.add_member(name, role)
jcomm.add_member(name, role)
comm.add_source(detail_url)
yield comm
jcomm.add_source(detail_url)
yield jcomm
| cliftonmcintosh/openstates | openstates/wy/committees.py | Python | gpl-3.0 | 1,983 | 0.003026 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-01-16 10:12
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('patients', '0026_clinicianother_user'),
]
operations = [
migrations.AddField(
model_name='clinicianother',
name='use_other',
field=models.BooleanField(
default=False),
),
migrations.AlterField(
model_name='clinicianother',
name='user',
field=models.ForeignKey(
blank=True,
null=True,
on_delete=models.CASCADE,
to=settings.AUTH_USER_MODEL),
),
]
| muccg/rdrf | rdrf/registry/patients/migrations/0027_auto_20180116_1012.py | Python | agpl-3.0 | 836 | 0 |
"""Package setup for tumblr-block"""
import setuptools
if __name__ == '__main__':
setuptools.setup(
name='tumblr_block',
version='2.0.0',
description=(
'Auto blocking for tumblr'
),
url='https://github.com/Deafjams/tumblr-block',
author='Emma Foster',
license='License :: OSI Approved :: MIT License',
install_requires=[
'plan==0.5'
],
package_dir={'tumblr_block': 'tumblr_block'},
packages=setuptools.find_packages(),
include_package_data=True,
classifiers=(
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: Python :: 2.7"
)
)
| Deafjams/tumblr-block | setup.py | Python | mit | 804 | 0 |
"""
Tests for CourseDetails
"""
import datetime
import pytest
import ddt
from pytz import UTC
from unittest.mock import patch # lint-amnesty, pylint: disable=wrong-import-order
from django.conf import settings
from xmodule.modulestore import ModuleStoreEnum
from xmodule.data import CertificatesDisplayBehaviors
from xmodule.modulestore.tests.django_utils import TEST_DATA_MONGO_AMNESTY_MODULESTORE, ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from openedx.core.djangoapps.models.course_details import ABOUT_ATTRIBUTES, CourseDetails
EXAMPLE_CERTIFICATE_AVAILABLE_DATE = datetime.date(2020, 1, 1)
@ddt.ddt
class CourseDetailsTestCase(ModuleStoreTestCase):
"""
Tests the first course settings page (course dates, overview, etc.).
"""
MODULESTORE = TEST_DATA_MONGO_AMNESTY_MODULESTORE
def setUp(self):
super().setUp()
self.course = CourseFactory.create()
def test_virgin_fetch(self):
details = CourseDetails.fetch(self.course.id)
assert details.org == self.course.location.org, 'Org not copied into'
assert details.course_id == self.course.location.course, 'Course_id not copied into'
assert details.run == self.course.location.block_id, 'Course name not copied into'
assert details.course_image_name == self.course.course_image
assert details.start_date.tzinfo is not None
assert details.end_date is None, ('end date somehow initialized ' + str(details.end_date))
assert details.enrollment_start is None,\
('enrollment_start date somehow initialized ' + str(details.enrollment_start))
assert details.enrollment_end is None,\
('enrollment_end date somehow initialized ' + str(details.enrollment_end))
assert details.certificate_available_date is None,\
('certificate_available_date date somehow initialized ' + str(details.certificate_available_date))
assert details.syllabus is None, ('syllabus somehow initialized' + str(details.syllabus))
assert details.intro_video is None, ('intro_video somehow initialized' + str(details.intro_video))
assert details.effort is None, ('effort somehow initialized' + str(details.effort))
assert details.language is None, ('language somehow initialized' + str(details.language))
assert not details.self_paced
def test_update_and_fetch(self):
jsondetails = CourseDetails.fetch(self.course.id)
jsondetails.syllabus = "<a href='foo'>bar</a>"
# encode - decode to convert date fields and other data which changes form
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, self.course.id):
assert CourseDetails.update_from_json(self.course.id, jsondetails.__dict__, self.user).syllabus ==\
jsondetails.syllabus, 'After set syllabus'
jsondetails.short_description = "Short Description"
assert CourseDetails.update_from_json(self.course.id, jsondetails.__dict__, self.user).short_description ==\
jsondetails.short_description, 'After set short_description'
jsondetails.overview = "Overview"
assert CourseDetails.update_from_json(self.course.id, jsondetails.__dict__, self.user).overview ==\
jsondetails.overview, 'After set overview'
jsondetails.intro_video = "intro_video"
assert CourseDetails.update_from_json(self.course.id, jsondetails.__dict__, self.user).intro_video ==\
jsondetails.intro_video, 'After set intro_video'
jsondetails.about_sidebar_html = "About Sidebar HTML"
assert CourseDetails.update_from_json(self.course.id, jsondetails.__dict__, self.user)\
.about_sidebar_html == jsondetails.about_sidebar_html, 'After set about_sidebar_html'
jsondetails.effort = "effort"
assert CourseDetails.update_from_json(self.course.id, jsondetails.__dict__, self.user).effort ==\
jsondetails.effort, 'After set effort'
jsondetails.self_paced = True
assert CourseDetails.update_from_json(self.course.id, jsondetails.__dict__, self.user).self_paced ==\
jsondetails.self_paced
jsondetails.start_date = datetime.datetime(2010, 10, 1, 0, tzinfo=UTC)
assert CourseDetails.update_from_json(self.course.id, jsondetails.__dict__, self.user).start_date ==\
jsondetails.start_date
jsondetails.end_date = datetime.datetime(2011, 10, 1, 0, tzinfo=UTC)
assert CourseDetails.update_from_json(self.course.id, jsondetails.__dict__, self.user).end_date ==\
jsondetails.end_date
jsondetails.certificate_available_date = datetime.datetime(2010, 10, 1, 0, tzinfo=UTC)
assert CourseDetails.update_from_json(self.course.id, jsondetails.__dict__, self.user)\
.certificate_available_date == jsondetails.certificate_available_date
jsondetails.course_image_name = "an_image.jpg"
assert CourseDetails.update_from_json(self.course.id, jsondetails.__dict__, self.user).course_image_name ==\
jsondetails.course_image_name
jsondetails.banner_image_name = "an_image.jpg"
assert CourseDetails.update_from_json(self.course.id, jsondetails.__dict__, self.user).banner_image_name ==\
jsondetails.banner_image_name
jsondetails.video_thumbnail_image_name = "an_image.jpg"
assert CourseDetails.update_from_json(self.course.id, jsondetails.__dict__, self.user)\
.video_thumbnail_image_name == jsondetails.video_thumbnail_image_name
jsondetails.language = "hr"
assert CourseDetails.update_from_json(self.course.id, jsondetails.__dict__, self.user).language ==\
jsondetails.language
jsondetails.learning_info = ["test", "test"]
assert CourseDetails.update_from_json(self.course.id, jsondetails.__dict__, self.user).learning_info ==\
jsondetails.learning_info
jsondetails.instructor_info = {
"instructors": [
{
"name": "test",
"title": "test",
"organization": "test",
"image": "test",
"bio": "test"
}
]
}
assert CourseDetails.update_from_json(self.course.id, jsondetails.__dict__, self.user).instructor_info ==\
jsondetails.instructor_info
def test_toggle_pacing_during_course_run(self):
self.course.start = datetime.datetime.now()
self.store.update_item(self.course, self.user.id)
details = CourseDetails.fetch(self.course.id)
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, self.course.id):
updated_details = CourseDetails.update_from_json(
self.course.id,
dict(details.__dict__, self_paced=True),
self.user
)
assert not updated_details.self_paced
@ddt.data(*ABOUT_ATTRIBUTES)
def test_fetch_about_attribute(self, attribute_name):
attribute_value = 'test_value'
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, self.course.id):
CourseDetails.update_about_item(self.course, attribute_name, attribute_value, self.user.id)
assert CourseDetails.fetch_about_attribute(self.course.id, attribute_name) == attribute_value
def test_fetch_about_attribute_error(self):
attribute_name = 'not_an_about_attribute'
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, self.course.id):
CourseDetails.update_about_item(self.course, attribute_name, 'test_value', self.user.id)
with pytest.raises(ValueError):
CourseDetails.fetch_about_attribute(self.course.id, attribute_name)
def test_fetch_video(self):
video_value = 'test_video_id'
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, self.course.id):
CourseDetails.update_about_video(self.course, video_value, self.user.id)
assert CourseDetails.fetch_youtube_video_id(self.course.id) == video_value
video_url = CourseDetails.fetch_video_url(self.course.id)
self.assertRegex(video_url, fr'http://.*{video_value}')
@ddt.data(
(
EXAMPLE_CERTIFICATE_AVAILABLE_DATE,
CertificatesDisplayBehaviors.END,
EXAMPLE_CERTIFICATE_AVAILABLE_DATE,
CertificatesDisplayBehaviors.END_WITH_DATE
),
(
EXAMPLE_CERTIFICATE_AVAILABLE_DATE,
CertificatesDisplayBehaviors.END_WITH_DATE,
EXAMPLE_CERTIFICATE_AVAILABLE_DATE,
CertificatesDisplayBehaviors.END_WITH_DATE
),
(
EXAMPLE_CERTIFICATE_AVAILABLE_DATE,
CertificatesDisplayBehaviors.EARLY_NO_INFO,
None,
CertificatesDisplayBehaviors.EARLY_NO_INFO
),
(
EXAMPLE_CERTIFICATE_AVAILABLE_DATE,
"invalid_option",
EXAMPLE_CERTIFICATE_AVAILABLE_DATE,
CertificatesDisplayBehaviors.END_WITH_DATE
),
(
None,
CertificatesDisplayBehaviors.END,
None,
CertificatesDisplayBehaviors.END
),
(
None,
CertificatesDisplayBehaviors.END_WITH_DATE,
None,
CertificatesDisplayBehaviors.END
),
(
None,
CertificatesDisplayBehaviors.EARLY_NO_INFO,
None,
CertificatesDisplayBehaviors.EARLY_NO_INFO
),
(
None,
"invalid_option",
None,
CertificatesDisplayBehaviors.END
),
)
@ddt.unpack
@patch.dict(settings.FEATURES, ENABLE_V2_CERT_DISPLAY_SETTINGS=True)
def test_validate_certificate_settings_v2(self, stored_date, stored_behavior, expected_date, expected_behavior):
assert CourseDetails.validate_certificate_settings(
stored_date, stored_behavior
) == (expected_date, expected_behavior)
@ddt.data(
(
EXAMPLE_CERTIFICATE_AVAILABLE_DATE,
CertificatesDisplayBehaviors.END_WITH_DATE,
EXAMPLE_CERTIFICATE_AVAILABLE_DATE,
CertificatesDisplayBehaviors.END_WITH_DATE
),
(
None,
"invalid_option",
None,
"invalid_option"
),
)
@ddt.unpack
@patch.dict(settings.FEATURES, ENABLE_V2_CERT_DISPLAY_SETTINGS=False)
def test_validate_certificate_settings_v1(self, stored_date, stored_behavior, expected_date, expected_behavior):
"""Test that method just returns passed in arguments if v2 is off"""
assert CourseDetails.validate_certificate_settings(
stored_date, stored_behavior
) == (expected_date, expected_behavior)
| eduNEXT/edx-platform | openedx/core/djangoapps/models/tests/test_course_details.py | Python | agpl-3.0 | 11,188 | 0.00429 |
#! /usr/bin/env python2
# -*- coding: utf-8 -*-
#======================================================================
#
# playsnd.py - play sound with ctypes + mci
#
# Created by skywind on 2013/12/01
# Last change: 2014/01/26 23:40:20
#
#======================================================================
from __future__ import print_function
import sys
import time
import os
import ctypes
import threading
#----------------------------------------------------------------------
# 2/3 compatible
#----------------------------------------------------------------------
if sys.version_info[0] >= 3:
long = int
unicode = str
xrange = range
#----------------------------------------------------------------------
# WinMM - Windows player
#----------------------------------------------------------------------
class WinMM (object):
def __init__ (self, prefix = ''):
import ctypes.wintypes
self.__winmm = ctypes.windll.winmm
self.__mciSendString = self.__winmm.mciSendStringW
self.__prefix = prefix
LPCWSTR = ctypes.wintypes.LPCWSTR
UINT = ctypes.wintypes.UINT
HANDLE = ctypes.wintypes.HANDLE
DWORD = ctypes.wintypes.DWORD
self.__mciSendString.argtypes = [LPCWSTR, LPCWSTR, UINT, HANDLE]
self.__mciSendString.restype = ctypes.wintypes.DWORD
self.__mciGetErrorStringW = self.__winmm.mciGetErrorStringW
self.__mciGetErrorStringW.argtypes = [DWORD, LPCWSTR, UINT]
self.__mciGetErrorStringW.restype = ctypes.wintypes.BOOL
self.__buffer = ctypes.create_unicode_buffer(2048)
self.__alias_index = 0
self.__lock = threading.Lock()
def mciSendString (self, command, encoding = None):
if encoding is None:
encoding = sys.getfilesystemencoding()
if isinstance(command, bytes):
command = command.decode(encoding)
with self.__lock:
hr = self.__mciSendString(command, self.__buffer, 2048, 0)
hr = (hr != 0) and long(hr) or self.__buffer.value
return hr
def mciGetErrorString (self, error):
buffer = self.__buffer
with self.__lock:
hr = self.__mciGetErrorStringW(error, buffer, 2048)
if hr == 0:
hr = None
else:
hr = buffer.value
return hr
def open (self, filename, media_type = ''):
if not os.path.exists(filename):
return None
filename = os.path.abspath(filename)
with self.__lock:
name = 'media:%s%d'%(self.__prefix, self.__alias_index)
self.__alias_index += 1
if self.__alias_index > 0x7fffffff:
self.__alias_index = 0
cmd = u'open "%s" alias %s'%(filename, name)
if media_type:
cmd = u'open "%s" type %s alias %s'%(filename, media_type, name)
hr = self.mciSendString(cmd)
if isinstance(hr, str) or isinstance(hr, unicode):
return name
return None
def close (self, name):
hr = self.mciSendString(u'close %s'%name)
if isinstance(hr, unicode) or isinstance(hr, str):
return True
return False
def __get_status (self, name, what):
hr = self.mciSendString(u'status %s %s'%(name, what))
if isinstance(hr, unicode) or isinstance(hr, str):
return hr
return None
def __get_status_int (self, name, what):
hr = self.__get_status(name, what)
if hr is None:
return -1
hr = long(hr)
return (hr > 0x7fffffff) and hr or int(hr)
def __mci_no_return (self, cmd):
hr = self.mciSendString(cmd)
if isinstance(hr, unicode) or isinstance(hr, str):
return True
return False
def get_length (self, name):
return self.__get_status_int(name, 'length')
def get_position (self, name):
return self.__get_status_int(name, 'position')
def get_mode (self, name):
hr = self.__get_status(name, 'mode')
return hr
def play (self, name, start = 0, end = -1, wait = False, repeat = False):
if wait:
repeat = False
if start < 0:
start = 0
cmd = u'play %s from %d'%(name, start)
if end >= 0:
cmd += u' to %d'%end
if wait:
cmd += u' wait'
if repeat:
cmd += u' repeat'
return self.__mci_no_return(cmd)
def stop (self, name):
return self.__mci_no_return(u'stop %s'%name)
def seek (self, name, position):
if isinstance(position, str) or isinstance(position, unicode):
if position == u'end':
position = 'end'
else:
position = '0'
elif position < 0:
position = 'end'
else:
position = str(position)
return self.__mci_no_return(u'seek %s to %s'%name)
def pause (self, name):
return self.__mci_no_return(u'pause %s'%name)
def resume (self, name):
return self.__mci_no_return(u'resume %s'%name)
def get_volume (self, name):
return self.__get_status_int(name, 'volume')
def set_volume (self, name, volume):
return self.__mci_no_return(u'setaudio %s volume to %s'%(name, volume))
def is_playing (self, name):
mode = self.get_mode(name)
if mode is None:
return False
if mode != 'playing':
return False
return True
#----------------------------------------------------------------------
# main entry
#----------------------------------------------------------------------
def main (args = None):
if args is None:
args = sys.argv
args = [n for n in args]
if len(args) < 2:
print('usage: playmp3.py [mp3]')
return 0
mp3 = args[1]
if not os.path.exists(mp3):
print('not find: %s'%mp3)
return 1
def ms2time(ms):
if ms <= 0: return '00:00:000'
time_sec, ms = ms / 1000, ms % 1000
time_min, time_sec = time_sec / 60, time_sec % 60
time_hor, time_min = time_min / 60, time_min % 60
if time_hor == 0: return '%02d:%02d:%03d'%(time_min, time_sec, ms)
return '%02d:%02d:%02d:%03d'%(time_hor, time_min, time_sec, ms)
winmm = WinMM()
name = winmm.open(mp3)
if name is None:
print('can not play: %s'%mp3)
return 2
import ctypes.wintypes
user32 = ctypes.windll.user32
user32.GetAsyncKeyState.restype = ctypes.wintypes.WORD
user32.GetAsyncKeyState.argtypes = [ ctypes.c_char ]
size = winmm.get_length(name)
print('Playing "%s", press \'q\' to exit ....'%mp3)
winmm.play(name, repeat = True)
while 1:
if user32.GetAsyncKeyState(b'Q'): break
time.sleep(0.1)
pos = winmm.get_position(name)
sys.stdout.write('[%s / %s]\r'%(ms2time(pos), ms2time(size)))
sys.stdout.flush()
print('')
print('stopped')
winmm.close(name)
return 0
#----------------------------------------------------------------------
# testing case
#----------------------------------------------------------------------
if __name__ == '__main__':
def test1():
winmm = WinMM()
name = winmm.open('d:/music/sample.mp3')
print(name)
print(winmm.get_length(name))
print(winmm.get_volume(name))
print(winmm.set_volume(name, 1000))
ts = time.time()
print(winmm.play(name))
ts = time.time() - ts
print("ts", ts)
input()
print('is_playing', winmm.is_playing(name))
print('position:', winmm.get_position(name))
print('mode:', winmm.get_mode(name))
print(winmm.stop(name))
print('mode:', winmm.get_mode(name))
return 0
def test2():
main([__file__, 'd:/music/sample.mp3'])
return 0
# test2()
main()
| skywind3000/collection | script/playmp3.py | Python | mit | 7,109 | 0.036995 |
# -*- coding: utf-8 -*-
import os
import sys
from radio.database import db
def prefsRootPath():
if sys.platform == "darwin":
return os.path.expanduser("~/Library/Application Support/radio")
elif sys.platform.startswith("win"):
return os.path.join(os.environ['APPDATA'], "radio")
else:
return os.path.expanduser("~/.radio")
class Prefs():
def __init__(self):
# Check for ~/.ironworks
if not os.path.isdir(prefsRootPath()):
os.mkdir(prefsRootPath())
self.db = db.Db(os.path.join(prefsRootPath(), "prefs.db"))
#self.configDb = db.Db(os.path.join(prefsRootPath(), "config.db"))
#query = self.configDb.query("SELECT name FROM sqlite_master")
#query = query.fetchall()
#print query
self.db.beginTransaction()
self.db.checkTable("radio_server_settings", [
{"name": "name", "type": "text"},
{"name": "value", "type": "text"}])
self.db.checkTable("radio_misc_settings", [
{"name": "key", "type": "int"},
{"name": "value", "type": "text"},
{"name": "description", "type": "text"},
{"name": "type", "type": "text"},
{"name": "options", "type": "text"}])
self.db.checkTable("pandora", [
{"name": "name", "type": "text"},
{"name": "value", "type": "text"}])
self.db.checkTable("radioStreams", [
{"name": "name", "type": "text"},
{"name": "value", "type": "text"}])
self.db.checkTable("audioQuality", [
{"name": "name", "type": "text"},
{"name": "value", "type": "text"}])
self.db.checkTable("client_keys", [
{"name": "client", "type": "text"},
{"name": "deviceModel", "type": "text"},
{"name": "username", "type": "text"},
{"name": "password", "type": "text"},
{"name": "rpcUrl", "type": "text"},
{"name": "encryptKey", "type": "text"},
{"name": "decryptKey", "type": "text"},
{"name": "version", "type": "text"}])
default_audio_quality = 'mediumQuality'
self.db.commitTransaction()
# Check radio server defaults
self.checkDefaults("radio_server_settings", {"name": "timesRun", "value": "0"})
self.checkDefaults("radio_server_settings", {"name": "daemon", "value": "False"})
self.checkDefaults("radio_server_settings", {"name": "pidfile", "value": "False"})
self.checkDefaults("radio_server_settings", {"name": "pidFileName", "value": ""})
self.checkDefaults("radio_server_settings", {"name": "port", "value": 7000})
self.checkDefaults("radio_server_settings", {"name": "verbose", "value": "True"})
self.checkDefaults("radio_server_settings", {"name": "development", "value": "True"})
self.checkDefaults("radio_server_settings", {"name": "kiosk", "value": "False"})
self.checkDefaults("radio_server_settings", {"name": "noupdate", "value": "True"})
self.checkDefaults("radio_server_settings", {"name": "webroot", "value": ""})
# Check radio misc defaults
self.checkDefaults("radio_misc_settings", data={'key': 'show_currently_playing',
'value': '1',
'description': 'Show currently playing bar',
'type': 'select',
'options': "{'1': 'Yes', '2': 'Minimized', '0': 'No'}"})
#Check Pandora defaults
self.checkDefaults("pandora", {"name": "user", "value": ""})
self.checkDefaults("pandora", {"name": "password", "value": ""})
self.checkDefaults("pandora", {"name": "notify", "value": "True"})
self.checkDefaults("pandora", {"name": "last_station_id", "value": ""})
self.checkDefaults("pandora", {"name": "proxy", "value": ""})
self.checkDefaults("pandora", {"name": "control_proxy", "value": ""})
self.checkDefaults("pandora", {"name": "show_icon", "value": "False"})
self.checkDefaults("pandora", {"name": "lastfm_key", "value": "False"})
self.checkDefaults("pandora", {"name": "mediakeys", "value": "True"})
self.checkDefaults("pandora", {"name": "screensaverpause", "value": "False"})
self.checkDefaults("pandora", {"name": "volume", "value": 1.0})
# If set, allow insecure permissions. Implements CVE-2011-1500
self.checkDefaults("pandora", {"name": "unsafe_permissions", "value": "False"})
self.checkDefaults("pandora", {"name": "audio_quality", "value": default_audio_quality})
self.checkDefaults("pandora", {"name": "pandora_one", "value": "False"})
self.checkDefaults("pandora", {"name": "force_client", "value": ""})
self.checkDefaults("radioStreams", {"name": "NPR", "value": "http://nprdmp.ic.llnwd.net/stream/nprdmp_live01_mp3"})
self.checkDefaults("radioStreams", {"name": "BBC News", "value": "http://bbcwssc.ic.llnwd.net/stream/bbcwssc_mp1_ws-eieuk"})
self.checkDefaults("audioQuality", {"name": "highQuality", "value": "High"})
self.checkDefaults("audioQuality", {"name": "mediumQuality", "value": "Medium"})
self.checkDefaults("audioQuality", {"name": "lowQuality", "value": "Low"})
self.checkDefaults("client_keys", {"client": "android-generic",
"deviceModel": "android-generic",
"username": "android",
"password": "AC7IBG09A3DTSYM4R41UJWL07VLN8JI7",
"rpcUrl": "//tuner.pandora.com/services/json/?",
"encryptKey": "6#26FRL$ZWD",
"decryptKey": "R=U!LH$O2B#",
"version": "5"})
self.checkDefaults("client_keys", {"client": "pandora-one",
"deviceModel": "D01",
"username": "pandora one",
"password": "TVCKIBGS9AO9TSYLNNFUML0743LH82D",
"rpcUrl": "//internal-tuner.pandora.com/services/json/?",
"encryptKey": "2%3WCL*JU$MP]4",
"decryptKey": "U#IO$RZPAB%VX2",
"version": "5"})
def getDb(self):
return self.db
def checkDefaults(self, table, data):
cursor = self.db.select(table, where=data)
if not cursor.fetchone():
self.db.beginTransaction()
self.db.insert(table, data)
self.db.commitTransaction()
def getPreference(self, table, name):
cursor = self.db.select(table, where={"name": name})
row = cursor.fetchone()
if not row:
raise Exception("No preference " + name)
return row["value"]
def getRadioSettingValue(self, key, default=None):
try:
data = self.db.select("radio_server_settings", where={"key": key}, what="value")
value = data.fetchone()
if value == '':
return None
return value
except:
return default
def getPandora(self, name):
cursor = self.db.select("pandora", where={"name": name})
row = cursor.fetchone()
if not row:
raise Exception("No Pandora property named: " + name)
return row["value"]
def getPandoraUsername(self):
username = self.getPandora("user")
return username
def getPandoraPassword(self):
password = self.getPandora("password")
return password
def getPandoraOne(self):
pandoraOne = self.getPandora("pandora_one")
return pandoraOne
def getPandoraProxy(self):
proxy = self.getPandora("proxy")
return proxy
def getPandoraControlProxy(self):
controlProxy = self.getPandora("control_proxy")
return controlProxy
def getPandoraAudioQuality(self):
audioQuality = self.getPandora("audio_quality")
return audioQuality
def getPandoraAudioQualitySettings(self):
cursor = self.db.select("audioQuality")
row = cursor.fetchall()
return row
def getNotify(self):
notify = self.getPandora("notify")
return notify
def getScreensaverPause(self):
screensaverPause = self.getPandora("screensaverpause")
return screensaverPause
def getIcon(self):
showIcon = self.getPandora("show_icon")
return showIcon
def getPandoraForceClient(self):
forceClient = self.getPandora("force_client")
return forceClient
def getPandoraClient(self, client):
cursor = self.db.select("client_keys", where={"client": client})
row = cursor.fetchall()
if not row:
raise Exception("No Pandora client named: " + client)
return row
def getRadioStreams(self):
cursor = self.db.select("radioStreams")
row = cursor.fetchall()
return row
def getLastStationId(self):
stationId = self.getPandora("last_station_id")
return stationId
def getTimesRun(self):
return int(self.getPreference("radio_server_settings", "timesRun"))
def getDaemon(self):
return self.getPreference("radio_server_settings", "daemon")
def getPidFile(self):
return self.getPreference("radio_server_settings", "pidfile")
def getPidFileName(self):
return self.getPreference("radio_server_settings", "pidFileName")
def getPort(self):
return int(self.getPreference("radio_server_settings", "port"))
def getVerbose(self):
return self.getPreference("radio_server_settings", "verbose")
def getDevelopment(self):
return self.getPreference("radio_server_settings", "development")
def getKiosk(self):
return self.getPreference("radio_server_settings", "kiosk")
def getNoUpdate(self):
return self.getPreference("radio_server_settings", "noupdate")
def incTimesRun(self):
r = int(self.getPreference("timesRun"))
self.db.beginTransaction()
self.db.update("prefs", {"value": r + 1}, {"name": "timesRun"})
self.db.commitTransaction()
def setDaemon(self, value):
self.db.beginTransaction()
self.db.update("radio_server_settings", {"value": value}, {"name": "daemon"})
self.db.commitTransaction()
def setPidFile(self, value):
self.db.beginTransaction()
self.db.insertOrUpdate("radio_server_settings", {"value": value}, {"name": "pidfile"})
self.db.commitTransaction()
def setPidFileName(self, value):
self.db.beginTransaction()
self.db.insertOrUpdate("radio_server_settings", {"value": value}, {"name": "pidFileName"})
self.db.commitTransaction()
def setPort(self, port):
self.db.beginTransaction()
self.db.insertOrUpdate("radio_server_settings", {"value": port}, {"name": "port"})
self.db.commitTransaction()
def setVerbose(self, value):
self.db.beginTransaction()
self.db.insertOrUpdate("radio_server_settings", {"value": value}, {"name": "verbose"})
self.db.commitTransaction()
def setDevelopment(self, value):
self.db.beginTransaction()
self.db.insertOrUpdate("radio_server_settings", {"value": value}, {"name": "development"})
self.db.commitTransaction()
def setKiosk(self, value):
self.db.beginTransaction()
self.db.insertOrUpdate("radio_server_settings", {"value": value}, {"name": "kiosk"})
self.db.commitTransaction()
def setNoUpdate(self, value):
self.db.beginTransaction()
self.db.insertOrUpdate("radio_server_settings", {"value": value}, {"name": "noupdate"})
self.db.commitTransaction()
def setPandoraUsername(self, value):
self.db.beginTransaction()
self.db.insertOrUpdate("pandora", {"value": value}, {"name": "user"})
self.db.commitTransaction()
def setPandoraPassword(self, value):
self.db.beginTransaction()
self.db.insertOrUpdate("pandora", {"value": value}, {"name": "password"})
self.db.commitTransaction()
def setPandoraOne(self, value):
self.db.beginTransaction()
self.db.insertOrUpdate("pandora", {"value": value}, {"name": "pandora_one"})
self.db.commitTransaction()
def setPandoraProxy(self, value):
self.db.beginTransaction()
self.db.insertOrUpdate("pandora", {"value": value}, {"name": "proxy"})
self.db.commitTransaction()
def setPandoraControlProxy(self, value):
self.db.beginTransaction()
self.db.insertOrUpdate("pandora", {"value": value}, {"name": "control_proxy"})
self.db.commitTransaction()
def setPandoraAudioQuality(self, value):
self.db.beginTransaction()
self.db.insertOrUpdate("pandora", {"value": value}, {"name": "audio_quality"})
self.db.commitTransaction()
def setNotify(self, value):
self.db.beginTransaction()
self.db.insertOrUpdate("pandora", {"value": value}, {"name": "notify"})
self.db.commitTransaction()
def setScreensaverPause(self, value):
self.db.beginTransaction()
self.db.insertOrUpdate("pandora", {"value": value}, {"name": "screensaverpause"})
self.db.commitTransaction()
def setIcon(self, value):
self.db.beginTransaction()
self.db.insertOrUpdate("pandora", {"value": value}, {"name": "show_icon"})
self.db.commitTransaction()
def getRadioSettings(self):
daemon = self.getDaemon()
pidFile = self.getPidFile()
pidFilename = self.getPidFileName()
port = self.getPort()
verbose = self.getVerbose()
dev = self.getDevelopment()
kiosk = self.getKiosk()
update = self.getNoUpdate()
data = ({'daemon': daemon,
'pidFile': pidFile,
'pidFilename': pidFilename,
'port': port,
'verbose': verbose,
'dev': dev,
'kiosk': kiosk,
'update': update})
return {'success': True, 'data': data} | hephaestus9/Radio | radio/config/preferences.py | Python | mit | 14,591 | 0.004386 |
from django.test import TestCase
import django_comments as comments
from django_comments_xtd.models import TmpXtdComment
from django_comments_xtd.forms import XtdCommentForm
from django_comments_xtd.tests.models import Article
class GetFormTestCase(TestCase):
def test_get_form(self):
# check function django_comments_xtd.get_form retrieves the due class
self.assert_(comments.get_form() == XtdCommentForm)
class XtdCommentFormTestCase(TestCase):
def setUp(self):
self.article = Article.objects.create(title="September",
slug="september",
body="What I did on September...")
self.form = comments.get_form()(self.article)
def test_get_comment_model(self):
# check get_comment_model retrieves the due model class
self.assert_(self.form.get_comment_model() == TmpXtdComment)
def test_get_comment_create_data(self):
# as it's used in django_comments.views.comments
data = {"name":"Daniel",
"email":"danirus@eml.cc",
"followup": True,
"reply_to": 0, "level": 1, "order": 1,
"comment":"Es war einmal iene kleine..." }
data.update(self.form.initial)
form = comments.get_form()(self.article, data)
self.assert_(self.form.security_errors() == {})
self.assert_(self.form.errors == {})
comment = form.get_comment_object()
# it does have the new field 'followup'
self.assert_("followup" in comment)
# and as long as settings.COMMENTS_XTD_CONFIRM_EMAIL is True
# is_public is set to False until receive the user confirmation
self.assert_(comment.is_public == False)
| jayfk/django-comments-xtd | django_comments_xtd/tests/forms.py | Python | bsd-2-clause | 1,785 | 0.006162 |
api_token = 'd469c24f-c428-a155-eae6-f8216cff4ace'
ytkanan_token = '6bc600bd-d0aa-369e-be0c-65c6af034183'
ythonest_token = 'f41ef6ea-b8ba-d952-4993-e24b9feeda46'
ytabhinav_token = '712c5c97-15c5-fc76-68c7-2acba12287d0'
yo_rss_token = '17aa580a-2863-db0f-34f1-23657b08dfe6'
dev_key = 'AIzaSyBU9eMQ1xW0NNEGprJIR5wgaQdrTFn_Fdc' | kartikluke/yotube | credentials.py | Python | mit | 324 | 0.003086 |
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from txircd.utils import isValidIdent, trimStringToByteLength
from zope.interface import implementer
from typing import Any, Dict, List, Optional, Tuple
@implementer(IPlugin, IModuleData, ICommand)
class UserCommand(Command, ModuleData):
name = "UserCommand"
core = True
forRegistered = False
def userCommands(self) -> List[Tuple[str, int, Command]]:
return [ ("USER", 1, self) ]
def parseParams(self, user: "IRCUser", params: List[str], prefix: str, tags: Dict[str, Optional[str]]) -> Optional[Dict[Any, Any]]:
if len(params) < 4:
user.sendSingleError("UserCmd", irc.ERR_NEEDMOREPARAMS, "USER", "Not enough parameters")
return None
if not params[3]: # Make sure the gecos isn't an empty string
user.sendSingleError("UserCmd", irc.ERR_NEEDMOREPARAMS, "USER", "Not enough parameters")
return None
# Trim down to guarantee ident and gecos won't be rejected by the user class for being too long
params[0] = trimStringToByteLength(params[0], self.ircd.config.get("ident_length", 12))
params[3] = trimStringToByteLength(params[3], self.ircd.config.get("gecos_length", 128))
if not isValidIdent(params[0]):
user.sendSingleError("UserCmd", irc.ERR_NEEDMOREPARAMS, "USER", "Your username is not valid") # The RFC is dumb.
return None
return {
"ident": params[0],
"gecos": params[3]
}
def execute(self, user: "IRCUser", data: Dict[Any, Any]) -> bool:
user.changeIdent(data["ident"])
user.changeGecos(data["gecos"])
user.register("USER")
return True
cmd_user = UserCommand() | Heufneutje/txircd | txircd/modules/rfc/cmd_user.py | Python | bsd-3-clause | 1,685 | 0.02908 |
import sys
import math
from pimath import *
from PyQt4 import QtCore, QtGui, QtOpenGL
from camera import Camera
import grind
#-----------------------------------------------------------------------------
from rodin import logging
log = logging.get_logger('grind.mangle.gl_widget')
try:
from OpenGL.GL import *
from OpenGL.GLU import *
except ImportError:
app = QtGui.QApplication(sys.argv)
QtGui.QMessageBox.critical(None, "mangle", "PyOpenGL must be installed to run this example.")
sys.exit(1)
class GLWidget(QtOpenGL.QGLWidget):
xRotationChanged = QtCore.pyqtSignal(int)
yRotationChanged = QtCore.pyqtSignal(int)
zRotationChanged = QtCore.pyqtSignal(int)
def __init__(self, parent=None):
super(GLWidget, self).__init__(parent)
self.renderable = None
self.object = 0
self.xRot = 0
self.yRot = 0
self.zRot = 0
self.width = 0
self.height = 0
self.display_selection_marker = False
self.selection_marker_bbox = grind.BBox()
self.selection_marker_bbox.set_colour(0xFF0000) # R G B
self.lastPos = QtCore.QPoint()
self.backgroundColour = QtGui.QColor.fromCmykF(0.28, 0.28, 0.28, 0.0)
self.foregroundColour = QtGui.QColor.fromCmykF(0.7, 0.7, 0.7, 0.0)
self.dist = 1.0
self.up = 1.0
self.drawGrid = True
self.drawDefaultObject = True
self.followBBox = False
self.moveGrid = False
self.camera = Camera()
self.frameView()
def setFollowBBox(self,follow):
self.followBBox = follow
self.updateGL()
def setCenterBBox(self,centered):
self.moveGrid = not centered
self.updateGL()
def setRenderable(self,renderable,callframeview=True):
self.renderable = renderable
if callframeview == True:
self.frameView()
self.resizeGL(self.width,self.height)
self.updateGL()
def minimumSizeHint(self):
return QtCore.QSize(50, 50)
def sizeHint(self):
return QtCore.QSize(640, 480)
def frameView(self,update=False):
if self.renderable is None:
self.camera.frame(V3f(0,0,0),1)
if update:
self.updateGL()
return
bb = self.renderable.getBounds()
height = bb.size().y
c = bb.center()
center = V3f(c.x,c.y,c.z)
self.camera.frame(center,height)
self.up = height*1.2
self.dist = self.camera.distanceNeeded(height)
if update:
self.updateGL()
def setXRotation(self, angle):
angle = self.normalizeAngle(angle)
if angle != self.xRot:
self.xRot = angle
self.xRotationChanged.emit(angle)
self.updateGL()
def setYRotation(self, angle):
angle = self.normalizeAngle(angle)
if angle != self.yRot:
self.yRot = angle
self.yRotationChanged.emit(angle)
self.updateGL()
def setZRotation(self, angle):
angle = self.normalizeAngle(angle)
if angle != self.zRot:
self.zRot = angle
self.zRotationChanged.emit(angle)
self.updateGL()
def initializeGL(self):
self.qglClearColor(self.foregroundColour.dark())
self.object = self.makeObject()
self.grid = self.makeGrid()
glShadeModel(GL_FLAT)
glEnable(GL_DEPTH_TEST)
def paintGL(self):
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glLoadIdentity()
glTranslatef( 0, -self.up, -self.dist )
if self.followBBox:
self.frameView(False)
self.camera.update(1/60.0)
if self.drawGrid:
move = self.moveGrid and self.renderable is not None
if move:
glPushMatrix()
center = self.renderable.getBounds().center()
glTranslatef(round(center.x/5)*5,round(center.y/5)*5,round(center.z/5)*5)
glCallList(self.grid)
if move:
glPopMatrix()
if self.renderable is None:
if self.drawDefaultObject:
glCallList(self.object)
else:
self.renderable.update()
self.renderable.render()
if self.display_selection_marker == True:
x = self.lastPos.x()
y = self.height - self.lastPos.y()
z = (GLfloat * 1)(0)
glReadPixels(x, y, 1, 1, GL_DEPTH_COMPONENT, GL_FLOAT, z)
if z[0] < 1: # ignore void click
proj = (ctypes.c_double*16)()
proj = glGetDoublev(GL_PROJECTION_MATRIX)
model = (ctypes.c_double*16)()
model = glGetDoublev(GL_MODELVIEW_MATRIX)
(wx,wy,wz) = gluUnProject( x,y,z[0], model, proj, (0, 0, self.width, self.height) ) # model proj view
scale = (self.camera.pos - V3f(wx,wy,wz)).length() * 0.0025
self.selection_marker_bbox.min = V3f(wx - scale, wy - scale, wz - scale)
self.selection_marker_bbox.max = V3f(wx + scale, wy + scale, wz + scale)
glDisable(GL_DEPTH_TEST)
glPolygonMode(GL_FRONT_AND_BACK, GL_LINE)
self.selection_marker_bbox.render(1)
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
glEnable(GL_DEPTH_TEST)
def resizeGL(self, width, height):
self.width = width
self.height = height
side = min(width, height)
if side < 0:
return
self.camera.aspect = float(self.width)/float(self.height)
glViewport(0, 0, width, height)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(35, float(self.width)/float(self.height), 0.01, 100)
glMatrixMode(GL_MODELVIEW)
def target_selection(self):
if self.display_selection_marker == True:
self.camera.lookat = V3f(self.selection_marker_bbox.center().x, self.selection_marker_bbox.center().y, self.selection_marker_bbox.center().z)
newdir = (self.camera.pos - self.camera.lookat).normalized()
self.camera.pos = self.camera.lookat + newdir * self.camera.dist
self.display_selection_marker = False
def mousePressEvent(self, event):
self.lastPos = event.pos()
self.camera.mouseButton(event.button(), True, self.lastPos.x(), self.lastPos.y())
self.updateGL()
def mouseReleaseEvent(self, event):
self.camera.mouseButton(event.button(), False, self.lastPos.x(), self.lastPos.y())
self.updateGL()
def mouseMoveEvent(self, event):
self.camera.mouseMotion(event.x(), event.y())
self.updateGL()
self.lastPos = event.pos()
def wheelEvent(self,event):
self.updateGL()
def makeObject(self):
genList = glGenLists(1)
glNewList(genList, GL_COMPILE)
NumSectors = 13
Length = 10.0
LengthSec = 25
Outer = 0.5
Inner = 0.4
ZInner = -Length/2.0
ZOuter = ZInner+0.04
ZInner = 0.01
ZOuter = -0.01
for j in range(LengthSec+1):
glBegin(GL_QUADS)
for i in range(NumSectors):
angle1 = (i * 2 * math.pi) / NumSectors
x5 = Outer * math.sin(angle1)
y5 = Outer * math.cos(angle1)
x6 = Inner * math.sin(angle1)
y6 = Inner * math.cos(angle1)
angle2 = ((i + 1) * 2 * math.pi) / NumSectors
x7 = Inner * math.sin(angle2)
y7 = Inner * math.cos(angle2)
x8 = Outer * math.sin(angle2)
y8 = Outer * math.cos(angle2)
#self.quad(x5, y5, x6, y6, x7, y7, x8, y8, ZOuter, ZInner)
self.extrude(x6, y6, x7, y7, ZOuter, ZInner)
#self.extrude(x8, y8, x5, y5, ZOuter, ZInner)
glEnd()
#glTranslate(0,0,Length/LengthSec)
glRotate(6.8,0,1.91231233,0)
glEndList()
return genList
def quad(self, x1, y1, x2, y2, x3, y3, x4, y4, z1, z2):
self.qglColor(self.backgroundColour)
glVertex3d(x1, y1, z2)
glVertex3d(x2, y2, z2)
glVertex3d(x3, y3, z2)
glVertex3d(x4, y4, z2)
glVertex3d(x4, y4, z1)
glVertex3d(x3, y3, z1)
glVertex3d(x2, y2, z1)
glVertex3d(x1, y1, z1)
def extrude(self, x1, y1, x2, y2, z1, z2):
self.qglColor(self.backgroundColour.dark(250 + int(100 * x1)))
glVertex3d(x1, y1, z1)
glVertex3d(x2, y2, z1)
glVertex3d(x2, y2, z2)
glVertex3d(x1, y1, z2)
def normalizeAngle(self, angle):
while angle < 0:
angle += 360 * 16
while angle > 360 * 16:
angle -= 360 * 16
return angle
def makeGrid(self):
genList = glGenLists(1)
glNewList(genList, GL_COMPILE)
glBegin(GL_LINES)
self.qglColor(self.backgroundColour.dark(150))
self.qglColor(QtGui.QColor(70,70,80))
size = 10.0
count = 10.0
xs = []
ys = []
for x in range(int(count)):
xpos = (x/count-0.5)*size
xs.append(xpos)
for y in range(int(count)):
ypos = (y/count-0.5)*size
ys.append(ypos)
a = ( xpos,0, ypos)
b = ( xpos,0,-ypos)
c = (-xpos,0,-ypos)
d = (-xpos,0, ypos)
glVertex3d(*a)
glVertex3d(*b)
glVertex3d(*d)
glVertex3d(*c)
glVertex3d(*a)
glVertex3d(*d)
glVertex3d(*b)
glVertex3d(*c)
self.qglColor(QtGui.QColor(54,54,54))
size = 10.0
count = 100.0
for x in range(int(count)):
xpos = (x/count-0.5)*size
if xpos in xs: continue
for y in range(int(count)):
ypos = (y/count-0.5)*size
if ypos in ys: continue
a = ( xpos,0, ypos)
b = ( xpos,0,-ypos)
c = (-xpos,0,-ypos)
d = (-xpos,0, ypos)
glVertex3d(*a)
glVertex3d(*b)
glVertex3d(*d)
glVertex3d(*c)
glVertex3d(*a)
glVertex3d(*d)
glVertex3d(*b)
glVertex3d(*c)
glEnd()
glEndList()
return genList
# Copyright 2008-2012 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios)
#
# This file is part of anim-studio-tools.
#
# anim-studio-tools is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# anim-studio-tools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with anim-studio-tools. If not, see <http://www.gnu.org/licenses/>.
| mstreatfield/anim-studio-tools | grind/python/util/glWidget.py | Python | gpl-3.0 | 11,356 | 0.006692 |
# Generated by Django 2.0.3 on 2018-05-27 06:40
import django.contrib.postgres.indexes
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('pola', '0005_auto_20171225_1632'),
]
operations = [
migrations.AddIndex(
model_name='query',
index=django.contrib.postgres.indexes.BrinIndex(
fields=['timestamp'], name='pola_query_timesta_ea44b7_brin', pages_per_range=64
),
),
]
| KlubJagiellonski/pola-backend | pola/migrations/0006_auto_20180527_0840.py | Python | bsd-3-clause | 505 | 0.00198 |
import _plotly_utils.basevalidators
class ShowlegendValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(self, plotly_name="showlegend", parent_name="scattermapbox", **kwargs):
super(ShowlegendValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "style"),
**kwargs
)
| plotly/plotly.py | packages/python/plotly/plotly/validators/scattermapbox/_showlegend.py | Python | mit | 413 | 0.002421 |
from behave import *
import icecream
def before_all(context):
context.app = icecream.app.test_client()
context.icecream = icecream
icecream.inititalize_redis()
| Cantal0p3/nyu-devops-homework-1 | features/environment.py | Python | apache-2.0 | 173 | 0.00578 |
# Copyright (c) 2014-2016, NVIDIA CORPORATION. All rights reserved.
from __future__ import absolute_import
import os
# Find the best implementation available
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import caffe_pb2
import flask
import lmdb
import PIL.Image
from .forms import ImageClassificationDatasetForm
from .job import ImageClassificationDatasetJob
from digits import utils
from digits.dataset import tasks
from digits.utils.forms import fill_form_if_cloned, save_form_to_job
from digits.utils.routing import request_wants_json, job_from_request
from digits.webapp import scheduler
blueprint = flask.Blueprint(__name__, __name__)
def from_folders(job, form):
"""
Add tasks for creating a dataset by parsing folders of images
"""
job.labels_file = utils.constants.LABELS_FILE
### Add ParseFolderTask
percent_val = form.folder_pct_val.data
val_parents = []
if form.has_val_folder.data:
percent_val = 0
percent_test = form.folder_pct_test.data
test_parents = []
if form.has_test_folder.data:
percent_test = 0
min_per_class = form.folder_train_min_per_class.data
max_per_class = form.folder_train_max_per_class.data
parse_train_task = tasks.ParseFolderTask(
job_dir = job.dir(),
folder = form.folder_train.data,
percent_val = percent_val,
percent_test = percent_test,
min_per_category = min_per_class if min_per_class>0 else 1,
max_per_category = max_per_class if max_per_class>0 else None
)
job.tasks.append(parse_train_task)
# set parents
if not form.has_val_folder.data:
val_parents = [parse_train_task]
if not form.has_test_folder.data:
test_parents = [parse_train_task]
if form.has_val_folder.data:
min_per_class = form.folder_val_min_per_class.data
max_per_class = form.folder_val_max_per_class.data
parse_val_task = tasks.ParseFolderTask(
job_dir = job.dir(),
parents = parse_train_task,
folder = form.folder_val.data,
percent_val = 100,
percent_test = 0,
min_per_category = min_per_class if min_per_class>0 else 1,
max_per_category = max_per_class if max_per_class>0 else None
)
job.tasks.append(parse_val_task)
val_parents = [parse_val_task]
if form.has_test_folder.data:
min_per_class = form.folder_test_min_per_class.data
max_per_class = form.folder_test_max_per_class.data
parse_test_task = tasks.ParseFolderTask(
job_dir = job.dir(),
parents = parse_train_task,
folder = form.folder_test.data,
percent_val = 0,
percent_test = 100,
min_per_category = min_per_class if min_per_class>0 else 1,
max_per_category = max_per_class if max_per_class>0 else None
)
job.tasks.append(parse_test_task)
test_parents = [parse_test_task]
### Add CreateDbTasks
backend = form.backend.data
encoding = form.encoding.data
compression = form.compression.data
job.tasks.append(
tasks.CreateDbTask(
job_dir = job.dir(),
parents = parse_train_task,
input_file = utils.constants.TRAIN_FILE,
db_name = utils.constants.TRAIN_DB,
backend = backend,
image_dims = job.image_dims,
resize_mode = job.resize_mode,
encoding = encoding,
compression = compression,
mean_file = utils.constants.MEAN_FILE_CAFFE,
labels_file = job.labels_file,
)
)
if percent_val > 0 or form.has_val_folder.data:
job.tasks.append(
tasks.CreateDbTask(
job_dir = job.dir(),
parents = val_parents,
input_file = utils.constants.VAL_FILE,
db_name = utils.constants.VAL_DB,
backend = backend,
image_dims = job.image_dims,
resize_mode = job.resize_mode,
encoding = encoding,
compression = compression,
labels_file = job.labels_file,
)
)
if percent_test > 0 or form.has_test_folder.data:
job.tasks.append(
tasks.CreateDbTask(
job_dir = job.dir(),
parents = test_parents,
input_file = utils.constants.TEST_FILE,
db_name = utils.constants.TEST_DB,
backend = backend,
image_dims = job.image_dims,
resize_mode = job.resize_mode,
encoding = encoding,
compression = compression,
labels_file = job.labels_file,
)
)
def from_files(job, form):
"""
Add tasks for creating a dataset by reading textfiles
"""
### labels
if form.textfile_use_local_files.data:
job.labels_file = form.textfile_local_labels_file.data.strip()
else:
flask.request.files[form.textfile_labels_file.name].save(
os.path.join(job.dir(), utils.constants.LABELS_FILE)
)
job.labels_file = utils.constants.LABELS_FILE
shuffle = bool(form.textfile_shuffle.data)
backend = form.backend.data
encoding = form.encoding.data
compression = form.compression.data
### train
if form.textfile_use_local_files.data:
train_file = form.textfile_local_train_images.data.strip()
else:
flask.request.files[form.textfile_train_images.name].save(
os.path.join(job.dir(), utils.constants.TRAIN_FILE)
)
train_file = utils.constants.TRAIN_FILE
image_folder = form.textfile_train_folder.data.strip()
if not image_folder:
image_folder = None
job.tasks.append(
tasks.CreateDbTask(
job_dir = job.dir(),
input_file = train_file,
db_name = utils.constants.TRAIN_DB,
backend = backend,
image_dims = job.image_dims,
image_folder= image_folder,
resize_mode = job.resize_mode,
encoding = encoding,
compression = compression,
mean_file = utils.constants.MEAN_FILE_CAFFE,
labels_file = job.labels_file,
shuffle = shuffle,
)
)
### val
if form.textfile_use_val.data:
if form.textfile_use_local_files.data:
val_file = form.textfile_local_val_images.data.strip()
else:
flask.request.files[form.textfile_val_images.name].save(
os.path.join(job.dir(), utils.constants.VAL_FILE)
)
val_file = utils.constants.VAL_FILE
image_folder = form.textfile_val_folder.data.strip()
if not image_folder:
image_folder = None
job.tasks.append(
tasks.CreateDbTask(
job_dir = job.dir(),
input_file = val_file,
db_name = utils.constants.VAL_DB,
backend = backend,
image_dims = job.image_dims,
image_folder= image_folder,
resize_mode = job.resize_mode,
encoding = encoding,
compression = compression,
labels_file = job.labels_file,
shuffle = shuffle,
)
)
### test
if form.textfile_use_test.data:
if form.textfile_use_local_files.data:
test_file = form.textfile_local_test_images.data.strip()
else:
flask.request.files[form.textfile_test_images.name].save(
os.path.join(job.dir(), utils.constants.TEST_FILE)
)
test_file = utils.constants.TEST_FILE
image_folder = form.textfile_test_folder.data.strip()
if not image_folder:
image_folder = None
job.tasks.append(
tasks.CreateDbTask(
job_dir = job.dir(),
input_file = test_file,
db_name = utils.constants.TEST_DB,
backend = backend,
image_dims = job.image_dims,
image_folder= image_folder,
resize_mode = job.resize_mode,
encoding = encoding,
compression = compression,
labels_file = job.labels_file,
shuffle = shuffle,
)
)
@blueprint.route('/new', methods=['GET'])
@utils.auth.requires_login
def new():
"""
Returns a form for a new ImageClassificationDatasetJob
"""
form = ImageClassificationDatasetForm()
## Is there a request to clone a job with ?clone=<job_id>
fill_form_if_cloned(form)
return flask.render_template('datasets/images/classification/new.html', form=form)
@blueprint.route('.json', methods=['POST'])
@blueprint.route('', methods=['POST'], strict_slashes=False)
@utils.auth.requires_login(redirect=False)
def create():
"""
Creates a new ImageClassificationDatasetJob
Returns JSON when requested: {job_id,name,status} or {errors:[]}
"""
form = ImageClassificationDatasetForm()
## Is there a request to clone a job with ?clone=<job_id>
fill_form_if_cloned(form)
if not form.validate_on_submit():
if request_wants_json():
return flask.jsonify({'errors': form.errors}), 400
else:
return flask.render_template('datasets/images/classification/new.html', form=form), 400
job = None
try:
job = ImageClassificationDatasetJob(
username = utils.auth.get_username(),
name = form.dataset_name.data,
image_dims = (
int(form.resize_height.data),
int(form.resize_width.data),
int(form.resize_channels.data),
),
resize_mode = form.resize_mode.data
)
if form.method.data == 'folder':
from_folders(job, form)
elif form.method.data == 'textfile':
from_files(job, form)
else:
raise ValueError('method not supported')
## Save form data with the job so we can easily clone it later.
save_form_to_job(job, form)
scheduler.add_job(job)
if request_wants_json():
return flask.jsonify(job.json_dict())
else:
return flask.redirect(flask.url_for('digits.dataset.views.show', job_id=job.id()))
except:
if job:
scheduler.delete_job(job)
raise
def show(job, related_jobs=None):
"""
Called from digits.dataset.views.datasets_show()
"""
return flask.render_template('datasets/images/classification/show.html', job=job, related_jobs=related_jobs)
@blueprint.route('/summary', methods=['GET'])
def summary():
"""
Return a short HTML summary of a DatasetJob
"""
job = job_from_request()
return flask.render_template('datasets/images/classification/summary.html', dataset=job)
class DbReader(object):
"""
Reads a database
"""
def __init__(self, location):
"""
Arguments:
location -- where is the database
"""
self._db = lmdb.open(location,
map_size=1024**3, # 1MB
readonly=True, lock=False)
with self._db.begin() as txn:
self.total_entries = txn.stat()['entries']
def entries(self):
"""
Generator returning all entries in the DB
"""
with self._db.begin() as txn:
cursor = txn.cursor()
for item in cursor:
yield item
@blueprint.route('/explore', methods=['GET'])
def explore():
"""
Returns a gallery consisting of the images of one of the dbs
"""
job = job_from_request()
# Get LMDB
db = flask.request.args.get('db', 'train')
if 'train' in db.lower():
task = job.train_db_task()
elif 'val' in db.lower():
task = job.val_db_task()
elif 'test' in db.lower():
task = job.test_db_task()
if task is None:
raise ValueError('No create_db task for {0}'.format(db))
if task.status != 'D':
raise ValueError("This create_db task's status should be 'D' but is '{0}'".format(task.status))
if task.backend != 'lmdb':
raise ValueError("Backend is {0} while expected backend is lmdb".format(task.backend))
db_path = job.path(task.db_name)
labels = task.get_labels()
page = int(flask.request.args.get('page', 0))
size = int(flask.request.args.get('size', 25))
label = flask.request.args.get('label', None)
if label is not None:
try:
label = int(label)
label_str = labels[label]
except ValueError:
label = None
reader = DbReader(db_path)
count = 0
imgs = []
min_page = max(0, page - 5)
if label is None:
total_entries = reader.total_entries
else:
total_entries = task.distribution[str(label)]
max_page = min((total_entries-1) / size, page + 5)
pages = range(min_page, max_page + 1)
for key, value in reader.entries():
if count >= page*size:
datum = caffe_pb2.Datum()
datum.ParseFromString(value)
if label is None or datum.label == label:
if datum.encoded:
s = StringIO()
s.write(datum.data)
s.seek(0)
img = PIL.Image.open(s)
else:
import caffe.io
arr = caffe.io.datum_to_array(datum)
# CHW -> HWC
arr = arr.transpose((1,2,0))
if arr.shape[2] == 1:
# HWC -> HW
arr = arr[:,:,0]
elif arr.shape[2] == 3:
# BGR -> RGB
# XXX see issue #59
arr = arr[:,:,[2,1,0]]
img = PIL.Image.fromarray(arr)
imgs.append({"label":labels[datum.label], "b64": utils.image.embed_image_html(img)})
if label is None:
count += 1
else:
datum = caffe_pb2.Datum()
datum.ParseFromString(value)
if datum.label == int(label):
count += 1
if len(imgs) >= size:
break
return flask.render_template('datasets/images/classification/explore.html', page=page, size=size, job=job, imgs=imgs, labels=labels, pages=pages, label=label, total_entries=total_entries, db=db)
| brainstorm-ai/DIGITS | digits/dataset/images/classification/views.py | Python | bsd-3-clause | 15,470 | 0.0181 |
#-*- coding: utf-8 -*-
from django.forms.models import modelform_factory
from django.contrib import admin
from django.http import HttpResponse
from django.utils import simplejson
from django.views.decorators.csrf import csrf_exempt
from filer import settings as filer_settings
from filer.models import Clipboard, ClipboardItem
from filer.utils.files import handle_upload, UploadException
from filer.utils.loader import load_object
# ModelAdmins
class ClipboardItemInline(admin.TabularInline):
model = ClipboardItem
class ClipboardAdmin(admin.ModelAdmin):
model = Clipboard
inlines = [ClipboardItemInline]
filter_horizontal = ('files',)
raw_id_fields = ('user',)
verbose_name = "DEBUG Clipboard"
verbose_name_plural = "DEBUG Clipboards"
def get_urls(self):
try:
# django >=1.4
from django.conf.urls import patterns, url
except ImportError:
# django <1.4
from django.conf.urls.defaults import patterns, url
urls = super(ClipboardAdmin, self).get_urls()
from filer import views
url_patterns = patterns('',
url(r'^operations/paste_clipboard_to_folder/$',
self.admin_site.admin_view(views.paste_clipboard_to_folder),
name='filer-paste_clipboard_to_folder'),
url(r'^operations/discard_clipboard/$',
self.admin_site.admin_view(views.discard_clipboard),
name='filer-discard_clipboard'),
url(r'^operations/delete_clipboard/$',
self.admin_site.admin_view(views.delete_clipboard),
name='filer-delete_clipboard'),
# upload does it's own permission stuff (because of the stupid
# flash missing cookie stuff)
url(r'^operations/upload/$',
self.ajax_upload,
name='filer-ajax_upload'),
)
url_patterns.extend(urls)
return url_patterns
@csrf_exempt
def ajax_upload(self, request, folder_id=None):
"""
receives an upload from the uploader. Receives only one file at the time.
"""
mimetype = "application/json" if request.is_ajax() else "text/html"
try:
upload, filename, is_raw = handle_upload(request)
# Get clipboad
clipboard = Clipboard.objects.get_or_create(user=request.user)[0]
# find the file type
for filer_class in filer_settings.FILER_FILE_MODELS:
FileSubClass = load_object(filer_class)
#TODO: What if there are more than one that qualify?
if FileSubClass.matches_file_type(filename, upload, request):
FileForm = modelform_factory(
model = FileSubClass,
fields = ('original_filename', 'owner', 'file')
)
break
uploadform = FileForm({'original_filename': filename,
'owner': request.user.pk},
{'file': upload})
if uploadform.is_valid():
file_obj = uploadform.save(commit=False)
# Enforce the FILER_IS_PUBLIC_DEFAULT
file_obj.is_public = filer_settings.FILER_IS_PUBLIC_DEFAULT
file_obj.save()
clipboard_item = ClipboardItem(
clipboard=clipboard, file=file_obj)
clipboard_item.save()
json_response = {
'thumbnail': file_obj.icons['32'],
'alt_text': '',
'id': file_obj.id,
'label': str(file_obj),
}
return HttpResponse(simplejson.dumps(json_response),
mimetype=mimetype)
else:
form_errors = '; '.join(['%s: %s' % (
field,
', '.join(errors)) for field, errors in uploadform.errors.items()
])
raise UploadException("AJAX request not valid: form invalid '%s'" % (form_errors,))
except UploadException as e:
return HttpResponse(simplejson.dumps({'error': str(e)}),
mimetype=mimetype)
def get_model_perms(self, request):
"""
It seems this is only used for the list view. NICE :-)
"""
return {
'add': False,
'change': False,
'delete': False,
}
| maykinmedia/django-filer | filer/admin/clipboardadmin.py | Python | bsd-3-clause | 4,562 | 0.002192 |
"""
This module contains tasks that are executed at intervals, and is imported at
the time the server is started. The intervals at which the tasks run
are configurable via :py:mod:`media_nommer.conf.settings`.
All functions prefixed with ``task_`` are task functions that are registered
with the Twisted_ reactor. All functions prefixed with ``threaded_`` are
the interesting bits that actually do things.
"""
from twisted.internet import task, reactor
from media_nommer.conf import settings
from media_nommer.utils import logger
from media_nommer.core.job_state_backend import JobStateBackend
from media_nommer.ec2nommerd.node_state import NodeStateManager
def threaded_encode_job(job):
"""
Given a job, run it through its encoding workflow in a non-blocking manner.
"""
# Update the timestamp for when the node last did something so it
# won't terminate itself.
NodeStateManager.i_did_something()
job.nommer.onomnom()
def task_check_for_new_jobs():
"""
Looks at the number of currently active threads and compares it against the
:py:data:`MAX_ENCODING_JOBS_PER_EC2_INSTANCE <media_nommer.conf.settings.MAX_ENCODING_JOBS_PER_EC2_INSTANCE>`
setting. If we are under the max, fire up another thread for encoding
additional job(s).
The interval at which :doc:`../ec2nommerd` checks for new jobs is
determined by the
:py:data:`NOMMERD_NEW_JOB_CHECK_INTERVAL <media_nommer.conf.settings.NOMMERD_NEW_JOB_CHECK_INTERVAL>`
setting.
Calls :py:func:`threaded_encode_job` for any jobs to encode.
"""
num_active_threads = NodeStateManager.get_num_active_threads()
max_threads = settings.MAX_ENCODING_JOBS_PER_EC2_INSTANCE
num_jobs_to_pop = max(0, max_threads - num_active_threads)
if num_jobs_to_pop > 0:
# We have more room for encoding threads, determine how many.
logger.debug("task_check_for_new_jobs: " \
"Popping up to %d new jobs." % num_jobs_to_pop)
# This is an iterable of BaseEncodingJob sub-classed instances for
# each job returned from the queue.
jobs = JobStateBackend.pop_new_jobs_from_queue(num_jobs_to_pop)
if jobs:
logger.debug("* Popped %d jobs from the queue." % len(jobs))
for job in jobs:
# For each job returned, render in another thread.
logger.debug("* Starting encoder thread for job: %s" % job.unique_id)
reactor.callInThread(threaded_encode_job, job)
def threaded_heartbeat():
"""
Fires off a threaded task to check in with feederd via SimpleDB_. There
is a domain that contains all of the running EC2_ instances and their
unique IDs, along with some state data.
The interval at which heartbeats occur is determined by the
:py:data:`NOMMERD_HEARTBEAT_INTERVAL <media_nommer.conf.settings.NOMMERD_HEARTBEAT_INTERVAL`
setting.
"""
if settings.NOMMERD_TERMINATE_WHEN_IDLE:
# thread_count_mod factors out this thread when counting active threads.
is_terminated = NodeStateManager.contemplate_termination(thread_count_mod= -1)
else:
is_terminated = False
if not is_terminated:
NodeStateManager.send_instance_state_update()
def task_heartbeat():
"""
Checks in with feederd in a non-blocking manner via
:py:meth:`threaded_heartbeat`.
Calls :py:func:`threaded_heartbeat`.
"""
reactor.callInThread(threaded_heartbeat)
def register_tasks():
"""
Registers all tasks. Called by the :doc:`../ec2nommerd` Twisted_ plugin.
"""
task.LoopingCall(task_check_for_new_jobs).start(
settings.NOMMERD_NEW_JOB_CHECK_INTERVAL,
now=True)
task.LoopingCall(task_heartbeat).start(settings.NOMMERD_HEARTBEAT_INTERVAL,
now=False)
| duointeractive/media-nommer | media_nommer/ec2nommerd/interval_tasks.py | Python | bsd-3-clause | 3,910 | 0.00665 |
#!/usr/bin/env python3
# coding=utf-8
from Geometry.Vector2 import Vector2
import math
import pygame
def intersecting_rows(rect1, rect2):
"""
@param rect2: pygame.Rect
@param rect1: pygame.Rect
@return: tuple
"""
tile_left = math.floor(rect1.left / rect2.width)
tile_right = math.ceil(rect1.right / rect2.width)
intersecting_range = (tile_left, tile_right)
return intersecting_range
def intersecting_columns(rect1, rect2):
"""
@param rect2: pygame.Rect
@param rect1: pygame.Rect
@return: tuple
"""
tile_top = math.floor(rect1.top / rect2.height)
tile_bottom = math.ceil(rect1.bottom / rect2.height)
intersecting_range = (tile_top, tile_bottom)
return intersecting_range
def scan_for_tiles_x(tileset, entity):
"""
@param entity: Prototype.Entity
@type tileset: Prototype.TileSet
"""
all_tiles = []
intersecting_range = intersecting_columns(entity.rect,
tileset.tile_array[0, 0].rect)
# print("range x", intersecting_range)
if entity.info['normal'].x > 0:
for i in range(intersecting_range[0], intersecting_range[1]):
tmp_tile_list = tileset.scan_x_right(i,
(entity.forward_edge.x //
tileset.size_info['tile'].x)
+ 1, entity)
all_tiles.extend(tmp_tile_list)
else:
for i in range(intersecting_range[0], intersecting_range[1]):
tmp_tile_list = tileset.scan_x_left(i,
(entity.forward_edge.x //
tileset.size_info['tile'].x)
- 1, entity)
all_tiles.extend(tmp_tile_list)
if on_slope_tile(entity, tileset):
for i in range(0, len(all_tiles)):
for bad_tile in tileset.not_considered_tiles:
try:
if all_tiles[i] == bad_tile:
del all_tiles[i]
except IndexError:
# print("index error: all_tiles[i]")
pass
return all_tiles
def scan_for_tiles_y(tileset, entity):
"""
@param entity: Prototype.Entity
@type tileset: Prototype.TileSet
"""
all_tiles = []
intersecting_range = intersecting_rows(entity.rect,
tileset.tile_array[0, 0].rect)
# print("range y", intersecting_range)
if entity.info['normal'].y > 0:
for i in range(intersecting_range[0], intersecting_range[1]):
tmp_tile_list = tileset.scan_y_bottom(i,
(entity.forward_edge.y //
tileset.size_info['tile'].y)
+ 1, entity)
all_tiles.extend(tmp_tile_list)
else:
for i in range(intersecting_range[0], intersecting_range[1]):
tmp_tile_list = tileset.scan_y_top(i,
(entity.forward_edge.y //
tileset.size_info['tile'].y)
- 1, entity)
all_tiles.extend(tmp_tile_list)
if on_slope_tile(entity, tileset):
for i in range(0, len(all_tiles)):
for bad_tile in tileset.not_considered_tiles:
try:
if all_tiles[i] == bad_tile:
del all_tiles[i]
except IndexError:
# print("index error: all_tiles[i]")
pass
return all_tiles
def closest_tile_x(tiles_to_check, entity):
"""
@param entity: Prototype.Entity
@type tiles_to_check: list
@return: Prototype.Tile
"""
distance_from_self = 1000000
closest = None
for tmp_tile in tiles_to_check:
if (abs(tmp_tile.rect.centerx - entity.rect.centerx) <
distance_from_self):
closest = tmp_tile
distance_from_self = abs(tmp_tile.rect.centerx -
entity.rect.centerx)
if closest is not None:
# noinspection PyArgumentList
closest_img = pygame.Surface((closest.rect.width, closest.rect.height))
closest_img.fill((0, 100, 255))
if entity.debug:
entity.debug_draw(closest_img, closest.rect.x, closest.rect.y)
return closest
def closest_tile_y(tiles_to_check, entity):
"""
@param entity: Prototype.Entity
@type tiles_to_check: list
@return: Prototype.Tile
"""
distance_from_self = 1000000
closest = None
for tmp_tile in tiles_to_check:
if (abs(tmp_tile.rect.centery - entity.rect.centery) <
distance_from_self):
closest = tmp_tile
distance_from_self = abs(tmp_tile.rect.centery -
entity.rect.centery)
if closest is not None:
# noinspection PyArgumentList
closest_img = pygame.Surface((closest.rect.width, closest.rect.height))
closest_img.fill((0, 100, 255))
if entity.debug:
entity.debug_draw(closest_img, closest.rect.x, closest.rect.y)
return closest
def closest_ladder_x(close_tile_list, entity):
distance_from_self = 1000000
closest = None
for tmp_tile in close_tile_list:
if tmp_tile is not None:
if tmp_tile.tile_info['type'] == 'ladder':
if entity.info['normal'].x > 0:
if (abs(tmp_tile.rect.left - entity.rect.right) <
distance_from_self):
closest = tmp_tile
distance_from_self = abs(tmp_tile.rect.left -
entity.rect.right)
else:
if (abs(tmp_tile.rect.right - entity.rect.left) <
distance_from_self):
closest = tmp_tile
distance_from_self = abs(tmp_tile.rect.right -
entity.rect.left)
if closest is not None:
# noinspection PyArgumentList
closest_img = pygame.Surface((closest.rect.width, closest.rect.height))
closest_img.fill((123, 50, 50))
if entity.debug:
entity.debug_draw(closest_img, closest.rect.x, closest.rect.y)
return closest
def closest_ladder_y(close_tile_list, entity):
distance_from_self = 1000000
closest = None
for tmp_tile in close_tile_list:
if tmp_tile is not None:
if tmp_tile.tile_info['type'] == 'ladder':
if entity.info['normal'].y > 0:
if (abs(tmp_tile.rect.top - entity.rect.bottom) <
distance_from_self):
closest = tmp_tile
distance_from_self = abs(tmp_tile.rect.top -
entity.rect.bottom)
else:
if (abs(tmp_tile.rect.bottom - entity.rect.top) <
distance_from_self):
closest = tmp_tile
distance_from_self = abs(tmp_tile.rect.bottom -
entity.rect.top)
if closest is not None:
# noinspection PyArgumentList
closest_img = pygame.Surface((closest.rect.width, closest.rect.height))
closest_img.fill((255, 100, 100))
if entity.debug:
entity.debug_draw(closest_img, closest.rect.x, closest.rect.y)
return closest
def closest_from_list_x(close_tile_list, entity):
"""
@param close_tile_list: list
@param entity: Prototype.Entity
@return: Prototype.Tile
"""
distance_from_self = 1000000
closest = None
for tmp_tile in close_tile_list:
if tmp_tile is not None:
if tmp_tile.tile_info['type'] != 'ladder':
if entity.info['normal'].x > 0:
if (abs(tmp_tile.rect.left - entity.rect.right) <
distance_from_self):
closest = tmp_tile
distance_from_self = abs(tmp_tile.rect.left -
entity.rect.right)
else:
if (abs(tmp_tile.rect.right - entity.rect.left) <
distance_from_self):
closest = tmp_tile
distance_from_self = abs(tmp_tile.rect.right -
entity.rect.left)
if tmp_tile.tile_info['type'] == 'slope':
slope_tile = touching_slope_tile(entity, tmp_tile)
if slope_tile is not None:
closest = slope_tile
if closest is not None:
# noinspection PyArgumentList
closest_img = pygame.Surface((closest.rect.width, closest.rect.height))
closest_img.fill((50, 50, 50))
if entity.debug:
entity.debug_draw(closest_img, closest.rect.x, closest.rect.y)
return closest
def closest_from_list_y(close_tile_list, entity):
"""
@param close_tile_list: list
@param entity: Prototype.Entity
@return: Prototype.Tile
"""
distance_from_self = 1000000
closest = None
for tmp_tile in close_tile_list:
if tmp_tile is not None:
if tmp_tile.tile_info['type'] != 'ladder':
if entity.info['normal'].y > 0:
if (abs(tmp_tile.rect.top - entity.rect.bottom) <
distance_from_self):
closest = tmp_tile
distance_from_self = abs(tmp_tile.rect.top -
entity.rect.bottom)
else:
if (abs(tmp_tile.rect.bottom - entity.rect.top) <
distance_from_self):
closest = tmp_tile
distance_from_self = abs(tmp_tile.rect.bottom -
entity.rect.top)
if tmp_tile.tile_info['type'] == 'slope':
slope_tile = touching_slope_tile(entity, tmp_tile)
if slope_tile is not None:
closest = slope_tile
if closest is not None:
# noinspection PyArgumentList
closest_img = pygame.Surface((closest.rect.width, closest.rect.height))
closest_img.fill((100, 100, 100))
if entity.debug:
entity.debug_draw(closest_img, closest.rect.x, closest.rect.y)
return closest
def touching_slope_tile(entity, slope_tile):
"""
@param entity: Prototype.Entity
@param slope_tile: Prototype.Tile
@return: Prototype.Tile
"""
intersecting_range = intersecting_rows(entity.rect, slope_tile.rect)
for i in range(intersecting_range[0], intersecting_range[1]):
if slope_tile.tile_coords.x == i:
return slope_tile
return None
def on_slope_tile(entity, tileset):
"""
@param entity: Prototype.Entity
@param tileset: Prototype.TileSet
@return: bool
"""
intersecting_range = intersecting_rows(entity.rect,
tileset.tile_array[0, 0].rect)
bottom_y = math.floor(entity.rect.bottom / tileset.size_info['tile'].x)
for i in range(intersecting_range[0], intersecting_range[1]):
try:
tmp_tile = tileset.tile_array[i, bottom_y]
if (tmp_tile.tile_info['type'] == 'slope' is True and
tmp_tile.tile_info['floor_y'].x != 0 and
tmp_tile.tile_info['floor_y'].y != 0):
return True
except KeyError:
print("on_slope_tile: KeyError out of tileset bounds")
return False
def collide_platform_x(platform, entities):
"""
@param platform: Prototype.Platform
@param entities: list
"""
for e in entities:
if pygame.sprite.collide_rect(platform, e):
if (platform.info['normal'].x > 0 and platform.rect.x <
e.rect.x and platform.rect.right >= e.rect.left):
e.rect.left = platform.rect.right
if (platform.info['normal'].x < 0 and platform.rect.x >
e.rect.x and platform.rect.left <= e.rect.right):
e.rect.right = platform.rect.left
def collide_platform_y(platform, entities):
"""
@param platform: Prototype.Platform
@param entities: list
"""
for e in entities:
if pygame.sprite.collide_rect(platform, e):
if (platform.info['normal'].y > 0 and platform.rect.bottom >
e.rect.top and platform.rect.x <= e.rect.y and e.rect.left >
platform.rect.left and e.rect.right < platform.rect.right):
e.rect.top = platform.rect.bottom
if (platform.info['normal'].y < 0 and platform.rect.top <
e.rect.bottom and platform.rect.x >= e.rect.y and e.rect.left >
platform.rect.left and e.rect.right < platform.rect.right):
e.rect.bottom = platform.rect.top
def scan_for_platforms_x(entity, platform_group):
"""
@param entity: Prototype.Entity
@param platform_group: Pygame.Sprite.Group
@return: list
"""
platform_list = []
for platform in platform_group:
if platform.rect.width >= entity.rect.width:
if (platform.rect.left <= entity.rect.left <= platform.rect.right or
platform.rect.left <= entity.rect.right <=
platform.rect.right):
if (entity.info['normal'].y > 0 and platform.rect.top >=
entity.rect.bottom or entity.info['normal'].y < 0 and
platform.rect.bottom <= entity.rect.top):
platform_list.append(platform)
else:
if (entity.rect.left <= platform.rect.left <= entity.rect.right or
entity.rect.left <= platform.rect.right <=
entity.rect.right):
if (entity.info['normal'].y > 0 and platform.rect.top >=
entity.rect.bottom or entity.info['normal'].x < 0 and
platform.rect.bottom <= entity.rect.top):
platform_list.append(platform)
platform_list.append(platform)
return platform_list
def scan_for_platforms_y(entity, platform_group):
"""
@param entity: Prototype.Entity
@param platform_group: Pygame.Sprite.Group
@return: list
"""
platform_list = []
for platform in platform_group:
if platform.rect.height >= entity.rect.height:
if (platform.rect.top <= entity.rect.top <= platform.rect.bottom or
platform.rect.top <= entity.rect.bottom <=
platform.rect.bottom):
if (entity.info['normal'].x > 0 and platform.rect.left >=
entity.rect.right or entity.info['normal'].x < 0 and
platform.rect.right <= entity.rect.left):
platform_list.append(platform)
else:
if (entity.rect.top <= platform.rect.top <= entity.rect.bottom or
entity.rect.top <= platform.rect.bottom <=
entity.rect.bottom):
if (entity.info['normal'].x > 0 and platform.rect.left >=
entity.rect.right or entity.info['normal'].x < 0 and
platform.rect.right <= entity.rect.left):
platform_list.append(platform)
return platform_list
class Camera(object):
"""
@param camera_func: str type of camera to use
@param width: int
@param height: int
"""
def __init__(self, camera_func=None, width=None, height=None):
if camera_func == "complex":
self.camera_func = self.complex_camera
elif camera_func == "simple":
self.camera_func = self.simple_camera
elif camera_func == "debug":
self.camera_func = self.debug_camera
else:
print("no camera func defined")
if width is not None and height is not None:
self.state = pygame.Rect(0, 0, width, height)
def apply(self, target):
"""
@param target: Entity, Sprite, anything with a pygame.Rect
@return: pygame.Rect
"""
return target.rect.move(self.state.topleft)
def update(self, target, screen_size):
"""
@param target: Entity, Sprite, anything with a pygame Rect
@param screen_size: Geometry.Vector2
"""
self.state = self.camera_func(self.state, target.rect, screen_size)
@staticmethod
def debug_camera(camera, target_rect, screen_size):
"""
@param camera: Helpers.Camera
@param target_rect: pygame.Rect
@param screen_size: Geometry.Vector2
@return: pygame.Rect
"""
print('camera', camera, 'is not used', target_rect, 'is not used')
return pygame.Rect(0, 0, screen_size.x, screen_size.y)
@staticmethod
def simple_camera(camera, target_rect, screen_size):
"""
@param camera: Helpers.Camera
@param target_rect: pygame.Rect
@param screen_size: Geometry.Vector2
@return: pygame.Rect
"""
l, t, _, _ = target_rect
_, _, w, h = camera
return pygame.Rect(-l + screen_size.x / 2, -t + screen_size.y / 2, w, h)
@staticmethod
def complex_camera(camera, target_rect, screen_size):
"""
@param camera: Helpers.Camera
@param target_rect: pygame.Rect
@param screen_size: Geometry.Vector2
@return: pygame.Rect
"""
l, t, _, _ = target_rect
_, _, w, h = camera
l, t, _, _ = -l + screen_size.x / 2, -t + screen_size.y / 2, w, h
# stop scrolling at the left edge
l = min(0, l)
# stop scrolling at the right edge
l = max(-(camera.width - screen_size.x), l)
# stop scrolling at the bottom
t = max(-(camera.height - screen_size.y), t)
t = min(0, t)
# stop scrolling at the top
return pygame.Rect(l, t, w, h)
class BackgroundManager(object):
"""
@param background_path: str
@param parallax: int
"""
def __init__(self, background_path, parallax):
self.background = pygame.image.load(background_path)
self.background_rect = self.background.get_rect()
self.parallax = parallax
self.position = Vector2(0, 0)
def update(self, camera, tileset):
"""
@param camera: Helpers.Camera
@param tileset: Prototype.TileSet
"""
self.position.x = ((-(camera.state.left / self.parallax) //
self.background_rect.width) *
self.background_rect.width)
self.position.y = (tileset.size_info['map'].y *
tileset.size_info['tile'].y -
self.background_rect.height)
# give parallax effect
self.position.x += camera.state.left / self.parallax
self.position.y += camera.state.top
def draw(self, screen, screen_size):
"""
@param screen: pygame.Display
@param screen_size: Geometry.Vector2
"""
screen.blit(self.background, (self.position.x, self.position.y + 32))
copies = (screen_size.x // self.background_rect.width) + 1
while copies > 0:
self.position.x += self.background_rect.width
screen.blit(self.background, (self.position.x,
self.position.y + 32))
copies -= 1
def one_way_platform_checker(entity, tile):
"""
@param entity: Prototype.Entity
@param tile: Prototype.Tile
@return: bool
"""
if (entity.rect.bottom - 1) <= tile.rect.top:
return True
return False
| bubbles231/Prototype | Helpers.py | Python | gpl-3.0 | 20,270 | 0.000247 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/creature/npc/droid/crafted/shared_it_o_interrogator_advanced.iff"
result.attribute_template_id = 3
result.stfName("droid_name","it_o_interrogator_crafted_advanced")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/creature/npc/droid/crafted/shared_it_o_interrogator_advanced.py | Python | mit | 492 | 0.044715 |
# -*- coding: utf-8 -*-
from orator.migrations import Migrator, DatabaseMigrationRepository
from .base_command import BaseCommand
class MigrateCommand(BaseCommand):
"""
Run the database migrations.
migrate
{--d|database= : The database connection to use.}
{--p|path= : The path of migrations files to be executed.}
{--s|seed : Indicates if the seed task should be re-run.}
{--seed-path= : The path of seeds files to be executed.
Defaults to <comment>./seeders</comment>.}
{--P|pretend : Dump the SQL queries that would be run.}
{--f|force : Force the operation to run.}
"""
def handle(self):
prompt_msg = ('<question>Are you sureyou want '
'to proceed with the migration?</question> ')
if not self.confirm_to_proceed(prompt_msg):
return
database = self.option('database')
repository = DatabaseMigrationRepository(self.resolver, 'migrations')
migrator = Migrator(repository, self.resolver)
self._prepare_database(migrator, database)
pretend = self.option('pretend')
path = self.option('path')
if path is None:
path = self._get_migration_path()
migrator.run(path, pretend)
for note in migrator.get_notes():
self.line(note)
# If the "seed" option has been given,
# we will rerun the database seed task to repopulate the database.
if self.option('seed'):
options = [
('--force', self.option('force'))
]
if database:
options.append(('--database', database))
if self.get_definition().has_option('config'):
options.append(('--config', self.option('config')))
if self.option('seed-path'):
options.append(('--path', self.option('seed-path')))
self.call('db:seed', options)
def _prepare_database(self, migrator, database):
migrator.set_connection(database)
if not migrator.repository_exists():
options = []
if database:
options.append(('--database', database))
if self.get_definition().has_option('config'):
options.append(('--config', self.option('config')))
self.call('migrate:install', options)
| Hanaasagi/sorator | orator/commands/migrations/migrate_command.py | Python | mit | 2,409 | 0 |
"""
Copyright 2016, 2017 UFPE - Universidade Federal de Pernambuco
Este arquivo é parte do programa Amadeus Sistema de Gestão de Aprendizagem, ou simplesmente Amadeus LMS
O Amadeus LMS é um software livre; você pode redistribui-lo e/ou modifica-lo dentro dos termos da Licença Pública Geral GNU como publicada pela Fundação do Software Livre (FSF); na versão 2 da Licença.
Este programa é distribuído na esperança que possa ser útil, mas SEM NENHUMA GARANTIA; sem uma garantia implícita de ADEQUAÇÃO a qualquer MERCADO ou APLICAÇÃO EM PARTICULAR. Veja a Licença Pública Geral GNU para maiores detalhes.
Você deve ter recebido uma cópia da Licença Pública Geral GNU, sob o título "LICENSE", junto com este programa, se não, escreva para a Fundação do Software Livre (FSF) Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
"""
##
# Handles all communication with the database
##
from django.db import connection
from h5p.models import h5p_libraries
class H5PEditorStorage:
##
# Decides which content types the editor should have
##
def getLibraries(self, libraries=None):
if libraries != None:
librariesWithDetails = list()
for library in libraries:
details = h5p_libraries.objects.filter(machine_name=library['name'], major_version=library[
'majorVersion'], minor_version=library['minorVersion']).values('title', 'runnable', 'restricted', 'tutorial_url')
if len(details) > 0:
details = details[0]
library['tutorialUrl'] = details['tutorial_url']
library['title'] = details['title']
library['runnable'] = details['runnable']
library['restricted'] = True if details[
'restricted'] == 1 else False
librariesWithDetails.append(library)
return librariesWithDetails
libraries = list()
librariesResult = h5p_libraries.objects.filter(runnable=1, semantics__isnull=False).extra(select={'name': 'machine_name', 'majorVersion': 'major_version', 'minorVersion': 'minor_version', 'tutorialUrl': 'tutorial_url'}).values(
'name', 'title', 'majorVersion', 'minorVersion', 'tutorialUrl', 'restricted').order_by('title')
for library in librariesResult:
libraries.append(library)
return libraries
##
# Load language file(JSON) from database.
# This is used to translate the editor fields(title, description, etc...)
##
def getLanguage(self, machineName, majorVersion, minorVersion, language):
# Load translation field from DB
cursor = connection.cursor()
cursor.execute("""
SELECT hlt.language_json
FROM h5p_libraries_languages hlt
JOIN h5p_libraries hl ON hl.library_id = hlt.library_id
WHERE hl.machine_name = %s AND hl.major_version = %s AND hl.minor_version = %s AND hlt.language_code = %s
""" % ("'" + machineName + "'", majorVersion, minorVersion, "'" + language + "'"))
result = self.dictfetchall(cursor)
return result[0]['language_json'] if len(result) > 0 else False
##
# Returns all rows from a cursor as a dict
##
def dictfetchall(self, cursor):
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
| amadeusproject/amadeuslms | h5p/base_plugin/editor/library/editorstorage.py | Python | gpl-2.0 | 3,487 | 0.00694 |
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the User Model."""
from google.appengine.api import users
from google.appengine.ext import db
from django.utils.translation import ugettext
import soc.models.linkable
class User(soc.models.linkable.Linkable):
"""A user and associated login credentials, the fundamental identity entity.
User is a separate Model class from Person because the same login
ID may be used to, for example, serve as Contributor in one Program
and a Reviewer in another.
Also, this allows a Person to, in the future, re-associate that
Person entity with a different Google Account if necessary.
A User entity participates in the following relationships implemented
as a db.ReferenceProperty elsewhere in another db.Model:
persons) a 1:many relationship of Person entities identified by the
User. This relation is implemented as the 'persons' back-reference
Query of the Person model 'user' reference.
documents) a 1:many relationship of Document entities identified by the
User. This relation is implemented as the 'user' back-reference
Query of the Document model 'user' reference.
responses) a 1:many relationship of Response entities submitted by the
User. This relation is implemented as the 'responses' back-reference
Query of the Response model 'respondent' reference.
"""
@property
def url_id(self):
"""URL ID property.
It provides a unique string identifier of the user that is to be used
as a part of various URLs. The returned string is URL safe and can be
validated by linkable.LINK_ID_REGEX regular expression.
Returns:
a string containing URL ID property
"""
return self.key().name()
#: A Google Account, which also provides a "private" email address.
#: This email address is only used in an automated fashion by
#: Melange web applications and is not made visible to other users
#: of any Melange application.
account = db.UserProperty(required=True,
verbose_name=ugettext('User account'))
account.help_text = ugettext(
'A valid Google Account.')
#: Google Account unique user id
user_id = db.StringProperty(required=False)
#: A list (possibly empty) of former Google Accounts associated with
#: this User.
former_accounts = db.ListProperty(users.User)
#: Required field storing publicly-displayed name. Can be a real name
#: (though this is not recommended), or a nick name or some other public
#: alias. Public names can be any valid UTF-8 text.
name = db.StringProperty(
default='', required=False, verbose_name=ugettext('Public name'))
name.help_text = ugettext(
'Human-readable name (UTF-8) that will be displayed publicly on the'
' site.')
#: field storing whether User is a Developer with site-wide access.
is_developer = db.BooleanProperty(default=False,
verbose_name=ugettext('Is Developer'))
is_developer.help_text = ugettext(
'Field used to indicate user with site-wide Developer access.')
#: List of Sponsors that the user is a host for
host_for = db.ListProperty(item_type=db.Key, default=[])
host_for.help_text = ugettext('List of program owners which '
'the user is a program administrator for.')
#: field storing the user preference as whether to disable TinyMCE
disable_tinymce = db.BooleanProperty(default=False,
verbose_name=ugettext('Disable TinyMCE'))
disable_tinymce.help_text = ugettext(
'Disable the TinyMCE editor.')
#: field storing the user preference as to how many rows to show
nr_list_rows = db.IntegerProperty(
required=False, verbose_name=ugettext('Number of list rows'))
nr_list_rows.help_text = ugettext(
'Controls how many rows will be shown per list by default. '
'Defaults to 5 if not set.')
#: field storing wheter the User has agreed to the site-wide Terms of Service.
#: (Not a required field because the Terms of Service might not be present
#: when the first User profile is created when bootstrapping the site.)
agreed_to_tos = db.BooleanProperty(required=False, default=False,
verbose_name=ugettext('I Agree to the Terms of Service'))
agreed_to_tos.help_text = ugettext(
'Indicates whether the user agreed to the site-wide Terms of Service.')
#: field storing when the User has agreed to the site-wide Terms of Service.
#: (Not a required field because the Terms of Service might not be present
#: when the first User profile is created when bootstrapping the site.)
agreed_to_tos_on = db.DateTimeProperty(required=False, default=None,
verbose_name=ugettext('Has agreed to the Terms of Service on'))
agreed_to_tos_on.help_text = ugettext(
'Indicates when the user agreed to the site-wide Terms of Service.')
#: field storing the status of this User.
#: valid: Is just that, it's a valid User.
#: invalid: This means that this User has been excluded
#: from using the website.
status = db.StringProperty(required=True, default='valid',
choices=['valid', 'invalid'],)
status.help_text = ugettext(
'Indicates the status of the User. Invalid means that this account '
'has been excluded from using the website.')
| rhyolight/nupic.son | app/soc/models/user.py | Python | apache-2.0 | 5,797 | 0.009488 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-09 20:19
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('core', '0011_auto_20151209_1947'),
]
operations = [
migrations.CreateModel(
name='Collection',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='artist',
name='collection',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='core.Collection'),
preserve_default=False,
),
migrations.AddField(
model_name='release',
name='collection',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='core.Collection'),
preserve_default=False,
),
migrations.AddField(
model_name='track',
name='collection',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='core.Collection'),
preserve_default=False,
),
]
| svenvandescheur/recordstore | recordstore/core/migrations/0012_auto_20151209_2019.py | Python | mit | 1,594 | 0.003137 |
from __future__ import unicode_literals
from django.apps import AppConfig
class DevelopersConfig(AppConfig):
name = 'developers'
| neldom/qessera | developers/apps.py | Python | mit | 136 | 0 |
"""
HttpError Spider Middleware
See documentation in docs/topics/spider-middleware.rst
"""
import logging
from scrapy.exceptions import IgnoreRequest
logger = logging.getLogger(__name__)
class HttpError(IgnoreRequest):
"""A non-200 response was filtered"""
def __init__(self, response, *args, **kwargs):
self.response = response
super(HttpError, self).__init__(*args, **kwargs)
class HttpErrorMiddleware(object):
@classmethod
def from_crawler(cls, crawler):
return cls(crawler.settings)
def __init__(self, settings):
self.handle_httpstatus_all = settings.getbool('HTTPERROR_ALLOW_ALL')
self.handle_httpstatus_list = settings.getlist('HTTPERROR_ALLOWED_CODES')
def process_spider_input(self, response, spider):
if 200 <= response.status < 300: # common case
return
meta = response.meta
if 'handle_httpstatus_all' in meta:
return
if 'handle_httpstatus_list' in meta:
allowed_statuses = meta['handle_httpstatus_list']
elif self.handle_httpstatus_all:
return
else:
allowed_statuses = getattr(spider, 'handle_httpstatus_list', self.handle_httpstatus_list)
if response.status in allowed_statuses:
return
raise HttpError(response, 'Ignoring non-200 response')
def process_spider_exception(self, response, exception, spider):
if isinstance(exception, HttpError):
spider.crawler.stats.inc_value('httperror/response_ignored_count')
spider.crawler.stats.inc_value(
'httperror/response_ignored_status_count/%s' % response.status
)
logger.info(
"Ignoring response %(response)r: HTTP status code is not handled or not allowed",
{'response': response}, extra={'spider': spider},
)
return []
| rolando-contrib/scrapy | scrapy/spidermiddlewares/httperror.py | Python | bsd-3-clause | 1,921 | 0.001562 |
########################################################################
# amara/xpath/locationpaths/predicates.py
"""
A parsed token that represents a predicate list.
"""
from __future__ import absolute_import
from itertools import count, izip
from amara.xpath import datatypes
from amara.xpath.expressions.basics import literal, variable_reference
from amara.xpath.expressions.booleans import equality_expr, relational_expr
from amara.xpath.functions import position_function
from ._nodetests import positionfilter
from ._paths import pathiter
__all__ = ['predicates', 'predicate']
class predicates(tuple):
def __init__(self, *args):
self.select = pathiter(pred.select for pred in self).select
return
def filter(self, nodes, context, reverse):
if self:
state = context.node, context.position, context.size
for predicate in self:
nodes = datatypes.nodeset(predicate.select(context, nodes))
context.node, context.position, context.size = state
else:
nodes = datatypes.nodeset(nodes)
if reverse:
nodes.reverse()
return nodes
def pprint(self, indent='', stream=None):
print >> stream, indent + repr(self)
for pred in self:
pred.pprint(indent + ' ', stream)
def __str__(self):
return self.__unicode__().encode('utf-8')
def __repr__(self):
ptr = id(self)
if ptr < 0: ptr += 0x100000000L
return '<%s at 0x%x: %s>' % (self.__class__.__name__, ptr, self)
def __unicode__(self):
return u''.join(map(unicode, self))
#FIXME: should this derive from boolean_expression?
class predicate:
def __init__(self, expression):
self._expr = expression
self._provide_context_size = False #See http://trac.xml3k.org/ticket/62
#FIXME: There are probably many code paths which need self._provide_context_size set
# Check for just "Number"
if isinstance(expression, literal):
const = datatypes.number(expression._literal)
index = int(const)
if index == const and index >= 1:
self.select = positionfilter(index)
else:
# FIXME: add warning that expression will not select anything
self.select = izip()
return
# Check for "position() = Expr"
elif isinstance(expression, equality_expr) and expression._op == '=':
if isinstance(expression._left, position_function):
expression = expression._right
if isinstance(expression, literal):
const = datatypes.number(expression._literal)
index = int(const)
if index == const and index >= 1:
self.select = positionfilter(index)
else:
self.select = izip()
else:
#FIXME: This will kick in the non-lazy behavior too broadly, e.g. in the case of [position = 1+1]
#See: http://trac.xml3k.org/ticket/62
self._provide_context_size = True
self._expr = expression
self.select = self._number
return
elif isinstance(expression._right, position_function):
expression = expression._left
if isinstance(expression, literal):
const = datatypes.number(expression._literal)
index = int(const)
if index == const and index >= 1:
self.select = positionfilter(index)
else:
self.select = izip()
else:
self._expr = expression
self.select = self._number
return
# Check for "position() [>,>=] Expr" or "Expr [<,<=] position()"
# FIXME - do full slice-type notation
elif isinstance(expression, relational_expr):
op = expression._op
if (isinstance(expression._left, position_function) and
isinstance(expression._right, (literal, variable_reference))
and op in ('>', '>=')):
self._start = expression._right
self._position = (op == '>')
self.select = self._slice
return
elif (isinstance(expression._left, (literal, variable_reference))
and isinstance(expression._right, Position)
and op in ('<', '<=')):
self._start = expression._left
self._position = (op == '<')
self.select = self._slice
return
if issubclass(expression.return_type, datatypes.number):
self.select = self._number
elif expression.return_type is not datatypes.xpathobject:
assert issubclass(expression.return_type, datatypes.xpathobject)
self.select = self._boolean
return
def _slice(self, context, nodes):
start = self._start.evaluate_as_number(context)
position = self._position
if position > start:
return nodes
position += 1
nodes = iter(nodes)
for node in nodes:
if position > start:
break
position += 1
return nodes
def _number(self, context, nodes):
expr = self._expr
position = 1
if self._provide_context_size:
nodes = list(nodes)
context.size = len(nodes)
context.current_node = context.node
for node in nodes:
context.node, context.position = node, position
if expr.evaluate_as_number(context) == position:
yield node
position += 1
return
def _boolean(self, context, nodes):
expr = self._expr
position = 1
context.current_node = context.node
for node in nodes:
context.node, context.position = node, position
if expr.evaluate_as_boolean(context):
yield node
position += 1
return
def select(self, context, nodes):
expr = self._expr
position = 1
context.current_node = context.node
for node in nodes:
context.node, context.position = node, position
result = expr.evaluate(context)
if isinstance(result, datatypes.number):
# This must be separate to prevent falling into
# the boolean check.
if result == position:
yield node
elif result:
yield node
position += 1
return
def pprint(self, indent='', stream=None):
print >> stream, indent + repr(self)
self._expr.pprint(indent + ' ', stream)
def __str__(self):
return self.__unicode__().encode('utf-8')
def __repr__(self):
ptr = id(self)
if ptr < 0: ptr += 0x100000000L
return '<%s at 0x%x: %s>' % (self.__class__.__name__, ptr, self)
def __unicode__(self):
return u'[%s]' % self._expr
@property
def children(self):
'Child of the parse tree of a predicate is its expression'
return (self._expr,)
| zepheira/amara | lib/xpath/locationpaths/predicates.py | Python | apache-2.0 | 7,392 | 0.001894 |
#raspberry pi states remote query service
#winxos 2016-6-10
import socket
import time
version="1.0"
port=9000
s=socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1)
s.setsockopt(socket.SOL_SOCKET,socket.SO_BROADCAST,1)
s.bind(('',port))
if __name__=='__main__':
print("query service starting...")
while True:
data,addr=s.recvfrom(1024)
print("%s:%s"%(addr,data))
datas=data.split()
if len(datas)==0:continue
if datas[0]=="RPI":
if len(datas)==1:
s.sendto("IPR",addr)
else:
if datas[1]=="NAME":
s.sendto(socket.gethostname(),addr)
elif datas[1]=="VERSION":
s.sendto(version,addr)
| winxos/python | smartrpi/ipreport.py | Python | mit | 789 | 0.032953 |
# Copyright 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from fabric.api import run
from fabric.api import settings
from cloudferry.lib.base import clients
from cloudferry.lib.utils import log
LOG = log.getLogger(__name__)
def update_user_ids_for_instance(db, instance_id, user_id):
sql = ("UPDATE nova.instances "
"SET nova.instances.user_id = '{user_id}' "
"WHERE nova.instances.uuid = '{instance_id}';").format(
user_id=user_id, instance_id=instance_id)
db.execute(sql)
def get_flav_details(db, instance_id):
sql = ("SELECT vcpus,memory_mb,root_gb,ephemeral_gb "
"FROM nova.instances "
"WHERE nova.instances.uuid = '{instance_id}' "
"AND NOT nova.instances.vm_state = 'deleted';").format(
instance_id=instance_id)
res = db.execute(sql)
for row in res:
return {'vcpus': row['vcpus'],
'memory_mb': row['memory_mb'],
'root_gb': row['root_gb'],
'ephemeral_gb': row['ephemeral_gb']}
class FlavorAccess(object):
def __init__(self, flavor_id=None, tenant_id=None):
self.flavor_id = flavor_id
self.tenant_id = tenant_id
@classmethod
def from_db(cls, db_record):
return cls(flavor_id=db_record['flavorid'],
tenant_id=db_record['project_id'])
@classmethod
def from_novaclient_object(cls, nc_flavor_access):
return cls(flavor_id=nc_flavor_access.flavor_id,
tenant_id=nc_flavor_access.tenant_id)
def get_flavor_access_list_from_db(db, flavor_id):
sql = ("SELECT it.flavorid, itp.project_id "
"FROM instance_types it "
"RIGHT JOIN instance_type_projects itp "
"ON it.id = itp.instance_type_id "
"WHERE it.flavorid = :flavor_id AND it.deleted = 0;")
return db.execute(sql, flavor_id=flavor_id)
def nova_live_migrate_vm(nova_client, config, vm_id, dest_host):
LOG.info("migrating %s to %s using nova live migrate", vm_id, dest_host)
nova_client.servers.live_migrate(
server=vm_id,
host=dest_host,
block_migration=config.compute.block_migration,
disk_over_commit=config.compute.disk_overcommit
)
def cobalt_live_migrate_vm(config, vm_id, dest_host):
"""Cobalt live migration is implemented as nova extension, so it's not
reachable through standard `novaclient.v1_1.Client()` instance
(or at least I was unable to find a way in a reasonable timeframe). Thus
running it as a CLI command."""
LOG.info("migrating %s to %s using Cobalt", vm_id, dest_host)
host_string = "{user}@{host}".format(
user=config.cloud.ssh_user, host=config.cloud.ssh_host)
with settings(warn_only=True,
host_string=host_string,
key_filename=config.migrate.key_filename,
connection_attempts=config.migrate.ssh_connection_attempts):
migrate_cmd = clients.os_cli_cmd(config.cloud, "nova",
"cobalt-migrate", vm_id,
"--dest", dest_host)
LOG.debug(migrate_cmd)
run(migrate_cmd)
def incloud_live_migrate(nova_client, config, vm_id, destination_host):
migration_tool = config.migrate.incloud_live_migration
if migration_tool == 'nova':
nova_live_migrate_vm(nova_client, config, vm_id, destination_host)
elif migration_tool == 'cobalt':
cobalt_live_migrate_vm(config, vm_id, destination_host)
else:
raise NotImplementedError(
"You're trying to use live migration tool "
"which is not available: '%s'", migration_tool)
| SVilgelm/CloudFerry | cloudferry/lib/os/compute/instances.py | Python | apache-2.0 | 4,200 | 0 |
"""Auto-generated file, do not edit by hand. KI metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_KI = PhoneMetadata(id='KI', country_code=686, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='[2458]\\d{4}|3\\d{4,7}|7\\d{7}', possible_number_pattern='\\d{5,8}'),
fixed_line=PhoneNumberDesc(national_number_pattern='(?:[24]\\d|3[1-9]|50|8[0-5])\\d{3}', possible_number_pattern='\\d{5}', example_number='31234'),
mobile=PhoneNumberDesc(national_number_pattern='7(?:[24]\\d|3[1-9]|8[0-5])\\d{5}', possible_number_pattern='\\d{8}', example_number='72012345'),
toll_free=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
premium_rate=PhoneNumberDesc(national_number_pattern='3001\\d{4}', possible_number_pattern='\\d{5,8}', example_number='30010000'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
national_prefix_for_parsing='0')
| dongguangming/python-phonenumbers | python/phonenumbers/data/region_KI.py | Python | apache-2.0 | 1,560 | 0.008974 |
# Generated by Django 2.0.5 on 2018-06-05 09:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0014_auto_20170725_1302'),
]
operations = [
migrations.AlterField(
model_name='formfield',
name='field_type',
field=models.CharField(choices=[('singleline', 'Single line text'), ('multiline', 'Multi-line text'), ('email', 'Email'), ('number', 'Number'), ('url', 'URL'), ('checkbox', 'Checkbox'), ('checkboxes', 'Checkboxes'), ('dropdown', 'Drop down'), ('multiselect', 'Multiple select'), ('radio', 'Radio buttons'), ('date', 'Date'), ('datetime', 'Date/time'), ('hidden', 'Hidden field')], max_length=16, verbose_name='field type'),
),
]
| UTNkar/moore | src/home/migrations/0015_auto_20180605_1111.py | Python | agpl-3.0 | 775 | 0.00129 |
# -*- coding: utf-8 -*-
# diceware_list -- generate wordlists for diceware
# Copyright (C) 2016-2019. Uli Fouquet
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Tests for libwordlist module
from __future__ import unicode_literals
try:
from urllib.request import urlopen, URLError # python 3.x
except ImportError: # pragma: no cover
from urllib2 import urlopen, URLError # python 2.x
from io import StringIO
import codecs
import decimal
import gzip
import random
import pytest
import sys
from diceware_list import DEFAULT_CHARS
from diceware_list.libwordlist import (
alpha_dist, base10_to_n, filter_chars, base_terms_iterator,
idx_to_dicenums, min_width_iter, normalize, shuffle_max_width_items,
term_iterator, paths_iterator, is_prefix_code, get_matching_prefixes,
get_prefixes, strip_matching_prefixes, flatten_prefix_tree,
AndroidWordList, entropy_per_char_bruteforce, min_word_length,
min_length_iter
)
EMPTY_GZ_FILE = (
b'\x1f\x8b\x08\x08\xea\xc1\xecY\x02\xffsample_emtpy'
b'\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00')
def ggsource_unreachable():
"""Check, whether we can ping android.googlesource.com'.
Respective tests may be skipped if no network is available.
"""
try:
urlopen('https://android.googlesource.com/').read()
except URLError:
return True
return False
def test_base10_to_n():
# we can turn integers into n-based numbers
assert base10_to_n(0, 2) == [0]
assert base10_to_n(1, 2) == [1]
assert base10_to_n(2, 2) == [1, 0]
assert base10_to_n(3, 2) == [1, 1]
assert base10_to_n(7775, 6) == [5, 5, 5, 5, 5]
assert base10_to_n(0, 6) == [0, ]
assert base10_to_n(1, 6) == [1, ]
assert base10_to_n(6, 6) == [1, 0]
assert base10_to_n(34, 6) == [5, 4]
assert base10_to_n(35, 6) == [5, 5]
assert base10_to_n(37, 6) == [1, 0, 1]
assert base10_to_n(38, 6) == [1, 0, 2]
assert base10_to_n(255, 16) == [15, 15]
assert base10_to_n(256, 16) == [1, 0, 0]
def test_filter_chars():
# we can detect words with unwanted chars
assert list(filter_chars([], DEFAULT_CHARS)) == []
assert list(filter_chars(["a", "b"], DEFAULT_CHARS)) == ["a", "b"]
assert list(filter_chars(["ä"], DEFAULT_CHARS)) == []
assert list(filter_chars(["a", "ä"], DEFAULT_CHARS)) == ["a"]
assert list(filter_chars(["ä", "a"], DEFAULT_CHARS)) == ["a"]
assert list(filter_chars(["a", "ä", "b"], DEFAULT_CHARS)) == ["a", "b"]
assert list(filter_chars(["a", "aä", "bö"], DEFAULT_CHARS)) == ["a"]
assert list(filter_chars([u"a", u"ä"], DEFAULT_CHARS)) == [u"a"]
def test_filter_chars_all_allowed():
# if `allowed` is None, no filtering will be done
assert list(filter_chars(['ä'], None)) == ['ä']
def test_idx_to_dicenums():
# we can get dice numbers from list indexes
assert idx_to_dicenums(0, 5) == "1-1-1-1-1"
assert idx_to_dicenums(1, 5) == "1-1-1-1-2"
assert idx_to_dicenums(7774, 5) == "6-6-6-6-5"
assert idx_to_dicenums(7775, 5) == "6-6-6-6-6"
# different dice sides, different results
assert idx_to_dicenums(0, 4, 4) == "1-1-1-1"
assert idx_to_dicenums(255, 4, 4) == "4-4-4-4"
assert idx_to_dicenums(255, 4) == "2-2-1-4"
# we can change the separator string (or leave it out)
assert idx_to_dicenums(0, 3) == "1-1-1" # default
assert idx_to_dicenums(0, 3, separator="sep") == "1sep1sep1"
assert idx_to_dicenums(0, 3, separator="") == "111"
def test_idx_to_dicenums_gives_text():
# we get text from this function, i.e. unicode under py2.
result = idx_to_dicenums(0, 5)
assert isinstance(result, type('text'))
def test_min_width_iter(monkeypatch):
# we can get iterators with minimal list width.
monkeypatch.setattr(random, "shuffle", lambda x: x)
assert list(min_width_iter(["bb", "a", "ccc", "dd"], 3)) == [
"a", "bb", "dd"]
assert list(min_width_iter(["c", "a", "b"], 2)) == ["a", "b"]
assert list(min_width_iter(["c", "a", "b"], 3)) == ["a", "b", "c"]
assert list(min_width_iter(["a", "c", "bb"], 2)) == ["a", "c"]
assert list(min_width_iter(["a", "cc", "b"], 2)) == ["a", "b"]
assert list(min_width_iter(["aa", "c", "bb"], 2)) == ["c", "aa"]
def test_min_length_iter():
assert list(min_length_iter(iter([]))) == []
assert list(min_length_iter(iter([]), 1)) == []
assert list(
min_length_iter(iter(["a", "bb", "ccc"]), 2)) == ["bb", "ccc"]
def test_min_width_iter_shuffle_max_widths_values(monkeypatch):
# words with maximum width are shuffled
monkeypatch.setattr(random, "shuffle", lambda x: x.reverse())
assert list(min_width_iter(
["a", "aa", "bb"], 2, shuffle_max_width=True)) == ["a", "bb"]
assert list(min_width_iter(
["bbb", "aa", "a"], 2, shuffle_max_width=True)) == ["a", "aa"]
assert list(min_width_iter(
["aa", "a"], 2, shuffle_max_width=True)) == ["a", "aa"]
def test_min_width_iter_discards_min_len_values(monkeypatch):
# too short terms are discarded
monkeypatch.setattr(random, "shuffle", lambda x: x.reverse())
assert sorted(list(min_width_iter(
['a', 'aa', 'b', 'ddd', 'ccc'], 2,
shuffle_max_width=False, min_len=1))) == ['a', 'b']
assert sorted(list(min_width_iter(
['a', 'aa', 'b', 'ddd', 'ccc'], 2,
shuffle_max_width=False, min_len=2))) == ['aa', 'ccc']
assert sorted(list(min_width_iter(
['a', 'aa', 'b', 'ddd', 'ccc'], 2,
shuffle_max_width=True, min_len=1))) == ['a', 'b']
assert sorted(list(min_width_iter(
['a', 'aa', 'b', 'ddd', 'ccc'], 2,
shuffle_max_width=True, min_len=2))) in (['aa', 'ccc'], ['aa', 'ddd'])
def test_normalize():
# we can normalize texts.
assert normalize("ªºÀÁÂÃÄÅÆ") == "aoAAAAAEAAE"
assert normalize("ÇÈÉÊËÌÍÎÏ") == "CEEEEIIII"
assert normalize("ÒÓÔÕÖØÙÚÛÜ") == "OOOOOEOEUUUUE"
assert normalize("ĐđÐÑÝßàáâãäåæçèéêë") == "DdDNYssaaaaaeaaeceeee"
assert normalize("ìíîïñòóôõöøùúûüý") == "iiiinoooooeoeuuuuey"
assert normalize("ÿĀāĂ㥹ĆćĈĉĊċČčĎď") == "yAaAaAaCcCcCcCcDd"
assert normalize("ĒēĔĕĖėĘęĚěĜĝĞğĠġĢģ") == "EeEeEeEeEeGgGgGgGg"
assert normalize("ĤĥĨĩĪīĬĭĮįİĒēĔĕĖė") == "HhIiIiIiIiIEeEeEe"
assert normalize("ĘęĚěĜĝĞğĠġĢģĤĥ") == "EeEeGgGgGgGgHh"
assert normalize("ĨĩĪīĬĭĮįİIJijĴĵĶķ") == "IiIiIiIiIIJijJjKk"
assert normalize("ĹĺĻļĽľĿŀŃńŅņŇňŌō") == "LlLlLlL·l·NnNnNnOo"
assert normalize("ŎŏŐőŔŕŖŗŘřŚśŜŝŞşŠš") == "OoOoRrRrRrSsSsSsSs"
assert normalize("ŢţŤťŨũŪūŬŭŮůŰűŲų") == "TtTtUuUuUuUuUuUu"
assert normalize("ŴŵŶŷŸŹźŻżŽžſ") == "WwYyYZzZzZzs"
# "þĦħĦħıĸŁłŊŋʼnŒœŦŧƀƁƂƃƄƅƆƇƈƉƊƋƌƍ""
assert normalize("mäßig") == "maessig"
def test_normalize_gives_text():
# we get unicode/text strings back
assert isinstance(normalize("far"), type("text"))
assert isinstance(normalize("fär"), type("text"))
assert isinstance(normalize(str("far")), type("text"))
def test_shuffle_max_width_items(monkeypatch):
# we can shuffle the max width items of a list
# install a pseudo-shuffler that generates predictable orders
# so that last elements are returned in reverse order.
monkeypatch.setattr(random, "shuffle", lambda x: x.reverse())
# an ordered list
result = list(shuffle_max_width_items(["a", "aa", "bb", "cc"]))
assert result == ["a", "cc", "bb", "aa"]
# an unordered list
result = list(shuffle_max_width_items(["aa", "d", "bb", "a", "cc"]))
assert result == ["d", "a", "cc", "bb", "aa"]
# a list of which the longes item should not be part of
result = list(shuffle_max_width_items(
["eeee", "bb", "ccc", "aa", "ddd"], max_width=3))
assert "eeee" not in result
# a list with one length only
result = list(shuffle_max_width_items(["aa", "bb", "cc"]))
assert result == ["cc", "bb", "aa"]
def test_shuffle_max_width_items_copes_with_files(monkeypatch, tmpdir):
# when shuffling max width entries we accept file input
monkeypatch.setattr(random, "shuffle", lambda x: x.reverse())
wlist = tmpdir.join("wlist.txt")
wlist.write(b"\n".join([b"a", b"bb", b"cc"]))
with open(str(wlist), "rb") as fd:
result = list(shuffle_max_width_items(fd))
assert result == [b"a", b"cc", b"bb"]
def test_base_terms_iterator():
# we can get an iterator over base terms
base_iter = base_terms_iterator()
base_list = list(base_iter)
assert "a2" in base_list
assert "9z" in base_list
assert "0" in base_list
assert "zzzz" in base_list
def test_base_terms_iterator_option_use_kit():
# we can tell whether to use dicewarekit, diceware416 lists.
assert "yyyy" not in list(base_terms_iterator(use_kit=False))
assert "a2" in list(base_terms_iterator(use_kit=False))
assert "yyyy" in list(base_terms_iterator(use_kit=True))
assert "a2" in list(base_terms_iterator(use_kit=True))
class TestTermIterator(object):
def test_term_iterator(self, tmpdir):
# the term_iterator really returns iterators
wlist = tmpdir.join("wlist.txt")
wlist.write(b"\n".join([b"a", b"b", b"c"]))
with open(str(wlist), "rb") as fd:
result = list(term_iterator([fd, ]))
assert result == [b"a", b"b", b"c"]
def test_term_iterator_multiple_files(self, tmpdir):
# we can feed multiple input files to term_iterator
wlist1 = tmpdir.join("wlist1.txt")
wlist2 = tmpdir.join("wlist2.txt")
wlist1.write(b"\n".join([b"a1", b"b1", b"c1"]))
wlist2.write(b"\n".join([b"a2", b"b2", b"c2"]))
with open(str(wlist1), "rb") as fd1:
with open(str(wlist2), "rb") as fd2:
result = list(term_iterator([fd1, fd2]))
assert result == [b"a1", b"b1", b"c1", b"a2", b"b2", b"c2"]
def test_term_iterator_handles_umlauts(self, tmpdir):
# we can feed term iterators with umlauts
wlist = tmpdir.join("wlist.txt")
wlist.write_text(u"ä\nö\n", "utf-8")
with codecs.open(str(wlist), "r", "utf-8") as fd:
result = list(term_iterator([fd, ]))
assert result == ["ä", "ö"]
def test_term_iterator_ignores_empty_lines(self, tmpdir):
# empty lines will be ignored
wlist = tmpdir.join("wlist.txt")
wlist.write("foo\n\nbar\n\n")
with open(str(wlist), "r") as fd:
result = list(term_iterator([fd, ]))
assert result == ["foo", "bar"]
class TestPathsIterator(object):
def test_paths_iterator(self, tmpdir):
# the paths iterator provides terms from paths
wlist = tmpdir.join("wlist.txt")
wlist.write(b"\n".join([b"a", b"b", b"c"]))
result = list(paths_iterator([str(wlist), ]))
assert result == ["a", "b", "c"]
def test_multiple_paths(self, tmpdir):
# the paths iterator can cope with several files
wlist1 = tmpdir.join("wlist1.txt")
wlist2 = tmpdir.join("wlits2.txt")
wlist1.write(b"a\nb")
wlist2.write(b"c\nd")
result = list(paths_iterator([str(wlist1), str(wlist2)]))
assert result == ["a", "b", "c", "d"]
def test_read_stdin(self, tmpdir, argv_handler):
# we can tell to read from stdin (dash as filename)
sys.stdin = StringIO('term1\nterm2\näöü\n')
result = list(paths_iterator('-'))
assert result == ['term1', 'term2', 'äöü']
class TestIsPrefixCode(object):
def test_is_prefix_code(self):
# we can really tell whether some list is a prefix code.
assert is_prefix_code(["aa", "ab", "ac"]) is True
assert is_prefix_code([]) is True
assert is_prefix_code(["a", "ab", "c"]) is False
assert is_prefix_code(["a", "c", "ab"]) is False
assert is_prefix_code(["aa", "b", "a"]) is False # order
assert is_prefix_code(["a", "a"]) is False # identity
def test_is_prefix_code_sorted_input(self):
# we do not sort already sorted input
assert is_prefix_code(["a", "aa", "b"], is_sorted=True) is False
assert is_prefix_code(["b", "c", "d"], is_sorted=True) is True
assert is_prefix_code(["b", "a"], is_sorted=False) is True
# we do not define behavior for unsorted lists, if `is_sorted` is True
def test_is_prefix_code_accepts_iter(self):
# is_prefix_code really copes with iterators (not only iterables)
assert is_prefix_code(iter(["a", "b", "c"])) is True
assert is_prefix_code(iter(["aa", "a"])) is False
def test_is_prefix_code_non_destructive(self):
# is_prefix_code is a non-destructive function.
iterable = ["d", "b", "c"]
is_prefix_code(iterable, is_sorted=False)
assert iterable == ["d", "b", "c"]
iterable = ["a", "b", "c"]
is_prefix_code(iterable, is_sorted=True)
assert iterable == ["a", "b", "c"]
def test_is_prefix_code_non_ascii(self):
# is_prefix_code copes with umlauts etc.
assert is_prefix_code(["z", "ä", "y", "äh"]) is False
assert is_prefix_code(["a", "äh"]) is True
class TestGetMatchingPrefixes(object):
def test_get_matching_prefixes(self):
assert list(get_matching_prefixes([])) == []
assert list(get_matching_prefixes(["a", "aa", "ab", "b", "x"])) == [
("a", "aa"), ("a", "ab")]
assert list(get_matching_prefixes(["a", "aa"])) == [("a", "aa")]
assert list(get_matching_prefixes(["b", "aa", "a"])) == [("a", "aa")]
def test_get_matching_prefixes_sorted_input(self):
# we can presort input lists
assert list(
get_matching_prefixes(["a", "aa", "ab"], is_sorted=True)) == [
("a", "aa"), ("a", "ab")]
assert list(get_matching_prefixes(["aa", "a"], is_sorted=False)) == [
("a", "aa")]
assert list(
get_matching_prefixes(["a", "aa", "aaa"], is_sorted=True)) == [
("a", "aa"), ("a", "aaa"), ("aa", "aaa")]
assert list(
get_matching_prefixes(["a", "aa", "aaa", "aaaa"], is_sorted=True)
) == [
("a", "aa"), ("a", "aaa"), ("a", "aaaa"), ("aa", "aaa"),
("aa", "aaaa"), ("aaa", "aaaa")]
def test_get_matching_prefixes_non_destructive(self):
# the given input will not be changed.
iterable = ["a", "aa", "c"]
list(get_matching_prefixes(iterable, is_sorted=False))
assert iterable == ["a", "aa", "c"]
list(get_matching_prefixes(iterable, is_sorted=True))
assert iterable == ["a", "aa", "c"]
def test_get_matching_prefixes_non_ascii(self):
# get_matching_prefixes copes with umlauts etc.
get_matching_prefixes(["a", "ä", "ö"], is_sorted=False) == []
get_matching_prefixes(["a", "ä", "äh"], is_sorted=False) == [
("ä", "äh")]
class TestStrinMatchingPrefixes(object):
def test_strip_matching_prefixes(self):
# we can get prefix code from any input
assert list(strip_matching_prefixes(
["a", "aa", "b"], is_sorted=False, prefer_short=True)
) == ["a", "b"]
assert list(strip_matching_prefixes(
["aa", "a", "b"], is_sorted=False, prefer_short=True)
) == ["a", "b"]
assert list(strip_matching_prefixes(
["a", "aa"], is_sorted=False, prefer_short=True)) == ["a"]
assert list(strip_matching_prefixes(
["aa", "a"], is_sorted=False, prefer_short=True)) == ["a"]
def test_strip_matching_prefixes_empty(self):
# we cope with empty iterables
assert list(strip_matching_prefixes([], is_sorted=True)) == []
def test_strip_matching_prefixes_non_destructive(self):
# given input will not be modified
in_list = ["b", "a", "aa"]
result = list(strip_matching_prefixes(in_list, is_sorted=False))
assert in_list == ["b", "a", "aa"] # unchanged
assert result == ["a", "b"]
def test_strip_matching_prefixes_prefer_short(self):
# we can tell to prefer shorter prefixes
in_list = ["a", "aa", "b"]
result1 = list(strip_matching_prefixes(
in_list, is_sorted=False, prefer_short=True))
assert result1 == ["a", "b"]
result2 = list(strip_matching_prefixes(
in_list, is_sorted=False, prefer_short=False))
assert result2 == ["aa", "b"]
result3 = list(strip_matching_prefixes(
["a", "aa", "ab", "c"], is_sorted=True, prefer_short=True))
assert result3 == ["a", "c"]
def test_strip_matching_prefixes_third_nesting_level(self):
# we cope with highly nested prefixes
result = list(strip_matching_prefixes(
["a", "aa", "aaa"], prefer_short=False))
assert result == ["aaa"]
result = list(strip_matching_prefixes(
["a", "aa", "aaa"], prefer_short=True))
assert result == ["a"]
def test_get_prefixes():
# we can create tree-like nested lists of prefixed lists of strings
assert get_prefixes([]) == []
assert get_prefixes(["a"]) == [["a"]]
assert get_prefixes(["a", "b"]) == [["a"], ["b"]]
assert get_prefixes(["a", "ab"]) == [["a", ["ab"]]]
assert get_prefixes(["a", "aa", "b"]) == [["a", ["aa"]], ["b"]]
assert get_prefixes(["a", "b", "ba"]) == [["a"], ["b", ["ba"]]]
assert get_prefixes(["a", "aa", "aaa", "ab"]) == [
['a', ['aa', ['aaa']], ['ab']]]
assert get_prefixes(["a", "aa", "aaa", "ab", "ac"]) == [
['a', ['aa', ['aaa']], ['ab'], ['ac']]]
def test_flatten_prefix_tree():
# we can flatten prefix trees
assert flatten_prefix_tree([["a"], ["b"]]) == ["a", "b"]
assert flatten_prefix_tree([["a", ["ab"]]]) == ["a"]
assert flatten_prefix_tree(
[["a", ["ab"]]], prefer_short=False) == ["ab"]
assert flatten_prefix_tree(
[['a', ['aa', ['aaa']], ['ab'], ['ac']]], prefer_short=False) == [
'aaa', 'ab', 'ac']
def test_alpha_dist():
# we get proper distributions of alphabets
assert alpha_dist([]) == dict()
assert alpha_dist(['a', 'b']) == dict(a=1, b=1)
assert alpha_dist(['ab', 'b']) == dict(a=1, b=2)
def test_entropy_per_char_bruteforce():
# we can get the entropy per char for plain bruteforce
decimal.getcontext().prec = 3
assert entropy_per_char_bruteforce(['ab', ]) == decimal.Decimal(1.0)
assert entropy_per_char_bruteforce(['a', 'b']) == decimal.Decimal(1.0)
assert entropy_per_char_bruteforce(
['aaa', 'b']) == decimal.Decimal('0.811')
assert entropy_per_char_bruteforce(
['ab', 'bc', 'cd', 'da']) == decimal.Decimal('2.0')
assert entropy_per_char_bruteforce(
['art', 'air']) == decimal.Decimal('1.92')
def test_min_word_length():
# we can compute the minimum length of a word required for a wordlist
assert min_word_length([]) == 1
assert min_word_length(['a', 'aa', 'aaa']) == 1
assert min_word_length(['a', 'b']) == 1
assert min_word_length(['abcd'] * 8192) == 7
assert min_word_length(['abab'] * 16) == 4
# we also accept iterators as input
assert min_word_length(iter(['a', 'b'])) == 1
def test_min_word_length_desired_len():
# the desired list length can differ from the current list length
# char entropy = 2.0, 16 = 2^4
assert min_word_length(['abcd'] * 1024, 16) == 2
# char entropy = 2.0, 32 = 2^5
assert min_word_length(['abcd'] * 8192, 32) == 3
class TestAndroidWordlist(object):
def test_attributes(self):
# android wordlists objects provide some attributes we expect
wl = AndroidWordList()
assert hasattr(wl, "base_url")
assert hasattr(wl, "path")
assert hasattr(wl, "gz_data")
assert hasattr(wl, "lang")
def test_init_path(self, local_android_dir):
# we can pass in a path to an unencoded file (no base64).
path = local_android_dir / "de_wordlist.combined.gz"
wl = AndroidWordList('file:////%s' % path)
assert wl.path == 'file:////%s' % path
def test_download(self, local_android_download_b64):
# we can download wordfiles that are base64 encoded.
wl = AndroidWordList(lang="de")
dl_data = wl.download()
assert wl.decompress(dl_data) == (
b'dictionary=main:de,locale=de,description=Deutsch,'
b'date=1414726263,version=54,REQUIRES_GERMAN_UMLAUT_PROCESSING=1'
b'\n word=der,f=216,flags=,originalFreq=216\n word=und,f=213,'
b'flags=,originalFreq=213\n')
def test_download_de(self, local_android_download_b64):
# we can download a german wordlist.
wl = AndroidWordList(lang="de")
wl.download()
assert list(wl.get_words()) == ['der', 'und']
def test_download_en(self, local_android_download_b64):
# we can download an english wordlist.
wl = AndroidWordList(lang="en")
wl.download()
assert list(wl.get_words()) == [
'the', 'to', 'of', 'and', 'hardcore', 'import']
def test_decompress(self, local_android_dir):
# we can decompress downloaded stuff.
wl = AndroidWordList()
path = local_android_dir / "de_wordlist.combined.gz"
data = path.read_binary()
assert wl.decompress(data).startswith(b"dictionary=main:de,locale=de")
def test_save(self, local_android_download_b64, tmpdir):
# we can save downloaded wordlists.
wl = AndroidWordList(lang="en")
wl.download()
path = tmpdir / 'mywordlist.gz'
wl.save(str(path))
assert path.isfile()
assert path.size() == 235
def test_save_no_data(self, local_android_download_b64, tmpdir):
# we do not complain when no data was downloaded already
wl = AndroidWordList()
path = tmpdir / 'mywordlist.gz'
wl.save(str(path))
assert not path.isfile()
def test_get_basename(self):
# we can get the basename of the file to download
wl = AndroidWordList()
assert wl.get_basename() == "en_wordlist.combined.gz"
def test_get_basename_lang(self, local_android_download_b64):
# when getting basename, we can select the language
wl = AndroidWordList()
assert wl.get_basename(lang="de") == "de_wordlist.combined.gz"
def test_get_basename_path(self, local_android_dir):
# we get a correct basename also if path is set manually
wl = AndroidWordList()
path1 = local_android_dir / "de_wordlist.combined.gz"
path2 = local_android_dir / "my_wordlist.gzip"
path1.copy(path2)
wl = AndroidWordList('file:////%s' % path2)
assert wl.get_basename(lang="foo") == "my_wordlist.gzip"
def test_metadata(self, local_android_dir):
# we can extract metadata from android wordfiles
path = local_android_dir / "de_wordlist.combined.gz"
wl = AndroidWordList()
wl.gz_data = path.read_binary()
meta = wl.get_meta_data()
assert meta == {
'dictionary': 'main:de',
'locale': 'de',
'description': 'Deutsch',
'date': '1414726263',
'version': '54',
'REQUIRES_GERMAN_UMLAUT_PROCESSING': '1'
}
def test_metadata_none(self):
# we cope with situation, when no wordfile was set before.
wl = AndroidWordList()
assert wl.get_meta_data() == {}
def test_metadata_empty(self):
# we cope with situation, where the wordfile is empty
wl = AndroidWordList()
wl.gz_data = EMPTY_GZ_FILE
assert wl.get_meta_data() == {}
def test_parse_lines(self, local_android_dir):
# we can raw parse simple lists
path = local_android_dir / "de_wordlist.combined.gz"
wl = AndroidWordList('file:////%s' % path)
lines = wl.parse_lines()
assert [x for x in lines] == [
{
'dictionary': 'main:de',
'locale': 'de',
'description': 'Deutsch',
'date': '1414726263',
'version': '54',
'REQUIRES_GERMAN_UMLAUT_PROCESSING': '1'},
{
'word': 'der', 'f': '216', 'flags': '',
'originalFreq': '216'},
{
'word': 'und', 'f': '213', 'flags': '',
'originalFreq': '213'},
]
def test_parse_lines_ignores_empty_lines(self, tmpdir):
# empty lines in wordlist files are ignored by the parser
path = tmpdir / 'sample_empty_lines.gz'
with gzip.open(str(path), 'wb') as f:
f.write(b'\n\n\n')
wl = AndroidWordList('file:////%s' % path)
lines = wl.parse_lines()
assert list(lines) == []
def test_get_words(self, dictfile_android_short_de):
# we can get plain wordlists from Android lists
wl = AndroidWordList("file:////%s" % str(dictfile_android_short_de))
assert [x for x in wl.get_words()] == ["der", "und"]
def test_get_words_offensive(self, dictfile_android_short_en):
# we can filter out offensive words
wl = AndroidWordList("file:////%s" % str(dictfile_android_short_en))
list1 = list(wl.get_words(offensive=False))
assert "hardcore" not in list1
assert "the" in list1
list2 = list(wl.get_words(offensive=True))
assert "hardcore" in list2
assert "the" not in list2
list3 = list(wl.get_words(offensive=None))
assert "hardcore" in list3
assert "the" in list3
@pytest.mark.skipif(ggsource_unreachable(), reason="no network available")
def test_get_valid_lang_codes(self):
# we can get a list of available language codes.
wl = AndroidWordList()
result = wl.get_valid_lang_codes()
assert result[0:3] == ['cs', 'da', 'de']
def test_get_valid_lang_codes_local(self, local_index):
# get valid lang codes from local copy of index list.
wl = AndroidWordList()
result = wl.get_valid_lang_codes()
assert result == [
'cs', 'da', 'de', 'el', 'en', 'en_GB', 'en_US', 'es',
'fi', 'fr', 'hr', 'it', 'iw', 'lt', 'lv', 'nb', 'nl', 'pl',
'pt_BR', 'pt_PT', 'ro', 'ru', 'sl', 'sr', 'sv', 'tr']
| ulif/wordlist-gen | tests/test_libwordlist.py | Python | gpl-3.0 | 27,107 | 0 |
# Copyright (c) 2014 by Ecreall under licence AGPL terms
# available on http://www.gnu.org/licenses/agpl.html
# licence: AGPL
# author: Amen Souissi
from pyramid.view import view_config
from pyramid.httpexceptions import HTTPFound
from dace.util import getSite
from dace.processinstance.core import DEFAULTMAPPING_ACTIONS_VIEWS
from pontus.view import BasicView
from lac.content.processes.services_processes.behaviors import (
SeeImportService)
from lac.content.service import ImportService
from lac.utilities.utils import (
ObjectRemovedException, generate_navbars)
@view_config(
name='',
context=ImportService,
renderer='pontus:templates/views_templates/grid.pt',
)
class SeeImportServiceView(BasicView):
title = ''
name = 'seeimportservice'
behaviors = [SeeImportService]
template = 'lac:views/services_processes/import_service/templates/see_import_service.pt'
viewid = 'seeimportservice'
def update(self):
self.execute(None)
result = {}
try:
navbars = generate_navbars(self, self.context, self.request)
except ObjectRemovedException:
return HTTPFound(self.request.resource_url(getSite(), ''))
values = {'object': self.context,
'navbar_body': navbars['navbar_body']}
body = self.content(args=values, template=self.template)['body']
item = self.adapt_item(body, self.viewid)
item['messages'] = navbars['messages']
item['isactive'] = navbars['isactive']
result.update(navbars['resources'])
result['coordinates'] = {self.coordinates: [item]}
return result
DEFAULTMAPPING_ACTIONS_VIEWS.update({SeeImportService: SeeImportServiceView})
| ecreall/lagendacommun | lac/views/services_processes/import_service/see_service.py | Python | agpl-3.0 | 1,732 | 0.001732 |
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import re
import subprocess
import sys
import tarfile
import tempfile
import test_server
import unittest
import zipfile
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
BUILD_TOOLS_DIR = os.path.dirname(SCRIPT_DIR)
TOOLS_DIR = os.path.join(os.path.dirname(BUILD_TOOLS_DIR), 'tools')
sys.path.extend([BUILD_TOOLS_DIR, TOOLS_DIR])
import build_utils
import getos
import manifest_util
import oshelpers
MANIFEST_BASENAME = 'naclsdk_manifest2.json'
# Attribute '' defined outside __init__
# pylint: disable=W0201
class SdkToolsTestCase(unittest.TestCase):
def tearDown(self):
if self.server:
self.server.Shutdown()
oshelpers.Remove(['-rf', self.basedir])
def SetupDefault(self):
self.SetupWithBaseDirPrefix('sdktools')
def SetupWithBaseDirPrefix(self, basedir_prefix, tmpdir=None):
self.basedir = tempfile.mkdtemp(prefix=basedir_prefix, dir=tmpdir)
# We have to make sure that we build our updaters with a version that is at
# least as large as the version in the sdk_tools bundle. If not, update
# tests may fail because the "current" version (according to the sdk_cache)
# is greater than the version we are attempting to update to.
self.current_revision = self._GetSdkToolsBundleRevision()
self._BuildUpdater(self.basedir, self.current_revision)
self._LoadCacheManifest()
self.server = test_server.LocalHTTPServer(self.basedir)
def _GetSdkToolsBundleRevision(self):
"""Get the sdk_tools bundle revision.
We get this from the checked-in path; this is the same file that
build_updater uses to specify the current revision of sdk_tools."""
manifest_filename = os.path.join(BUILD_TOOLS_DIR, 'json',
'naclsdk_manifest0.json')
manifest = manifest_util.SDKManifest()
manifest.LoadDataFromString(open(manifest_filename, 'r').read())
return manifest.GetBundle('sdk_tools').revision
def _LoadCacheManifest(self):
"""Read the manifest from nacl_sdk/sdk_cache.
This manifest should only contain the sdk_tools bundle.
"""
manifest_filename = os.path.join(self.basedir, 'nacl_sdk', 'sdk_cache',
MANIFEST_BASENAME)
self.manifest = manifest_util.SDKManifest()
self.manifest.LoadDataFromString(open(manifest_filename).read())
self.sdk_tools_bundle = self.manifest.GetBundle('sdk_tools')
def _WriteCacheManifest(self, manifest):
"""Write the manifest at nacl_sdk/sdk_cache.
This is useful for faking having installed a bundle.
"""
manifest_filename = os.path.join(self.basedir, 'nacl_sdk', 'sdk_cache',
MANIFEST_BASENAME)
with open(manifest_filename, 'w') as stream:
stream.write(manifest.GetDataAsString())
def _WriteManifest(self):
with open(os.path.join(self.basedir, MANIFEST_BASENAME), 'w') as stream:
stream.write(self.manifest.GetDataAsString())
def _BuildUpdater(self, out_dir, revision=None):
build_updater_py = os.path.join(BUILD_TOOLS_DIR, 'build_updater.py')
cmd = [sys.executable, build_updater_py, '-o', out_dir]
if revision:
cmd.extend(['-r', str(revision)])
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
_, _ = process.communicate()
self.assertEqual(process.returncode, 0)
def _BuildUpdaterArchive(self, rel_path, revision):
"""Build a new sdk_tools bundle.
Args:
rel_path: The relative path to build the updater.
revision: The revision number to give to this bundle.
Returns:
A manifest_util.Archive() that points to this new bundle on the local
server.
"""
self._BuildUpdater(os.path.join(self.basedir, rel_path), revision)
new_sdk_tools_tgz = os.path.join(self.basedir, rel_path, 'sdk_tools.tgz')
with open(new_sdk_tools_tgz, 'rb') as sdk_tools_stream:
archive_sha1, archive_size = manifest_util.DownloadAndComputeHash(
sdk_tools_stream)
archive = manifest_util.Archive('all')
archive.url = self.server.GetURL('%s/sdk_tools.tgz' % (rel_path,))
archive.checksum = archive_sha1
archive.size = archive_size
return archive
def _Run(self, args):
naclsdk_shell_script = os.path.join(self.basedir, 'nacl_sdk', 'naclsdk')
if getos.GetPlatform() == 'win':
naclsdk_shell_script += '.bat'
cmd = [naclsdk_shell_script]
cmd.extend(args)
cmd.extend(['-U', self.server.GetURL(MANIFEST_BASENAME)])
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
stdout, _ = process.communicate()
try:
self.assertEqual(process.returncode, 0)
except Exception:
print stdout
raise
return stdout
def _RunAndExtractRevision(self):
stdout = self._Run(['version'])
match = re.search('version r(\d+)', stdout)
self.assertTrue(match is not None)
return int(match.group(1))
class TestSdkTools(SdkToolsTestCase):
def testPathHasSpaces(self):
"""Test that running naclsdk from a path with spaces works."""
self.SetupWithBaseDirPrefix('sdk tools')
self._WriteManifest()
self._RunAndExtractRevision()
class TestBuildUpdater(SdkToolsTestCase):
def setUp(self):
self.SetupDefault()
def testUpdaterPathsAreSane(self):
"""Test that the paths to files in nacl_sdk.zip and sdktools.tgz are
relative to the output directory."""
nacl_sdk_zip_path = os.path.join(self.basedir, 'nacl_sdk.zip')
zip_stream = zipfile.ZipFile(nacl_sdk_zip_path, 'r')
try:
self.assertTrue(all(name.startswith('nacl_sdk')
for name in zip_stream.namelist()))
finally:
zip_stream.close()
# sdktools.tgz has no built-in directories to look for. Instead, just look
# for some files that must be there.
sdktools_tgz_path = os.path.join(self.basedir, 'sdk_tools.tgz')
tar_stream = tarfile.open(sdktools_tgz_path, 'r:gz')
try:
names = [m.name for m in tar_stream.getmembers()]
self.assertTrue('LICENSE' in names)
self.assertTrue('sdk_update.py' in names)
finally:
tar_stream.close()
class TestAutoUpdateSdkTools(SdkToolsTestCase):
def setUp(self):
self.SetupDefault()
def testNoUpdate(self):
"""Test that running naclsdk with current revision does nothing."""
self._WriteManifest()
revision = self._RunAndExtractRevision()
self.assertEqual(revision, self.current_revision)
def testUpdate(self):
"""Test that running naclsdk with a new revision will auto-update."""
new_revision = self.current_revision + 1
archive = self._BuildUpdaterArchive('new', new_revision)
self.sdk_tools_bundle.RemoveAllArchivesForHostOS(archive.host_os)
self.sdk_tools_bundle.AddArchive(archive)
self.sdk_tools_bundle.revision = new_revision
self._WriteManifest()
revision = self._RunAndExtractRevision()
self.assertEqual(revision, new_revision)
def testManualUpdateIsIgnored(self):
"""Test that attempting to manually update sdk_tools is ignored.
If the sdk_tools bundle was updated normally (i.e. the old way), it would
leave a sdk_tools_update folder that would then be copied over on a
subsequent run. This test ensures that there is no folder made.
"""
new_revision = self.current_revision + 1
archive = self._BuildUpdaterArchive('new', new_revision)
self.sdk_tools_bundle.RemoveAllArchivesForHostOS(archive.host_os)
self.sdk_tools_bundle.AddArchive(archive)
self.sdk_tools_bundle.revision = new_revision
self._WriteManifest()
sdk_tools_update_dir = os.path.join(self.basedir, 'nacl_sdk',
'sdk_tools_update')
self.assertFalse(os.path.exists(sdk_tools_update_dir))
stdout = self._Run(['update', 'sdk_tools'])
self.assertTrue(stdout.find('Ignoring manual update request.') != -1)
self.assertFalse(os.path.exists(sdk_tools_update_dir))
def testHelpCommand(self):
"""Running naclsdk with -h should work.
This is a regression test for a bug where the auto-updater would remove the
sdk_tools directory when running "naclsdk -h".
"""
self._WriteManifest()
self._Run(['-h'])
class TestAutoUpdateSdkToolsDifferentFilesystem(TestAutoUpdateSdkTools):
def setUp(self):
# On Linux (on my machine at least), /tmp is a different filesystem than
# the current directory. os.rename fails when the source and destination
# are on different filesystems. Test that case here.
self.SetupWithBaseDirPrefix('sdktools', tmpdir='.')
if __name__ == '__main__':
sys.exit(unittest.main())
| zcbenz/cefode-chromium | native_client_sdk/src/build_tools/tests/sdktools_test.py | Python | bsd-3-clause | 8,659 | 0.005197 |
import requests
"""The available regions"""
REGIONS = {
'US': 'https://us.api.battle.net/wow',
'EU': 'https://eu.api.battle.net/wow',
'KR': 'https://kr.api.battle.net/wow',
'TW': 'https://tw.api.battle.net/wow'
}
"""The available fields for use to get more detailed information for a specific character"""
CHARACTER_FIELDS = [
"achievements",
"appearance",
"feed",
"guild",
"hunterPets",
"items",
"mounts",
"pets",
"petSlots",
"progression",
"pvp",
"quests",
"reputation",
"statistics",
"stats",
"talents",
"titles",
"audit"
]
"""The available fields for use to get more detailed information for a specific guild"""
GUILD_FIELDS = [
"achievements",
"members",
"news",
"challenge"
]
"""The available PvP brackets"""
PVP_BRACKETS = [
'2v2',
'3v3',
'5v5',
'rbg'
]
class APIError(Exception):
"""Represents an Error accessing the community api for WoW"""
def __init__(self, status_code, message):
self.status_code = status_code
self.message = message
def __str__(self):
return "{0}: {1}".format(self.status_code, self.message)
class API:
def __init__(self, apiKey, region='US', locale='en_US'):
self.apiKey = apiKey
self.locale = locale
if region not in REGIONS:
raise ValueError("Unknown region: {0}".format(region))
self.region = region
self.baseUrl = REGIONS[self.region]
def get_resource(self, resourceURL, parameters=None):
url = self.baseUrl + resourceURL
payload = {'locale': self.locale, 'apikey': self.apiKey}
if parameters is not None:
# Merge parameters, overriding those that come from the call
for key in parameters:
payload[key] = parameters[key]
r = requests.get(url, params=payload)
# Raise an api error for all non-200 status codes
if r.status_code != 200:
raise APIError(r.status_code, r.text)
return r.json()
def achievement(self, id):
"""Returns a specific achievement for the given id"""
resourceUrl = "/achievement/{0}".format(id)
return self.get_resource(resourceUrl)
def auction_status(self, realm):
"""Returns a link to the latest auction house data dump for the given realm"""
resourceUrl = "/auction/data/{0}".format(realm)
return self.get_resource(resourceUrl)
def battlepet_ability(self, abilityId):
"""Returns data about a specific battle pet ability for the given id"""
resourceUrl = "/battlePet/ability/{0}".format(abilityId)
return self.get_resource(resourceUrl)
def battlepet_species(self,speciesId):
"""Returns data about an indiviual pet specied for the given specied id"""
resourceUrl = "/battlePet/species/{0}".format(speciesId)
return self.get_resource(resourceUrl)
def battlepet_stats(self, speciesId, level=1, breedId=3, qualityId=1):
"""Returns detailed information about a given species of pet"""
if level > 25 or level < 1:
raise ValueError("BattlePet levels must be in the range from 0 to 25")
if qualityId > 6 or qualityId < 0:
raise ValueError("BattlePet quality level must be in the range from 0 to 6")
params = {
"level": level,
"breedId": breedId,
"qualityId": qualityId
}
resourceUrl = "/battlePet/stats/{0}".format(speciesId)
return self.get_resource(resourceUrl, parameters = params)
def challenge_realm_leaderboard(self, realm):
"""Returns data about the challenge realm leaderboard for a given realm"""
resourceUrl = "/challenge/{0}".format(realm)
return self.get_resource(resourceUrl)
def challenge_region_leaderboard(self):
"""Returns data about the challenge realm leaderboard for the region you choose to use"""
resourceUrl = "/challenge/region"
return self.get_resource(resourceUrl)
def character(self, realm, characterName, fields=None):
"""Returns character information based on the given realm/characterName """
params = {}
if fields is not None:
for field in fields:
if field not in CHARACTER_FIELDS:
raise ValueError("{0} is not a valid field for a character.".format(field))
params = {
'fields': ','.join(fields)
}
resourceUrl = "/character/{0}/{1}".format(realm, characterName)
return self.get_resource(resourceUrl, params)
def guild(self, realm, guildName, fields=None):
"""Returns guild information based on the given realm/guildName"""
params = {}
if fields is not None:
for field in fields:
if field not in GUILD_FIELDS:
raise ValueError("{0} is not a valid field for a guild.".format(field))
params = {
'fields': ','.join(fields)
}
resourceUrl = "/guild/{0}/{1}".format(realm, guildName)
return self.get_resource(resourceUrl, params)
def item(self, itemId):
"""Returns detailed item information for the given itemId"""
resourceUrl = "/item/{0}".format(itemId)
return self.get_resource(resourceUrl)
def item_set(self, setId):
"""Returns detailed item information for the given item setId"""
resourceUrl = "/item/set/{0}".format(setId)
return self.get_resource(resourceUrl)
def pvp_leaderboard(self, bracket):
"""Returns PvP leaderboard information for the given bracket"""
if bracket not in PVP_BRACKETS:
raise ValueError("Unknown bracket type. Valid values are 2v2, 3v3, 5v5 and rbg.")
resourceUrl = "/leaderboard/{0}".format(bracket)
return self.get_resource(resourceUrl)
def quest(self, questId):
"""Returns metadata for the given questId"""
resourceUrl = "/quest/{0}".format(questId)
return self.get_resource(resourceUrl)
def realm_status(self):
"""Returns realm status information for all realms"""
resourceUrl = "/realm/status"
return self.get_resource(resourceUrl)
def recipe(self, recipeId):
"""Returns basic recipe information for the given recipeId"""
resourceUrl = "/recipe/{0}".format(recipeId)
return self.get_resource(resourceUrl)
def spell(self, spellId):
"""Returns some information for the given spellId"""
resourceUrl = "/spell/{0}".format(spellId)
return self.get_resource(resourceUrl)
def battlegroups(self):
"""Returns a list of battlegroups for the region"""
resourceUrl = "/data/battlegroups/"
return self.get_resource(resourceUrl)
def character_races(self):
"""Returns a list of each race and their associated faction, name, uniqueId, and skin"""
resourceUrl = "/data/character/races"
return self.get_resource(resourceUrl)
def character_classes(self):
"""Returns a list of character classes"""
resourceUrl = "/data/character/classes"
return self.get_resource(resourceUrl)
def character_achievements(self):
"""Returns a list of all achievements that characters can earn"""
resourceUrl = "/data/character/achievements"
return self.get_resource(resourceUrl)
def guild_rewards(self):
"""Returns a list of all guild rewards"""
resourceUrl = "/data/guild/rewards"
return self.get_resource(resourceUrl)
def guild_perks(self):
"""Returns a list of all guild perks"""
resourceUrl = "/data/guild/perks"
return self.get_resource(resourceUrl)
def guild_achievements(self):
"""Returns a list of all achievements that a guild can earn"""
resourceUrl = "/data/guild/achievements"
return self.get_resource(resourceUrl)
def item_classes(self):
"""Returns a list of item classes"""
resourceUrl = "/data/item/classes"
return self.get_resource(resourceUrl)
def talents(self):
"""Returns a list of talents, specs, and glyphs for each class"""
resourceUrl = "/data/talents"
return self.get_resource(resourceUrl)
def pet_types(self):
"""Returns different battle pet types, including what they are strong and weak against"""
resourceUrl = "/data/pet/types"
return self.get_resource(resourceUrl)
def mount(self):
"""Returns a list of all supported mounts"""
resourceUrl = "/mount/"
return self.get_resource(resourceUrl)
def zone(self, id=None):
"""Returns a specific zone (zone being a dungeon or raid in this context) or a list of all supported zones"""
if id is not None:
resourceUrl = "/zone/{0}".format(id)
return self.get_resource(resourceUrl)
resourceUrl = "/zone/"
return self.get_resource(resourceUrl)
def boss(self, id=None):
"""Returns a specific boss (boss being a boss encounter, which may include more than one NPC) or a list of all supported bosses"""
if id is not None:
resourceUrl = "/boss/{0}".format(id)
return self.get_resource(resourceUrl)
resourceUrl = "/boss/"
return self.get_resource(resourceUrl)
| GoblinLedger/wowapi | wowapi/__init__.py | Python | mit | 9,460 | 0.00222 |
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import oslo_i18n
DOMAIN = "neutron"
_translators = oslo_i18n.TranslatorFactory(domain=DOMAIN)
# The primary translation function using the well-known name "_"
_ = _translators.primary
# The contextual translation function using the name "_C"
_C = _translators.contextual_form
# The plural translation function using the name "_P"
_P = _translators.plural_form
def get_available_languages():
return oslo_i18n.get_available_languages(DOMAIN)
| noironetworks/neutron | neutron/_i18n.py | Python | apache-2.0 | 1,049 | 0 |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibDocFile Regression Test Suite."""
__revision__ = "$Id$"
import unittest
from invenio.testutils import make_test_suite, run_test_suite
from invenio.bibdocfile import BibRecDocs, check_bibdoc_authorization
from invenio.access_control_config import CFG_WEBACCESS_WARNING_MSGS
from invenio.config import \
CFG_SITE_URL, \
CFG_PREFIX, \
CFG_WEBSUBMIT_FILEDIR
class BibRecDocsTest(unittest.TestCase):
"""regression tests about BibRecDocs"""
def test_BibRecDocs(self):
"""bibdocfile - BibRecDocs functions"""
my_bibrecdoc = BibRecDocs(2)
#add bibdoc
my_bibrecdoc.add_new_file(CFG_PREFIX + '/lib/webtest/invenio/test.jpg', 'Main', 'img_test', False, 'test add new file', 'test', '.jpg')
my_bibrecdoc.add_bibdoc(doctype='Main', docname='file', never_fail=False)
self.assertEqual(len(my_bibrecdoc.list_bibdocs()), 3)
my_added_bibdoc = my_bibrecdoc.get_bibdoc('file')
#add bibdocfile in empty bibdoc
my_added_bibdoc.add_file_new_version(CFG_PREFIX + '/lib/webtest/invenio/test.gif', \
description= 'added in empty bibdoc', comment=None, format=None, flags=['PERFORM_HIDE_PREVIOUS'])
#propose unique docname
self.assertEqual(my_bibrecdoc.propose_unique_docname('file'), 'file_2')
#has docname
self.assertEqual(my_bibrecdoc.has_docname_p('file'), True)
#merge 2 bibdocs
my_bibrecdoc.merge_bibdocs('img_test', 'file')
self.assertEqual(len(my_bibrecdoc.get_bibdoc("img_test").list_all_files()), 2)
#check file exists
self.assertEqual(my_bibrecdoc.check_file_exists(CFG_PREFIX + '/lib/webtest/invenio/test.jpg'), True)
#get bibdoc names
self.assertEqual(my_bibrecdoc.get_bibdoc_names('Main')[0], '0104007_02')
self.assertEqual(my_bibrecdoc.get_bibdoc_names('Main')[1],'img_test')
#get total size
self.assertEqual(my_bibrecdoc.get_total_size(), 1647591)
#get total size latest version
self.assertEqual(my_bibrecdoc.get_total_size_latest_version(), 1647591)
#display
value = my_bibrecdoc.display(docname='img_test', version='', doctype='', ln='en', verbose=0, display_hidden=True)
self.assert_("<small><b>Main</b>" in value)
#get xml 8564
value = my_bibrecdoc.get_xml_8564()
self.assert_('/record/2/files/img_test.jpg</subfield>' in value)
#check duplicate docnames
self.assertEqual(my_bibrecdoc.check_duplicate_docnames(), True)
def tearDown(self):
my_bibrecdoc = BibRecDocs(2)
#delete
my_bibrecdoc.delete_bibdoc('img_test')
my_bibrecdoc.delete_bibdoc('file')
class BibDocsTest(unittest.TestCase):
"""regression tests about BibDocs"""
def test_BibDocs(self):
"""bibdocfile - BibDocs functions"""
#add file
my_bibrecdoc = BibRecDocs(2)
my_bibrecdoc.add_new_file(CFG_PREFIX + '/lib/webtest/invenio/test.jpg', 'Main', 'img_test', False, 'test add new file', 'test', '.jpg')
my_new_bibdoc = my_bibrecdoc.get_bibdoc("img_test")
value = my_bibrecdoc.list_bibdocs()
self.assertEqual(len(value), 2)
#get total file (bibdoc)
self.assertEqual(my_new_bibdoc.get_total_size(), 91750)
#get recid
self.assertEqual(my_new_bibdoc.get_recid(), 2)
#change name
my_new_bibdoc.change_name('new_name')
#get docname
self.assertEqual(my_new_bibdoc.get_docname(), 'new_name')
#get type
self.assertEqual(my_new_bibdoc.get_type(), 'Main')
#get id
self.assert_(my_new_bibdoc.get_id() > 80)
#set status
my_new_bibdoc.set_status('new status')
#get status
self.assertEqual(my_new_bibdoc.get_status(), 'new status')
#get base directory
self.assert_(my_new_bibdoc.get_base_dir().startswith(CFG_WEBSUBMIT_FILEDIR))
#get file number
self.assertEqual(my_new_bibdoc.get_file_number(), 1)
#add file new version
my_new_bibdoc.add_file_new_version(CFG_PREFIX + '/lib/webtest/invenio/test.jpg', description= 'the new version', comment=None, format=None, flags=["PERFORM_HIDE_PREVIOUS"])
self.assertEqual(my_new_bibdoc.list_versions(), [1, 2])
#revert
my_new_bibdoc.revert(1)
self.assertEqual(my_new_bibdoc.list_versions(), [1, 2, 3])
self.assertEqual(my_new_bibdoc.get_description('.jpg', version=3), 'test add new file')
#get total size latest version
self.assertEqual(my_new_bibdoc.get_total_size_latest_version(), 91750)
#get latest version
self.assertEqual(my_new_bibdoc.get_latest_version(), 3)
#list latest files
self.assertEqual(len(my_new_bibdoc.list_latest_files()), 1)
self.assertEqual(my_new_bibdoc.list_latest_files()[0].get_version(), 3)
#list version files
self.assertEqual(len(my_new_bibdoc.list_version_files(1, list_hidden=True)), 1)
#display
value = my_new_bibdoc.display(version='', ln='en', display_hidden=True)
self.assert_('>test add new file<' in value)
#format already exist
self.assertEqual(my_new_bibdoc.format_already_exists_p('.jpg'), True)
#get file
self.assertEqual(my_new_bibdoc.get_file('.jpg', version='1').get_version(), 1)
#set description
my_new_bibdoc.set_description('new description', '.jpg', version=1)
#get description
self.assertEqual(my_new_bibdoc.get_description('.jpg', version=1), 'new description')
#set comment
my_new_bibdoc.set_description('new comment', '.jpg', version=1)
#get comment
self.assertEqual(my_new_bibdoc.get_description('.jpg', version=1), 'new comment')
#get history
assert len(my_new_bibdoc.get_history()) > 0
#delete file
my_new_bibdoc.delete_file('.jpg', 2)
#list all files
self.assertEqual(len(my_new_bibdoc.list_all_files()), 2)
#delete file
my_new_bibdoc.delete_file('.jpg', 3)
#add new format
my_new_bibdoc.add_file_new_format(CFG_PREFIX + '/lib/webtest/invenio/test.gif', version=None, description=None, comment=None, format=None)
self.assertEqual(len(my_new_bibdoc.list_all_files()), 2)
#delete file
my_new_bibdoc.delete_file('.jpg', 1)
#delete file
my_new_bibdoc.delete_file('.gif', 1)
#empty bibdoc
self.assertEqual(my_new_bibdoc.empty_p(), True)
#hidden?
self.assertEqual(my_new_bibdoc.hidden_p('.jpg', version=1), False)
#hide
my_new_bibdoc.set_flag('HIDDEN', '.jpg', version=1)
#hidden?
self.assertEqual(my_new_bibdoc.hidden_p('.jpg', version=1), True)
#add and get icon
my_new_bibdoc.add_icon( CFG_PREFIX + '/lib/webtest/invenio/icon-test.gif')
value = my_bibrecdoc.list_bibdocs()[1]
self.assertEqual(value.get_icon(), my_new_bibdoc.get_icon())
#delete icon
my_new_bibdoc.delete_icon()
#get icon
self.assertEqual(my_new_bibdoc.get_icon(), None)
#delete
my_new_bibdoc.delete()
self.assertEqual(my_new_bibdoc.deleted_p(), True)
#undelete
my_new_bibdoc.undelete(previous_status='')
def tearDown(self):
my_bibrecdoc = BibRecDocs(2)
#delete
my_bibrecdoc.delete_bibdoc('img_test')
my_bibrecdoc.delete_bibdoc('new_name')
class BibDocFilesTest(unittest.TestCase):
"""regression tests about BibDocFiles"""
def test_BibDocFiles(self):
"""bibdocfile - BibDocFile functions """
#add bibdoc
my_bibrecdoc = BibRecDocs(2)
my_bibrecdoc.add_new_file(CFG_PREFIX + '/lib/webtest/invenio/test.jpg', 'Main', 'img_test', False, 'test add new file', 'test', '.jpg')
my_new_bibdoc = my_bibrecdoc.get_bibdoc("img_test")
my_new_bibdocfile = my_new_bibdoc.list_all_files()[0]
#get url
self.assertEqual(my_new_bibdocfile.get_url(), CFG_SITE_URL + '/record/2/files/img_test.jpg')
#get type
self.assertEqual(my_new_bibdocfile.get_type(), 'Main')
#get path
self.assert_(my_new_bibdocfile.get_path().startswith(CFG_WEBSUBMIT_FILEDIR))
self.assert_(my_new_bibdocfile.get_path().endswith('/img_test.jpg;1'))
#get bibdocid
self.assertEqual(my_new_bibdocfile.get_bibdocid(), my_new_bibdoc.get_id())
#get name
self.assertEqual(my_new_bibdocfile.get_name() , 'img_test')
#get full name
self.assertEqual(my_new_bibdocfile.get_full_name() , 'img_test.jpg')
#get full path
self.assert_(my_new_bibdocfile.get_full_path().startswith(CFG_WEBSUBMIT_FILEDIR))
self.assert_(my_new_bibdocfile.get_full_path().endswith('/img_test.jpg;1'))
#get format
self.assertEqual(my_new_bibdocfile.get_format(), '.jpg')
#get version
self.assertEqual(my_new_bibdocfile.get_version(), 1)
#get description
self.assertEqual(my_new_bibdocfile.get_description(), my_new_bibdoc.get_description('.jpg', version=1))
#get comment
self.assertEqual(my_new_bibdocfile.get_comment(), my_new_bibdoc.get_comment('.jpg', version=1))
#get recid
self.assertEqual(my_new_bibdocfile.get_recid(), 2)
#get status
self.assertEqual(my_new_bibdocfile.get_status(), '')
#get size
self.assertEqual(my_new_bibdocfile.get_size(), 91750)
#get checksum
self.assertEqual(my_new_bibdocfile.get_checksum(), '28ec893f9da735ad65de544f71d4ad76')
#check
self.assertEqual(my_new_bibdocfile.check(), True)
#display
value = my_new_bibdocfile.display(ln='en')
assert 'files/img_test.jpg?version=1">' in value
#hidden?
self.assertEqual(my_new_bibdocfile.hidden_p(), False)
#delete
my_new_bibdoc.delete()
self.assertEqual(my_new_bibdoc.deleted_p(), True)
class CheckBibDocAuthorization(unittest.TestCase):
"""Regression tests for check_bibdoc_authorization function."""
def test_check_bibdoc_authorization(self):
"""bibdocfile - check_bibdoc_authorization function"""
from invenio.webuser import collect_user_info, get_uid_from_email
jekyll = collect_user_info(get_uid_from_email('jekyll@cds.cern.ch'))
self.assertEqual(check_bibdoc_authorization(jekyll, 'role:thesesviewer'), (0, CFG_WEBACCESS_WARNING_MSGS[0]))
self.assertEqual(check_bibdoc_authorization(jekyll, 'role: thesesviewer'), (0, CFG_WEBACCESS_WARNING_MSGS[0]))
self.assertEqual(check_bibdoc_authorization(jekyll, 'role: thesesviewer'), (0, CFG_WEBACCESS_WARNING_MSGS[0]))
self.assertEqual(check_bibdoc_authorization(jekyll, 'Role: thesesviewer'), (0, CFG_WEBACCESS_WARNING_MSGS[0]))
self.assertEqual(check_bibdoc_authorization(jekyll, 'email: jekyll@cds.cern.ch'), (0, CFG_WEBACCESS_WARNING_MSGS[0]))
self.assertEqual(check_bibdoc_authorization(jekyll, 'email: jekyll@cds.cern.ch'), (0, CFG_WEBACCESS_WARNING_MSGS[0]))
juliet = collect_user_info(get_uid_from_email('juliet.capulet@cds.cern.ch'))
self.assertEqual(check_bibdoc_authorization(juliet, 'restricted_picture'), (0, CFG_WEBACCESS_WARNING_MSGS[0]))
self.assertEqual(check_bibdoc_authorization(juliet, 'status: restricted_picture'), (0, CFG_WEBACCESS_WARNING_MSGS[0]))
self.assertNotEqual(check_bibdoc_authorization(juliet, 'restricted_video')[0], 0)
self.assertNotEqual(check_bibdoc_authorization(juliet, 'status: restricted_video')[0], 0)
TEST_SUITE = make_test_suite(BibRecDocsTest, \
BibDocsTest, \
BibDocFilesTest, \
CheckBibDocAuthorization)
if __name__ == "__main__":
run_test_suite(TEST_SUITE, warn_user=True)
| kaplun/Invenio-OpenAIRE | modules/websubmit/lib/bibdocfile_regression_tests.py | Python | gpl-2.0 | 12,722 | 0.011083 |
from django.core.exceptions import PermissionDenied
def require_permission(user, *args):
for arg in args:
if not user.has_perm(arg):
raise PermissionDenied("Action %s not allowed" % arg) | Lapeth/timeline | Timeline/util/Permissions.py | Python | apache-2.0 | 211 | 0.009479 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutClassMethods in the Ruby Koans
#
from runner.koan import *
class AboutClassAttributes(Koan):
class Dog:
pass
def test_objects_are_objects(self):
fido = self.Dog()
self.assertEqual(True, isinstance(fido, object))
def test_classes_are_types(self):
self.assertEqual(True, self.Dog.__class__ == type)
def test_classes_are_objects_too(self):
self.assertEqual(True, issubclass(self.Dog, object))
def test_objects_have_methods(self):
fido = self.Dog()
self.assertEqual(26, len(dir(fido)))
def test_classes_have_methods(self):
self.assertEqual(26, len(dir(self.Dog)))
def test_creating_objects_without_defining_a_class(self):
singularity = object()
self.assertEqual(23, len(dir(singularity)))
def test_defining_attributes_on_individual_objects(self):
fido = self.Dog()
fido.legs = 4
self.assertEqual(4, fido.legs)
def test_defining_functions_on_individual_objects(self):
fido = self.Dog()
fido.wag = lambda : 'fidos wag'
self.assertEqual('fidos wag', fido.wag())
def test_other_objects_are_not_affected_by_these_singleton_functions(self):
fido = self.Dog()
rover = self.Dog()
def wag():
return 'fidos wag'
fido.wag = wag
with self.assertRaises(AttributeError): rover.wag()
# ------------------------------------------------------------------
class Dog2:
def wag(self):
return 'instance wag'
def bark(self):
return "instance bark"
def growl(self):
return "instance growl"
@staticmethod
def bark():
return "staticmethod bark, arg: None"
@classmethod
def growl(cls):
return "classmethod growl, arg: cls=" + cls.__name__
def test_since_classes_are_objects_you_can_define_singleton_methods_on_them_too(self):
self.assertRegex(self.Dog2.growl(), 'classmethod growl, arg: cls=Dog2')
def test_classmethods_are_not_independent_of_instance_methods(self):
fido = self.Dog2()
self.assertRegex(fido.growl(), 'classmethod growl, arg: cls=Dog2')
self.assertRegex(self.Dog2.growl(), 'classmethod growl, arg: cls=Dog2')
def test_staticmethods_are_unbound_functions_housed_in_a_class(self):
self.assertRegex(self.Dog2.bark(), 'staticmethod bark, arg: None')
def test_staticmethods_also_overshadow_instance_methods(self):
fido = self.Dog2()
self.assertRegex(fido.bark(), 'staticmethod bark, arg: None')
# ------------------------------------------------------------------
class Dog3:
def __init__(self):
self._name = None
def get_name_from_instance(self):
return self._name
def set_name_from_instance(self, name):
self._name = name
@classmethod
def get_name(cls):
return cls._name
@classmethod
def set_name(cls, name):
cls._name = name
name = property(get_name, set_name)
name_from_instance = property(get_name_from_instance, set_name_from_instance)
def test_classmethods_can_not_be_used_as_properties(self):
fido = self.Dog3()
with self.assertRaises(TypeError): fido.name = "Fido"
def test_classes_and_instances_do_not_share_instance_attributes(self):
fido = self.Dog3()
fido.set_name_from_instance("Fido")
fido.set_name("Rover")
self.assertEqual('Fido', fido.get_name_from_instance())
self.assertEqual('Rover', self.Dog3.get_name())
def test_classes_and_instances_do_share_class_attributes(self):
fido = self.Dog3()
fido.set_name("Fido")
self.assertEqual('Fido', fido.get_name())
self.assertEqual('Fido', self.Dog3.get_name())
# ------------------------------------------------------------------
class Dog4:
def a_class_method(cls):
return 'dogs class method'
def a_static_method():
return 'dogs static method'
a_class_method = classmethod(a_class_method)
a_static_method = staticmethod(a_static_method)
def test_you_can_define_class_methods_without_using_a_decorator(self):
self.assertEqual('dogs class method', self.Dog4.a_class_method())
def test_you_can_define_static_methods_without_using_a_decorator(self):
self.assertEqual('dogs static method', self.Dog4.a_static_method())
# ------------------------------------------------------------------
def test_heres_an_easy_way_to_explicitly_call_class_methods_from_instance_methods(self):
fido = self.Dog4()
self.assertEqual(fido.a_class_method(), fido.__class__.a_class_method())
| gregkorte/Python-Koans | python3/koans/about_class_attributes.py | Python | mit | 4,882 | 0.001639 |
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import json
from django.utils.translation import ugettext as _
from jobbrowser.apis.base_api import Api, MockDjangoRequest, _extract_query_params
from liboozie.oozie_api import get_oozie
LOG = logging.getLogger(__name__)
try:
from oozie.conf import OOZIE_JOBS_COUNT, ENABLE_OOZIE_BACKEND_FILTERING
from oozie.views.dashboard import get_oozie_job_log, list_oozie_workflow, manage_oozie_jobs, bulk_manage_oozie_jobs, has_dashboard_jobs_access, massaged_oozie_jobs_for_json
except Exception, e:
LOG.exception('Some applications are not enabled for Job Browser v2: %s' % e)
class WorkflowApi(Api):
def apps(self, filters):
kwargs = {'cnt': OOZIE_JOBS_COUNT.get(), 'filters': []}
text_filters = _extract_query_params(filters)
if not has_dashboard_jobs_access(self.user):
kwargs['filters'].append(('user', self.user.username))
elif 'user' in text_filters:
kwargs['filters'].append(('user', text_filters['username']))
if 'time' in filters:
kwargs['filters'].extend([('startcreatedtime', '-%s%s' % (filters['time']['time_value'], filters['time']['time_unit'][:1]))])
if ENABLE_OOZIE_BACKEND_FILTERING.get() and text_filters.get('text'):
kwargs['filters'].extend([('text', text_filters.get('text'))])
if filters['pagination']:
kwargs['offset'] = filters['pagination']['offset']
kwargs['cnt'] = min(filters['pagination']['limit'], OOZIE_JOBS_COUNT.get())
if filters.get('states'):
states_filters = {'running': ['RUNNING', 'PREP', 'SUSPENDED'], 'completed': ['SUCCEEDED'], 'failed': ['FAILED', 'KILLED'],}
for _state in filters.get('states'):
for _status in states_filters[_state]:
kwargs['filters'].extend([('status', _status)])
oozie_api = get_oozie(self.user)
wf_list = oozie_api.get_workflows(**kwargs)
return {
'apps':[{
'id': app['id'],
'name': app['appName'],
'status': app['status'],
'apiStatus': self._api_status(app['status']),
'type': 'workflow',
'user': app['user'],
'progress': app['progress'],
'duration': app['durationInMillis'],
'submitted': app['startTimeInMillis']
} for app in massaged_oozie_jobs_for_json(wf_list.jobs, self.user)['jobs']],
'total': wf_list.total
}
def app(self, appid):
if '@' in appid:
return WorkflowActionApi(self.user).app(appid)
oozie_api = get_oozie(self.user)
workflow = oozie_api.get_job(jobid=appid)
common = {
'id': workflow.id,
'name': workflow.appName,
'status': workflow.status,
'apiStatus': self._api_status(workflow.status),
'progress': workflow.get_progress(),
'type': 'workflow',
}
request = MockDjangoRequest(self.user)
response = list_oozie_workflow(request, job_id=appid)
common['properties'] = json.loads(response.content)
common['properties']['xml'] = ''
common['properties']['properties'] = ''
common['properties']['coordinator_id'] = workflow.get_parent_job_id()
common['properties']['bundle_id'] = workflow.conf_dict.get('oozie.bundle.id')
return common
def action(self, app_ids, action):
return _manage_oozie_job(self.user, action, app_ids)
def logs(self, appid, app_type, log_name=None):
if '@' in appid:
return WorkflowActionApi(self.user).logs(appid, app_type)
request = MockDjangoRequest(self.user)
data = get_oozie_job_log(request, job_id=appid)
return {'logs': json.loads(data.content)['log']}
def profile(self, appid, app_type, app_property, app_filters):
if '@' in appid:
return WorkflowActionApi(self.self.user).profile(appid, app_type, app_property)
if app_property == 'xml':
oozie_api = get_oozie(self.user)
workflow = oozie_api.get_job(jobid=appid)
return {
'xml': workflow.definition,
}
elif app_property == 'properties':
oozie_api = get_oozie(self.user)
workflow = oozie_api.get_job(jobid=appid)
return {
'properties': workflow.conf_dict,
}
return {}
def _api_status(self, status):
if status in ['PREP', 'RUNNING']:
return 'RUNNING'
elif status == 'SUSPENDED':
return 'PAUSED'
elif status == 'SUCCEEDED':
return 'SUCCEEDED'
else:
return 'FAILED' # KILLED and FAILED
class WorkflowActionApi(Api):
def app(self, appid):
oozie_api = get_oozie(self.user)
action = oozie_api.get_action(action_id=appid)
common = action.to_json()
common['action_type'] = common['type']
common['type'] = 'workflow-action'
common['properties'] = {
'workflow_id': appid.split('@', 1)[0]
}
return common
def logs(self, appid, app_type, log_name=None):
return {'progress': 0, 'logs': ''}
def _manage_oozie_job(user, action, app_ids):
if action == 'change' or action == 'ignore' or len(app_ids) == 1:
request = MockDjangoRequest(user)
response = manage_oozie_jobs(request, app_ids[0], action['action'])
else:
request = MockDjangoRequest(user, post={'job_ids': ' '.join(app_ids), 'action': action['action']})
response = bulk_manage_oozie_jobs(request)
result = json.loads(response.content)
result['status'] = result.get('totalErrors', 0)
result['message'] = _('%s action sent to %s jobs') % (action['action'], result.get('totalRequests', 1))
return result
| jayceyxc/hue | apps/jobbrowser/src/jobbrowser/apis/workflow_api.py | Python | apache-2.0 | 6,200 | 0.009194 |
from cn import ast,tokenSpec
tokens = tokenSpec.tokens
precedence = (
('left', 'OR', 'AND'),
('left', 'EQ', 'NE', 'LE', 'LT', 'GT', 'GE'),
('left', 'PLUS', 'MINUS'),
('left', 'TIMES', 'DIVIDE'),
('left', 'MOD'),
('right', 'PIPE')
)
def p_program(t):
'program : imports declaration_list'
t[0] = ast.Program(t[1], t[2])
def p_empty(t):
'empty :'
pass
def p_imports(t):
'''imports : imports_list
| empty'''
t[0] = t[1]
def p_imports_list(t):
'''imports_list : imports_list import_declaration
| import_declaration'''
if(len(t) == 2):
t[0] = [t[1]]
else:
t[0] = t[1]
t[0].append(t[2])
def p_import_declaration(t):
'import_declaration : IMPORT ID SEMI'
t[0] = ast.ImportDeclaration(t[2])
def p_declaration_list(t):
'''declaration_list : declaration_list declaration
| declaration'''
if(len(t) == 2):
t[0] = [t[1]]
else:
t[0] = t[1]
t[0].append(t[2])
def p_declaration(t):
'''declaration : var_declaration
| function_declaration
| class_declaration'''
t[0] = t[1]
def p_var_declaration(t):
'''var_declaration : type ID SEMI
| type ID ASSIGN expression SEMI'''
if len(t) == 4:
t[0] = ast.GVariableDeclaration(t[1], t[2])
else:
t[0] = ast.GVariableDeclaration(t[1], t[2], t[4])
def p_function_declaration(t):
'function_declaration : type ID LPAREN params RPAREN compound_stmt'
t[0] = ast.Function(t[1], t[2], t[4], t[6])
def p_type(t):
'''type : VOID
| INT
| FLOAT
| STRING
| BOOLEAN
| CHAR
| ID
| type LBRACKET RBRACKET'''
t[0] = t[1]
def p_params(t):
'''params : param_list
| empty'''
t[0] = t[1]
def p_param_list(t):
'''param_list : param_list COMMA param
| param'''
if(len(t) == 2):
t[0] = [t[1]]
else:
t[0] = t[1]
t[0].append(t[3])
def p_pram(t):
'param : type ID'
t[0] = ast.Param(t[1], t[2])
def p_class_declaration(t):
'class_declaration : CLASS ID LBRACE class_block RBRACE'
t[0] = ast.ClassDeclaration(t[2], t[4])
def p_class_block(t):
'class_block : attribute_list constructor method_list'
t[0] = (t[1], t[2], t[3])
def p_attribute_list(t):
'''attribute_list : attribute_list attribute_declaration
| empty'''
if len(t) == 2:
t[0] = []
else:
t[0] = t[1]
t[0].append(t[2])
def p_attribute(t):
'''attribute_declaration : type ID SEMI
| type ID ASSIGN expression SEMI'''
if len(t) == 4:
t[0] = ast.ClassAttribute(t[1], t[2])
else:
t[0] = ast.ClassAttribute(t[1], t[2], t[4])
def p_method_list(t):
'''method_list : method_list method_declaration
| empty'''
if len(t) == 2:
t[0] = []
else:
t[0] = t[1]
t[0].append(t[2])
def p_method_declaration(t):
'method_declaration : type ID LPAREN params RPAREN compound_stmt'
t[0] = ast.ClassMethod(t[1], t[2], t[4], t[6])
def p_constructor(t):
'constructor : CONSTR LPAREN params RPAREN compound_stmt'
t[0] = ast.ClassConstructor(t[3], t[5])
def p_compound_stmt(t):
'compound_stmt : LBRACE statement_list RBRACE'
t[0] = t[2]
def p_statement_list(t):
'''statement_list : statement_list statement
| statement'''
if len(t) == 2:
t[0] = [t[1]]
else:
t[0] = t[1]
t[0].append(t[2])
def p_statement(t):
'''statement : expression_stmt
| compound_stmt
| selection_stmt
| iteration_stmt
| return_stmt
| var_dec_stmt
| array_dec_stmt'''
t[0] = t[1]
def p_var_dec_stmt(t):
'''var_dec_stmt : type ID SEMI
| type ID ASSIGN expression SEMI'''
if len(t) == 4:
t[0] = ast.VariableDeclaration(t[1], t[2])
else:
t[0] = ast.VariableDeclaration(t[1], t[2], t[4])
def p_array_dec_stmt(t):
'''array_dec_stmt : type ID LBRACKET INTLIT RBRACKET SEMI
| type ID LBRACKET INTLIT RBRACKET array SEMI'''
if len(t) == 7:
t[0] = ast.ArrayDeclaration(t[1], t[2], t[4])
else:
t[0] = ast.ArrayDeclaration(t[1], t[2], t[4], t[6])
def p_expression_stmt(t):
'expression_stmt : expression SEMI'
t[0] = t[1]
def p_selection_stmt(t):
'''selection_stmt : if_stmt
| if_else_stmt'''
t[0] = t[1]
def p_if_stmt(t):
'if_stmt : IF LPAREN expression RPAREN statement'
t[0] = ast.IfStmt(t[3], t[5])
def p_if_else_stmt(t):
'if_else_stmt : IF LPAREN expression RPAREN statement ELSE statement'
t[0] = ast.IfElseStmt(t[3], t[5], t[7])
def p_iteration_stmt(t):
'''iteration_stmt : while_stmt
| for_stmt'''
t[0] = t[1]
def p_while_stmt(t):
'while_stmt : WHILE LPAREN expression RPAREN statement'
t[0] = ast.WhileStmt(t[3], t[5])
def p_for_stmt(t):
'for_stmt : FOR LPAREN var_declaration SEMI expression SEMI assignment'
pass
def p_return_stmt(t):
'''return_stmt : RETURN SEMI
| RETURN expression SEMI'''
length = len(t)
if(length == 3):
t[0] = ast.ReturnStmt()
else:
t[0] = ast.ReturnStmt(t[2])
def p_expression(t):
'''expression : assignment
| binary
| unary
| call
| variable
| literal
| paren_expr
| array
| attribute_assign
| class_attribute
| method_call'''
t[0] = t[1]
def p_attribute_assign(t):
'attribute_assign : expression DOT ID ASSIGN expression'
t[0] = ast.AttributeAssignment(t[1], t[3], t[5])
def p_class_attribute(t):
'class_attribute : expression DOT ID'
t[0] = ast.Attribute(t[1], t[3])
def p_method_call(t):
'method_call : expression DOT ID LPAREN arguments RPAREN'
t[0] = ast.MethodCall(t[1], t[3], t[5])
def p_assignment(t):
'''assignment : ID ASSIGN expression
| ID LBRACKET expression RBRACKET ASSIGN expression'''
if len(t) == 4:
t[0] = ast.Assignment(t[1], t[3])
else:
t[0] = ast.Assignment(t[1], t[6], t[3])
def p_variable(t):
'''variable : ID
| ID LBRACKET expression RBRACKET'''
if len(t) == 2:
t[0] = ast.Variable(t[1])
else:
t[0] = ast.Variable(t[1], t[3])
def p_literal(t):
'''literal : integer
| float
| boolean
| string
| character'''
t[0] = t[1]
def p_integer(t):
'integer : INTLIT'
t[0] = ast.Integer(t[1])
def p_float(t):
'float : FLOATLIT'
t[0] = ast.Float(t[1])
def p_boolean(t):
'''boolean : TRUE
| FALSE'''
t[0] = ast.Boolean(t[1])
def p_string(t):
'string : STRLIT'
t[0] = ast.String(t[1])
def p_character(t):
'character : CHARLIT'
t[0] = ast.Character(t[1])
def p_paren_expr(t):
'paren_expr : LPAREN expression RPAREN'
t[0] = t[2]
def p_binary(t):
'''binary : expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIVIDE expression
| expression MOD expression
| expression LT expression
| expression LE expression
| expression GT expression
| expression GE expression
| expression EQ expression
| expression NE expression
| expression AND expression
| expression OR expression'''
t[0] = ast.BinaryOp(t[1],t[2], t[3])
def p_unary(t):
'unary : unary_op expression'
t[0] = ast.UnaryOp(t[1], t[2])
def p_unary_op(t):
'''unary_op : MINUS
| NOT'''
t[0] = t[1]
def p_call(t):
'call : expression LPAREN arguments RPAREN'
t[0] = ast.Call(t[1], t[3])
def p_arguments(t):
'''arguments : argument_list
| empty'''
t[0] = t[1]
def p_argument_list(t):
'''argument_list : argument_list COMMA expression
| expression'''
if(len(t) == 2):
t[0] = [t[1]]
else:
t[0] = t[1]
t[0].append(t[3])
def p_array_literal(t):
'array : LBRACE list RBRACE'
t[0] = ast.Array(t[2])
def p_list(t):
'''list : list element
| empty'''
if len(t) == 2:
t[0] = []
else:
t[0] = t[1]
t[0].append[2]
def p_element(t):
'element : literal'
t[0] = t[1]
def p_error(t):
print("Syntax error " + t.value)
| OrangeShark/senior-project | cn/grammar.py | Python | gpl-3.0 | 8,351 | 0.022393 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.