text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
from handler.base import BaseHandler
import glob
import json
import rospy
class pymlabBase(BaseHandler):
def get(self, path = None):
print('OK')
file = rospy.get_param('/arom/node/pymlab_node/feature/pymlab_structure/cfg')
self.render('modules/features/pymlab.hbs', current_file = file)
def post(self, path = None):
operation = self.get_argument('operation', 'get_json')
print('operation>>', self.get_argument('operation', 'XXX'))
if operation == 'get_json':
filename = self.get_argument('file', False)
print(filename)
if filename and filename != 'false':
file = filename
else:
file = glob.glob("/home/odroid/robozor/station/pymlab_presets/*.json")[0]
file = rospy.get_param('/arom/node/pymlab_node/feature/pymlab_structure/cfg')
json_data = json.load(open(file))
self.write(json_data)
elif operation == 'push_json':
filename = self.get_argument('file')
data = json.loads(self.get_argument('data'))
print(filename, data)
if True: #TODO: overeni spravne cesty....
with open(filename, 'w') as f:
json.dump(data, f)
self.write('OK') | Robozor-network/arom-web_ui | src/aromweb/node_handlers/pymlab_handler.py | Python | gpl-3.0 | 1,316 | 0.009119 |
import json
import subprocess
import sublime
try:
from urllib.parse import urlencode
from urllib.request import urlopen
except ImportError:
from urllib import urlencode, urlopen
BASE_URL = 'https://slack.com/api/'
def api_call(method, call_args={}, loading=None, filename=None, icon=None):
if icon:
call_args['icon_url'] = icon
print('icon', icon)
URL = BASE_URL + method + "?" + urlencode(call_args)
print('calling:', URL)
try:
if filename:
f = open(filename, 'rb')
filebody = f.read()
f.close()
data = urlencode({'content': filebody})
response = urlopen(
url=URL,
data=data.encode('utf8')
).read().decode('utf8')
else:
response = urlopen(url=URL).read().decode('utf8')
except:
# fallback for sublime bug with urlopen (on linux only)
if filename: # upload filename
proc = subprocess.Popen(
['curl', '-X', 'POST', '-F', 'file=@'+filename, URL],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
else:
proc = subprocess.Popen(
['curl', '-s', URL],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
response = out.decode('utf8')
response = json.loads(response)
if not response['ok']:
sublime.error_message("SLACK Api error: " + response['error'])
if loading:
loading.done = True
return False
return response
| simion/sublime-slack-integration | api.py | Python | gpl-2.0 | 1,631 | 0.000613 |
#!/usr/bin/env python
# Aravis - Digital camera library
#
# Copyright (c) 2011-2012 Emmanuel Pacaud
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General
# Public License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330,
# Boston, MA 02111-1307, USA.
#
# Author: Emmanuel Pacaud <emmanuel@gnome.org>
# If you have installed aravis in a non standard location, you may need
# to make GI_TYPELIB_PATH point to the correct location. For example:
#
# export GI_TYPELIB_PATH=$GI_TYPELIB_PATH:/opt/bin/lib/girepositry-1.0/
#
# You may also have to give the path to libaravis.so, using LD_PRELOAD or
# LD_LIBRARY_PATH.
import gi
gi.require_version ('Aravis', '0.2')
from gi.repository import Aravis
print Aravis.Auto
print Aravis.Auto.OFF
print Aravis.BufferStatus
print Aravis.DebugLevel
print Aravis.DomNodeType
print Aravis.GvStreamPacketResend
print Aravis.GvspPacketType
print Aravis.PixelFormat
print Aravis.PixelFormat.MONO_8
| lu-zero/aravis | tests/python/arv-enum-test.py | Python | lgpl-2.1 | 1,513 | 0.001322 |
"""
Tests for Django's bundled context processors.
"""
from django.test import SimpleTestCase, TestCase, override_settings
@override_settings(
ROOT_URLCONF='context_processors.urls',
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.request',
],
},
}],
)
class RequestContextProcessorTests(SimpleTestCase):
"""
Tests for the ``django.template.context_processors.request`` processor.
"""
def test_request_attributes(self):
"""
Test that the request object is available in the template and that its
attributes can't be overridden by GET and POST parameters (#3828).
"""
url = '/request_attrs/'
# We should have the request object in the template.
response = self.client.get(url)
self.assertContains(response, 'Have request')
# Test is_secure.
response = self.client.get(url)
self.assertContains(response, 'Not secure')
response = self.client.get(url, {'is_secure': 'blah'})
self.assertContains(response, 'Not secure')
response = self.client.post(url, {'is_secure': 'blah'})
self.assertContains(response, 'Not secure')
# Test path.
response = self.client.get(url)
self.assertContains(response, url)
response = self.client.get(url, {'path': '/blah/'})
self.assertContains(response, url)
response = self.client.post(url, {'path': '/blah/'})
self.assertContains(response, url)
@override_settings(
DEBUG=True,
INTERNAL_IPS=['127.0.0.1'],
ROOT_URLCONF='context_processors.urls',
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
],
},
}],
)
class DebugContextProcessorTests(TestCase):
"""
Tests for the ``django.template.context_processors.debug`` processor.
"""
def test_debug(self):
url = '/debug/'
# We should have the debug flag in the template.
response = self.client.get(url)
self.assertContains(response, 'Have debug')
# And now we should not
with override_settings(DEBUG=False):
response = self.client.get(url)
self.assertNotContains(response, 'Have debug')
def test_sql_queries(self):
"""
Test whether sql_queries represents the actual amount
of queries executed. (#23364)
"""
url = '/debug/'
response = self.client.get(url)
self.assertContains(response, 'First query list: 0')
self.assertContains(response, 'Second query list: 1')
# Check we have not actually memoized connection.queries
self.assertContains(response, 'Third query list: 2')
| DONIKAN/django | tests/context_processors/tests.py | Python | bsd-3-clause | 3,031 | 0 |
# Copyright (c) 2017, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
from mixbox import entities
from mixbox import fields
import cybox.bindings.dns_cache_object as dns_cache_binding
from cybox.common import ObjectProperties, PositiveInteger
from cybox.objects.dns_record_object import DNSRecord
class DNSCacheEntry(entities.Entity):
_namespace = "http://cybox.mitre.org/objects#DNSCacheObject-2"
_binding = dns_cache_binding
_binding_class = dns_cache_binding.DNSCacheEntryType
dns_entry = fields.TypedField("DNS_Entry", DNSRecord)
ttl = fields.TypedField("TTL", PositiveInteger)
class DNSCache(ObjectProperties):
_binding = dns_cache_binding
_binding_class = dns_cache_binding.DNSCacheObjectType
_namespace = "http://cybox.mitre.org/objects#DNSCacheObject-2"
_XSI_NS = "DNSCacheObj"
_XSI_TYPE = "DNSCacheObjectType"
dns_cache_entry = fields.TypedField("DNS_Cache_Entry", DNSCacheEntry, multiple=True)
| CybOXProject/python-cybox | cybox/objects/dns_cache_object.py | Python | bsd-3-clause | 990 | 0.00101 |
# vim: ts=4:sw=4:expandtab
# BleachBit
# Copyright (C) 2008-2015 Andrew Ziem
# http://bleachbit.sourceforge.net
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Test cases for module Winapp
"""
import os
import shutil
import sys
import unittest
sys.path.append('.')
from bleachbit.Winapp import Winapp, detectos, detect_file, section2option
from bleachbit.Windows import detect_registry_key
import common
if 'nt' == os.name:
import _winreg
else:
def fake_detect_registry_key(f):
return True
import bleachbit.Windows
bleachbit.Windows.detect_registry_key = fake_detect_registry_key
def get_winapp2():
"""Download and cache winapp2.ini. Return local filename."""
url = "http://www.winapp2.com/Winapp2.ini"
tmpdir = None
if 'posix' == os.name:
tmpdir = '/tmp'
if 'nt' == os.name:
tmpdir = os.getenv('TMP')
fn = os.path.join(tmpdir, 'bleachbit_test_winapp2.ini')
if os.path.exists(fn):
import time
import stat
age_seconds = time.time() - os.stat(fn)[stat.ST_MTIME]
if age_seconds > (24 * 36 * 36):
print 'note: deleting stale file %s ' % fn
os.remove(fn)
if not os.path.exists(fn):
f = file(fn, 'w')
import urllib2
txt = urllib2.urlopen(url).read()
f.write(txt)
return fn
class WinappTestCase(unittest.TestCase):
"""Test cases for Winapp"""
def run_all(self, cleaner, really_delete):
"""Test all the cleaner options"""
for (option_id, __name) in cleaner.get_options():
for cmd in cleaner.get_commands(option_id):
for result in cmd.execute(really_delete):
common.validate_result(self, result, really_delete)
def test_remote(self):
"""Test with downloaded file"""
winapps = Winapp(get_winapp2())
for cleaner in winapps.get_cleaners():
self.run_all(cleaner, False)
def test_detectos(self):
"""Test detectos function"""
# Tests are in the format (required_ver, mock, expected_return)
tests = (('5.1', '5.1', True),
('5.1', '6.0', False),
('6.0', '5.1', False),
('|5.1', '5.1', True),
('|5.1', '6.0', False),
('6.1|', '5.1', False),
('6.1|', '6.0', False),
('6.1|', '6.1', True),
('6.1|', '6.2', True),
('6.2|', '5.1', False),
('6.2|', '6.0', False),
('6.2|', '6.1', False),
('6.2|', '6.2', True))
for (s, mock, expected_return) in tests:
actual_return = detectos(s, mock)
self.assertEqual(expected_return, actual_return,
'detectos(%s, %s)==%s instead of %s' % (s, mock,
actual_return, expected_return))
def test_detect_file(self):
"""Test detect_file function"""
tests = [('%windir%\\system32\\kernel32.dll', True),
('%windir%\\system32', True),
('%ProgramFiles%\\Internet Explorer', True),
('%ProgramFiles%\\Internet Explorer\\', True),
('%windir%\\doesnotexist', False),
('%windir%\\system*', True),
('%windir%\\*ystem32', True),
('%windir%\\*ystem3*', True)]
# On 64-bit Windows, Winapp2.ini expands the %ProgramFiles% environment
# variable to also %ProgramW6432%, so test unique entries in
# %ProgramW6432%.
import struct
if not 32 == 8 * struct.calcsize('P'):
raise NotImplementedError('expecting 32-bit Python')
if os.getenv('ProgramW6432'):
dir_64 = os.listdir(os.getenv('ProgramFiles'))
dir_32 = os.listdir(os.getenv('ProgramW6432'))
dir_32_unique = set(dir_32) - set(dir_64)
if dir_32 and not dir_32_unique:
raise RuntimeError(
'Test expects objects in %ProgramW6432% not in %ProgramFiles%')
for pathname in dir_32_unique:
tests.append(('%%ProgramFiles%%\\%s' % pathname, True))
else:
print 'NOTE: skipping %ProgramW6432% tests because WoW64 not detected'
for (pathname, expected_return) in tests:
actual_return = detect_file(pathname)
msg = 'detect_file(%s) returned %s' % (pathname, actual_return)
self.assertEqual(expected_return, actual_return, msg)
def test_fake(self):
"""Test with fake file"""
ini_fn = None
keyfull = 'HKCU\\Software\\BleachBit\\DeleteThisKey'
subkey = 'Software\\BleachBit\\DeleteThisKey\\AndThisKey'
def setup_fake(f1_filename=None):
"""Setup the test environment"""
dirname = tempfile.mkdtemp(prefix='bleachbit-test-winapp')
f1 = os.path.join(dirname, f1_filename or 'deleteme.log')
file(f1, 'w').write('')
dirname2 = os.path.join(dirname, 'sub')
os.mkdir(dirname2)
f2 = os.path.join(dirname2, 'deleteme.log')
file(f2, 'w').write('')
fbak = os.path.join(dirname, 'deleteme.bak')
file(fbak, 'w').write('')
self.assertTrue(os.path.exists(f1))
self.assertTrue(os.path.exists(f2))
self.assertTrue(os.path.exists(fbak))
hkey = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, subkey)
hkey.Close()
self.assertTrue(detect_registry_key(keyfull))
self.assertTrue(detect_registry_key('HKCU\\%s' % subkey))
return (dirname, f1, f2, fbak)
def ini2cleaner(filekey, do_next=True):
ini = file(ini_fn, 'w')
ini.write('[someapp]\n')
ini.write('LangSecRef=3021\n')
ini.write(filekey)
ini.write('\n')
ini.close()
self.assertTrue(os.path.exists(ini_fn))
if do_next:
return Winapp(ini_fn).get_cleaners().next()
else:
return Winapp(ini_fn).get_cleaners()
# reuse this path to store a winapp2.ini file in
import tempfile
(ini_h, ini_fn) = tempfile.mkstemp(suffix='.ini', prefix='winapp2')
os.close(ini_h)
# a set of tests
tests = [
# single file
('FileKey1=%s|deleteme.log', None,
False, True, False, True, True, True),
# special characters for XML
('FileKey1=%s|special_chars_&-\'.txt', 'special_chars_&-\'.txt',
False, True, False, True, True, True),
# *.log
('FileKey1=%s|*.LOG', None, False, True, False, True, True, True),
# semicolon separates different file types
('FileKey1=%s|*.log;*.bak', None,
False, True, False, True, False, True),
# *.*
('FileKey1=%s|*.*', None, False, True, False, True, False, True),
# recurse *.*
('FileKey1=%s|*.*|RECURSE', None, False,
True, False, False, False, True),
# remove self *.*, this removes the directory
('FileKey1=%s|*.*|REMOVESELF', None,
False, False, False, False, False, True),
]
# Add positive detection, where the detection believes the application is present,
# to all the tests, which are also positive.
new_tests = []
for test in tests:
for detect in (
"\nDetectFile=%%APPDATA%%\\Microsoft",
"\nDetectFile1=%%APPDATA%%\\Microsoft\nDetectFile2=%%APPDATA%%\\does_not_exist",
"\nDetectFile1=%%APPDATA%%\\does_not_exist\nDetectFile2=%%APPDATA%%\\Microsoft",
"\nDetect=HKCU\\Software\\Microsoft",
"\nDetect1=HKCU\\Software\\Microsoft\nDetect2=HKCU\\Software\\does_not_exist",
"\nDetect1=HKCU\\Software\\does_not_exist\nDetect2=HKCU\\Software\\Microsoft"):
new_ini = test[0] + detect
new_test = [new_ini, ] + [x for x in test[1:]]
new_tests.append(new_test)
positive_tests = tests + new_tests
# execute positive tests
for test in positive_tests:
print 'positive test: ', test
(dirname, f1, f2, fbak) = setup_fake(test[1])
cleaner = ini2cleaner(test[0] % dirname)
self.assertEqual(test[2], cleaner.auto_hide())
self.run_all(cleaner, False)
self.run_all(cleaner, True)
self.assertEqual(test[3], os.path.exists(dirname))
self.assertEqual(test[4], os.path.exists(f1))
self.assertEqual(test[5], os.path.exists(f2))
self.assertEqual(test[6], os.path.exists(fbak))
self.assertEqual(test[7], cleaner.auto_hide())
shutil.rmtree(dirname, True)
# negative tests where the application detect believes the application
# is absent
for test in tests:
for detect in (
"\nDetectFile=c:\\does_not_exist",
# special characters for XML
"\nDetectFile=c:\\does_not_exist_special_chars_&'",
"\nDetectFile1=c:\\does_not_exist1\nDetectFile2=c:\\does_not_exist2",
"\nDetect=HKCU\\Software\\does_not_exist",
"\nDetect=HKCU\\Software\\does_not_exist_&'",
"\nDetect1=HKCU\\Software\\does_not_exist1\nDetect2=HKCU\\Software\\does_not_exist1"):
new_ini = test[0] + detect
t = [new_ini, ] + [x for x in test[1:]]
print 'negative test', t
# execute the test
(dirname, f1, f2, fbak) = setup_fake()
cleaner = ini2cleaner(t[0] % dirname, False)
self.assertRaises(StopIteration, cleaner.next)
# registry key, basic
(dirname, f1, f2, fbak) = setup_fake()
cleaner = ini2cleaner('RegKey1=%s' % keyfull)
self.run_all(cleaner, False)
self.assertTrue(detect_registry_key(keyfull))
self.run_all(cleaner, True)
self.assertFalse(detect_registry_key(keyfull))
# check for parse error with ampersand
(dirname, f1, f2, fbak) = setup_fake()
cleaner = ini2cleaner('RegKey1=HKCU\\Software\\PeanutButter&Jelly')
self.run_all(cleaner, False)
self.run_all(cleaner, True)
def test_section2option(self):
"""Test for section2option()"""
tests = ((' FOO2 ', 'foo2'),
('A - B (C)', 'a_b_c'))
for test in tests:
self.assertEqual(section2option(test[0]), test[1])
def suite():
return unittest.makeSuite(WinappTestCase)
if __name__ == '__main__':
unittest.main()
| uudiin/bleachbit | tests/TestWinapp.py | Python | gpl-3.0 | 11,482 | 0.001132 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import oauth2client.django_orm
class Migration(migrations.Migration):
dependencies = [
('sites', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('web_property_id', models.CharField(help_text='The property tracking ID is available when viewing the "Tracking Code" details in the Google Analytics admin.', max_length=25, verbose_name='property tracking ID')),
('profile_id', models.CharField(default='', max_length=25, verbose_name='view (profile) ID', blank=True)),
('display_features', models.BooleanField(default=False, help_text='Used for remarketing, demographics and interest reporting.', verbose_name='Use Display advertising features?')),
('is_enabled', models.BooleanField(default=False, help_text='Is Google Analytics tracking enabled on the website?', verbose_name='enabled')),
],
options={
'ordering': ['site'],
'verbose_name': 'view (profile)',
'verbose_name_plural': 'views (profiles)',
},
),
migrations.CreateModel(
name='ProfileOAuth2Credentials',
fields=[
('id', models.OneToOneField(related_name='_oauth2_credentials', primary_key=True, serialize=False, to='googleanalytics.Profile')),
('credentials', oauth2client.django_orm.CredentialsField(null=True)),
],
),
migrations.AddField(
model_name='profile',
name='site',
field=models.OneToOneField(related_name='+', to='sites.Site'),
),
]
| thecut/thecut-googleanalytics | thecut/googleanalytics/migrations/0001_initial.py | Python | apache-2.0 | 1,911 | 0.003663 |
# Copyright 2016 Casey Jaymes
# This file is part of PySCAP.
#
# PySCAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PySCAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PySCAP. If not, see <http://www.gnu.org/licenses/>.
from scap.model.xs.StringType import StringType
class Rights(StringType):
pass
| cjaymes/pyscap | src/scap/model/dc_elements_1_1/Rights.py | Python | gpl-3.0 | 765 | 0.001307 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class FirewallRule(SubResource):
"""Data Lake Analytics firewall rule information.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: The resource identifier.
:vartype id: str
:ivar name: The resource name.
:vartype name: str
:ivar type: The resource type.
:vartype type: str
:ivar start_ip_address: The start IP address for the firewall rule. This
can be either ipv4 or ipv6. Start and End should be in the same protocol.
:vartype start_ip_address: str
:ivar end_ip_address: The end IP address for the firewall rule. This can
be either ipv4 or ipv6. Start and End should be in the same protocol.
:vartype end_ip_address: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'start_ip_address': {'readonly': True},
'end_ip_address': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'start_ip_address': {'key': 'properties.startIpAddress', 'type': 'str'},
'end_ip_address': {'key': 'properties.endIpAddress', 'type': 'str'},
}
def __init__(self):
super(FirewallRule, self).__init__()
self.start_ip_address = None
self.end_ip_address = None
| lmazuel/azure-sdk-for-python | azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/firewall_rule.py | Python | mit | 1,956 | 0.000511 |
"""Basic Thrift server that uses the Milo Protocol"""
import sys
import os
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from thrift.server import TServer
from helloworld import HelloWorld
from milo.protocols.thrift import TMiloProtocolFactory
from milo.servers.thrift.TThreadedServer import TDebugThreadedServer
class HelloWorldHandler:
def ping(self, name):
return name
handler = HelloWorldHandler()
processor = HelloWorld.Processor(handler)
transport = TSocket.TServerSocket(9090)
tfactory = TTransport.TFramedTransportFactory()
pfactory = TMiloProtocolFactory()
server = TDebugThreadedServer(processor, transport, tfactory, pfactory)
print 'Starting the server...'
server.serve()
print 'done.' | lenn0x/Milo-Tracing-Framework | src/py/examples/milo_server.py | Python | apache-2.0 | 861 | 0.012776 |
#!/usr/bin/env python
import pandas as pd
import numpy as np
import argparse
def getOptions():
# Parse command line arguments
parser = argparse.ArgumentParser(description="Merge tappAS DEA output files with detection flags")
# Input data
parser.add_argument("-t", "--input-directory", dest="inDir", required=True, help="Input directory of maize tappAS output")
parser.add_argument("-f", "--flag", dest="inFlag", required=True, help="Input TSV of on/off flags")
parser.add_argument("-a", "--annot", dest="inAnnot", required=True, help="Input CSV of transcript_id to gene_id pairs in transcriptome")
parser.add_argument("-e", "--exclude", dest="exclude", required=False, action='append', help="Samples to exclude from expression matrices, multiple values can be listed with each '-e'")
# Output data
parser.add_argument("-o", "--output", dest="outFile", required=True, help="Output file")
args = parser.parse_args()
return args
def main():
# Get on/off flags and transcript_id to gene_id pairs
flagDF = pd.read_csv(args.inFlag, sep="\t",low_memory=False)
annotDF = pd.read_csv(args.inAnnot)
# Merge gene_id into flags
geneDF = pd.merge(annotDF,flagDF,how='outer',on='transcript_id',indicator='merge_check')
# geneDF['merge_check'].value_counts()
# Fix the 89 single transcript genes missing in EA files
# (have the same value for transcript_id and gene_id)
geneDF = geneDF[geneDF['merge_check']!='left_only']
geneDF['gene_id'] = np.where(geneDF['merge_check']=='right_only',geneDF['transcript_id'],geneDF['gene_id'])
del(geneDF['merge_check'])
# geneDF['gene_id'].isna().any()
geneDF = geneDF.groupby('gene_id')[[c for c in geneDF.columns if 'flag' in c]].max().reset_index()
# Remove excluded columns if provided
if args.exclude is not None:
for s in args.exclude:
geneDF = geneDF.drop(columns=[c for c in geneDF.columns if c==s])
print("Removed {} columns from matrix...".format(s))
# Count and drop transcripts that are not detected in any samples
# and transcripts only detected in one samples
if 'sum_flag' not in geneDF.columns:
geneDF['sum_flag'] = geneDF[[c for c in geneDF.columns if "flag_" in c]].sum(axis=1).astype(int)
print("Detection of genes by number of genotypes:\nnumSamples\tfreq\n{}".format(
geneDF['sum_flag'].value_counts().sort_index()))
print("{} transcripts detected in 0 samples or 1 sample only\n...Dropped from tappas files".format(
len(geneDF[geneDF['sum_flag']<=1])))
detectDF = geneDF[geneDF['sum_flag']>1].copy().reset_index(drop=True)
# Merge genotype tappAS output files
mergeDF = detectDF.copy()
for genotype in ["B73","C123","Hp301","Mo17","NC338"]:
tempDF = pd.read_csv("{}/{}_tappAS_DEA_Genes.tsv".format(args.inDir,genotype),sep="\t")
tempDF['flag_DE_'+genotype] = np.where(tempDF['DEA Result']=="DE",1,0)
tempDF = tempDF.rename(columns={'#Gene':'gene_id',
'(1 - Probability)':'DE_pval_'+genotype,
'Log2FC':'Log2FC_'+genotype,
'Ambient MeanExpLevel':'mean_TPM_ambient_'+genotype,
'Ozone MeanExpLevel':'mean_TPM_ozone_'+genotype})
tempDF = tempDF[['gene_id','DE_pval_'+genotype,'flag_DE_'+genotype,
'Log2FC_'+genotype,'mean_TPM_ambient_'+genotype,'mean_TPM_ozone_'+genotype]]
tempMerge = pd.merge(mergeDF,tempDF,how='outer',on="gene_id",indicator='merge_check')
# tempMerge['merge_check'].value_counts()
print("\t{} genes not detected in {}".format(len(tempMerge[tempMerge['merge_check']=='left_only']),genotype))
del(tempMerge['merge_check'])
tempMerge['flag_detect_DE_'+genotype] = np.where((tempMerge['flag_DE_'+genotype]==1)&
(tempMerge['flag_'+genotype+'_Amb']+tempMerge['flag_'+genotype+'_Ele']>0),1,0)
mergeDF = tempMerge.copy()
# Output merged file of flags and tappas results
mergeDF.to_csv(args.outFile,index=False)
if __name__ == '__main__':
# Parse command line arguments
global args
args = getOptions()
main()
| McIntyre-Lab/papers | nanni_maize_2022/scripts/merge_tappas_DEA_results.py | Python | lgpl-3.0 | 4,326 | 0.017337 |
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import pkgutil
from pants.backend.python.targets.python_binary import PythonBinary
from pants.backend.python.targets.python_library import PythonLibrary
from pants.backend.python.tasks.python_task import PythonTask
from pants.base.exceptions import TaskError
from pants.base.generator import Generator, TemplateData
from pants.base.workunit import WorkUnit, WorkUnitLabel
class PythonEval(PythonTask):
class Error(TaskError):
"""A richer failure exception type useful for tests."""
def __init__(self, *args, **kwargs):
compiled = kwargs.pop('compiled')
failed = kwargs.pop('failed')
super(PythonEval.Error, self).__init__(*args, **kwargs)
self.compiled = compiled
self.failed = failed
_EVAL_TEMPLATE_PATH = os.path.join('templates', 'python_eval', 'eval.py.mustache')
@staticmethod
def _is_evalable(target):
return isinstance(target, (PythonLibrary, PythonBinary))
@classmethod
def register_options(cls, register):
super(PythonEval, cls).register_options(register)
register('--fail-slow', action='store_true', default=False,
help='Compile all targets and present the full list of errors.')
register('--closure', action='store_true', default=False,
help='Eval all targets in the closure individually instead of just the targets '
'specified on the command line.')
def execute(self):
targets = self.context.targets() if self.get_options().closure else self.context.target_roots
with self.invalidated(filter(self._is_evalable, targets),
topological_order=True) as invalidation_check:
compiled = self._compile_targets(invalidation_check.invalid_vts)
return compiled # Collected and returned for tests
def _compile_targets(self, invalid_vts):
with self.context.new_workunit(name='eval-targets', labels=[WorkUnitLabel.MULTITOOL]):
compiled = []
failed = []
for vt in invalid_vts:
target = vt.target
return_code = self._compile_target(target)
if return_code == 0:
vt.update() # Ensure partial progress is marked valid
compiled.append(target)
else:
if self.get_options().fail_slow:
failed.append(target)
else:
raise self.Error('Failed to eval {}'.format(target.address.spec),
compiled=compiled,
failed=[target])
if failed:
msg = 'Failed to evaluate {} targets:\n {}'.format(
len(failed),
'\n '.join(t.address.spec for t in failed))
raise self.Error(msg, compiled=compiled, failed=failed)
return compiled
def _compile_target(self, target):
# "Compiles" a target by forming an isolated chroot of its sources and transitive deps and then
# attempting to import each of the target's sources in the case of a python library or else the
# entry point in the case of a python binary.
#
# For a library with sources lib/core.py and lib/util.py a "compiler" main file would look like:
#
# if __name__ == '__main__':
# import lib.core
# import lib.util
#
# For a binary with entry point lib.bin:main the "compiler" main file would look like:
#
# if __name__ == '__main__':
# from lib.bin import main
#
# In either case the main file is executed within the target chroot to reveal missing BUILD
# dependencies.
with self.context.new_workunit(name=target.address.spec):
modules = []
if isinstance(target, PythonBinary):
source = 'entry_point {}'.format(target.entry_point)
components = target.entry_point.rsplit(':', 1)
module = components[0]
if len(components) == 2:
function = components[1]
data = TemplateData(source=source,
import_statement='from {} import {}'.format(module, function))
else:
data = TemplateData(source=source, import_statement='import {}'.format(module))
modules.append(data)
else:
for path in target.sources_relative_to_source_root():
if path.endswith('.py'):
if os.path.basename(path) == '__init__.py':
module_path = os.path.dirname(path)
else:
module_path, _ = os.path.splitext(path)
source = 'file {}'.format(os.path.join(target.target_base, path))
module = module_path.replace(os.path.sep, '.')
data = TemplateData(source=source, import_statement='import {}'.format(module))
modules.append(data)
if not modules:
# Nothing to eval, so a trivial compile success.
return 0
interpreter = self.select_interpreter_for_targets([target])
if isinstance(target, PythonBinary):
pexinfo, platforms = target.pexinfo, target.platforms
else:
pexinfo, platforms = None, None
generator = Generator(pkgutil.get_data(__name__, self._EVAL_TEMPLATE_PATH),
chroot_parent=self.chroot_cache_dir, modules=modules)
executable_file_content = generator.render()
chroot = self.cached_chroot(interpreter=interpreter,
pex_info=pexinfo,
targets=[target],
platforms=platforms,
executable_file_content=executable_file_content)
pex = chroot.pex()
with self.context.new_workunit(name='eval',
labels=[WorkUnitLabel.COMPILER, WorkUnitLabel.RUN,
WorkUnitLabel.TOOL],
cmd=' '.join(pex.cmdline())) as workunit:
returncode = pex.run(stdout=workunit.output('stdout'), stderr=workunit.output('stderr'))
workunit.set_outcome(WorkUnit.SUCCESS if returncode == 0 else WorkUnit.FAILURE)
if returncode != 0:
self.context.log.error('Failed to eval {}'.format(target.address.spec))
return returncode
| megaserg/pants | src/python/pants/backend/python/tasks/python_eval.py | Python | apache-2.0 | 6,421 | 0.009967 |
import pushmanager.core.db as db
import pushmanager.core.util
from pushmanager.core.db import InsertIgnore
from pushmanager.core.mail import MailQueue
from pushmanager.core.requesthandler import RequestHandler
from pushmanager.core.xmppclient import XMPPQueue
class AddRequestServlet(RequestHandler):
def post(self):
if not self.current_user:
return self.send_error(403)
self.pushid = pushmanager.core.util.get_int_arg(self.request, 'push')
self.request_ids = self.request.arguments.get('request', [])
insert_queries = [
InsertIgnore(db.push_pushcontents, ({'request': int(i), 'push': self.pushid}))
for i in self.request_ids
]
update_query = db.push_requests.update().where(
db.push_requests.c.id.in_(self.request_ids)).values({'state': 'added'})
request_query = db.push_requests.select().where(
db.push_requests.c.id.in_(self.request_ids))
db.execute_transaction_cb(insert_queries + [update_query, request_query], self.on_db_complete)
# allow both GET and POST
get = post
def on_db_complete(self, success, db_results):
self.check_db_results(success, db_results)
for req in db_results[-1]:
if req['watchers']:
user_string = '%s (%s)' % (req['user'], req['watchers'])
users = [req['user']] + req['watchers'].split(',')
else:
user_string = req['user']
users = [req['user']]
msg = (
"""
<p>
%(pushmaster)s has accepted request for %(user)s into a push:
</p>
<p>
<strong>%(user)s - %(title)s</strong><br />
<em>%(repo)s/%(branch)s</em>
</p>
<p>
Regards,<br />
PushManager
</p>"""
) % pushmanager.core.util.EscapedDict({
'pushmaster': self.current_user,
'user': user_string,
'title': req['title'],
'repo': req['repo'],
'branch': req['branch'],
})
subject = "[push] %s - %s" % (user_string, req['title'])
MailQueue.enqueue_user_email(users, msg, subject)
msg = '{0} has accepted request "{1}" for {2} into a push:\n{3}/push?id={4}'.format(
self.current_user,
req['title'],
user_string,
self.get_base_url(),
self.pushid,
)
XMPPQueue.enqueue_user_xmpp(users, msg)
| Yelp/pushmanager | pushmanager/servlets/addrequest.py | Python | apache-2.0 | 2,713 | 0.001843 |
from pycp2k.inputsection import InputSection
from ._external_potential1 import _external_potential1
from ._rescale_forces1 import _rescale_forces1
from ._mixed1 import _mixed1
from ._dft1 import _dft1
from ._mm1 import _mm1
from ._qmmm1 import _qmmm1
from ._eip1 import _eip1
from ._bsse1 import _bsse1
from ._subsys1 import _subsys1
from ._properties1 import _properties1
from ._print64 import _print64
class _force_eval2(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Method = None
self.Stress_tensor = None
self.Embed = None
self.EXTERNAL_POTENTIAL_list = []
self.RESCALE_FORCES = _rescale_forces1()
self.MIXED = _mixed1()
self.DFT = _dft1()
self.MM = _mm1()
self.QMMM = _qmmm1()
self.EIP = _eip1()
self.BSSE = _bsse1()
self.SUBSYS = _subsys1()
self.PROPERTIES = _properties1()
self.PRINT = _print64()
self._name = "FORCE_EVAL"
self._keywords = {'Method': 'METHOD', 'Embed': 'EMBED', 'Stress_tensor': 'STRESS_TENSOR'}
self._subsections = {'PRINT': 'PRINT', 'MIXED': 'MIXED', 'EIP': 'EIP', 'SUBSYS': 'SUBSYS', 'RESCALE_FORCES': 'RESCALE_FORCES', 'PROPERTIES': 'PROPERTIES', 'DFT': 'DFT', 'QMMM': 'QMMM', 'BSSE': 'BSSE', 'MM': 'MM'}
self._repeated_subsections = {'EXTERNAL_POTENTIAL': '_external_potential1'}
self._attributes = ['EXTERNAL_POTENTIAL_list']
def EXTERNAL_POTENTIAL_add(self, section_parameters=None):
new_section = _external_potential1()
if section_parameters is not None:
if hasattr(new_section, 'Section_parameters'):
new_section.Section_parameters = section_parameters
self.EXTERNAL_POTENTIAL_list.append(new_section)
return new_section
| SINGROUP/pycp2k | pycp2k/classes/_force_eval2.py | Python | lgpl-3.0 | 1,808 | 0.002212 |
#!/usr/bin/env python
"""\
Test the 2e integral code.
"""
from PyQuante.pyints import contr_coulomb as pycc
from PyQuante.rys import contr_coulomb as ryscc
from PyQuante.hgp import contr_coulomb as hgpcc
from PyQuante.cints import contr_coulomb as ccc
from PyQuante.crys import contr_coulomb as cryscc
from PyQuante.chgp import contr_coulomb as chgpcc
from PyQuante.CGBF import CGBF
from PyQuante.cints import ijkl2intindex
from PyQuante.Ints import getbasis
from PyQuante.Molecule import Molecule
from time import time
def get2ints(bfs,coul_func):
"""Store integrals in a long array in the form (ij|kl) (chemists
notation. We only need i>=j, k>=l, and ij <= kl"""
from array import array
nbf = len(bfs)
totlen = nbf*(nbf+1)*(nbf*nbf+nbf+2)/8
Ints = array('d',[0]*totlen)
for i in range(nbf):
for j in range(i+1):
ij = i*(i+1)/2+j
for k in range(nbf):
for l in range(k+1):
kl = k*(k+1)/2+l
if ij >= kl:
ijkl = ijkl2intindex(i,j,k,l)
Ints[ijkl] = coulomb(bfs[i],bfs[j],bfs[k],bfs[l],
coul_func)
return Ints
def coulomb(a,b,c,d,coul_func):
"Coulomb interaction between 4 contracted Gaussians"
Jij = coul_func(a.pexps,a.pcoefs,a.pnorms,a.origin,a.powers,
b.pexps,b.pcoefs,b.pnorms,b.origin,b.powers,
c.pexps,c.pcoefs,c.pnorms,c.origin,c.powers,
d.pexps,d.pcoefs,d.pnorms,d.origin,d.powers)
return a.norm*b.norm*c.norm*d.norm*Jij
def maxdiff(a,b):
md = -1e10
for i in range(len(a)):
md = max(md,a[i]-b[i])
return md
def test():
#from PyQuante.Basis.sto3g import basis_data
from PyQuante.Basis.p631ss import basis_data
r = 1/0.52918
atoms=Molecule('h2o',atomlist = [(8,(0,0,0)),(1,(r,0,0)),(1,(0,r,0))])
inttol = 1e-6 # Tolerance to which integrals must be equal
bfs = getbasis(atoms,basis_data)
print "Int times: "
t0 = time()
int0 = get2ints(bfs,chgpcc)
t1 = time()
print "CHGP Ints: ",t1-t0
int1 = get2ints(bfs,cryscc)
t2 = time()
print "CRys Ints: ",t2-t1
assert maxdiff(int0,int1)<inttol
int1 = get2ints(bfs,ccc)
t3 = time()
print "CINTS Ints: ",t3-t2
assert maxdiff(int0,int1)<inttol
ints1 = get2ints(bfs,hgpcc)
t4 = time()
print "HGP Ints: ",t4-t3
assert maxdiff(int0,int1)<inttol
int1 = get2ints(bfs,ryscc)
t5 = time()
print "Rys Ints: ",t5-t4
assert maxdiff(int0,int1)<inttol
int1 = get2ints(bfs,pycc)
t6 = time()
print "Py Ints: ",t6-t5
assert maxdiff(int0,int1)<inttol
if __name__ == '__main__': test()
# Sample times (9/28/09, Macbook Air:)
# Int times:
# CHGP Ints: 3.02386283875
# CRys Ints: 2.28243303299
# CINTS Ints: 6.17023396492
# HGP Ints: 250.576164007
# Rys Ints: 204.740512133
# Py Ints: 295.842331886
| berquist/PyQuante | Tests/integrals.py | Python | bsd-3-clause | 3,015 | 0.026866 |
from django.shortcuts import render
from django.http import HttpResponse, HttpResponseRedirect
from librehatti.catalog.models import PurchaseOrder
from librehatti.catalog.models import Category
from librehatti.catalog.models import PurchasedItem
from librehatti.catalog.models import ModeOfPayment
from librehatti.catalog.models import Product
from librehatti.catalog.models import HeaderFooter
from librehatti.catalog.models import Surcharge
from librehatti.catalog.request_change import request_notify
from librehatti.bills.models import QuotedTaxesApplied
from librehatti.bills.models import QuotedOrder
from librehatti.bills.models import QuotedBill
from librehatti.bills.models import QuotedItem
from librehatti.bills.models import QuotedOrderofSession
from librehatti.bills.models import QuotedOrderNote
from librehatti.bills.models import NoteLine
from librehatti.bills.forms import SelectNoteForm
from librehatti.bills.forms import ItemSelectForm
from librehatti.suspense.models import QuotedSuspenseOrder
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
import useraccounts
from django.db.models import Sum
from django.db.models import Max
import simplejson
from django.core.urlresolvers import reverse
from librehatti.voucher.models import FinancialSession
"""
This view calculate taxes on quoted order, bill data
and save those values in database.
"""
@login_required
def quoted_bill_cal(request):
old_post = request.session.get('old_post')
quoted_order_id = request.session.get('quoted_order_id')
quoted_order = QuotedOrder.objects.get(id=quoted_order_id)
quoted_order_obj = QuotedOrder.objects.values('total_discount').\
get(id=quoted_order_id)
quoted_item = QuotedItem.objects.\
filter(quoted_order=quoted_order_id).aggregate(Sum('price'))
total = quoted_item['price__sum']
price_total = total - quoted_order_obj['total_discount']
totalplusdelivery = price_total
surcharge = Surcharge.objects.values('id', 'value', 'taxes_included')
delivery_rate = Surcharge.objects.values('value').\
filter(tax_name='Transportation')
distance = QuotedSuspenseOrder.objects.\
filter(quoted_order=quoted_order_id).aggregate(Sum('distance_estimated'))
if distance['distance_estimated__sum']:
delivery_charges = int(distance['distance_estimated__sum'])*\
delivery_rate[0]['value']
totalplusdelivery = totalplusdelivery + delivery_charges
else:
delivery_charges = 0
for value in surcharge:
surcharge_id = value['id']
surcharge_value = value['value']
surcharge_tax = value['taxes_included']
if surcharge_tax == 1:
taxes = round((totalplusdelivery * surcharge_value)/100)
surcharge_obj = Surcharge.objects.get(id=surcharge_id)
taxes_applied = QuotedTaxesApplied(quoted_order=quoted_order,
surcharge=surcharge_obj, tax=taxes)
taxes_applied.save()
taxes_applied_obj = QuotedTaxesApplied.objects.\
filter(quoted_order=quoted_order_id).aggregate(Sum('tax'))
tax_total = taxes_applied_obj['tax__sum']
grand_total = price_total + tax_total + delivery_charges
amount_received = grand_total
bill = QuotedBill(quoted_order=quoted_order, total_cost=price_total,
total_tax=tax_total, grand_total=grand_total,
delivery_charges=delivery_charges, amount_received=amount_received,
totalplusdelivery=totalplusdelivery)
bill.save()
request.session['old_post'] = old_post
request.session['quoted_order_id'] = quoted_order_id
return HttpResponseRedirect(reverse("librehatti.bills.views.select_note"))
@login_required
def quoted_order_added_success(request):
quoted_order_id = request.session.get('quoted_order_id')
details = QuotedOrder.objects.values('buyer__first_name',\
'buyer__last_name', 'buyer__customer__address__street_address',\
'buyer__customer__title', 'buyer__customer__address__city').\
filter(id=quoted_order_id)[0]
return render(request, 'bills/quoted_success.html', {'details':details,
'quoted_order_id':quoted_order_id})
@login_required
def select_note(request):
quoted_order_id = request.session.get('quoted_order_id')
form = SelectNoteForm(initial={'quoted_order':quoted_order_id})
request_status = request_notify()
return render(request, 'bills/select_note.html', \
{'form':form, 'request':request_status})
@login_required
def select_note_save(request):
if request.method == 'POST':
form = SelectNoteForm(request.POST)
if form.is_valid():
formdata = form.cleaned_data
quoted_order = formdata['quoted_order']
quoted_order_id = QuotedOrder.objects.get(id=quoted_order)
note_list = []
for note in formdata['note_line']:
note_list.append(note)
for value in note_list:
obj = QuotedOrderNote(quoted_order=quoted_order_id, note=value)
obj.save()
return HttpResponseRedirect(\
reverse("librehatti.bills.views.quoted_order_added_success"))
else:
return HttpResponseRedirect(\
reverse("librehatti.bills.views.quoted_order_added_success"))
else:
error_type = "404 Forbidden"
error = "Please again place the order"
temp = {'type': error_type, 'message':error}
return render(request, 'error_page.html', temp)
@login_required
def new_note_line(request):
note_line = request.GET['note_line']
obj = NoteLine(note=note_line)
obj.save()
return HttpResponse('')
@login_required
def delete_note(request):
delete_note = request.GET['delete_note']
delete_note_id = delete_note.split(',')
for id in delete_note_id:
NoteLine.objects.filter(id=id).delete()
return HttpResponse('')
@login_required
def quoted_order_of_session(request):
old_post = request.session.get('old_post')
quoted_order_id = request.session.get('quoted_order_id')
quoted_order = QuotedOrder.objects.get(id=quoted_order_id)
quoted_order_obj = QuotedOrder.objects.values('id', 'date_time').\
get(id=quoted_order_id)
quoted_order_date = quoted_order_obj['date_time']
financialsession = FinancialSession.objects.\
values('id', 'session_start_date', 'session_end_date')
for value in financialsession:
start_date = value['session_start_date']
end_date = value['session_end_date']
if start_date <= quoted_order_date <= end_date:
session_id = value['id']
session = FinancialSession.objects.get(id=session_id)
max_id = QuotedOrderofSession.objects.all().aggregate(Max('id'))
if max_id['id__max'] == None:
obj = QuotedOrderofSession(quoted_order=quoted_order,\
session=session, quoted_order_session=1)
obj.save()
else:
quoted_order_of_session = QuotedOrderofSession.objects.\
values('quoted_order_session', 'session').get(id=max_id['id__max'])
if quoted_order_of_session['session'] == session_id:
obj = QuotedOrderofSession(quoted_order=quoted_order,\
session=session, quoted_order_session=\
quoted_order_of_session['quoted_order_session']+1)
obj.save()
else:
obj = QuotedOrderofSession(quoted_order=quoted_order,\
session=session, quoted_order_session=1)
obj.save()
request.session['old_post'] = old_post
request.session['quoted_order_id'] = quoted_order_id
return HttpResponseRedirect(\
reverse("librehatti.suspense.views.quoted_add_distance")) | s-monisha/LibreHatti | src/librehatti/bills/views.py | Python | gpl-2.0 | 7,704 | 0.005452 |
from datetime import timedelta
from django.conf import settings
from django.contrib.auth.models import Group
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
from django.urls import reverse
from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from libya_elections.abstract import AbstractTimestampTrashBinModel
from libya_elections.libya_bread import StartEndTimeFormatterMixin, TimestampFormatterMixin
from libya_elections.phone_numbers import FormattedPhoneNumberMixin, PhoneNumberField
BUTTON_GO_BACK = "goback"
BUTTON_START_OVER = "startover"
BUTTON_YES = "yes"
BUTTON_NO = "no"
BUTTON_NO_CITIZEN = "no_citizen"
BUTTON_MATCH = "match"
BUTTON_NO_MATCH = "nomatch"
BUTTON_CONTINUE = "continue"
BUTTON_HUNG_UP = "hungup"
BUTTON_SUBMIT = "submit"
BUTTON_UNABLE = "unable"
BUTTON_DONE = "done"
BUTTON_CHOICES = (
(BUTTON_GO_BACK, _('Previous screen')),
(BUTTON_START_OVER, _('Start from Beginning')),
(BUTTON_YES, _('Yes')),
(BUTTON_NO, _('No')),
(BUTTON_NO_CITIZEN, _('No, Caller is a Citizen')),
(BUTTON_MATCH, _('Name and ID match')),
(BUTTON_NO_MATCH, _('Name and ID do not match')),
(BUTTON_CONTINUE, _('Continue')),
(BUTTON_HUNG_UP, _('Caller hung up')),
(BUTTON_SUBMIT, _('Submit')),
(BUTTON_UNABLE, _('Unable to Provide')),
(BUTTON_DONE, _('Done')),
)
# To make it easier to look up the text for a button
BUTTON_TEXT = dict(BUTTON_CHOICES)
# CSS class to use for rendering each button, if not overridden by the screen.
DEFAULT_BUTTON_CLASSES = {
BUTTON_GO_BACK: 'info',
BUTTON_YES: 'success', # BLUE
BUTTON_MATCH: 'success', # BLUE
BUTTON_CONTINUE: 'success', # BLUE
BUTTON_SUBMIT: 'success',
BUTTON_DONE: 'success',
BUTTON_NO: 'warning', # RED
BUTTON_NO_CITIZEN: 'warning', # RED
BUTTON_UNABLE: 'warning', # RED
BUTTON_NO_MATCH: 'warning', # RED
BUTTON_HUNG_UP: 'inverse', # BLACK
}
# Group names - for permissions
HELP_DESK_OPERATORS_GROUP = "Help Desk Operators"
HELP_DESK_SUPERVISORS_GROUP = "Help Desk Supervisors"
HELP_DESK_SENIOR_STAFF_GROUP = "Help Desk Senior Staff"
HELP_DESK_VIEW_GROUP = "Help Desk Viewers"
HELP_DESK_MANAGERS_GROUP = "Help Desk Managers"
# Permissions to give each group
# Operators are not quite a superset of the view-only permissions because operators
# don't necessarily need to be able to read the reports, I don't think.
# After that, though, each group contains the previous group's permissions,
# plus some new ones.
HELP_DESK_GROUP_PERMISSIONS = {
HELP_DESK_VIEW_GROUP: [
'help_desk.read_report',
'help_desk.read_case',
],
HELP_DESK_OPERATORS_GROUP: [
'help_desk.add_case',
'help_desk.change_case',
'help_desk.read_case'
],
HELP_DESK_SUPERVISORS_GROUP: [
HELP_DESK_VIEW_GROUP,
HELP_DESK_OPERATORS_GROUP,
'help_desk.add_operator',
'help_desk.add_update',
'help_desk.cancel_registration_change',
'help_desk.mark_case',
],
HELP_DESK_SENIOR_STAFF_GROUP: [
HELP_DESK_SUPERVISORS_GROUP,
'help_desk.recommend_case',
],
HELP_DESK_MANAGERS_GROUP: [
HELP_DESK_SENIOR_STAFF_GROUP,
'help_desk.browse_fieldstaff',
'help_desk.read_fieldstaff',
'help_desk.add_fieldstaff',
'help_desk.add_senior_staff',
'help_desk.add_supervisor',
'help_desk.add_viewonly',
'help_desk.change_fieldstaff',
'help_desk.change_staff_password',
'help_desk.resolve_case',
'help_desk.suspend_fieldstaff',
],
}
def all_help_desk_groups():
return list(Group.objects.filter(name__in=HELP_DESK_GROUP_PERMISSIONS.keys(),))
# 1. Citizen has complained about the service.
# Action: Mark as seen
#
# 2. Accidentally unlocked.
# Action: Re-lock or Ignore
#
# 3. Citizen changes their mind about unlocking.
# Action: Re-lock or Ignore
#
# 4. Phone call was harassing.
# Action: Mark as seen
REASONS_TO_MARK = (
('complaint', _('Person has complained about the service.')),
('accident', _('Accidentally unlocked.')),
('changed_mind', _('Person on the end of the phone changes their mind about unlocking.')),
('harassing', _('Phone call was harassing. ')),
('other', _('Other reason, see comments.')),
)
ALLOWED_ACTIONS = {
'complaint': ['seen'],
'accident': ['relock', 'ignore'],
'changed_mind': ['relock', 'ignore'],
'harassing': ['seen'],
'other': ['seen'],
}
CASE_ACTIONS = (
('seen', _('Mark as seen.')),
('relock', _('Re-lock.')),
('ignore', _('Ignore.')),
)
class FieldStaff(FormattedPhoneNumberMixin, AbstractTimestampTrashBinModel):
# Note that field staff are not website "User"s. Or if they are,
# we don't know or care.
name = models.CharField(_('name'), max_length=160, default='')
staff_id = models.IntegerField(
_('staff id'),
unique=True,
validators=[
MinValueValidator(100),
MaxValueValidator(999),
]
)
phone_number = PhoneNumberField(_('phone number'))
suspended = models.BooleanField(_('suspended'), blank=True, default=False)
class Meta:
ordering = ['name', 'staff_id']
verbose_name = _("field staff member")
verbose_name_plural = _("field staff members")
permissions = [
('browse_fieldstaff', _('Can browse field staff')),
('read_fieldstaff', _('Can read field staff')),
('suspend_fieldstaff', _('Can suspend field staff')), # Custom
]
def __str__(self): # pragma: no cover
return '%s (%s)' % (self.name, self.formatted_phone_number())
def get_absolute_url(self):
return reverse('read_fieldstaff', args=[self.pk])
# Note: Choices are char fields instead of integers to make it easier
# for other applications to make use of the data in the database.
class Case(StartEndTimeFormatterMixin, AbstractTimestampTrashBinModel):
# start and end time of the call
start_time = models.DateTimeField(_('start time'), default=now)
end_time = models.DateTimeField(_('end time'), null=True, blank=True)
current_screen = models.ForeignKey(
'help_desk.ScreenRecord', related_name='current_case', verbose_name=_('current screen'),
null=True, blank=True,
on_delete=models.SET_NULL,
)
# Incoming phone number
phone_number = PhoneNumberField(
_('phone number'),
null=True,
default=None,
blank=True, # Older cases didn't collect this information
)
# operator handling the call
operator = models.ForeignKey(settings.AUTH_USER_MODEL,
verbose_name=_('operator'),
related_name='cases_as_operator',
on_delete=models.CASCADE)
# field staff making the call
field_staff = models.ForeignKey('FieldStaff', null=True, blank=True,
verbose_name=_('field staff'),
related_name='help_desk_cases',
on_delete=models.PROTECT,
)
# citizen whose registration is being worked with
citizen = models.ForeignKey('civil_registry.Citizen',
verbose_name=_('citizen'),
related_name='help_desk_cases',
on_delete=models.PROTECT,
# Nullable because when a call starts we don't know who
# it's for (and might never find out).
null=True, blank=True,
)
@property
def blocked(self):
"""Return True if there's a corresponding citizen and they're blocked from registering
and voting"""
return self.citizen and self.citizen.blocked
#: whether user's changes on their registered phone have been increased
changes_increased = models.BooleanField(_('changes increased'), default=False)
registration = models.ForeignKey(
'register.Registration', null=True, blank=True,
help_text=_("Registration of citizen at time of call"),
verbose_name=_('registration'),
related_name='cases',
on_delete=models.PROTECT,
)
# Classifications of a case, re: being reviewed
NOT_MARKED = 'unmarked'
FOR_REVIEW = 'marked'
RECOMMENDED = 'recommended'
RESOLVED = 'resolved'
REVIEW_CLASSIFICATION_CHOICES = (
(NOT_MARKED, _('Not marked for review')),
(FOR_REVIEW, _('Marked for review')),
(RECOMMENDED, _('Recommendation made')),
(RESOLVED, _('Resolved')),
)
review_classification = models.CharField(
_('review classification'),
choices=REVIEW_CLASSIFICATION_CHOICES, max_length=12, default=NOT_MARKED
)
reason_marked = models.CharField(
_('reason marked'),
choices=REASONS_TO_MARK, max_length=14, default='', blank=True, null=False,
)
recommended_action = models.CharField(
_('recommended action'),
choices=CASE_ACTIONS, max_length=6, default='', blank=True, null=False,
)
# Possible outcomes of a call
ABANDONED = 'abandoned' # operator started a new call without closing this one
HUNG_UP = 'hungup'
INVALID_STAFF_ID = 'invalid_staff_id'
INVALID_STAFF_NAME = 'invalid_staff_name'
INVALID_STAFF_PHONE = 'invalid_staff_phone'
INVALID_NID = 'invalid_nid'
INVALID_NAME_DOB = 'invalid_name_dob'
INVALID_FRN = 'invalid_frn'
UNREGISTERED = 'unregistered'
REGISTRATION_OKAY = 'registered'
SAME_PHONE = 'same_phone'
INCREASED_CHANGES = 'increased_changes'
UNLOCKED = 'unlocked'
RELOCKED = 'relocked'
CALL_OUTCOME_CHOICES = (
(HUNG_UP, _('Hung up')),
(INVALID_STAFF_ID, _('Invalid staff ID')),
(INVALID_STAFF_NAME, _('Wrong staff name')),
(INVALID_STAFF_PHONE, _('Wrong staff phone')),
(INVALID_NID, _('Invalid NID')),
(INVALID_NAME_DOB, _('Invalid name or birth year')),
(INVALID_FRN, _('Invalid FRN and mother\'s name')),
(UNREGISTERED, _('Not registered')),
(REGISTRATION_OKAY, _('No help needed')),
(SAME_PHONE, _('No change')),
(INCREASED_CHANGES, _('Increased changes')),
(UNLOCKED, _('Unlocked')),
(RELOCKED, _('Relocked')),
(ABANDONED, _('Abandoned')),
)
call_outcome = models.CharField(
_('call outcome'),
choices=CALL_OUTCOME_CHOICES, max_length=20, blank=True, null=True,
)
ALL_CALL_OUTCOMES = [x[0] for x in CALL_OUTCOME_CHOICES]
class Meta:
verbose_name = _("case")
verbose_name_plural = _("cases")
ordering = ['-start_time']
# Run 'python manage.py update_permissions' if the permissions change. Django
# only updates these during syncdb if it is creating a new table
# for this model.
permissions = (
('read_case', 'Can read case'),
('cancel_registration_change', 'Can cancel change period'),
('mark_case', 'Can mark case for review'),
('recommend_case', 'Can recomment action on case'),
('resolve_case', 'Can mark a case as resolved'),
('read_report', 'Can read reports'),
('add_operator', 'Can add operator'),
('add_supervisor', 'Can add supervisor'),
('add_senior_staff', 'Can add senior staff'),
('add_viewonly', 'Can add view-only users'),
('add_manager', 'Can add help desk managers'),
('change_staff_password', 'Can set password for help desk staff'),
)
def __str__(self): # pragma: no cover
x = _("Call started %(start_time)s. Operator %(operator)s. ") % \
{'start_time': self.formatted_start_time(),
'operator': self.operator.get_full_name() or self.operator.username}
if self.citizen:
x += _("Citizen is %s. ") % str(self.citizen)
if self.end_time:
x += _("Call ended %s. ") % self.end_time
return x
def reset(self):
"""
Operator wants to start the case over. Clear any data that was collected
during the call so far. Does not save.
"""
self.current_screen = None
self.field_staff = None
self.citizen = None
self.changes_increased = False
self.registration = None
self.review_classification = self.NOT_MARKED
self.reason_marked = ''
self.recommended_action = ''
self.call_outcome = None
self.screens.all().delete()
self.updates.all().delete()
def get_operator_url(self):
from help_desk.screens import FIRST_SCREEN
return reverse(self.current_screen.name if self.current_screen else FIRST_SCREEN,
args=[self.pk])
def get_absolute_url(self):
return reverse('case_detail', args=[self.pk])
def start_screen(self, name):
self.current_screen = ScreenRecord.objects.create(
case=self,
name=name,
)
self.save(update_fields=['current_screen'])
@property
def last_screen(self):
"""Return the last screen of this case"""
try:
return self.screens.order_by('-start_time')[0]
except IndexError:
return None
@property
def has_previous_screen(self):
"""We can only 'go back' if there was a screen before this one"""
return self.screens.count() > 1
def end(self):
self.end_time = now()
one_hour = timedelta(hours=1)
# if the call lasted more than 1 hour set time to 1 hour
if (self.end_time - self.start_time) > one_hour:
self.end_time = self.start_time + one_hour
def increase_changes_if_needed(self):
"""
If the citizen is out of changes, give them 3 more, and set outcome
to INCREASED_CHANGES
"""
if self.registration.change_count >= self.registration.max_changes:
self.registration.max_changes = self.registration.change_count + 3
self.registration.save()
self.call_outcome = Case.INCREASED_CHANGES
self.changes_increased = True
self.save(update_fields=['call_outcome', 'changes_increased'])
def registration_unlocked(self):
return self.registration and self.registration.unlocked
def unlock_registration(self):
if self.registration:
self.registration.unlocked_until = now() + settings.LENGTH_OF_REGISTRATION_UNLOCKING
self.registration.save()
def relock_registration(self):
if self.registration_unlocked:
self.registration.unlocked_until = None
self.registration.save()
if self.call_outcome == Case.UNLOCKED:
self.call_outcome = Case.RELOCKED
self.save()
def is_under_review(self):
return self.review_classification in [Case.FOR_REVIEW, Case.RECOMMENDED]
def get_state(self):
if self.review_classification in (Case.FOR_REVIEW, Case.RECOMMENDED):
return self.get_review_classification_display()
elif self.end_time:
return _('Complete')
else:
return _('In progress')
def get_length_in_seconds(self):
"""
Return length of call in seconds as a float,
or 0 if call has not ended.
"""
if not self.end_time:
return 0.0
delta = self.end_time - self.start_time
return delta.total_seconds()
@property
def field_staff_validated(self):
"""
Returns True if the call is being made by a field staffer and their
identity has been verified.
"""
from .screens import CHECK_STAFF_NAME
# We must have a staff ID and both name and ID have been validated,
# and the phone number too. If we got past CHECK_STAFF_NAME with
# a match, we've successfully checked all that.
return all([
self.field_staff is not None,
ScreenRecord.objects.filter(
case=self,
name=CHECK_STAFF_NAME,
button=BUTTON_MATCH
).exists(),
])
@property
def national_id_validated(self):
"""
Returns True if we have a Citizen NID and the caller
has provided a matching name & DOB
"""
from .screens import CHECK_NAME_AND_DOB
return all([
self.citizen is not None,
ScreenRecord.objects.filter(
case=self,
name=CHECK_NAME_AND_DOB,
button=BUTTON_YES
).exists()
])
class ScreenRecord(AbstractTimestampTrashBinModel):
"""
A record of each screen that was visited during the call,
and the operator's input.
"""
case = models.ForeignKey(Case, related_name='screens', on_delete=models.CASCADE)
name = models.CharField("screen name", max_length=40)
start_time = models.DateTimeField(default=now)
end_time = models.DateTimeField(null=True, blank=True)
button = models.CharField(
help_text="Button the operator pressed to finish the screen",
choices=BUTTON_CHOICES, max_length=10,
blank=True,
)
input = models.CharField(
help_text="Input field from screen",
blank=True, default='',
max_length=80,
)
class Meta:
verbose_name = _("screen record")
verbose_name_plural = _("screen records")
def __str__(self): # pragma: no cover
return self.name
def end(self, case, button=None, input=None):
# 'case' is the Case object to update - because self.case might
# be a different Python object than the one the caller is working with.
self.button = button or ''
self.input = input or ''
self.end_time = now()
self.save()
case.current_screen = None
case.save()
class Update(TimestampFormatterMixin, AbstractTimestampTrashBinModel):
"""
An update records any time someone changes the case's state AFTER the
call (marks it for review, adds a comment, recommends an action,
etc.)
"""
MARK_FOR_REVIEW = 'mark'
COMMENT = 'comment'
RECOMMEND = 'recommend'
CANCEL = 'cancel'
RESOLVE = 'resolve'
UPDATE_KIND_CHOICES = (
(COMMENT, _("Comment")),
(MARK_FOR_REVIEW, _("Mark case for review")),
(RECOMMEND, _("Recommend action on case")),
(CANCEL, _("Cancel open re-registration period")),
(RESOLVE, _("Mark case as resolved")),
)
case = models.ForeignKey(Case, verbose_name=_('case'), related_name='updates',
on_delete=models.CASCADE)
kind = models.CharField(verbose_name=_('kind of update'),
choices=UPDATE_KIND_CHOICES, max_length=10, default=COMMENT)
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_('user'),
on_delete=models.PROTECT)
timestamp = models.DateTimeField(_('timestamp'), default=now)
reason_marked = models.CharField(
_('reason marked'),
choices=REASONS_TO_MARK, max_length=14, default='', blank=True, null=False,
)
recommended_action = models.CharField(
_('recommended action'),
choices=CASE_ACTIONS, max_length=6, default='', blank=True, null=False,
)
comment = models.TextField(verbose_name=_('comment'), blank=True)
class Meta:
ordering = ['timestamp']
verbose_name = _("update")
verbose_name_plural = _("updates")
def __str__(self):
return "%s %s" % (self.get_kind_display(), self.case)
class ActiveRange(AbstractTimestampTrashBinModel):
"An extension to the User model which allows for dated expiration."
end_date = models.DateField(_('end date'), null=True)
user = models.OneToOneField(settings.AUTH_USER_MODEL, verbose_name=_('user'),
related_name='active_range',
on_delete=models.CASCADE)
class Meta:
verbose_name = _("active range")
verbose_name_plural = _("active ranges")
| SmartElect/SmartElect | help_desk/models.py | Python | apache-2.0 | 20,453 | 0.001125 |
ThemeID = 'UI/ColorThemes/Custom'
#themeID, baseColor, hiliteColor
THEMES = (('UI/ColorThemes/Custom', (0.05, 0.05, 0.05), (0.4, 0.8, 1.0)),)
| EVEModX/Mods | mods/CustomThemeColor/ThemeConfig.py | Python | mit | 143 | 0.006993 |
# Created By: Virgil Dupras
# Created On: 2010-09-09
# Copyright 2015 Hardcoded Software (http://www.hardcoded.net)
#
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
from hscommon.testutil import eq_
from ...model.account import AccountType
from ..base import TestApp, with_app
# ---
@with_app(TestApp)
def test_new_item_in_empty_table(app):
# Since we have no txn, we have nothing to show in the gltable. Performing new item has no
# effect.
app.show_glview()
app.mw.new_item() # no crash
eq_(len(app.gltable), 0)
# ---
def app_two_sided_txn():
app = TestApp()
app.add_accounts('foo', 'bar')
app.add_txn(description='hello', from_='foo', to='bar', amount='42')
app.show_glview()
return app
@with_app(app_two_sided_txn)
def test_delete_entry(app):
app.gltable.select([4]) # the 'foo' entry of 'hello'
app.mw.delete_item()
# both entries were removed
eq_(len(app.gltable), 0)
@with_app(app_two_sided_txn)
def test_dont_show_empty_accounts(app):
# When accounts have nothing to show, don't put them in the table.
app.drsel.select_prev_date_range()
eq_(len(app.gltable), 0)
@with_app(app_two_sided_txn)
def test_new_entry_with_account_row_selected(app):
# Adding a new entry when the account row is selected doesn't cause a crash and creates a new
# entry.
app.gltable.select([0])
app.mw.new_item() # no crash
eq_(len(app.gltable), 7)
eq_(app.gltable.selected_indexes, [2])
@with_app(app_two_sided_txn)
def test_rows_data_with_two_sided_txn(app):
# In a general ledger, we end up with 6 lines: 2 account lines (titles), two entry lines as well
# as two total lines.
eq_(len(app.gltable), 6)
ACCOUNT_ROWS = {0, 3}
BOLD_ROWS = {2, 5}
EDITABLE_ROWS = {1, 4}
for i in range(6):
eq_(app.gltable.is_account_row(app.gltable[i]), i in ACCOUNT_ROWS)
eq_(app.gltable.is_bold_row(app.gltable[i]), i in BOLD_ROWS)
eq_(app.gltable.can_edit_cell('description', i), i in EDITABLE_ROWS)
eq_(app.gltable[0].account_name, 'bar')
eq_(app.gltable[3].account_name, 'foo')
eq_(app.gltable[1].description, 'hello')
eq_(app.gltable[1].debit, '42.00')
eq_(app.gltable[4].description, 'hello')
eq_(app.gltable[4].credit, '42.00')
@with_app(app_two_sided_txn)
def test_set_amount_without_shown_account(app):
# Previously, setting an amount while mainwindow.shown_account to None resulted in a crash
app.gltable[1].debit = '42' # no crash
eq_(app.gltable[1].debit, '42.00')
# ---
def app_txns_in_different_date_ranges():
app = TestApp()
app.drsel.select_month_range()
app.add_accounts('foo', 'bar')
app.add_txn(date='10/08/2010', description='first', from_='foo', to='bar', amount='42')
app.add_txn(date='10/09/2010', description='second', from_='foo', to='bar', amount='42')
app.drsel.select_prev_date_range()
app.show_glview()
return app
@with_app(app_txns_in_different_date_ranges)
def test_edit_item(app):
# the table correctly updates txn selection so that when edit item is called, the right txn
# shown up in the panel.
tpanel = app.mw.edit_item()
eq_(tpanel.description, 'first')
@with_app(app_txns_in_different_date_ranges)
def test_only_show_rows_in_date_range(app):
# Rows that are out of the date range aren't shown.
eq_(app.gltable[1].description, 'first')
@with_app(app_txns_in_different_date_ranges)
def test_previous_balance_rows(app):
# We show previous balance rows where appropriate
app.drsel.select_next_date_range()
eq_(app.gltable[1].description, 'Previous Balance')
eq_(app.gltable[1].balance, '42.00')
assert app.gltable.is_bold_row(app.gltable[1])
# ---
def app_txn_in_income():
app = TestApp()
app.add_account('foo', account_type=AccountType.Income)
app.add_account('bar')
app.add_txn(description='hello', from_='foo', to='bar', amount='42')
app.show_glview()
return app
@with_app(app_txn_in_income)
def test_balance_cell_is_empty_for_income_entries(app):
# Balance doesn't make any sense in income/expense, so don't show it
row = app.gltable[4] # income account shown second
eq_(row.balance, '')
| tuxlifan/moneyguru | core/tests/gui/general_ledger_table_test.py | Python | gpl-3.0 | 4,362 | 0.005961 |
"модуль із класом Core що включає в собі деякі методи комбінаторики, які використовуються в програмі"
import itertools
from math import factorial as fact
class Core:
# Методи-генератори комбінацій/перестановок
#perm = itertools.permutations # перестановки
comb = itertools.combinations # комбінації без повторень (порядок не враховується)
comb_w_repl = itertools.combinations_with_replacement # комбінації із повторенями (порядок не враховується)
@staticmethod
def place(seq, N, prev=[]): # розміщеня із seq по N (порядок ВРАХОВУЄТЬСЯ)
for char in seq:
res = prev + [char]
if len(res) == N:
yield res
else:
new_s = list(seq)
new_s.remove(char)
for res in Core.place(new_s, N, prev=res):
yield res
@staticmethod
def place_w_repl(seq, depth, prev=()): # розміщення із повторенями (порядок ВРАХОВУЄТЬСЯ)
for char in tuple(seq):
res = prev + (char,)
if len(res) == depth:
yield res
else:
for res in Core.place_w_repl(seq, depth, prev=res):
yield res
@staticmethod
def perm(seq, uselessK): # перестановки
for res in itertools.permutations(seq):
yield res
#
# Методи для обчислення кількості комбінацій/перестановок
@staticmethod
def comb_number(seq, k):
N = len(seq)
return int(fact(N)/(fact(k) * fact(N-k)))
@staticmethod
def comb_w_repl_number(seq, k):
N = len(seq)
return int(fact(N+k-1)/(fact(k) * fact(N-1)))
@staticmethod
def place_number(seq, k):
N = len(seq)
return int(fact(N)/fact(N-k))
@staticmethod
def place_w_repl_number(seq, k):
N = len(seq)
return N**k
@staticmethod
def perm_number(seq, uselessK=None):
N = len(seq)
return fact(N)
# тестування
if __name__ == '__main__':
for i in Core.place_w_repl("abcd", 3):
print(i, end=" ")
print(Core.place_w_repl_number("abcd", 3))
for i in Core.place("abcdef", 3):
print(i, end=" ")
print(Core.place_number("abcdef", 3))
print(len(list(Core.perm(range(10), None))), Core.perm_number(range(10), None))
l=[]
for i in Core.comb(range(15545), 15):
l.append(i)
if len(l)>50: break
print(l)
print( len ( list(Core.place_w_repl("01", 8)) ) )
print( len ( list(Core.place("abcca", 3)) ) ) | bodik10/Combinatorics | core.py | Python | gpl-3.0 | 3,143 | 0.013309 |
from datetime import datetime
from NetCatKS.Logger import Logger
from NetCatKS.Components import BaseRootAPI
class Time(BaseRootAPI):
def __init__(self, factory):
super(Time, self).__init__(factory)
self.factory = factory
self.logger = Logger()
def process_factory(self):
if self.factory.time == 'get':
self.factory.time = str(datetime.now())
else:
self.factory.time = 'service unavailable'
self.logger.debug('IN TIME API: {}'.format(self.factory.to_dict()))
return self.factory
| dimddev/NetCatKS | examples/projects/rootapis/web_time_server/components/adapters/time/__init__.py | Python | bsd-2-clause | 587 | 0.003407 |
"""
Given two strings s and t which consist of only lowercase letters.
String t is generated by random shuffling string s and then add one more letter at a random position.
Find the letter that was added in t.
Example:
Input:
s = "abcd"
t = "abcde"
Output:
e
Explanation:
'e' is the letter that was added.
"""
class Solution(object):
def findTheDifference(self, s, t):
"""
:type s: str
:type t: str
:rtype: str
"""
mapping = [0] * 26
for c in s:
mapping[ord(c) - ord('a')] += 1
for c in t:
mapping[ord(c) - ord('a')] -= 1
for idx, num in enumerate(mapping):
if num < 0:
return str(chr(ord('a') + idx))
return None
| franklingu/leetcode-solutions | questions/find-the-difference/Solution.py | Python | mit | 760 | 0.001316 |
from codecs import open
from os import path
from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='serpy',
version='0.3.1',
description='ridiculously fast object serialization',
long_description=long_description,
url='https://github.com/clarkduvall/serpy',
author='Clark DuVall',
author_email='clark.duvall@gmail.com',
license='MIT',
install_requires=['six'],
test_suite='tests',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
keywords=('serialization', 'rest', 'json', 'api', 'marshal',
'marshalling', 'validation', 'schema', 'fast'),
packages=find_packages(exclude=[
'contrib',
'docs',
'tests*',
'benchmarks'
]),
)
| clarkduvall/serpy | setup.py | Python | mit | 1,452 | 0 |
from __future__ import print_function
from pylab import *
import numpy as np
import cv2
import sys
def main(filenames):
for filename in filenames:
print("Processing %s" % filename)
img = cv2.imread(filename)
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
# Using opencv
cv2.imshow('image %dx%d' % (img.shape[1],img.shape[0]),img)
cv2.waitKey(0)
cv2.destroyAllWindows()
if __name__ == '__main__':
if len(sys.argv) > 1:
filenames = sys.argv[1:]
else:
filenames = ['input/6.jpg']
main(filenames)
| Elucidation/ChessboardDetect | base_imgload.py | Python | mit | 538 | 0.01487 |
import unittest
import os
import flask
from model import *
from controller import controller
from Herkansing6B import views
from Herkansing6B import app
import tempfile
modelClass = Model()
class Test_Model_InputuserTest(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
self.app.testing = True
#test InputuserTest function in combination with the /inputusertest route (source: views.py)
def test_model_InputuserTest(self):
self.assertTrue(modelClass.InputuserTest("A","B","C","D","E","f","g","h","i","j"), ["a","b","c","d","e","f","g","h","i","j"])
class Test_Model_ResultTest(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
self.app.testing = True
#test ResultTest function in combination with the /resulttest route (source: views.py)
def test_model_ResultTest_score_is_100(self):
self.assertTrue(modelClass.ResultTest(["a","b","c","d","e","f","g","h","i","j"], ["a","b","c","d","e","f","g","h","i","j"]), 100)
#test ResultTest function in combination with the /resulttest route (source: views.py)
def test_model_ResultTest_score_is_60(self):
self.assertTrue(modelClass.ResultTest(["a","b","c","d","e","f","d","c","b","a"], ["a","b","c","d","e","f","g","h","i","j"]), 60)
class Test_Model_Easyanswers(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
self.app.testing = True
#test Easyanswers function in combination with the /easytest route (source: views.py)
def test_model_Easyanswers(self):
response = self.app.post('/easytest', data = dict(), follow_redirects=True)
self.assertTrue(["to smoke", "laughing", "to go", "help", "to quit", "shouting", "to cook", "to open", "to change", "smoking"], response.data)
class Test_Model_Mediumanswers(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
self.app.testing = True
#test Mediumanswers function in combination with the /mediumtest route (source: views.py)
def test_model_Mediumanswers(self):
response = self.app.post('/mediumtest', data = dict(), follow_redirects=True)
self.assertTrue(["getting", "stealing", "reading", "to go", "going", "to speak", "working", "to talk", "going", "playing"], response.data)
class Test_Model_Hardanswers(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
self.app.testing = True
#test Hardanswers function in combination with the /hardtest route (source: views.py)
def test_model_Hardanswers(self):
response = self.app.post('/hardtest', data = dict(), follow_redirects=True)
self.assertTrue(["to help", "to do", "to become", "becoming", "to travel", "to be", "to speak", "seeing", "to call", "to go"], response.data)
if __name__ == '__main__':
unittest.main() | John078/DEV6B | Herkansing6B/test_unit.py | Python | mit | 2,881 | 0.03228 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "MovingAverage", cycle_length = 0, transform = "RelativeDifference", sigma = 0.0, exog_count = 0, ar_order = 12); | antoinecarme/pyaf | tests/artificial/transf_RelativeDifference/trend_MovingAverage/cycle_0/ar_12/test_artificial_32_RelativeDifference_MovingAverage_0_12_0.py | Python | bsd-3-clause | 276 | 0.083333 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
PYPI_RST_FILTERS = (
# Replace code-blocks
(r'\.\.\s? code-block::\s*(\w|\+)+', '::'),
# Replace image
(r'\.\.\s? image::.*', ''),
# Remove travis ci badge
(r'.*travis-ci\.org/.*', ''),
# Remove pypip.in badges
(r'.*pypip\.in/.*', ''),
(r'.*crate\.io/.*', ''),
(r'.*coveralls\.io/.*', ''),
)
def rst(filename):
'''
Load rst file and sanitize it for PyPI.
Remove unsupported github tags:
- code-block directive
- travis ci build badge
'''
content = open(filename).read()
for regex, replacement in PYPI_RST_FILTERS:
content = re.sub(regex, replacement, content)
return content
def required(filename):
with open(filename) as f:
packages = f.read().splitlines()
return packages
setup(
name="serialkiller-plugins",
version="0.0.2",
description="Plugins for serialkiller project",
long_description=rst('README.rst') + rst('CHANGELOG.txt'),
author="Bruno Adelé",
author_email="Bruno Adelé <bruno@adele.im>",
url="https://github.com/badele/serialkiller-plugins",
license="GPL",
install_requires=required('requirements/base.txt'),
setup_requires=[],
tests_require=[
'pep8',
'coveralls'
],
test_suite='tests',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=[],
classifiers=[
'Programming Language :: Python',
],
)
| badele/serialkiller-plugins | setup.py | Python | gpl-3.0 | 1,664 | 0 |
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2010-2022, GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import io
import os
import ast
import json
import logging
import operator
import itertools
from collections import namedtuple, defaultdict
import toml
import numpy
from openquake.baselib import hdf5
from openquake.baselib.node import Node as N, context
from openquake.baselib.general import duplicated, BASE94, group_array
from openquake.hazardlib import valid, nrml, pmf, lt, InvalidFile
from openquake.hazardlib.gsim.mgmpe.avg_poe_gmpe import AvgPoeGMPE
from openquake.hazardlib.gsim.base import CoeffsTable
from openquake.hazardlib.imt import from_string
BranchTuple = namedtuple('BranchTuple', 'trt id gsim weight effective')
class InvalidLogicTree(Exception):
pass
# manage the legacy logicTreeBranchingLevel nodes
def bsnodes(fname, branchinglevel):
if branchinglevel.tag.endswith('logicTreeBranchingLevel'):
if len(branchinglevel) > 1:
raise InvalidLogicTree(
'%s: Branching level %s has multiple branchsets'
% (fname, branchinglevel['branchingLevelID']))
return branchinglevel.nodes
elif branchinglevel.tag.endswith('logicTreeBranchSet'):
return [branchinglevel]
else:
raise ValueError('Expected BranchingLevel/BranchSet, got %s' %
branchinglevel)
def fix_bytes(record):
# convert a record with bytes fields into a dictionary of strings
dic = {}
for n in record.dtype.names:
v = record[n]
dic[n] = v.decode('utf-8') if isinstance(v, bytes) else v
return dic
class ImtWeight(object):
"""
A composite weight by IMTs extracted from the gsim_logic_tree_file
"""
def __init__(self, branch, fname):
with context(fname, branch.uncertaintyWeight):
nodes = list(branch.getnodes('uncertaintyWeight'))
if 'imt' in nodes[0].attrib:
raise InvalidLogicTree('The first uncertaintyWeight has an imt'
' attribute')
self.dic = {'weight': float(nodes[0].text)}
imts = []
for n in nodes[1:]:
self.dic[n['imt']] = float(n.text)
imts.append(n['imt'])
if len(set(imts)) < len(imts):
raise InvalidLogicTree(
'There are duplicated IMTs in the weights')
def __mul__(self, other):
new = object.__new__(self.__class__)
if isinstance(other, self.__class__):
keys = set(self.dic) | set(other.dic)
new.dic = {k: self[k] * other[k] for k in keys}
else: # assume a float
new.dic = {k: self.dic[k] * other for k in self.dic}
return new
__rmul__ = __mul__
def __add__(self, other):
new = object.__new__(self.__class__)
if isinstance(other, self.__class__):
new.dic = {k: self.dic[k] + other[k] for k in self.dic}
else: # assume a float
new.dic = {k: self.dic[k] + other for k in self.dic}
return new
__radd__ = __add__
def __truediv__(self, other):
new = object.__new__(self.__class__)
if isinstance(other, self.__class__):
new.dic = {k: self.dic[k] / other[k] for k in self.dic}
else: # assume a float
new.dic = {k: self.dic[k] / other for k in self.dic}
return new
def is_one(self):
"""
Check that all the inner weights are 1 up to the precision
"""
return all(abs(v - 1.) < pmf.PRECISION for v in self.dic.values() if v)
def __getitem__(self, imt):
try:
return self.dic[imt]
except KeyError:
return self.dic['weight']
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.dic)
def keyno(branch_id, bsno, brno, fname='', chars=BASE94):
"""
:param branch_id: a branch ID string
:param bsno: number of the branchset (starting from 0)
:param brno: number of the branch in the branchset (starting from 0)
:returns: a short unique alias for the branch_id
"""
if not set(branch_id) <= set(chars):
raise ValueError('%s %s' % (ex, fname))
return chars[brno] + str(bsno)
class GsimLogicTree(object):
"""
A GsimLogicTree instance is an iterable yielding `Realization`
tuples with attributes `value`, `weight` and `lt_path`, where
`value` is a dictionary {trt: gsim}, `weight` is a number in the
interval 0..1 and `lt_path` is a tuple with the branch ids of the
given realization.
:param str fname:
full path of the gsim_logic_tree file
:param tectonic_region_types:
a sequence of distinct tectonic region types
:param ltnode:
usually None, but it can also be a
:class:`openquake.hazardlib.nrml.Node` object describing the
GSIM logic tree XML file, to avoid reparsing it
"""
@classmethod
def from_(cls, gsim):
"""
Generate a trivial GsimLogicTree from a single GSIM instance.
"""
ltbranch = N('logicTreeBranch', {'branchID': 'b1'},
nodes=[N('uncertaintyModel', text=str(gsim)),
N('uncertaintyWeight', text='1.0')])
lt = N('logicTree', {'logicTreeID': 'lt1'},
nodes=[N('logicTreeBranchingLevel', {'branchingLevelID': 'bl1'},
nodes=[N('logicTreeBranchSet',
{'applyToTectonicRegionType': '*',
'branchSetID': 'bs1',
'uncertaintyType': 'gmpeModel'},
nodes=[ltbranch])])])
return cls('fake/' + gsim.__class__.__name__, ['*'], ltnode=lt)
def __init__(self, fname, tectonic_region_types=['*'], ltnode=None):
# tectonic_region_types usually comes from the source models
self.filename = fname
trts = sorted(tectonic_region_types)
if len(trts) > len(set(trts)):
raise ValueError(
'The given tectonic region types are not distinct: %s' %
','.join(trts))
self.values = defaultdict(list) # {trt: gsims}
self._ltnode = ltnode or nrml.read(fname).logicTree
self.bsetdict = {}
self.shortener = {}
self.branches = self._build_trts_branches(trts) # sorted by trt
if trts != ['*']:
# reduce self.values to the listed TRTs
values = {}
for trt in trts:
values[trt] = self.values[trt]
if not values[trt]:
raise InvalidLogicTree('%s is missing the TRT %r' %
(fname, trt))
self.values = values
if trts and not self.branches:
raise InvalidLogicTree(
'%s is missing in %s' % (set(tectonic_region_types), fname))
@property
def req_site_params(self):
site_params = set()
for trt in self.values:
for gsim in self.values[trt]:
site_params.update(gsim.REQUIRES_SITES_PARAMETERS)
return site_params
def check_imts(self, imts):
"""
Make sure the IMTs are recognized by all GSIMs in the logic tree
"""
for trt in self.values:
for gsim in self.values[trt]:
for attr in dir(gsim):
coeffs = getattr(gsim, attr)
if not isinstance(coeffs, CoeffsTable):
continue
for imt in imts:
if imt.startswith('SA'):
try:
coeffs[from_string(imt)]
except KeyError:
raise ValueError(
'%s is out of the period range defined '
'for %s' % (imt, gsim))
def __toh5__(self):
weights = set()
for branch in self.branches:
weights.update(branch.weight.dic)
dt = [('trt', hdf5.vstr), ('branch', hdf5.vstr),
('uncertainty', hdf5.vstr)] + [
(weight, float) for weight in sorted(weights)]
branches = [(b.trt, b.id, repr(b.gsim)) +
tuple(b.weight[weight] for weight in sorted(weights))
for b in self.branches if b.effective]
dic = {'bsetdict': json.dumps(self.bsetdict)}
if hasattr(self, 'filename'):
# missing in EventBasedRiskTestCase case_1f
dic['filename'] = self.filename
dirname = os.path.dirname(self.filename)
for gsims in self.values.values():
for gsim in gsims:
for k, v in gsim.kwargs.items():
if k.endswith(('_file', '_table')):
fname = os.path.join(dirname, v)
with open(fname, 'rb') as f:
dic[os.path.basename(v)] = f.read()
return numpy.array(branches, dt), dic
def __fromh5__(self, array, dic):
self.bsetdict = json.loads(dic['bsetdict'])
self.filename = dic['filename']
self.branches = []
self.shortener = {}
self.values = defaultdict(list)
dirname = os.path.dirname(dic['filename'])
for bsno, branches in enumerate(group_array(array, 'trt').values()):
for brno, branch in enumerate(branches):
branch = fix_bytes(branch)
br_id = branch['branch']
gsim = valid.gsim(branch['uncertainty'], dirname)
for k, v in gsim.kwargs.items():
if k.endswith(('_file', '_table')):
arr = numpy.asarray(dic[os.path.basename(v)][()])
gsim.kwargs[k] = io.BytesIO(bytes(arr))
self.values[branch['trt']].append(gsim)
weight = object.__new__(ImtWeight)
# branch dtype ('trt', 'branch', 'uncertainty', 'weight', ...)
weight.dic = {w: branch[w] for w in array.dtype.names[3:]}
if len(weight.dic) > 1:
gsim.weight = weight
bt = BranchTuple(branch['trt'], br_id, gsim, weight, True)
self.branches.append(bt)
self.shortener[br_id] = keyno(br_id, bsno, brno)
def reduce(self, trts):
"""
Reduce the GsimLogicTree.
:param trts: a subset of tectonic region types
:returns: a reduced GsimLogicTree instance
"""
new = object.__new__(self.__class__)
vars(new).update(vars(self))
if trts != {'*'}:
new.branches = []
for br in self.branches:
branch = BranchTuple(br.trt, br.id, br.gsim, br.weight,
br.trt in trts)
new.branches.append(branch)
return new
def collapse(self, branchset_ids):
"""
Collapse the GsimLogicTree by using AgvGMPE instances if needed
:param branchset_ids: branchset ids to collapse
:returns: a collapse GsimLogicTree instance
"""
new = object.__new__(self.__class__)
vars(new).update(vars(self))
new.branches = []
for trt, grp in itertools.groupby(self.branches, lambda b: b.trt):
bs_id = self.bsetdict[trt]
brs = []
gsims = []
weights = []
for br in grp:
brs.append(br.id)
gsims.append(br.gsim)
weights.append(br.weight)
if len(gsims) > 1 and bs_id in branchset_ids:
kwargs = {}
for brid, gsim, weight in zip(brs, gsims, weights):
kw = gsim.kwargs.copy()
kw['weight'] = weight.dic['weight']
kwargs[brid] = {gsim.__class__.__name__: kw}
_toml = toml.dumps({'AvgPoeGMPE': kwargs})
gsim = AvgPoeGMPE(**kwargs)
gsim._toml = _toml
new.values[trt] = [gsim]
branch = BranchTuple(trt, bs_id, gsim, sum(weights), True)
new.branches.append(branch)
else:
new.branches.append(br)
return new
def get_num_branches(self):
"""
Return the number of effective branches for tectonic region type,
as a dictionary.
"""
num = {}
for trt, branches in itertools.groupby(
self.branches, operator.attrgetter('trt')):
num[trt] = sum(1 for br in branches if br.effective)
return num
def get_num_paths(self):
"""
Return the effective number of paths in the tree.
"""
num_branches = self.get_num_branches()
if not sum(num_branches.values()):
return 0
num = 1
for val in num_branches.values():
if val: # the branch is effective
num *= val
return num
def _build_trts_branches(self, tectonic_region_types):
# do the parsing, called at instantiation time to populate .values
trts = []
branches = []
branchids = []
branchsetids = set()
basedir = os.path.dirname(self.filename)
for bsno, blnode in enumerate(self._ltnode):
[branchset] = bsnodes(self.filename, blnode)
if branchset['uncertaintyType'] != 'gmpeModel':
raise InvalidLogicTree(
'%s: only uncertainties of type "gmpeModel" '
'are allowed in gmpe logic tree' % self.filename)
bsid = branchset['branchSetID']
if bsid in branchsetids:
raise InvalidLogicTree(
'%s: Duplicated branchSetID %s' %
(self.filename, bsid))
else:
branchsetids.add(bsid)
trt = branchset.get('applyToTectonicRegionType')
if trt: # missing in logictree_test.py
self.bsetdict[trt] = bsid
trts.append(trt)
self.bsetdict[trt] = bsid
# NB: '*' is used in scenario calculations to disable filtering
effective = (tectonic_region_types == ['*'] or
trt in tectonic_region_types)
weights = []
branch_ids = []
for brno, branch in enumerate(branchset):
weight = ImtWeight(branch, self.filename)
weights.append(weight)
branch_id = 'g' + BASE94[brno] + str(bsno)
branch_ids.append(branch_id)
try:
gsim = valid.gsim(branch.uncertaintyModel, basedir)
except Exception as exc:
raise ValueError(
"%s in file %s" % (exc, self.filename)) from exc
if gsim in self.values[trt]:
raise InvalidLogicTree('%s: duplicated gsim %s' %
(self.filename, gsim))
if len(weight.dic) > 1:
gsim.weight = weight
self.values[trt].append(gsim)
bt = BranchTuple(
branchset['applyToTectonicRegionType'],
branch_id, gsim, weight, effective)
if effective:
branches.append(bt)
self.shortener[branch_id] = keyno(
branch_id, bsno, brno, self.filename)
tot = sum(weights)
assert tot.is_one(), '%s in branch %s' % (tot, branch_id)
if duplicated(branch_ids):
raise InvalidLogicTree(
'There where duplicated branchIDs in %s' %
self.filename)
branchids.extend(branch_ids)
if len(trts) > len(set(trts)):
raise InvalidLogicTree(
'%s: Found duplicated applyToTectonicRegionType=%s' %
(self.filename, trts))
dupl = duplicated(branchids)
if dupl:
logging.debug(
'There are duplicated branchIDs %s in %s', dupl, self.filename)
branches.sort(key=lambda b: b.trt)
return branches
def get_weights(self, trt, imt='weight'):
"""
Branch weights for the given TRT
"""
weights = []
for br in self.branches:
if br.trt == trt:
weights.append(br.weight[imt])
return numpy.array(weights)
def sample(self, n, seed, sampling_method='early_weights'):
"""
:param n: number of samples
:param seed: random seed
:param sampling_method: by default 'early_weights'
:returns: n Realization objects
"""
m = len(self.values) # number of TRTs
probs = lt.random((n, m), seed, sampling_method)
brlists = [lt.sample([b for b in self.branches if b.trt == trt],
probs[:, i], sampling_method)
for i, trt in enumerate(self.values)]
rlzs = []
for i in range(n):
weight = 1
lt_path = []
lt_uid = []
value = []
for brlist in brlists: # there is branch list for each TRT
branch = brlist[i]
lt_path.append(branch.id)
lt_uid.append(branch.id if branch.effective else '@')
weight *= branch.weight
value.append(branch.gsim)
rlz = lt.Realization(tuple(value), weight, i, tuple(lt_uid))
rlzs.append(rlz)
return rlzs
def get_rlzs_by_gsim_trt(self, samples=0, seed=42,
sampling_method='early_weights'):
"""
:param samples:
number of realizations to sample (if 0, use full enumeration)
:param seed:
seed to use for the sampling
:param sampling_method:
sampling method, by default 'early_weights'
:returns:
dictionary trt -> gsim -> all_rlz_ordinals for each gsim in the trt
"""
if samples:
rlzs = self.sample(samples, seed, sampling_method)
else:
rlzs = list(self)
ddic = {}
for i, trt in enumerate(self.values):
ddic[trt] = {gsim: [rlz.ordinal for rlz in rlzs
if rlz.value[i] == gsim]
for gsim in self.values[trt]}
return ddic
def get_rlzs_by_g(self):
"""
:returns: an array of lists of g-indices
"""
lst = []
for rlzs_by_gsim in self.get_rlzs_by_gsim_trt().values():
lst.extend(rlzs_by_gsim.values())
return numpy.array(lst)
def __iter__(self):
"""
Yield :class:`openquake.commonlib.logictree.Realization` instances
"""
groups = []
# NB: branches are already sorted
for trt in self.values:
groups.append([b for b in self.branches if b.trt == trt])
# with T tectonic region types there are T groups and T branches
for i, branches in enumerate(itertools.product(*groups)):
weight = 1
lt_path = []
lt_uid = []
value = []
for trt, branch in zip(self.values, branches):
lt_path.append(branch.id)
lt_uid.append(branch.id if branch.effective else '@')
weight *= branch.weight
value.append(branch.gsim)
yield lt.Realization(tuple(value), weight, i, tuple(lt_uid))
def __repr__(self):
lines = ['%s,%s,%s,w=%s' %
(b.trt, b.id, b.gsim, b.weight['weight'])
for b in self.branches if b.effective]
return '<%s\n%s>' % (self.__class__.__name__, '\n'.join(lines))
def rel_paths(toml):
# the paths inside the toml describing the gsim
paths = []
for line in toml.splitlines():
try:
name, path = line.split('=')
except ValueError:
pass
else:
if name.rstrip().endswith(('_file', '_table')):
paths.append(ast.literal_eval(path.strip()))
return paths
def abs_paths(smlt, fnames):
# relative -> absolute paths
base_path = os.path.dirname(smlt)
paths = []
for fname in fnames:
if os.path.isabs(fname):
raise InvalidFile('%s: %s must be a relative path' % (smlt, fname))
fname = os.path.abspath(os.path.join(base_path, fname))
if os.path.exists(fname): # consider only real paths
paths.append(fname)
return paths
def collect_files(gsim_lt_path):
"""
Given a path to a gsim logic tree, collect all of the
path names it contains (relevent for tabular/file-dependent GSIMs).
"""
n = nrml.read(gsim_lt_path)
try:
blevels = n.logicTree
except Exception:
raise InvalidFile('%s is not a valid source_model_logic_tree_file'
% gsim_lt_path)
paths = set()
for blevel in blevels:
for bset in bsnodes(gsim_lt_path, blevel):
assert bset['uncertaintyType'] == 'gmpeModel', bset
for br in bset:
with context(gsim_lt_path, br):
relpaths = rel_paths(br.uncertaintyModel.text)
paths.update(abs_paths(gsim_lt_path, relpaths))
return sorted(paths)
| gem/oq-engine | openquake/hazardlib/gsim_lt.py | Python | agpl-3.0 | 22,252 | 0 |
from copy import deepcopy
from casexml.apps.case.models import CommCareCase
from casexml.apps.phone.data_providers.case.utils import CaseSyncUpdate
from casexml.apps.phone.xml import get_case_element
from corehq.toggles import ENABLE_LOADTEST_USERS
def get_loadtest_factor(domain, user):
"""
Gets the loadtest factor for a domain and user. Is always 1 unless
both the toggle is enabled for the domain, and the user has a non-zero,
non-null factor set.
"""
if domain and ENABLE_LOADTEST_USERS.enabled(domain):
return getattr(user, 'loadtest_factor', 1) or 1
return 1
def transform_loadtest_update(update, factor):
"""
Returns a new CaseSyncUpdate object (from an existing one) with all the
case IDs and names mapped to have the factor appended.
"""
def _map_id(id, count):
return '{}-{}'.format(id, count)
case = CommCareCase.wrap(deepcopy(update.case._doc))
case._id = _map_id(case._id, factor)
for index in case.indices:
index.referenced_id = _map_id(index.referenced_id, factor)
case.name = '{} ({})'.format(case.name, factor)
return CaseSyncUpdate(case, update.sync_token, required_updates=update.required_updates)
def append_update_to_response(response, update, restore_state):
"""
Adds the XML from the case_update to the restore response.
If factor is > 1 it will append that many updates to the response for load testing purposes.
"""
current_count = 0
original_update = update
while current_count < restore_state.loadtest_factor:
element = get_case_element(update.case, update.required_updates, restore_state.version)
response.append(element)
current_count += 1
if current_count < restore_state.loadtest_factor:
update = transform_loadtest_update(original_update, current_count)
| puttarajubr/commcare-hq | corehq/ex-submodules/casexml/apps/phone/data_providers/case/load_testing.py | Python | bsd-3-clause | 1,857 | 0.001616 |
import pygame
from utils import load_image, laserSound, rocketSound, explodeSound, \
missleExplosion, scream, load_sliced_sprites, chickenSound
import stats
class Shot(pygame.sprite.Sprite):
def __init__(self, x, y, image, enemies, power, speed, maxspeed=None):
pygame.sprite.Sprite.__init__(self)
screen = pygame.display.get_surface()
self.image, self.rect = load_image(image, -1)
self.area = screen.get_rect()
self.rect.centerx = x
self.enemies = enemies
self.rect.bottom = y
self.speed = speed
if maxspeed:
self.maxspeed = maxspeed
else:
self.maxspeed = speed / 2
self.power = power
p = min(stats.bonus['power'], 1.5)
width = int(p * self.image.get_width())
height = int(p * self.image.get_height())
self.image = pygame.transform.scale(self.image, (width, height))
def update(self):
s = min(self.speed * stats.bonus['bspeed'], self.maxspeed)
self.move(s)
def move(self, dy):
newpos = self.rect.move((0, self.speed * stats.bonus['bspeed']))
if not self.area.contains(newpos):
if self.rect.bottom < self.area.top:
self.kill()
self.check_collide()
self.rect = newpos
def check_collide(self):
for c in pygame.sprite.spritecollide(self, self.enemies, False):
self.kill()
stats.damage += self.power * stats.bonus['power']
c.hit(self.power * stats.bonus['power'])
class Rocket(Shot):
def __init__(self, x, y, enemies, power=5, speed=-5):
Shot.__init__(self, x, y, 'rocket.png', enemies, power, speed)
class Laser(Shot):
def __init__(self, x, y, enemies, power=2, speed=-10):
Shot.__init__(self, x, y, 'laser.png', enemies, power, speed)
class SuperChicken(Shot):
def __init__(self, x, y, enemies, power=200, speed=-25):
Shot.__init__(self, x, y, 'superchicken.png', enemies, power, speed)
class Fighter(pygame.sprite.Sprite):
def __init__(self, enemies, speed=8, maxspeed=20):
pygame.sprite.Sprite.__init__(self)
self.image, self.rect = load_image('fighter.png', -1)
self.screen = pygame.display.get_surface()
self.area = self.screen.get_rect()
self.speed = speed
self.maxspeed = maxspeed
self.dx = 0
self.enemies = enemies
self.shooting = False
self.rect.bottom = self.area.bottom
self.rect.centerx = self.area.centerx
self.shots = pygame.sprite.Group()
self._ticks = pygame.time.get_ticks()
self._rocketDelay = 500
self._last_update = 0
self._start = pygame.time.get_ticks()
self.shotType = None
def update(self):
if self.shooting:
self.shotType()
if self.dx:
self.move_side(self.dx)
self.shots.update()
self.shots.draw(self.screen)
def move_side(self, dx):
dx *= stats.bonus['speed']
if self.rect.left + dx <= self.area.left:
self.rect.left = self.area.left
elif self.rect.right + dx >= self.area.right:
self.rect.right = self.area.right
else:
self.rect.centerx += dx
def move(self, dx, dy):
self.rect.midtop = (self.rect.x + dx, self.rect.y + dy)
def shootRocket(self, delay=500, mindelay=200):
d = min(delay / stats.bonus['rspeed'], mindelay)
if pygame.time.get_ticks() - self._last_update > d:
self._last_update = pygame.time.get_ticks()
rocket = Rocket(self.rect.centerx, self.rect.top, self.enemies)
self.shots.add(rocket)
stats.shots += 1
rocketSound.play()
def shootLaser(self, delay=200, mindelay=100):
d = min(delay / stats.bonus['rspeed'], mindelay)
if pygame.time.get_ticks() - self._last_update > d:
self._last_update = pygame.time.get_ticks()
laser = Laser(self.rect.centerx, self.rect.top, self.enemies)
self.shots.add(laser)
stats.shots += 1
laserSound.play()
def shootSuperChicken(self, delay=1000, mindelay=750):
d = min(delay / stats.bonus['rspeed'], mindelay)
if pygame.time.get_ticks() - self._last_update > d:
self._last_update = pygame.time.get_ticks()
chicken = SuperChicken(self.rect.centerx, self.rect.top,
self.enemies)
self.shots.add(chicken)
stats.shots += 1
chickenSound.play()
class Enemy(pygame.sprite.Sprite):
def __init__(self, image, life, speed, minspeed=None, bombdelay=None,
bomb=None, explodeimg='explode.png'):
if not minspeed:
self.minspeed = speed / 2
pygame.sprite.Sprite.__init__(self)
self.image, self.rect = load_image(image, -1)
screen = pygame.display.get_surface()
self.area = screen.get_rect()
self.speed = speed
self.life = life
self.exploding = False
self._last_update = 0
self._frame = 0
self._start = pygame.time.get_ticks()
self._delay = 100
self.explodeSound = explodeSound
self.explodeImages = load_sliced_sprites(128, 128, explodeimg)
def update(self):
if self.exploding:
self.explode()
else:
self._move()
def _move(self):
if self.rect.left > self.area.right or \
self.rect.right < self.area.left:
if self.rect.bottom <= self.area.bottom - 128:
self.rect.centery += 40
else:
self._breakthrough()
self.speed *= -1.05
if self.rect.left > self.area.right:
self.rect.left = self.area.right
else:
self.rect.right = self.area.left
else:
if self.speed > 0:
s = max(self.speed / stats.bonus['slowdown'], -self.minspeed)
else:
s = min(self.speed / stats.bonus['slowdown'], self.minspeed)
self.rect = self.rect.move((s, 0))
def _breakthrough(self):
stats.life -= 1
scream.play()
self.kill()
def hit(self, power):
self.life -= power
missleExplosion.play()
if self.life <= 0:
self.exploding = True
self._start = pygame.time.get_ticks()
def explode(self):
t = pygame.time.get_ticks()
if self._frame == 0:
self.explodeSound.play()
self.rect.centery -= 40
if t - self._last_update > self._delay:
self._frame += 1
if self._frame >= len(self.explodeImages):
self._frame = 0
self.kill()
stats.kills += 1
self.image = self.explodeImages[self._frame]
self._last_update = t
class Ufo(Enemy):
def __init__(self):
Enemy.__init__(self, 'ufo.png', 25, 8)
self.rect.topright = 0, 30
stats.spawned += 1
| kelvan/pyspaceshooter | objects.py | Python | gpl-3.0 | 7,112 | 0 |
###
#Various nose tests. If you want to adapt this for your own use, be aware that the start/end block list has a very specific formatting.
###
import date_chopper
import arrow
from pymongo import MongoClient
import secrets.admin_secrets
import secrets.client_secrets
MONGO_CLIENT_URL = "mongodb://{}:{}@localhost:{}/{}".format(
secrets.client_secrets.db_user,
secrets.client_secrets.db_user_pw,
secrets.admin_secrets.port,
secrets.client_secrets.db)
try:
dbclient = MongoClient(MONGO_CLIENT_URL)
db = getattr(dbclient, secrets.client_secrets.db)
collection = db.dated
base_size = collection.count() #current size of the db, for comparison later
except:
print("Failure opening database. Is Mongo running? Correct password?")
sys.exit(1)
def test_overlap(): #Given a sample list, check to see if its dates overlap at all
ranges = [['2016-11-20T08:30:00', '2016-11-20T10:30:00'], ['2016-11-20T11:00:00', '2016-11-20T15:00:00']]
assert (date_chopper.date_overlap(ranges)) == False
ranges = [['2016-11-20T08:30:00', '2016-11-20T10:30:00'], ['2016-11-20T11:00:00', '2016-11-20T15:00:00'], ['2016-11-20T16:00:00', '2016-11-20T18:00:00'], ['2016-11-20T17:00:00', '2016-11-20T19:00:00']]
assert (date_chopper.date_overlap(ranges)) == True
ranges = [['2016-11-20T08:30:00', '2016-11-20T10:30:00'], ['2016-11-20T10:30:00', '2016-11-20T13:30:00'], ['2016-11-20T13:00:00', '2016-11-20T14:00:00'], ['2016-11-20T14:10:00', '2016-11-20T16:00:00']]
assert (date_chopper.date_overlap(ranges)) == True
def test_underlap(): #tests if the program can detect start times that go out of bounds
ranges = [['2016-11-20T08:30:00', '2016-11-20T10:30:00'], ['2016-11-20T11:00:00', '2016-11-20T15:00:00']]
start = arrow.get('2016-11-20T05:00:00')
assert (date_chopper.date_underlap(ranges, start)) == True
ranges = [['2016-11-20T08:30:00', '2016-11-20T10:30:00'], ['2016-11-20T11:00:00', '2016-11-20T15:00:00']]
start = arrow.get('2016-11-20T10:00:00')
assert (date_chopper.date_underlap(ranges, start)) == False
ranges = [['2016-11-20T08:30:00', '2016-11-20T10:30:00'], ['2016-11-20T11:00:00', '2016-11-20T15:00:00']]
start = arrow.get('2016-11-20T16:00:00')
assert (date_chopper.date_underlap(ranges, start)) == False
ranges = [['2016-11-20T08:30:00', '2016-11-20T10:30:00'], ['2016-11-20T11:00:00', '2016-11-20T15:00:00']]
start = arrow.get('2016-11-20T10:00:00')
assert (date_chopper.date_underlap(ranges, start)) == False
def test_db(): #tests basic DB operation
assert collection != None
collection.insert({"type" : "freebusy", "entry" : [["entry 1"], ["entry 2"]]})
assert base_size < collection.count()
collection.remove({"entry" : [["entry 1"], ["entry 2"]]})
assert base_size == collection.count()
| zenranda/proj10-final | test_main.py | Python | artistic-2.0 | 2,958 | 0.014875 |
import os
import csv
from pdfminer.pdfparser import PDFParser
from pdfminer.pdfdocument import PDFDocument
from pdfminer.pdfpage import PDFPage, PDFTextExtractionNotAllowed
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.pdfdevice import PDFDevice
from pdfminer.layout import LAParams, LTTextBox, LTLine, LTTextLine
from pdfminer.converter import PDFPageAggregator
#set the path for the pdf file of T2 sponsors list.
dir = os.path.dirname(os.path.realpath(__file__))
rel_path = "files/Tier_2_5_Register_of_Sponsors_2015-05-01.pdf"
pdfpath = os.path.join(dir,rel_path)
print pdfpath
field_names = ["Organisation Name","Town/City","Tier & Rating","Sub Tier"]
#open the pdf file of T2 sponsor list.
with open(pdfpath, "r") as pdf_file:
#create a parser object of the file.
parser = PDFParser(pdf_file)
#create a PDF document object to store the document structure.
doc = PDFDocument(parser)
#check whether the document allows text extraction. If not, abort.
if not doc.is_extractable:
raise PDFTextExtractionNotAllowed
#create a PDF resource manager object to store shared resources.
rsrcmgr = PDFResourceManager()
#set parameters for analysis.
laparams = LAParams(line_margin=0.2)
#create a PDF page aggregator object.
device = PDFPageAggregator(rsrcmgr,laparams=laparams)
#create a PDF intepreter object to process each page.
interpreter = PDFPageInterpreter(rsrcmgr, device)
page_dict = {}
#set column locations of the table.
col1 = 24.0
col2 = 351.0
col3 = 576.0
col4 = 678.0
#set top margin of the table.
top_h1 = 396.0
top_h2 = 568.0
#set keys of each table column.
col1_key = int(col1)
col2_key = int(col2)
col3_key = int(col3)
col4_key = int(col4)
#initialise page_dict that stores columns of a row in the table.
page_dict[col1_key] = ""#field_names[0]
page_dict[col2_key] = ""#field_names[1]
page_dict[col3_key] = ""#field_names[2]
page_dict[col4_key] = ""#field_names[3]
#open and wrtie data.csv file.
with open("data.csv","wb") as data:
writer = csv.writer(data)
#process each page contained in the PDF document.
for i,page in enumerate(PDFPage.create_pages(doc)):
#page_content that stores table elements in current page.
page_content = []
#process each page.
interpreter.process_page(page)
#receive the LTPage object for the page.
layout = device.get_result()
print "page {}".format(i+1)
# if i == 2: break
#choose correct top margin for page 1.
if i == 0:
top_h = top_h1
else:
top_h = top_h2
#process each child objects within LTPage object.
for obj in layout:
#select only LTTextBox and LTLine objects.
if isinstance(obj,LTTextBox) or isinstance(obj,LTLine):
#get x0,y0 position.
x0 = obj.bbox[0]
y0 = obj.bbox[1]
#if col_key is table columns, store the object it in page_content.
col_key = int(x0)
if col_key in page_dict and y0 < top_h:
page_content.append(obj)
#sort page_content by y0 position.
page_content.sort(key=lambda x: x.bbox[1], reverse=True)
#iterate sorted page_content.
for obj in page_content:
#if it is a LTLine object.
if isinstance(obj,LTLine):
#combine columns into a row.
row=[page_dict[col1_key],page_dict[col2_key],
page_dict[col3_key],page_dict[col4_key]]
#write the row into csv file.
writer.writerow([s.encode("utf-8") for s in row])
# print "Line here {}".format(ob.bbox)
#reset page_dict to store columns of next row.
page_dict[col1_key] = ""
page_dict[col2_key] = ""
page_dict[col3_key] = ""
page_dict[col4_key] = ""
#if it is a LTTextBox object.
else:
# #store it to corresponding column.
page_dict[int(obj.bbox[0])] += obj.get_text()
# print ob.get_text()
| zzyxzz/T2-list-parser | t2-parser.py | Python | mit | 4,480 | 0.012723 |
import pyxb_114.binding.generate
import pyxb_114.binding.datatypes as xs
import pyxb_114.binding.basis
import pyxb_114.utils.domutils
import os.path
xsd='''<?xml version="1.0" encoding="UTF-8"?>
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:tns="urn:trac-0071" targetNamespace="urn:trac-0071">
<xs:element name="MetadataDocument" type="tns:MetadataType"/>
<xs:complexType name="MetadataType">
<xs:sequence maxOccurs="1" minOccurs="1">
<xs:element name="template" type="xs:string"/>
<xs:element name="timespan" maxOccurs="unbounded" minOccurs="0">
<xs:complexType>
<xs:sequence>
<xs:element name="field" maxOccurs="unbounded" minOccurs="0">
<xs:complexType>
<xs:sequence>
<xs:element name="name" type="xs:string"/>
<xs:element name="value" minOccurs="0" maxOccurs="unbounded">
<xs:complexType>
<xs:simpleContent>
<xs:extension base="xs:string">
<xs:attribute name="lang" type="xs:language"/>
<xs:attribute name="user" type="xs:string"/>
<xs:attribute name="timestamp" type="xs:dateTime"/>
</xs:extension>
</xs:simpleContent>
</xs:complexType>
</xs:element>
</xs:sequence>
<xs:attribute name="track" type="xs:string"/>
</xs:complexType>
</xs:element>
</xs:sequence>
<xs:attribute name="start" type="xs:string"/>
<xs:attribute name="end" type="xs:string"/>
</xs:complexType>
</xs:element>
</xs:sequence>
</xs:complexType>
</xs:schema>
'''
#file('schema.xsd', 'w').write(xsd)
code = pyxb_114.binding.generate.GeneratePython(schema_text=xsd)
#file('code.py', 'w').write(code)
#print code
rv = compile(code, 'test', 'exec')
eval(rv)
from pyxb_114.exceptions_ import *
import unittest
class TestTrac_0071 (unittest.TestCase):
def test (self):
newdoc = MetadataDocument()
metadatadoc_type = MetadataDocument.typeDefinition()
timespan_element = metadatadoc_type._ElementMap['timespan'].elementBinding()
timespan_type = timespan_element.typeDefinition()
field_element = timespan_type._ElementMap['field'].elementBinding()
field_type = field_element.typeDefinition()
value_element = field_type._ElementMap['value'].elementBinding()
value_type = value_element.typeDefinition()
newdoc.template = 'anewtemplate'
field = field_type('title', pyxb_114.BIND('foo', lang='ENG'), _element=field_element)
self.assertTrue(isinstance(field.value_, list))
self.assertEqual(1, len(field.value_))
self.assertTrue(isinstance(field.value_[0], value_type))
field.validateBinding()
self.assertEqual('<field><name>title</name><value lang="ENG">foo</value></field>', field.toxml("utf-8", root_only=True))
field = field_type(name='title', _element=field_element)
field.value_.append(pyxb_114.BIND('foo', lang='ENG'))
self.assertTrue(isinstance(field.value_, list))
self.assertEqual(1, len(field.value_))
self.assertTrue(isinstance(field.value_[0], pyxb_114.BIND))
field.validateBinding()
self.assertTrue(isinstance(field.value_[0], pyxb_114.BIND))
self.assertEqual('<field><name>title</name><value lang="ENG">foo</value></field>', field.toxml("utf-8", root_only=True))
'''
NOT YET FINISHED
newdoc.timespan.append(pyxb_114.BIND( # Single timespan
pyxb_114.BIND( # First field instance
'title',
pyxb_114.BIND('foo', lang='ENG')
),
start='-INF', end='+INF'))
newdoc.validateBinding()
timespan = newdoc.timespan[0]
#self.assertTrue(isinstance(timespan, timespan_type))
print newdoc.toxml("utf-8")
newdoc.timespan[:] = []
newdoc.timespan.append(pyxb_114.BIND( # Single timespan
pyxb_114.BIND( # First field instance
name='title',
value=pyxb_114.BIND('foo', lang='ENG')
),
start='-INF', end='+INF'))
newdoc.validateBinding()
timespan = newdoc.timespan[0]
#self.assertTrue(isinstance(timespan, timespan_type))
print newdoc.toxml("utf-8")
newdoc.timespan[:] = []
newdoc.timespan.append(pyxb_114.BIND( # Single timespan
pyxb_114.BIND( # First field instance
name='title',
value_=pyxb_114.BIND('foo', lang='ENG')
),
start='-INF', end='+INF'))
newdoc.validateBinding()
timespan = newdoc.timespan[0]
#self.assertTrue(isinstance(timespan, timespan_type))
print newdoc.toxml("utf-8")
newdoc.timespan[:] = []
'''
if __name__ == '__main__':
unittest.main()
| msherry/PyXB-1.1.4 | tests/trac/test-trac-0071.py | Python | apache-2.0 | 5,134 | 0.002922 |
from .backends import AzureActiveDirectoryBackend
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME, login
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.cache import never_cache
import requests
import urlparse
import uuid
import json
@never_cache
def auth(request):
backend = AzureActiveDirectoryBackend()
redirect_uri = request.build_absolute_uri(reverse(complete))
nonce = str(uuid.uuid4())
request.session['nonce'] = nonce
state = str(uuid.uuid4())
request.session['state'] = state
login_url = backend.login_url(
redirect_uri=redirect_uri,
nonce=nonce,
state=state
)
return HttpResponseRedirect(login_url)
@never_cache
@csrf_exempt
def complete(request):
backend = AzureActiveDirectoryBackend()
redirect_uri = request.build_absolute_uri(reverse(complete))
method = 'GET' if backend.RESPONSE_MODE == 'fragment' else 'POST'
original_state = request.session.get('state')
state = getattr(request, method).get('state')
if original_state == state:
code = getattr(request, method).get('code')
nonce = request.session.get('nonce')
if code is not None:
data = backend.token_params(
redirect_uri=redirect_uri,
code=code,
)
url = data.pop('endpoint', None)
token_response = requests.post(url, data=data)
payload = json.loads(token_response.text)
id_token = payload['id_token']
request.session['access_token'] = payload['access_token']
request.session['code'] = code
user = backend.authenticate(token=id_token, nonce=nonce)
if user is not None:
login(request, user)
return HttpResponseRedirect(get_login_success_url(request))
return HttpResponseRedirect('failure')
def get_login_success_url(request):
redirect_to = request.GET.get(REDIRECT_FIELD_NAME, '')
netloc = urlparse.urlparse(redirect_to)[1]
if not redirect_to:
redirect_to = settings.AAD_REDIRECT_URL
elif netloc and netloc != request.get_host():
redirect_to = settings.AAD_REDIRECT_URL
return redirect_to
| csabatini/deploy-azurerm-django | azure_ad_auth/views.py | Python | bsd-3-clause | 2,354 | 0.000425 |
"""
Settings and actions for the dummyrunner
This module defines dummyrunner settings and sets up
the actions available to dummy accounts.
The settings are global variables:
TIMESTEP - time in seconds between each 'tick'
CHANCE_OF_ACTION - chance 0-1 of action happening
CHANCE_OF_LOGIN - chance 0-1 of login happening
TELNET_PORT - port to use, defaults to settings.TELNET_PORT
ACTIONS - see below
ACTIONS is a tuple
```
(login_func, logout_func, (0.3, func1), (0.1, func2) ... )
```
where the first entry is the function to call on first connect, with a
chance of occurring given by CHANCE_OF_LOGIN. This function is usually
responsible for logging in the account. The second entry is always
called when the dummyrunner disconnects from the server and should
thus issue a logout command. The other entries are tuples (chance,
func). They are picked randomly, their commonality based on the
cumulative chance given (the chance is normalized between all options
so if will still work also if the given chances don't add up to 1).
Since each function can return a list of game-command strings, each
function may result in multiple operations.
An action-function is called with a "client" argument which is a
reference to the dummy client currently performing the action. It
returns a string or a list of command strings to execute. Use the
client object for optionally saving data between actions.
The client object has the following relevant properties and methods:
- key - an optional client key. This is only used for dummyrunner output.
Default is "Dummy-<cid>"
- cid - client id
- gid - globally unique id, hashed with time stamp
- istep - the current step
- exits - an empty list. Can be used to store exit names
- objs - an empty list. Can be used to store object names
- counter() - returns a unique increasing id, hashed with time stamp
to make it unique also between dummyrunner instances.
The return should either be a single command string or a tuple of
command strings. This list of commands will always be executed every
TIMESTEP with a chance given by CHANCE_OF_ACTION by in the order given
(no randomness) and allows for setting up a more complex chain of
commands (such as creating an account and logging in).
---
"""
# Dummy runner settings
# Time between each dummyrunner "tick", in seconds. Each dummy
# will be called with this frequency.
TIMESTEP = 2
# Chance of a dummy actually performing an action on a given tick.
# This spreads out usage randomly, like it would be in reality.
CHANCE_OF_ACTION = 0.5
# Chance of a currently unlogged-in dummy performing its login
# action every tick. This emulates not all accounts logging in
# at exactly the same time.
CHANCE_OF_LOGIN = 1.0
# Which telnet port to connect to. If set to None, uses the first
# default telnet port of the running server.
TELNET_PORT = None
# Setup actions tuple
# some convenient templates
DUMMY_NAME = "Dummy-%s"
DUMMY_PWD = "password-%s"
START_ROOM = "testing_room_start_%s"
ROOM_TEMPLATE = "testing_room_%s"
EXIT_TEMPLATE = "exit_%s"
OBJ_TEMPLATE = "testing_obj_%s"
TOBJ_TEMPLATE = "testing_button_%s"
TOBJ_TYPECLASS = "contrib.tutorial_examples.red_button.RedButton"
# action function definitions (pick and choose from
# these to build a client "usage profile"
# login/logout
def c_login(client):
"logins to the game"
# we always use a new client name
cname = DUMMY_NAME % client.gid
cpwd = DUMMY_PWD % client.gid
# set up for digging a first room (to move to and keep the
# login room clean)
roomname = ROOM_TEMPLATE % client.counter()
exitname1 = EXIT_TEMPLATE % client.counter()
exitname2 = EXIT_TEMPLATE % client.counter()
client.exits.extend([exitname1, exitname2])
cmds = (
"create %s %s" % (cname, cpwd),
"connect %s %s" % (cname, cpwd),
"@dig %s" % START_ROOM % client.gid,
"@teleport %s" % START_ROOM % client.gid,
"@dig %s = %s, %s" % (roomname, exitname1, exitname2),
)
return cmds
def c_login_nodig(client):
"logins, don't dig its own room"
cname = DUMMY_NAME % client.gid
cpwd = DUMMY_PWD % client.gid
cmds = ("create %s %s" % (cname, cpwd), "connect %s %s" % (cname, cpwd))
return cmds
def c_logout(client):
"logouts of the game"
return "@quit"
# random commands
def c_looks(client):
"looks at various objects"
cmds = ["look %s" % obj for obj in client.objs]
if not cmds:
cmds = ["look %s" % exi for exi in client.exits]
if not cmds:
cmds = "look"
return cmds
def c_examines(client):
"examines various objects"
cmds = ["examine %s" % obj for obj in client.objs]
if not cmds:
cmds = ["examine %s" % exi for exi in client.exits]
if not cmds:
cmds = "examine me"
return cmds
def c_idles(client):
"idles"
cmds = ("idle", "idle")
return cmds
def c_help(client):
"reads help files"
cmds = ("help", "help @teleport", "help look", "help @tunnel", "help @dig")
return cmds
def c_digs(client):
"digs a new room, storing exit names on client"
roomname = ROOM_TEMPLATE % client.counter()
exitname1 = EXIT_TEMPLATE % client.counter()
exitname2 = EXIT_TEMPLATE % client.counter()
client.exits.extend([exitname1, exitname2])
return "@dig/tel %s = %s, %s" % (roomname, exitname1, exitname2)
def c_creates_obj(client):
"creates normal objects, storing their name on client"
objname = OBJ_TEMPLATE % client.counter()
client.objs.append(objname)
cmds = (
"@create %s" % objname,
'@desc %s = "this is a test object' % objname,
"@set %s/testattr = this is a test attribute value." % objname,
"@set %s/testattr2 = this is a second test attribute." % objname,
)
return cmds
def c_creates_button(client):
"creates example button, storing name on client"
objname = TOBJ_TEMPLATE % client.counter()
client.objs.append(objname)
cmds = ("@create %s:%s" % (objname, TOBJ_TYPECLASS), "@desc %s = test red button!" % objname)
return cmds
def c_socialize(client):
"socializechats on channel"
cmds = (
"ooc Hello!",
"ooc Testing ...",
"ooc Testing ... times 2",
"say Yo!",
"emote stands looking around.",
)
return cmds
def c_moves(client):
"moves to a previously created room, using the stored exits"
cmds = client.exits # try all exits - finally one will work
return "look" if not cmds else cmds
def c_moves_n(client):
"move through north exit if available"
return "north"
def c_moves_s(client):
"move through south exit if available"
return "south"
# Action tuple (required)
#
# This is a tuple of client action functions. The first element is the
# function the client should use to log into the game and move to
# STARTROOM . The second element is the logout command, for cleanly
# exiting the mud. The following elements are 2-tuples of (probability,
# action_function). The probablities should normally sum up to 1,
# otherwise the system will normalize them.
#
# "normal builder" definitionj
# ACTIONS = ( c_login,
# c_logout,
# (0.5, c_looks),
# (0.08, c_examines),
# (0.1, c_help),
# (0.01, c_digs),
# (0.01, c_creates_obj),
# (0.3, c_moves))
# "heavy" builder definition
# ACTIONS = ( c_login,
# c_logout,
# (0.2, c_looks),
# (0.1, c_examines),
# (0.2, c_help),
# (0.1, c_digs),
# (0.1, c_creates_obj),
# #(0.01, c_creates_button),
# (0.2, c_moves))
# "passive account" definition
# ACTIONS = ( c_login,
# c_logout,
# (0.7, c_looks),
# #(0.1, c_examines),
# (0.3, c_help))
# #(0.1, c_digs),
# #(0.1, c_creates_obj),
# #(0.1, c_creates_button),
# #(0.4, c_moves))
# "inactive account" definition
# ACTIONS = (c_login_nodig,
# c_logout,
# (1.0, c_idles))
# "normal account" definition
ACTIONS = (c_login, c_logout, (0.01, c_digs), (0.39, c_looks), (0.2, c_help), (0.4, c_moves))
# walking tester. This requires a pre-made
# "loop" of multiple rooms that ties back
# to limbo (using @tunnel and @open)
# ACTIONS = (c_login_nodig,
# c_logout,
# (1.0, c_moves_n))
# "socializing heavy builder" definition
# ACTIONS = (c_login,
# c_logout,
# (0.1, c_socialize),
# (0.1, c_looks),
# (0.2, c_help),
# (0.1, c_creates_obj),
# (0.2, c_digs),
# (0.3, c_moves))
# "heavy digger memory tester" definition
# ACTIONS = (c_login,
# c_logout,
# (1.0, c_digs))
| jamesbeebop/evennia | evennia/server/profiling/dummyrunner_settings.py | Python | bsd-3-clause | 8,806 | 0.000227 |
# Generated by Django 2.2.24 on 2021-11-11 08:49
import django.core.validators
from django.db import migrations, models
from django.db.models import Count
def add_shard_to_no_rule_configurations(apps, schema_editor):
Configuration = apps.get_model("santa", "Configuration")
for configuration in Configuration.objects.annotate(num_rules=Count('rule')).filter(num_rules=0):
configuration.allow_unknown_shard = 5
configuration.save()
class Migration(migrations.Migration):
dependencies = [
('santa', '0025_auto_20210921_1517'),
]
operations = [
migrations.AddField(
model_name='configuration',
name='allow_unknown_shard',
field=models.IntegerField(
default=100,
help_text="Restrict the reporting of 'Allow Unknown' events to a percentage (0-100) of hosts",
validators=[django.core.validators.MinValueValidator(0),
django.core.validators.MaxValueValidator(100)]
),
),
migrations.RunPython(add_shard_to_no_rule_configurations),
]
| zentralopensource/zentral | zentral/contrib/santa/migrations/0026_configuration_allow_unknown_shard.py | Python | apache-2.0 | 1,128 | 0.001773 |
#!/usr/bin/env python
import argparse
import csv
import random
import sys
import math
from math import inf
from textwrap import dedent
from editCIGAR import CIGAR
from textwrap import dedent
# Don't throw an error if output is piped into a program that doesn't
# read all of its input, like `head`.
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE, SIG_DFL)
description = dedent("""
Insert insertions and deletions randomly into a sequence recieved
via standard input.
Expected input format: For every input line: A string of nucleotides
followed by a tab character, followed by a corresponding CIGAR
string. Example:
–––––––––––––––––––––––
seq cigar
GGTGACATAAAGGC 8M5I
TTCCGCAGGG 10M
CTCGTGGAGT 5M2D5M
....
–––––––––––––––––––––––
(white space stands for one tab character)
If no CIGAR strings are available for the nucleotides, then use the
parameter --cigar-new. In that case, only a nucleotide string per line is
expected. Every read is expected to have complete (mis)match to the
reference (CIGAR character M).
""")
def parse_arguments(argv):
p = argparse.ArgumentParser(description=description,
formatter_class= argparse.RawTextHelpFormatter)
for s,l in [('in','insert'),('del','deletion')]:
p.add_argument('--{}-prob'.format(s), default=0,
type = checkPositive(float), metavar='P', help=
"Per-base probability of a seqence {}".format(l))
p.add_argument('--{}-exp'.format(s), default=None, metavar = 'L',
type = float, help=dedent("""\
Length distribution of {}s shall be exponentially
distributed, with 50%% of reads longer than L""")\
.format(l))
p.add_argument('--cigar-new', default = False,
action='store_true', help=dedent("""\
Do not read CIGAR strings from standard input, but assume a
complete (mis)match (no indels, CIGAR character M) for every
nucleotide string."""))
p.add_argument('--col-seq', default = 'seq', type = str,
help = "Column name of the nucleotide strings")
p.add_argument('--col-cigar', default = 'cigar', type = str,
help = "Column name of the CIGAR strings")
# p.add_argument('--input-fmt', default = ['lines'],
# nargs='+', metavar='',
# help =dedent("""\
# Format of the file containing the CIGAR strings.
# Usage: --input-fmt lines
# --input-fmt tab COL-NUCL COL-CIGAR
# Choices:
# 'lines': One nucleotide and CIGAR string per input line
# (use --cigar-new to assume only M instead of giving
# CIGAR input)
#
# 'tab COL-NUCL COL-CIGAR': The file is in a tabular format.
# The first line contains the column names, the following
# files contain the content. Only the contents of the
# columns named COL-NUCL and COL-CIGAR are used. Columns
# are separated by a tab character (\\t), unless another
# character is specified by the --sep argument
# """))
# p.add_argument('--change-coords', nargs=2,
# help=dedent("""\
# If an output CIGAR string begins or ends with a deletion, change
# the true read coordinates instead. Else, such reads would not be
# assigned to their true position."""))
p.add_argument('--no-header',default = False, action = 'store_true',
help="""Do not expect a table header. Nucleotide strings
are expected as first input column and CIGAR strings as
second column. If no CIGAR strings are available, use
--cigar-new in addition to this option.""")
p.add_argument('--sep', default='\t', help=dedent("""\
Character separating the input columns if the input is
in tabular format (see --input-fmt). Common choices
are '\\t' (default), ',' or ';'."""))
p.add_argument('--seed', default = None, type=int,
help = dedent("""\
Set the random number generator seed to
this value. Calls with the same input files
and seed always produce the same result."""))
args = p.parse_args(argv)
if args.sep == '\\t':
args.sep = '\t'
if len(args.sep) != 1:
raise ValueError('--sep must be followed by only one character')
if args.in_exp is not None and (args.in_exp >= 1 or args.in_exp < 0):
raise ValueError('--in-exp must be >=0 and < 1')
if args.del_exp is not None and (args.del_exp >= 1 or args.del_exp < 0):
raise ValueError('--del-exp must be >=0 and < 1')
if args.in_prob < 0 or args.in_prob > 1:
raise ValueError('--in-prob must be >= 0 and <= 1')
if args.del_prob < 0 or args.del_prob > 1:
raise ValueError('--del-prob must be >= 0 and <= 1')
return args
def main(argv):
# --- Argument checks ----------------------------------------
args = parse_arguments(argv[1:])
if args.in_prob != 0:
if args.in_exp is None:
raise ValueError("--in-prob requires --in-exp")
if args.del_prob != 0:
if args.del_exp is None:
raise ValueError("--del-prob requires --del-exp")
if args.seed is not None:
random.seed(args.seed, version=2)
# --- Input parsing --------------------------------------------
input = sys.stdin
def safe_index(l, what):
try: return l.index(what)
except ValueError: return None
if not args.no_header:
header = next(input).rstrip().split(args.sep)
i_nucl, i_cigar = \
(safe_index(header, x) for x in \
(args.col_seq, args.col_cigar))#, cn_start, cn_stop))
if i_nucl is None:
raise ValueError(('The specified nucleotide column {} does '+
'not exist. Use the --col-seq parameter to set an '+
'existing column name.').format(args.col_seq))
if i_cigar is None and not args.cigar_new:
raise ValueError(('The specified CIGAR column {} does '+
'not exist. Use the --col-cigar parameter to set an '+
'existing column name or use the --cigar-new parameter '+
'to create new CIGAR strings.').format(args.col_cigar))
#i_rest = [i for i in range(0,len(header)) if i not in (i_nucl, i_cigar)]
else:
i_nucl, i_cigar = 0, 1
#i_rest = []
rows = (s.rstrip() for s in input)
fields = (s.split(args.sep) for s in rows if s != '')
if args.cigar_new:
if not args.no_header:
if not args.col_cigar in header:
header = header + [args.col_cigar]
else:
raise ValueError((
"The column name {} for the new CIGAR column "+
"already exists in the input. Choose a new column name "+
"using the --col-cigar option or omit the --cigar-new "+
"option if CIGAR strings exist already in the input.")
.format(args.col_cigar))
step1 = ( r + [addCIGAR(r[i_nucl])] for r in fields )
i_cigar = -1 # The last element of the row
else:
step1 = fields
step3 = mutatorExpLen(step1, args.in_prob
, args.in_exp, args.del_prob, args.del_exp, (i_nucl,i_cigar))
if not args.no_header:
print(args.sep.join(header))
for x in step3:
print(args.sep.join(x))
def splitter(tuples, idxs):
"""idxs: list of tuples of indices (integer). Returns a tuple with one
element for each element i in `idxs`. Return tuple i contains all tuples[j]
for all j in idxs[i].
>>> l = [(1,2,3), (4,5,6), (7,8,9)]
>>> list(splitter(l, [(1), (2,3)]))
[((1), (2, 3)), ((4), (5, 6)), ((7), (8, 9))]
"""
for n in tuples:
yield tuple(tuple(n[j] for j in idx) for idx in idxs)
def inputNormalizer(strings, sep):
"""Make sure no invalid data format (too many columns)
is specified and remove newlines"""
iLine = 0
ncols = None
while True:
s = next(strings)
iLine += 1
s.rstrip("\n")
s = s.split(sep)
if ncols is None: ncols = len(s)
if len(s) != ncols:
raise ValueError(("Invalid input in line {}: {} columns "+
"expected on every input line. Got: {}. (Wrong column separator?)")
.format(iLine, ncols, len(s)))
if any(c in x for c in [' ','\t'] for x in s):
raise ValueError(("Invalid input in line {}: Illegal Whitespace"+
"found in Nucleotide or CIGAR strings.").format(iLine))
yield s
def addCIGAR(nucl):
return str(len(nucl))+'M'
def mutatorExpLen(inputTuples, i_prob, i_len, d_prob, d_len, idxs=(0,1) ):
"""`inputTuples` is a interable on string tuples `(nucl, cigar)`.
`cigar` may also be of class CIGAR, this way unnessecary conversions
to and from strings can be avoided if this method is applied multiple
times on the same stream of data.
Insert/delete parts of the nucleotide strings `nucl` (the short read
nucleotide strings) and document the result by changing the CIGAR
strings provided by `cigar`. Operation starts with a constant
per-character (per-base) probability and the operation length is
exponentially distributed with rate `1/i_len` or `1/d_len`,
respectively.
idxs: use these elements of the inputTuples as (nucl, cigar). The
rest is copied to the output as-is.
"""
ln2 = math.log(2)
#rndPos = lambda p: math.floor(random.expovariate(p))+1 if p != 0 else inf
#rndLen = lambda s: math.floor(random.expovariate(ln2/s))+ 1 \
# if s is not None else None
rndPos = lambda p: rGeom(p)+1 if p != 0 else inf
rndLen = lambda s: rGeom(1-s)+1 if s is not None else None
toCIGAR = lambda x: CIGAR.fromString(x) if isinstance(x,str) else x
# State automaton:
# ================
# -- Possible states:
NEXT_STRING = 'next_str' # Optain a new nucleotide string
DET_NEXT_OP = 'det_op' # Determine if in/del takes place in this string
INSERT = 'insert' # Insert a sequence and get a new insert position
DELETE = 'delete' # Delete a sequence and get a new delete position
YIELD = 'yield' # Return current, possibly modified string to the caller
# -- Queue of next states (new states can be inserted at front and at
# end)
todo = [NEXT_STRING]
# -- Current state:
inputRecord = None
l = 0
nucl, cigar = "", ""
bpToNextIns = rndPos(i_prob)-1 # only for the first time may this also be 0
bpInsLen = rndLen(i_len)
bpToNextDel = rndPos(d_prob) # same as bpToNextIns
bpDelLen = rndLen(d_len)
# -- State transitions:
# The loop ends if StopIteration is thrown by next(.)
while True:
# Corner case: if all mutation probabilities are 0, return input
# unchanged
if bpToNextIns == inf and bpToNextDel == inf:
yield next(inputTuples)
continue
#print(",".join(todo))
do = todo.pop(0) if len(todo) > 0 else YIELD
# Tie break by random choice of one of the two actions (insert or
# delete. The other action is skipped => another exp-distributed
# number is added to it.
if do == NEXT_STRING:
bpToNextIns -= l
bpToNextDel -= l
inputRecord = next(inputTuples)
nucl = inputRecord[idxs[0]]
cigar = inputRecord[idxs[1]]
l = len(nucl)
todo.append(DET_NEXT_OP)
elif do == DET_NEXT_OP:
todo.clear()
if bpToNextIns < bpToNextDel:
# Check/queue insert operation first
if bpToNextIns < l: todo.append(INSERT)
if bpToNextDel < l: todo.append(DELETE)
elif bpToNextDel < bpToNextIns:
# Check/queue delete operation first
if bpToNextDel < l: todo.append(DELETE)
if bpToNextIns < l: todo.append(INSERT)
elif bpToNextIns == bpToNextDel:
assert not (bpToNextIns == inf and bpToNextDel == inf)
# Skip one of the two operations, randomly
if random.choice([True, False]):
bpToNextIns += rndPos(i_prob)
else:
bpToNextDel += rndPos(d_prob)
todo.insert(0, DET_NEXT_OP)
else: assert False
elif do == INSERT:
nucl = insertRandom(nucl, bpToNextIns, bpInsLen)
cigar = toCIGAR(cigar)
cigar.operationAt('I',bpInsLen, bpToNextIns)
l = len(nucl) # String gets longer
# Skip the insert when calculating the bp to the next operation
bpToNextDel += bpInsLen
bpToNextIns += rndPos(i_prob) + bpInsLen
bpInsLen = rndLen(i_len)
todo.insert(0, DET_NEXT_OP)
elif do == DELETE:
# Deletion stops at end of string if delete position is
# nearer at `nucl` string end than bpDelLen
nucl = nucl[:bpToNextDel] + nucl[(bpToNextDel+bpDelLen):]
cigar = toCIGAR(cigar)
effDelLen = min(l - bpToNextDel, bpDelLen)
cigar.operationAt('D',effDelLen, bpToNextDel)
l = len(nucl) # String gets shorter
# If an insert operation is pending, it must be recalculated if
# it is still on this nucleotide string, as that just got
# shorter.
bpToNextDel += rndPos(d_prob)
bpDelLen = rndLen(d_len)
todo.insert(0, DET_NEXT_OP)
elif do == YIELD:
inputRecord[idxs[0]] = nucl
inputRecord[idxs[1]] = str(cigar)
yield tuple(inputRecord)
todo.append(NEXT_STRING)
#print((nucl, str(cigar), f'I={bpToNextIns}/{bpInsLen}, D={bpToNextDel}/{bpDelLen}'))
def insertRandom(string, pos, length):
"""Insert a random sequence into a string
>>> s = insertRandom('AAAAA', 2, 3)
>>> [s[0:2], s[5:8]]
['AA', 'AAA']
>>> all(x in 'ATCG' for x in s[2:5])
True
"""
s = "".join(random.choice(['A','T','G','C']) for _ in range(0,length))
string = string[:pos] + s + string[pos:]
return string
def rGeom(p):
"""Generate a geometrically distributed random number. p is the success
probability. The numbers are in the range {0, 1, 2,...}"""
# CDF = 1-(1-p)^(k+1) (CDF of geometric distribution)
# (1-p)^(k+1) = 1-CDF (... solve for k ...)
# k+1 = log(1-CDF)/log(1-p)
# k = (log(1-CDF)/log(1-p)) - 1
# insert a uniform random number in [0;1] for CDF to
# obtain geometrically distributed numbers
u = random.random()
if p == 1 : return 0
return math.ceil( (math.log(1-u,1-p))-1 )
def zip_samelen(*streams):
"""`streams` are multiple iterables. Does the same as zip, except if
it throws ValueError if not all streams throw StopIteration at the
same time.
>>> list(zip_samelen([1,2,3],[4,5,6]))
[(1, 4), (2, 5), (3, 6)]
>>> list(zip_samelen([1,2,3],[4,5,6,7]))
Traceback (most recent call last):
...
ValueError: The supplied inputs are of different lengths
"""
iters = [iter(s) for s in streams]
sentinel = object()
while True:
ret = tuple(next(s,sentinel) for s in iters)
if all(s is sentinel for s in ret):
raise StopIteration
elif any(s is sentinel for s in ret):
# Not all streams have ended
raise ValueError("The supplied inputs are of different lengths")
else:
yield ret
def checkPositive(expectedType):
"For use in parse_arguments"
def check(value):
try:
value = expectedType(value)
if value <= 0: raise ValueError()
except ValueError:
raise argparse.ArgumentTypeError(
("{} is an invalid positive {}"+
+"value").format(value,str(expectedType)))
else:
return value
return check
if __name__ == "__main__": main(sys.argv)
# vim:tw=75
| mlell/tapas | scripts/src/indel.py | Python | mit | 16,326 | 0.011889 |
# Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
from __future__ import print_function, unicode_literals
import argparse
import re
import sys
import os
import os.path as path
import copy
from collections import OrderedDict
from time import time
from mach.registrar import Registrar
from mach.decorators import (
CommandArgument,
CommandProvider,
Command,
)
from servo.command_base import CommandBase, call, check_call, host_triple
from wptrunner import wptcommandline
from update import updatecommandline
from servo_tidy import tidy
from servo_tidy_tests import test_tidy
SCRIPT_PATH = os.path.split(__file__)[0]
PROJECT_TOPLEVEL_PATH = os.path.abspath(os.path.join(SCRIPT_PATH, "..", ".."))
WEB_PLATFORM_TESTS_PATH = os.path.join("tests", "wpt", "web-platform-tests")
SERVO_TESTS_PATH = os.path.join("tests", "wpt", "mozilla", "tests")
TEST_SUITES = OrderedDict([
("tidy", {"kwargs": {"all_files": False, "no_progress": False, "self_test": False},
"include_arg": "include"}),
("wpt", {"kwargs": {"release": False},
"paths": [path.abspath(WEB_PLATFORM_TESTS_PATH),
path.abspath(SERVO_TESTS_PATH)],
"include_arg": "include"}),
("css", {"kwargs": {"release": False},
"paths": [path.abspath(path.join("tests", "wpt", "css-tests"))],
"include_arg": "include"}),
("unit", {"kwargs": {},
"paths": [path.abspath(path.join("tests", "unit"))],
"include_arg": "test_name"}),
("compiletest", {"kwargs": {"release": False},
"paths": [path.abspath(path.join("tests", "compiletest"))],
"include_arg": "test_name"})
])
TEST_SUITES_BY_PREFIX = {path: k for k, v in TEST_SUITES.iteritems() if "paths" in v for path in v["paths"]}
def create_parser_wpt():
parser = wptcommandline.create_parser()
parser.add_argument('--release', default=False, action="store_true",
help="Run with a release build of servo")
parser.add_argument('--chaos', default=False, action="store_true",
help="Run under chaos mode in rr until a failure is captured")
parser.add_argument('--pref', default=[], action="append", dest="prefs",
help="Pass preferences to servo")
return parser
@CommandProvider
class MachCommands(CommandBase):
DEFAULT_RENDER_MODE = "cpu"
HELP_RENDER_MODE = "Value can be 'cpu', 'gpu' or 'both' (default " + DEFAULT_RENDER_MODE + ")"
def __init__(self, context):
CommandBase.__init__(self, context)
if not hasattr(self.context, "built_tests"):
self.context.built_tests = False
@Command('test',
description='Run specified Servo tests',
category='testing')
@CommandArgument('params', default=None, nargs="...",
help="Optionally select test based on "
"test file directory")
@CommandArgument('--render-mode', '-rm', default=DEFAULT_RENDER_MODE,
help="The render mode to be used on all tests. " +
HELP_RENDER_MODE)
@CommandArgument('--release', default=False, action="store_true",
help="Run with a release build of servo")
@CommandArgument('--tidy-all', default=False, action="store_true",
help="Check all files, and run the WPT lint in tidy, "
"even if unchanged")
@CommandArgument('--no-progress', default=False, action="store_true",
help="Don't show progress for tidy")
@CommandArgument('--self-test', default=False, action="store_true",
help="Run unit tests for tidy")
@CommandArgument('--all', default=False, action="store_true", dest="all_suites",
help="Run all test suites")
def test(self, params, render_mode=DEFAULT_RENDER_MODE, release=False, tidy_all=False,
no_progress=False, self_test=False, all_suites=False):
suites = copy.deepcopy(TEST_SUITES)
suites["tidy"]["kwargs"] = {"all_files": tidy_all, "no_progress": no_progress, "self_test": self_test}
suites["wpt"]["kwargs"] = {"release": release}
suites["css"]["kwargs"] = {"release": release}
suites["unit"]["kwargs"] = {}
suites["compiletest"]["kwargs"] = {"release": release}
selected_suites = OrderedDict()
if params is None:
if all_suites:
params = suites.keys()
else:
print("Specify a test path or suite name, or pass --all to run all test suites.\n\nAvailable suites:")
for s in suites:
print(" %s" % s)
return 1
for arg in params:
found = False
if arg in suites and arg not in selected_suites:
selected_suites[arg] = []
found = True
else:
suite = self.suite_for_path(arg)
if suite is not None:
if suite not in selected_suites:
selected_suites[suite] = []
selected_suites[suite].append(arg)
found = True
break
if not found:
print("%s is not a valid test path or suite name" % arg)
return 1
test_start = time()
for suite, tests in selected_suites.iteritems():
props = suites[suite]
kwargs = props.get("kwargs", {})
if tests:
kwargs[props["include_arg"]] = tests
Registrar.dispatch("test-%s" % suite, context=self.context, **kwargs)
elapsed = time() - test_start
print("Tests completed in %0.2fs" % elapsed)
# Helper to determine which test suite owns the path
def suite_for_path(self, path_arg):
if os.path.exists(path.abspath(path_arg)):
abs_path = path.abspath(path_arg)
for prefix, suite in TEST_SUITES_BY_PREFIX.iteritems():
if abs_path.startswith(prefix):
return suite
return None
@Command('test-geckolib',
description='Test geckolib sanity checks',
category='testing')
def test_geckolib(self):
self.ensure_bootstrapped()
env = self.build_env()
env["RUST_BACKTRACE"] = "1"
return call(["cargo", "test"], env=env, cwd=path.join("ports", "geckolib"))
@Command('test-unit',
description='Run unit tests',
category='testing')
@CommandArgument('--package', '-p', default=None, help="Specific package to test")
@CommandArgument('test_name', nargs=argparse.REMAINDER,
help="Only run tests that match this pattern or file path")
def test_unit(self, test_name=None, package=None):
if test_name is None:
test_name = []
self.ensure_bootstrapped()
if package:
packages = {package}
else:
packages = set()
test_patterns = []
for test in test_name:
# add package if 'tests/unit/<package>'
match = re.search("tests/unit/(\\w+)/?$", test)
if match:
packages.add(match.group(1))
# add package & test if '<package>/<test>', 'tests/unit/<package>/<test>.rs', or similar
elif re.search("\\w/\\w", test):
tokens = test.split("/")
packages.add(tokens[-2])
test_prefix = tokens[-1]
if test_prefix.endswith(".rs"):
test_prefix = test_prefix[:-3]
test_prefix += "::"
test_patterns.append(test_prefix)
# add test as-is otherwise
else:
test_patterns.append(test)
if not packages:
packages = set(os.listdir(path.join(self.context.topdir, "tests", "unit")))
args = ["cargo", "test"]
for crate in packages:
args += ["-p", "%s_tests" % crate]
args += test_patterns
features = self.servo_features()
if features:
args += ["--features", "%s" % ' '.join(features)]
env = self.build_env()
env["RUST_BACKTRACE"] = "1"
if sys.platform in ("win32", "msys"):
if "msvc" in host_triple():
# on MSVC, we need some DLLs in the path. They were copied
# in to the servo.exe build dir, so just point PATH to that.
env["PATH"] = "%s%s%s" % (path.dirname(self.get_binary_path(False, False)), os.pathsep, env["PATH"])
else:
env["RUSTFLAGS"] = "-C link-args=-Wl,--subsystem,windows"
result = call(args, env=env, cwd=self.servo_crate())
if result != 0:
return result
@Command('test-compiletest',
description='Run compiletests',
category='testing')
@CommandArgument('--package', '-p', default=None, help="Specific package to test")
@CommandArgument('test_name', nargs=argparse.REMAINDER,
help="Only run tests that match this pattern or file path")
@CommandArgument('--release', default=False, action="store_true",
help="Run with a release build of servo")
def test_compiletest(self, test_name=None, package=None, release=False):
if test_name is None:
test_name = []
self.ensure_bootstrapped()
if package:
packages = {package}
else:
packages = set()
test_patterns = []
for test in test_name:
# add package if 'tests/compiletest/<package>'
match = re.search("tests/compiletest/(\\w+)/?$", test)
if match:
packages.add(match.group(1))
# add package & test if '<package>/<test>', 'tests/compiletest/<package>/<test>.rs', or similar
elif re.search("\\w/\\w", test):
tokens = test.split("/")
packages.add(tokens[-2])
test_prefix = tokens[-1]
if test_prefix.endswith(".rs"):
test_prefix = test_prefix[:-3]
test_prefix += "::"
test_patterns.append(test_prefix)
# add test as-is otherwise
else:
test_patterns.append(test)
if not packages:
packages = set(os.listdir(path.join(self.context.topdir, "tests", "compiletest")))
packages.remove("helper")
args = ["cargo", "test"]
for crate in packages:
args += ["-p", "%s_compiletest" % crate]
args += test_patterns
env = self.build_env()
if release:
env["BUILD_MODE"] = "release"
args += ["--release"]
else:
env["BUILD_MODE"] = "debug"
result = call(args, env=env, cwd=self.servo_crate())
if result != 0:
return result
@Command('test-content',
description='Run the content tests',
category='testing')
def test_content(self):
print("Content tests have been replaced by web-platform-tests under "
"tests/wpt/mozilla/.")
return 0
@Command('test-tidy',
description='Run the source code tidiness check',
category='testing')
@CommandArgument('--all', default=False, action="store_true", dest="all_files",
help="Check all files, and run the WPT lint in tidy, "
"even if unchanged")
@CommandArgument('--no-progress', default=False, action="store_true",
help="Don't show progress for tidy")
@CommandArgument('--self-test', default=False, action="store_true",
help="Run unit tests for tidy")
def test_tidy(self, all_files, no_progress, self_test):
if self_test:
return test_tidy.do_tests()
else:
return tidy.scan(not all_files, not no_progress)
@Command('test-webidl',
description='Run the WebIDL parser tests',
category='testing')
@CommandArgument('--quiet', '-q', default=False, action="store_true",
help="Don't print passing tests.")
@CommandArgument('tests', default=None, nargs="...",
help="Specific tests to run, relative to the tests directory")
def test_webidl(self, quiet, tests):
self.ensure_bootstrapped()
test_file_dir = path.abspath(path.join(PROJECT_TOPLEVEL_PATH, "components", "script",
"dom", "bindings", "codegen", "parser"))
# For the `import WebIDL` in runtests.py
sys.path.insert(0, test_file_dir)
run_file = path.abspath(path.join(test_file_dir, "runtests.py"))
run_globals = {"__file__": run_file}
execfile(run_file, run_globals)
verbose = not quiet
return run_globals["run_tests"](tests, verbose)
@Command('test-wpt-failure',
description='Run the web platform tests',
category='testing')
def test_wpt_failure(self):
self.ensure_bootstrapped()
return not call([
"bash",
path.join("tests", "wpt", "run.sh"),
"--no-pause-after-test",
"--include",
"infrastructure/failing-test.html"
], env=self.build_env())
@Command('test-wpt',
description='Run the web platform tests',
category='testing',
parser=create_parser_wpt)
def test_wpt(self, **kwargs):
self.ensure_bootstrapped()
return self.run_test_list_or_dispatch(kwargs["test_list"], "wpt", self._test_wpt, **kwargs)
def _test_wpt(self, **kwargs):
hosts_file_path = path.join(self.context.topdir, 'tests', 'wpt', 'hosts')
os.environ["hosts_file_path"] = hosts_file_path
run_file = path.abspath(path.join(self.context.topdir, "tests", "wpt", "run_wpt.py"))
return self.wptrunner(run_file, **kwargs)
# Helper to ensure all specified paths are handled, otherwise dispatch to appropriate test suite.
def run_test_list_or_dispatch(self, requested_paths, correct_suite, correct_function, **kwargs):
if not requested_paths:
return correct_function(**kwargs)
else:
# Paths specified on command line. Ensure they can be handled, re-dispatch otherwise.
all_handled = True
for test_path in requested_paths:
suite = self.suite_for_path(test_path)
if suite is not None and correct_suite != suite:
all_handled = False
print("Warning: %s is not a %s test. Delegating to test-%s." % (test_path, correct_suite, suite))
if all_handled:
return correct_function(**kwargs)
else:
# Dispatch each test to the correct suite via test()
Registrar.dispatch("test", context=self.context, params=requested_paths)
# Helper for test_css and test_wpt:
def wptrunner(self, run_file, **kwargs):
os.environ["RUST_BACKTRACE"] = "1"
kwargs["debug"] = not kwargs["release"]
if kwargs.pop("chaos"):
kwargs["debugger"] = "rr"
kwargs["debugger_args"] = "record --chaos"
kwargs["repeat_until_unexpected"] = True
# TODO: Delete rr traces from green test runs?
prefs = kwargs.pop("prefs")
if prefs:
binary_args = []
for pref in prefs:
binary_args.append("--pref=" + pref)
kwargs["binary_args"] = binary_args
run_globals = {"__file__": run_file}
execfile(run_file, run_globals)
return run_globals["run_tests"](**kwargs)
@Command('update-manifest',
description='run test-wpt --manifest-update SKIP_TESTS to regenerate MANIFEST.json',
category='testing',
parser=create_parser_wpt)
def update_manifest(self, **kwargs):
kwargs['test_list'].append(str('SKIP_TESTS'))
kwargs['manifest_update'] = True
return self.test_wpt(**kwargs)
@Command('update-wpt',
description='Update the web platform tests',
category='testing',
parser=updatecommandline.create_parser())
@CommandArgument('--patch', action='store_true', default=False,
help='Create an mq patch or git commit containing the changes')
def update_wpt(self, patch, **kwargs):
self.ensure_bootstrapped()
run_file = path.abspath(path.join("tests", "wpt", "update.py"))
kwargs["no_patch"] = not patch
if kwargs["no_patch"] and kwargs["sync"]:
print("Are you sure you don't want a patch?")
return 1
run_globals = {"__file__": run_file}
execfile(run_file, run_globals)
return run_globals["update_tests"](**kwargs)
@Command('test-jquery',
description='Run the jQuery test suite',
category='testing')
@CommandArgument('--release', '-r', action='store_true',
help='Run the release build')
@CommandArgument('--dev', '-d', action='store_true',
help='Run the dev build')
def test_jquery(self, release, dev):
return self.jquery_test_runner("test", release, dev)
@Command('test-dromaeo',
description='Run the Dromaeo test suite',
category='testing')
@CommandArgument('tests', default=["recommended"], nargs="...",
help="Specific tests to run")
@CommandArgument('--release', '-r', action='store_true',
help='Run the release build')
@CommandArgument('--dev', '-d', action='store_true',
help='Run the dev build')
def test_dromaeo(self, tests, release, dev):
return self.dromaeo_test_runner(tests, release, dev)
@Command('update-jquery',
description='Update the jQuery test suite expected results',
category='testing')
@CommandArgument('--release', '-r', action='store_true',
help='Run the release build')
@CommandArgument('--dev', '-d', action='store_true',
help='Run the dev build')
def update_jquery(self, release, dev):
return self.jquery_test_runner("update", release, dev)
@Command('test-css',
description='Run the web platform tests',
category='testing',
parser=create_parser_wpt)
def test_css(self, **kwargs):
self.ensure_bootstrapped()
return self.run_test_list_or_dispatch(kwargs["test_list"], "css", self._test_css, **kwargs)
def _test_css(self, **kwargs):
run_file = path.abspath(path.join("tests", "wpt", "run_css.py"))
return self.wptrunner(run_file, **kwargs)
@Command('update-css',
description='Update the web platform tests',
category='testing',
parser=updatecommandline.create_parser())
@CommandArgument('--patch', action='store_true', default=False,
help='Create an mq patch or git commit containing the changes')
def update_css(self, patch, **kwargs):
self.ensure_bootstrapped()
run_file = path.abspath(path.join("tests", "wpt", "update_css.py"))
kwargs["no_patch"] = not patch
if kwargs["no_patch"] and kwargs["sync"]:
print("Are you sure you don't want a patch?")
return 1
run_globals = {"__file__": run_file}
execfile(run_file, run_globals)
return run_globals["update_tests"](**kwargs)
@Command('compare_dromaeo',
description='compare outputs of two runs of ./mach test-dromaeo command',
category='testing')
@CommandArgument('params', default=None, nargs="...",
help=" filepaths of output files of two runs of dromaeo test ")
def compare_dromaeo(self, params):
prev_op_filename = params[0]
cur_op_filename = params[1]
result = {'Test': [], 'Prev_Time': [], 'Cur_Time': [], 'Difference(%)': []}
with open(prev_op_filename, 'r') as prev_op, open(cur_op_filename, 'r') as cur_op:
l1 = prev_op.readline()
l2 = cur_op.readline()
while ((l1.find('[dromaeo] Saving...') and l2.find('[dromaeo] Saving...'))):
l1 = prev_op.readline()
l2 = cur_op.readline()
reach = 3
while (reach > 0):
l1 = prev_op.readline()
l2 = cur_op.readline()
reach -= 1
while True:
l1 = prev_op.readline()
l2 = cur_op.readline()
if not l1:
break
result['Test'].append(str(l1).split('|')[0].strip())
result['Prev_Time'].append(float(str(l1).split('|')[1].strip()))
result['Cur_Time'].append(float(str(l2).split('|')[1].strip()))
a = float(str(l1).split('|')[1].strip())
b = float(str(l2).split('|')[1].strip())
result['Difference(%)'].append(((b - a) / a) * 100)
width_col1 = max([len(x) for x in result['Test']])
width_col2 = max([len(str(x)) for x in result['Prev_Time']])
width_col3 = max([len(str(x)) for x in result['Cur_Time']])
width_col4 = max([len(str(x)) for x in result['Difference(%)']])
for p, q, r, s in zip(['Test'], ['First Run'], ['Second Run'], ['Difference(%)']):
print ("\033[1m" + "{}|{}|{}|{}".format(p.ljust(width_col1), q.ljust(width_col2), r.ljust(width_col3),
s.ljust(width_col4)) + "\033[0m" + "\n" + "--------------------------------------------------"
+ "-------------------------------------------------------------------------")
for a1, b1, c1, d1 in zip(result['Test'], result['Prev_Time'], result['Cur_Time'], result['Difference(%)']):
if d1 > 0:
print ("\033[91m" + "{}|{}|{}|{}".format(a1.ljust(width_col1),
str(b1).ljust(width_col2), str(c1).ljust(width_col3), str(d1).ljust(width_col4)) + "\033[0m")
elif d1 < 0:
print ("\033[92m" + "{}|{}|{}|{}".format(a1.ljust(width_col1),
str(b1).ljust(width_col2), str(c1).ljust(width_col3), str(d1).ljust(width_col4)) + "\033[0m")
else:
print ("{}|{}|{}|{}".format(a1.ljust(width_col1), str(b1).ljust(width_col2),
str(c1).ljust(width_col3), str(d1).ljust(width_col4)))
def jquery_test_runner(self, cmd, release, dev):
self.ensure_bootstrapped()
base_dir = path.abspath(path.join("tests", "jquery"))
jquery_dir = path.join(base_dir, "jquery")
run_file = path.join(base_dir, "run_jquery.py")
# Clone the jQuery repository if it doesn't exist
if not os.path.isdir(jquery_dir):
check_call(
["git", "clone", "-b", "servo", "--depth", "1", "https://github.com/servo/jquery", jquery_dir])
# Run pull in case the jQuery repo was updated since last test run
check_call(
["git", "-C", jquery_dir, "pull"])
# Check that a release servo build exists
bin_path = path.abspath(self.get_binary_path(release, dev))
return call([run_file, cmd, bin_path, base_dir])
def dromaeo_test_runner(self, tests, release, dev):
self.ensure_bootstrapped()
base_dir = path.abspath(path.join("tests", "dromaeo"))
dromaeo_dir = path.join(base_dir, "dromaeo")
run_file = path.join(base_dir, "run_dromaeo.py")
# Clone the Dromaeo repository if it doesn't exist
if not os.path.isdir(dromaeo_dir):
check_call(
["git", "clone", "-b", "servo", "--depth", "1", "https://github.com/notriddle/dromaeo", dromaeo_dir])
# Run pull in case the Dromaeo repo was updated since last test run
check_call(
["git", "-C", dromaeo_dir, "pull"])
# Compile test suite
check_call(
["make", "-C", dromaeo_dir, "web"])
# Check that a release servo build exists
bin_path = path.abspath(self.get_binary_path(release, dev))
return check_call(
[run_file, "|".join(tests), bin_path, base_dir])
def create_parser_create():
import argparse
p = argparse.ArgumentParser()
p.add_argument("--no-editor", action="store_true",
help="Don't try to open the test in an editor")
p.add_argument("-e", "--editor", action="store", help="Editor to use")
p.add_argument("--no-run", action="store_true",
help="Don't try to update the wpt manifest or open the test in a browser")
p.add_argument('--release', action="store_true",
help="Run with a release build of servo")
p.add_argument("--long-timeout", action="store_true",
help="Test should be given a long timeout (typically 60s rather than 10s,"
"but varies depending on environment)")
p.add_argument("--overwrite", action="store_true",
help="Allow overwriting an existing test file")
p.add_argument("-r", "--reftest", action="store_true",
help="Create a reftest rather than a testharness (js) test"),
p.add_argument("-ref", "--reference", dest="ref", help="Path to the reference file")
p.add_argument("--mismatch", action="store_true",
help="Create a mismatch reftest")
p.add_argument("--wait", action="store_true",
help="Create a reftest that waits until takeScreenshot() is called")
p.add_argument("path", action="store", help="Path to the test file")
return p
@CommandProvider
class WebPlatformTestsCreator(CommandBase):
template_prefix = """<!doctype html>
%(documentElement)s<meta charset="utf-8">
"""
template_long_timeout = "<meta name=timeout content=long>\n"
template_body_th = """<title></title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script>
</script>
"""
template_body_reftest = """<title></title>
<link rel="%(match)s" href="%(ref)s">
"""
template_body_reftest_wait = """<script src="/common/reftest-wait.js"></script>
"""
def make_test_file_url(self, absolute_file_path):
# Make the path relative to the project top-level directory so that
# we can more easily find the right test directory.
file_path = os.path.relpath(absolute_file_path, PROJECT_TOPLEVEL_PATH)
if file_path.startswith(WEB_PLATFORM_TESTS_PATH):
url = file_path[len(WEB_PLATFORM_TESTS_PATH):]
elif file_path.startswith(SERVO_TESTS_PATH):
url = "/mozilla" + file_path[len(SERVO_TESTS_PATH):]
else: # This test file isn't in any known test directory.
return None
return url.replace(os.path.sep, "/")
def make_test_and_reference_urls(self, test_path, reference_path):
test_path = os.path.normpath(os.path.abspath(test_path))
test_url = self.make_test_file_url(test_path)
if test_url is None:
return (None, None)
if reference_path is None:
return (test_url, '')
reference_path = os.path.normpath(os.path.abspath(reference_path))
# If the reference is in the same directory, the URL can just be the
# name of the refernce file itself.
reference_path_parts = os.path.split(reference_path)
if reference_path_parts[0] == os.path.split(test_path)[0]:
return (test_url, reference_path_parts[1])
return (test_url, self.make_test_file_url(reference_path))
@Command("create-wpt",
category="testing",
parser=create_parser_create)
def run_create(self, **kwargs):
import subprocess
test_path = kwargs["path"]
reference_path = kwargs["ref"]
if reference_path:
kwargs["reftest"] = True
(test_url, reference_url) = self.make_test_and_reference_urls(
test_path, reference_path)
if test_url is None:
print("""Test path %s is not in wpt directories:
tests/wpt/web-platform-tests for tests that may be shared
tests/wpt/mozilla/tests for Servo-only tests""" % test_path)
return 1
if reference_url is None:
print("""Reference path %s is not in wpt directories:
testing/web-platform/tests for tests that may be shared
testing/web-platform/mozilla/tests for Servo-only tests""" % reference_path)
return 1
if os.path.exists(test_path) and not kwargs["overwrite"]:
print("Test path already exists, pass --overwrite to replace")
return 1
if kwargs["mismatch"] and not kwargs["reftest"]:
print("--mismatch only makes sense for a reftest")
return 1
if kwargs["wait"] and not kwargs["reftest"]:
print("--wait only makes sense for a reftest")
return 1
args = {"documentElement": "<html class=\"reftest-wait\">\n" if kwargs["wait"] else ""}
template = self.template_prefix % args
if kwargs["long_timeout"]:
template += self.template_long_timeout
if kwargs["reftest"]:
args = {"match": "match" if not kwargs["mismatch"] else "mismatch",
"ref": reference_url}
template += self.template_body_reftest % args
if kwargs["wait"]:
template += self.template_body_reftest_wait
else:
template += self.template_body_th
with open(test_path, "w") as f:
f.write(template)
if kwargs["no_editor"]:
editor = None
elif kwargs["editor"]:
editor = kwargs["editor"]
elif "VISUAL" in os.environ:
editor = os.environ["VISUAL"]
elif "EDITOR" in os.environ:
editor = os.environ["EDITOR"]
else:
editor = None
if editor:
proc = subprocess.Popen("%s %s" % (editor, test_path), shell=True)
if not kwargs["no_run"]:
p = create_parser_wpt()
args = ["--manifest-update"]
if kwargs["release"]:
args.append("--release")
args.append(test_path)
wpt_kwargs = vars(p.parse_args(args))
self.context.commands.dispatch("test-wpt", self.context, **wpt_kwargs)
if editor:
proc.wait()
@Command('update-net-cookies',
description='Update the net unit tests with cookie tests from http-state',
category='testing')
def update_net_cookies(self):
cache_dir = path.join(self.config["tools"]["cache-dir"], "tests")
run_file = path.abspath(path.join(PROJECT_TOPLEVEL_PATH,
"tests", "unit", "net",
"cookie_http_state_utils.py"))
run_globals = {"__file__": run_file}
execfile(run_file, run_globals)
return run_globals["update_test_file"](cache_dir)
| amarant/servo | python/servo/testing_commands.py | Python | mpl-2.0 | 31,871 | 0.002134 |
from .. import DStateSpace
def extension_method(cls):
def decorator(func):
setattr(cls, func.__name__, func)
return func
return decorator
def extension_class(name, bases, namespace):
assert len(bases) == 1, "Exactly one base class required"
base = bases[0]
for name, value in namespace.iteritems():
if name != "__metaclass__":
setattr(base, name, value)
return base
class IdentDStateSpace(DStateSpace):
__metaclass__ = extension_class
def fit(self, u, y):
pass | Ajoo/pySystems | pysystems/ident/systems.py | Python | mit | 549 | 0.009107 |
# -*- coding: utf-8 -*-
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tagging', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='tagavailablevalues',
options={'ordering': ('id',), 'verbose_name': 'available tag value'},
),
migrations.AlterModelOptions(
name='tagcategories',
options={'ordering': ('title',), 'verbose_name': 'tag category', 'verbose_name_plural': 'tag categories'},
),
]
| edx-solutions/edx-platform | cms/lib/xblock/tagging/migrations/0002_auto_20170116_1541.py | Python | agpl-3.0 | 571 | 0.003503 |
from typing import List, Set, Tuple, Dict
import numpy
from allennlp.common.checks import ConfigurationError
def decode_mst(
energy: numpy.ndarray, length: int, has_labels: bool = True
) -> Tuple[numpy.ndarray, numpy.ndarray]:
"""
Note: Counter to typical intuition, this function decodes the _maximum_
spanning tree.
Decode the optimal MST tree with the Chu-Liu-Edmonds algorithm for
maximum spanning arborescences on graphs.
# Parameters
energy : `numpy.ndarray`, required.
A tensor with shape (num_labels, timesteps, timesteps)
containing the energy of each edge. If has_labels is `False`,
the tensor should have shape (timesteps, timesteps) instead.
length : `int`, required.
The length of this sequence, as the energy may have come
from a padded batch.
has_labels : `bool`, optional, (default = `True`)
Whether the graph has labels or not.
"""
if has_labels and energy.ndim != 3:
raise ConfigurationError("The dimension of the energy array is not equal to 3.")
elif not has_labels and energy.ndim != 2:
raise ConfigurationError("The dimension of the energy array is not equal to 2.")
input_shape = energy.shape
max_length = input_shape[-1]
# Our energy matrix might have been batched -
# here we clip it to contain only non padded tokens.
if has_labels:
energy = energy[:, :length, :length]
# get best label for each edge.
label_id_matrix = energy.argmax(axis=0)
energy = energy.max(axis=0)
else:
energy = energy[:length, :length]
label_id_matrix = None
# get original score matrix
original_score_matrix = energy
# initialize score matrix to original score matrix
score_matrix = numpy.array(original_score_matrix, copy=True)
old_input = numpy.zeros([length, length], dtype=numpy.int32)
old_output = numpy.zeros([length, length], dtype=numpy.int32)
current_nodes = [True for _ in range(length)]
representatives: List[Set[int]] = []
for node1 in range(length):
original_score_matrix[node1, node1] = 0.0
score_matrix[node1, node1] = 0.0
representatives.append({node1})
for node2 in range(node1 + 1, length):
old_input[node1, node2] = node1
old_output[node1, node2] = node2
old_input[node2, node1] = node2
old_output[node2, node1] = node1
final_edges: Dict[int, int] = {}
# The main algorithm operates inplace.
chu_liu_edmonds(
length, score_matrix, current_nodes, final_edges, old_input, old_output, representatives
)
heads = numpy.zeros([max_length], numpy.int32)
if has_labels:
head_type = numpy.ones([max_length], numpy.int32)
else:
head_type = None
for child, parent in final_edges.items():
heads[child] = parent
if has_labels:
head_type[child] = label_id_matrix[parent, child]
return heads, head_type
def chu_liu_edmonds(
length: int,
score_matrix: numpy.ndarray,
current_nodes: List[bool],
final_edges: Dict[int, int],
old_input: numpy.ndarray,
old_output: numpy.ndarray,
representatives: List[Set[int]],
):
"""
Applies the chu-liu-edmonds algorithm recursively
to a graph with edge weights defined by score_matrix.
Note that this function operates in place, so variables
will be modified.
# Parameters
length : `int`, required.
The number of nodes.
score_matrix : `numpy.ndarray`, required.
The score matrix representing the scores for pairs
of nodes.
current_nodes : `List[bool]`, required.
The nodes which are representatives in the graph.
A representative at it's most basic represents a node,
but as the algorithm progresses, individual nodes will
represent collapsed cycles in the graph.
final_edges : `Dict[int, int]`, required.
An empty dictionary which will be populated with the
nodes which are connected in the maximum spanning tree.
old_input : `numpy.ndarray`, required.
old_output : `numpy.ndarray`, required.
representatives : `List[Set[int]]`, required.
A list containing the nodes that a particular node
is representing at this iteration in the graph.
# Returns
Nothing - all variables are modified in place.
"""
# Set the initial graph to be the greedy best one.
parents = [-1]
for node1 in range(1, length):
parents.append(0)
if current_nodes[node1]:
max_score = score_matrix[0, node1]
for node2 in range(1, length):
if node2 == node1 or not current_nodes[node2]:
continue
new_score = score_matrix[node2, node1]
if new_score > max_score:
max_score = new_score
parents[node1] = node2
# Check if this solution has a cycle.
has_cycle, cycle = _find_cycle(parents, length, current_nodes)
# If there are no cycles, find all edges and return.
if not has_cycle:
final_edges[0] = -1
for node in range(1, length):
if not current_nodes[node]:
continue
parent = old_input[parents[node], node]
child = old_output[parents[node], node]
final_edges[child] = parent
return
# Otherwise, we have a cycle so we need to remove an edge.
# From here until the recursive call is the contraction stage of the algorithm.
cycle_weight = 0.0
# Find the weight of the cycle.
index = 0
for node in cycle:
index += 1
cycle_weight += score_matrix[parents[node], node]
# For each node in the graph, find the maximum weight incoming
# and outgoing edge into the cycle.
cycle_representative = cycle[0]
for node in range(length):
if not current_nodes[node] or node in cycle:
continue
in_edge_weight = float("-inf")
in_edge = -1
out_edge_weight = float("-inf")
out_edge = -1
for node_in_cycle in cycle:
if score_matrix[node_in_cycle, node] > in_edge_weight:
in_edge_weight = score_matrix[node_in_cycle, node]
in_edge = node_in_cycle
# Add the new edge score to the cycle weight
# and subtract the edge we're considering removing.
score = (
cycle_weight
+ score_matrix[node, node_in_cycle]
- score_matrix[parents[node_in_cycle], node_in_cycle]
)
if score > out_edge_weight:
out_edge_weight = score
out_edge = node_in_cycle
score_matrix[cycle_representative, node] = in_edge_weight
old_input[cycle_representative, node] = old_input[in_edge, node]
old_output[cycle_representative, node] = old_output[in_edge, node]
score_matrix[node, cycle_representative] = out_edge_weight
old_output[node, cycle_representative] = old_output[node, out_edge]
old_input[node, cycle_representative] = old_input[node, out_edge]
# For the next recursive iteration, we want to consider the cycle as a
# single node. Here we collapse the cycle into the first node in the
# cycle (first node is arbitrary), set all the other nodes not be
# considered in the next iteration. We also keep track of which
# representatives we are considering this iteration because we need
# them below to check if we're done.
considered_representatives: List[Set[int]] = []
for i, node_in_cycle in enumerate(cycle):
considered_representatives.append(set())
if i > 0:
# We need to consider at least one
# node in the cycle, arbitrarily choose
# the first.
current_nodes[node_in_cycle] = False
for node in representatives[node_in_cycle]:
considered_representatives[i].add(node)
if i > 0:
representatives[cycle_representative].add(node)
chu_liu_edmonds(
length, score_matrix, current_nodes, final_edges, old_input, old_output, representatives
)
# Expansion stage.
# check each node in cycle, if one of its representatives
# is a key in the final_edges, it is the one we need.
found = False
key_node = -1
for i, node in enumerate(cycle):
for cycle_rep in considered_representatives[i]:
if cycle_rep in final_edges:
key_node = node
found = True
break
if found:
break
previous = parents[key_node]
while previous != key_node:
child = old_output[parents[previous], previous]
parent = old_input[parents[previous], previous]
final_edges[child] = parent
previous = parents[previous]
def _find_cycle(
parents: List[int], length: int, current_nodes: List[bool]
) -> Tuple[bool, List[int]]:
added = [False for _ in range(length)]
added[0] = True
cycle = set()
has_cycle = False
for i in range(1, length):
if has_cycle:
break
# don't redo nodes we've already
# visited or aren't considering.
if added[i] or not current_nodes[i]:
continue
# Initialize a new possible cycle.
this_cycle = set()
this_cycle.add(i)
added[i] = True
has_cycle = True
next_node = i
while parents[next_node] not in this_cycle:
next_node = parents[next_node]
# If we see a node we've already processed,
# we can stop, because the node we are
# processing would have been in that cycle.
if added[next_node]:
has_cycle = False
break
added[next_node] = True
this_cycle.add(next_node)
if has_cycle:
original = next_node
cycle.add(original)
next_node = parents[original]
while next_node != original:
cycle.add(next_node)
next_node = parents[next_node]
break
return has_cycle, list(cycle)
| allenai/allennlp | allennlp/nn/chu_liu_edmonds.py | Python | apache-2.0 | 10,283 | 0.000486 |
# -*- coding: utf-8 -*-
'''
Mepinta
Copyright (c) 2011-2012, Joaquin G. Duo
This file is part of Mepinta.
Mepinta is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Mepinta is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Mepinta. If not, see <http://www.gnu.org/licenses/>.
'''
def isiterable(instance):
try:
iterator = iter(instance)
return True
except TypeError as e:
return False
if __name__ == "__main__":
debugPrint(isiterable([]))
debugPrint(isiterable("foo"))
debugPrint(isiterable(10))
| joaduo/mepinta | core/python_core/common/type_checking/isiterable.py | Python | gpl-3.0 | 972 | 0 |
#
# Copyright (C) 2014 Dell, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import threading
import uuid
import dcm.agent.utils as agent_utils
from dcm.agent.events.globals import global_space as dcm_events
_g_logger = logging.getLogger(__name__)
_g_message_uuid = str(uuid.uuid4()).split("-")[0]
_g_message_id_count = 0
_g_guid_lock = threading.RLock()
def new_message_id():
# note: using uuid here caused deadlock in tests
global _g_message_id_count
global _g_message_uuid
_g_guid_lock.acquire()
try:
_g_message_id_count = _g_message_id_count + 1
finally:
_g_guid_lock.release()
return _g_message_uuid + str(_g_message_id_count)
class MessageTimer(object):
def __init__(self, timeout, callback, message_doc):
self._send_doc = message_doc
self._timeout = timeout
self._cb = callback
self._timer = None
self._lock = threading.RLock()
self.message_id = message_doc['message_id']
def lock(self):
self._lock.acquire()
def unlock(self):
self._lock.release()
@agent_utils.class_method_sync
def send(self, conn):
_g_logger.info("Resending reply to %s" % self._send_doc["request_id"])
self._send_doc['entity'] = "timer"
conn.send(self._send_doc)
self._timer = dcm_events.register_callback(
self._cb, args=[self], delay=self._timeout)
@agent_utils.class_method_sync
def cancel(self):
if self._timer is None:
return
dcm_events.cancel_callback(self._timer)
self._timer = None
| JPWKU/unix-agent | src/dcm/agent/messaging/utils.py | Python | apache-2.0 | 2,118 | 0 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Keras core layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python import keras
from tensorflow.python.eager import context
from tensorflow.python.framework import ops
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import testing_utils
from tensorflow.python.keras.mixed_precision.experimental import policy
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
@keras_parameterized.run_all_keras_modes
class DropoutLayersTest(keras_parameterized.TestCase):
def test_dropout(self):
testing_utils.layer_test(
keras.layers.Dropout, kwargs={'rate': 0.5}, input_shape=(3, 2))
testing_utils.layer_test(
keras.layers.Dropout,
kwargs={'rate': 0.5,
'noise_shape': [3, 1]},
input_shape=(3, 2))
def test_dropout_supports_masking(self):
dropout = keras.layers.Dropout(0.5)
self.assertEqual(True, dropout.supports_masking)
def test_spatial_dropout_1d(self):
testing_utils.layer_test(
keras.layers.SpatialDropout1D,
kwargs={'rate': 0.5},
input_shape=(2, 3, 4))
def test_spatial_dropout_2d(self):
testing_utils.layer_test(
keras.layers.SpatialDropout2D,
kwargs={'rate': 0.5},
input_shape=(2, 3, 4, 5))
testing_utils.layer_test(
keras.layers.SpatialDropout2D,
kwargs={'rate': 0.5, 'data_format': 'channels_first'},
input_shape=(2, 3, 4, 5))
def test_spatial_dropout_3d(self):
testing_utils.layer_test(
keras.layers.SpatialDropout3D,
kwargs={'rate': 0.5},
input_shape=(2, 3, 4, 4, 5))
testing_utils.layer_test(
keras.layers.SpatialDropout3D,
kwargs={'rate': 0.5, 'data_format': 'channels_first'},
input_shape=(2, 3, 4, 4, 5))
@keras_parameterized.run_all_keras_modes
class LambdaLayerTest(keras_parameterized.TestCase):
def test_lambda(self):
testing_utils.layer_test(
keras.layers.Lambda,
kwargs={'function': lambda x: x + 1},
input_shape=(3, 2))
testing_utils.layer_test(
keras.layers.Lambda,
kwargs={
'function': lambda x, a, b: x * a + b,
'arguments': {
'a': 0.6,
'b': 0.4
}
},
input_shape=(3, 2))
# test serialization with function
def f(x):
return x + 1
ld = keras.layers.Lambda(f)
config = ld.get_config()
ld = keras.layers.deserialize({
'class_name': 'Lambda',
'config': config
})
# test with lambda
ld = keras.layers.Lambda(
lambda x: keras.backend.concatenate([math_ops.square(x), x]))
config = ld.get_config()
ld = keras.layers.Lambda.from_config(config)
def test_lambda_multiple_inputs(self):
ld = keras.layers.Lambda(lambda x: x[0], output_shape=lambda x: x[0])
x1 = np.ones([3, 2], np.float32)
x2 = np.ones([3, 5], np.float32)
out = ld([x1, x2])
self.assertAllEqual(out.shape, [3, 2])
def test_lambda_output_shape(self):
l = keras.layers.Lambda(lambda x: x + 1, output_shape=(1, 1))
l(keras.backend.variable(np.ones((1, 1))))
self.assertEqual((1, 1), l.get_config()['output_shape'])
def test_lambda_output_shape_function(self):
def get_output_shape(input_shape):
return 1 * input_shape
l = keras.layers.Lambda(lambda x: x + 1, output_shape=get_output_shape)
l(keras.backend.variable(np.ones((1, 1))))
self.assertEqual('lambda', l.get_config()['output_shape_type'])
def test_lambda_output_shape_autocalculate_multiple_inputs(self):
def lambda_fn(x):
return math_ops.matmul(x[0], x[1])
l = keras.layers.Lambda(lambda_fn)
output_shape = l.compute_output_shape([(10, 10), (10, 20)])
self.assertAllEqual((10, 20), output_shape)
def test_lambda_output_shape_list_multiple_outputs(self):
def lambda_fn(x):
return x
l = keras.layers.Lambda(lambda_fn, output_shape=[(10,), (20,)])
output_shape = l.compute_output_shape([(10, 10), (10, 20)])
self.assertAllEqual([(10, 10), (10, 20)], output_shape)
def test_lambda_output_shape_tuple_with_none(self):
def lambda_fn(x):
return x
l = keras.layers.Lambda(lambda_fn, output_shape=(None, 10))
output_shape = l.compute_output_shape((5, 10, 20))
self.assertAllEqual([5, None, 10], output_shape.as_list())
def test_lambda_output_shape_function_multiple_outputs(self):
def lambda_fn(x):
return x
def output_shape_fn(input_shape):
return input_shape
l = keras.layers.Lambda(lambda_fn, output_shape=output_shape_fn)
output_shape = l.compute_output_shape([(10, 10), (10, 20)])
self.assertAllEqual([(10, 10), (10, 20)], output_shape)
def test_lambda_config_serialization(self):
# Test serialization with output_shape and output_shape_type
layer = keras.layers.Lambda(lambda x: x + 1, output_shape=(1, 1))
layer(keras.backend.variable(np.ones((1, 1))))
config = layer.get_config()
layer = keras.layers.deserialize({
'class_name': 'Lambda',
'config': config
})
layer = keras.layers.Lambda.from_config(config)
@keras_parameterized.run_all_keras_modes
class CoreLayersTest(keras_parameterized.TestCase):
def test_masking(self):
testing_utils.layer_test(
keras.layers.Masking, kwargs={}, input_shape=(3, 2, 3))
def test_keras_mask(self):
x = np.ones((10, 10))
y = keras.layers.Masking(1.)(x)
self.assertTrue(hasattr(y, '_keras_mask'))
self.assertTrue(y._keras_mask is not None)
self.assertAllClose(self.evaluate(y._keras_mask), np.zeros((10,)))
def test_activation(self):
# with string argument
testing_utils.layer_test(
keras.layers.Activation,
kwargs={'activation': 'relu'},
input_shape=(3, 2))
# with function argument
testing_utils.layer_test(
keras.layers.Activation,
kwargs={'activation': keras.backend.relu},
input_shape=(3, 2))
def test_reshape(self):
testing_utils.layer_test(
keras.layers.Reshape,
kwargs={'target_shape': (8, 1)},
input_shape=(3, 2, 4))
testing_utils.layer_test(
keras.layers.Reshape,
kwargs={'target_shape': (-1, 1)},
input_shape=(3, 2, 4))
testing_utils.layer_test(
keras.layers.Reshape,
kwargs={'target_shape': (1, -1)},
input_shape=(3, 2, 4))
testing_utils.layer_test(
keras.layers.Reshape,
kwargs={'target_shape': (-1, 1)},
input_shape=(None, None, 2))
def test_permute(self):
testing_utils.layer_test(
keras.layers.Permute, kwargs={'dims': (2, 1)}, input_shape=(3, 2, 4))
def test_permute_errors_on_invalid_starting_dims_index(self):
with self.assertRaisesRegexp(ValueError, r'Invalid permutation .*dims.*'):
testing_utils.layer_test(
keras.layers.Permute,
kwargs={'dims': (0, 1, 2)}, input_shape=(3, 2, 4))
def test_permute_errors_on_invalid_set_of_dims_indices(self):
with self.assertRaisesRegexp(ValueError, r'Invalid permutation .*dims.*'):
testing_utils.layer_test(
keras.layers.Permute,
kwargs={'dims': (1, 4, 2)}, input_shape=(3, 2, 4))
def test_flatten(self):
testing_utils.layer_test(
keras.layers.Flatten, kwargs={}, input_shape=(3, 2, 4))
# Test channels_first
inputs = np.random.random((10, 3, 5, 5)).astype('float32')
outputs = testing_utils.layer_test(
keras.layers.Flatten,
kwargs={'data_format': 'channels_first'},
input_data=inputs)
target_outputs = np.reshape(
np.transpose(inputs, (0, 2, 3, 1)), (-1, 5 * 5 * 3))
self.assertAllClose(outputs, target_outputs)
def test_flatten_scalar_channels(self):
testing_utils.layer_test(
keras.layers.Flatten, kwargs={}, input_shape=(3,))
# Test channels_first
inputs = np.random.random((10,)).astype('float32')
outputs = testing_utils.layer_test(
keras.layers.Flatten,
kwargs={'data_format': 'channels_first'},
input_data=inputs)
target_outputs = np.expand_dims(inputs, -1)
self.assertAllClose(outputs, target_outputs)
def test_repeat_vector(self):
testing_utils.layer_test(
keras.layers.RepeatVector, kwargs={'n': 3}, input_shape=(3, 2))
def test_dense(self):
testing_utils.layer_test(
keras.layers.Dense, kwargs={'units': 3}, input_shape=(3, 2))
testing_utils.layer_test(
keras.layers.Dense, kwargs={'units': 3}, input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.Dense, kwargs={'units': 3}, input_shape=(None, None, 2))
testing_utils.layer_test(
keras.layers.Dense, kwargs={'units': 3}, input_shape=(3, 4, 5, 2))
def test_dense_dtype(self):
inputs = ops.convert_to_tensor(
np.random.randint(low=0, high=7, size=(2, 2)))
layer = keras.layers.Dense(5, dtype='float32')
outputs = layer(inputs)
self.assertEqual(outputs.dtype, 'float32')
def test_dense_with_policy(self):
inputs = ops.convert_to_tensor(
np.random.randint(low=0, high=7, size=(2, 2)), dtype='float16')
layer = keras.layers.Dense(5, dtype=policy.Policy('infer_float32_vars'))
outputs = layer(inputs)
self.assertEqual(outputs.dtype, 'float16')
self.assertEqual(layer.kernel.dtype, 'float32')
def test_dense_regularization(self):
layer = keras.layers.Dense(
3,
kernel_regularizer=keras.regularizers.l1(0.01),
bias_regularizer='l1',
activity_regularizer='l2',
name='dense_reg')
layer(keras.backend.variable(np.ones((2, 4))))
self.assertEqual(3, len(layer.losses))
def test_dense_constraints(self):
k_constraint = keras.constraints.max_norm(0.01)
b_constraint = keras.constraints.max_norm(0.01)
layer = keras.layers.Dense(
3, kernel_constraint=k_constraint, bias_constraint=b_constraint)
layer(keras.backend.variable(np.ones((2, 4))))
self.assertEqual(layer.kernel.constraint, k_constraint)
self.assertEqual(layer.bias.constraint, b_constraint)
def test_activity_regularization(self):
layer = keras.layers.ActivityRegularization(l1=0.1)
layer(keras.backend.variable(np.ones((2, 4))))
self.assertEqual(1, len(layer.losses))
config = layer.get_config()
self.assertEqual(config.pop('l1'), 0.1)
def test_numpy_inputs(self):
if context.executing_eagerly():
layer = keras.layers.RepeatVector(2)
x = np.ones((10, 10))
self.assertAllEqual(np.ones((10, 2, 10)), layer(x))
layer = keras.layers.Concatenate()
x, y = np.ones((10, 10)), np.ones((10, 10))
self.assertAllEqual(np.ones((10, 20)), layer([x, y]))
if __name__ == '__main__':
test.main()
| ageron/tensorflow | tensorflow/python/keras/layers/core_test.py | Python | apache-2.0 | 11,584 | 0.004575 |
'''
Created on Nov 26, 2014
@author: Yury Zhauniarovich <y.zhalnerovich{at}gmail.com>
'''
import os, time
from interfaces.adb_interface import AdbInterface
from bboxcoverage import BBoxCoverage
from running_strategies import IntentInvocationStrategy
import smtplib
import email.utils
from email.mime.text import MIMEText
APK_DIR_SOURCES = ["", ""]
SMTP_SERVER = 'smtp.gmail.com'
SMTP_PORT = 587
SENDER = ""
PASSWORD = ""
TO_EMAIL = ""
def sendMessage(subj, email_message):
msg = MIMEText(email_message)
msg['To'] = email.utils.formataddr(('Recipient', TO_EMAIL))
msg['From'] = email.utils.formataddr(('Author', SENDER))
msg['Subject'] = subj
server = smtplib.SMTP(SMTP_SERVER, SMTP_PORT)
try:
server.set_debuglevel(True)
# identify ourselves, prompting server for supported features
server.ehlo()
# If we can encrypt this session, do it
if server.has_extn('STARTTLS'):
server.starttls()
server.ehlo() # re-identify ourselves over TLS connection
server.login(SENDER, PASSWORD)
server.sendmail(SENDER, [TO_EMAIL], msg.as_string())
finally:
server.quit()
def getExecutionDevice():
'''
This method allows a user to select a device that is used to for further
analysis.
'''
dev_list = AdbInterface.getDeviceSerialsList()
devNum = len(dev_list)
if devNum <= 0:
print "No device has been detected! Connect your device and restart the application!"
return
if devNum == 1:
return dev_list[0]
choice = None
if devNum > 1:
print "Select the device to use for analysis:\n"
for i in xrange(0, devNum):
print "%d. %s\n" % ((i + 1), dev_list[i])
while not choice:
try:
choice = int(raw_input())
if choice not in range(1, devNum+1):
choice = None
print 'Invalid choice! Choose right number!'
except ValueError:
print 'Invalid Number! Choose right number!'
return dev_list[choice-1]
def getSubdirs(rootDir):
return [os.path.join(rootDir, name) for name in os.listdir(rootDir)
if os.path.isdir(os.path.join(rootDir, name))]
def getInstrApkInFolder(folder):
for f in os.listdir(folder):
if f.endswith("_aligned.apk"):
filepath = os.path.join(folder, f)
return filepath
return None
def runMainIntentsStrategy(adb, androidManifest, delay=10):
automaticTriggeringStrategy = IntentInvocationStrategy(adbDevice=adb, pathToAndroidManifest=androidManifest)
automaticTriggeringStrategy.run(delay=delay)
#main part
adb = AdbInterface()
device = getExecutionDevice()
if not device:
exit(1)
adb.setTargetSerial(device)
bboxcoverage = BBoxCoverage()
for apk_dir_source in APK_DIR_SOURCES:
print "\n\nStarting experiment for directory: [%s]" % apk_dir_source
result_directories = getSubdirs(apk_dir_source)
for directory in result_directories:
apk_file = getInstrApkInFolder(directory)
if apk_file:
print "Starting experiment for apk: [%s]" % apk_file
try:
bboxcoverage.initAlreadyInstrApkEnv(pathToInstrApk=apk_file, resultsDir=directory)
except:
print "Exception while initialization!"
continue
try:
bboxcoverage.installApkOnDevice()
except:
print "Exception while installation apk on device!"
bboxcoverage.uninstallPackage()
try:
bboxcoverage.installApkOnDevice()
except:
continue
package_name = bboxcoverage.getPackageName()
params = {}
params["strategy"] = "main_intents"
params["package_name"] = package_name
params["main_activity"] = bboxcoverage.androidManifest.getMainActivity()
try:
bboxcoverage.startTesting()
except:
print "Exception while startTesting!"
bboxcoverage.uninstallPackage()
continue
try:
runMainIntentsStrategy(adb=adb, androidManifest=bboxcoverage.androidManifestFile, delay=10)
except:
print "Exception while running strategy!"
bboxcoverage.uninstallPackage()
continue
try:
bboxcoverage.stopTesting("main_intents", paramsToWrite=params)
except:
print "Exception while running strategy!"
bboxcoverage.uninstallPackage()
continue
time.sleep(3)
bboxcoverage.uninstallPackage()
time.sleep(5)
sendMessage("[BBoxTester]", "Experiments done for directory [%s]!" % apk_dir_source)
| zyrikby/BBoxTester | BBoxTester/main_intents_strategy.py | Python | apache-2.0 | 5,189 | 0.007901 |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import scipy.stats as osp_stats
from jax import lax
from jax._src.lax.lax import _const as _lax_const
from jax._src.numpy import lax_numpy as jnp
from jax._src.numpy.util import _wraps
from jax.scipy.special import xlog1py
@_wraps(osp_stats.geom.logpmf, update_doc=False)
def logpmf(k, p, loc=0):
k, p, loc = jnp._promote_args_inexact("geom.logpmf", k, p, loc)
zero = _lax_const(k, 0)
one = _lax_const(k, 1)
x = lax.sub(k, loc)
log_probs = xlog1py(lax.sub(x, one), -p) + lax.log(p)
return jnp.where(lax.le(x, zero), -jnp.inf, log_probs)
@_wraps(osp_stats.geom.pmf, update_doc=False)
def pmf(k, p, loc=0):
return jnp.exp(logpmf(k, p, loc))
| google/jax | jax/_src/scipy/stats/geom.py | Python | apache-2.0 | 1,244 | 0.002412 |
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet keypool and interaction with wallet encryption/locking."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class KeyPoolTest(BitcoinTestFramework):
def run_test(self):
nodes = self.nodes
addr_before_encrypting = nodes[0].getnewaddress()
addr_before_encrypting_data = nodes[0].validateaddress(addr_before_encrypting)
wallet_info_old = nodes[0].getwalletinfo()
assert(addr_before_encrypting_data['hdmasterkeyid'] == wallet_info_old['hdmasterkeyid'])
# Encrypt wallet and wait to terminate
nodes[0].encryptwallet('test')
bitcoind_processes[0].wait()
# Restart node 0
nodes[0] = start_node(0, self.options.tmpdir)
# Keep creating keys
addr = nodes[0].getnewaddress()
addr_data = nodes[0].validateaddress(addr)
wallet_info = nodes[0].getwalletinfo()
assert(addr_before_encrypting_data['hdmasterkeyid'] != wallet_info['hdmasterkeyid'])
assert(addr_data['hdmasterkeyid'] == wallet_info['hdmasterkeyid'])
assert_raises_jsonrpc(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
# put six (plus 2) new keys in the keypool (100% external-, +100% internal-keys, 1 in min)
nodes[0].walletpassphrase('test', 12000)
nodes[0].keypoolrefill(6)
nodes[0].walletlock()
wi = nodes[0].getwalletinfo()
assert_equal(wi['keypoolsize_hd_internal'], 6)
assert_equal(wi['keypoolsize'], 6)
# drain the internal keys
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
addr = set()
# the next one should fail
assert_raises_jsonrpc(-12, "Keypool ran out", nodes[0].getrawchangeaddress)
# drain the external keys
addr.add(nodes[0].getnewaddress())
addr.add(nodes[0].getnewaddress())
addr.add(nodes[0].getnewaddress())
addr.add(nodes[0].getnewaddress())
addr.add(nodes[0].getnewaddress())
addr.add(nodes[0].getnewaddress())
assert(len(addr) == 6)
# the next one should fail
assert_raises_jsonrpc(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
# refill keypool with three new addresses
nodes[0].walletpassphrase('test', 1)
nodes[0].keypoolrefill(3)
# test walletpassphrase timeout
time.sleep(1.1)
assert_equal(nodes[0].getwalletinfo()["unlocked_until"], 0)
# drain them by mining
nodes[0].generate(1)
nodes[0].generate(1)
nodes[0].generate(1)
assert_raises_jsonrpc(-12, "Keypool ran out", nodes[0].generate, 1)
nodes[0].walletpassphrase('test', 100)
nodes[0].keypoolrefill(100)
wi = nodes[0].getwalletinfo()
assert_equal(wi['keypoolsize_hd_internal'], 100)
assert_equal(wi['keypoolsize'], 100)
def __init__(self):
super().__init__()
self.setup_clean_chain = False
self.num_nodes = 1
if __name__ == '__main__':
KeyPoolTest().main()
| earonesty/bitcoin | test/functional/keypool.py | Python | mit | 3,529 | 0.002834 |
# vim: set encoding=utf-8
# Copyright (c) 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""tests dicom.inspect() functionality"""
import unittest
from sparktkregtests.lib import sparktk_test
import os
import dicom
import numpy
from lxml import etree
class TakeDicomTest(sparktk_test.SparkTKTestCase):
def setUp(self):
"""import dicom data for testing"""
super(TakeDicomTest, self).setUp()
self.dataset = self.get_file("dicom_uncompressed")
self.dicom = self.context.dicom.import_dcm(self.dataset)
self.xml_directory = self.get_local_dataset("dicom_xml/")
self.image_directory = self.get_local_dataset("dicom_uncompressed/")
self.count = self.dicom.metadata.count()
def test_metadata_imagedata_row_count_same(self):
"""test metadata pixeldata row count"""
metadata_result = self.dicom.metadata.get_inspect(
self.dicom.metadata.count())
image_result = self.dicom.pixeldata.get_inspect(
self.dicom.pixeldata.count())
self.assertEqual(len(metadata_result.rows), len(image_result.rows))
def test_metadata_content_take_dcm_basic(self):
"""content test of dicom metadata import"""
# here we will get the files so we can generate the expected result
files = []
for filename in os.listdir(self.xml_directory):
if filename.endswith(".xml"):
with open(self.xml_directory + str(filename), 'rb') as xmlfile:
contents = xmlfile.read()
xml = etree.fromstring(contents)
bulk_data = xml.xpath("//BulkData")[0]
bulk_data.getparent().remove(bulk_data)
files.append(etree.tostring(xml))
# the BulkData location element of the metadata xml will be different
# since the dicom may load the data from a differnet location then
# where we loaded our files. We will remove this element from the
# metadata before we compare
metadata_take = self.dicom.metadata.take(self.count)
for dcm_file in metadata_take:
dcm_file = dcm_file[1].encode("ascii", "ignore")
dcm_xml_root = etree.fromstring(dcm_file)
dcm_bulk_data = dcm_xml_root.xpath("//BulkData")[0]
dcm_bulk_data.getparent().remove(dcm_bulk_data)
self.assertTrue(etree.tostring(dcm_xml_root) in files)
def test_image_content_take_dcm_basic(self):
"""content test of image data for dicom"""
# load the files so we can compare with the dicom result
files = []
for filename in os.listdir(self.image_directory):
pixel_data = dicom.read_file(
self.image_directory + filename).pixel_array
files.append(pixel_data)
# iterate through the data in the files and in the dicom frame
# and ensure that they match
image_inspect = self.dicom.pixeldata.take(self.count)
for dcm_image in image_inspect:
result = any(numpy.array_equal(
dcm_image[1], file_image) for file_image in files)
self.assertTrue(result)
if __name__ == "__main__":
unittest.main()
| grehx/spark-tk | regression-tests/sparktkregtests/testcases/dicom/take_dicom_test.py | Python | apache-2.0 | 3,835 | 0 |
import gtk
import pango
import math
from core.world import TheWorld
class TextEditor(object):
def __init__(self, text):
self.__text = text
self.cursorindex = 0
self.padding = 10.0
self.width = 0.0
self.height = 0.0
self.pixel_width = 0.0
self.pixel_height = 0.0
# create text layout
self.layout = pango.Layout(TheWorld.pango_context)
fontDescription = pango.FontDescription("Monospace 8")
self.layout.set_font_description(fontDescription)
#layout.set_markup(self.text)
self.layout.set_text(text)
# calc text metrics
self.recalc_text_size()
# -- properties
def __get_text(self):
return self.__text
def __set_text(self, text):
self.__text = text
self.layout.set_text(self.__text)
self.recalc_text_size() # recalc text size
text = property(__get_text, __set_text)
def recalc_text_size(self):
(self.pixel_width, self.pixel_height) = self.layout.get_pixel_size() # bogus when called from init() !?
self.width = self.pixel_width / float(TheWorld.width) #+ self.padding * 2
self.height = self.pixel_height / float(TheWorld.height) #+ self.padding
def draw(self, context, x, y):
# figure out scale factor
# TODO - Text should be independant of scale factor
scale_x = 1.0 / self.pixel_width
scale_y = 1.0 / self.pixel_height
# render the text
context.save()
#context.set_source_rgba(0.0, 0.0, 0.0, 1.0)
context.move_to(0.0, 0.0)
context.scale(scale_x, scale_y)
# draw a background for the text
self.draw_background(context, 0.0, 0.0, self.pixel_width, self.pixel_height, 10) # ve vant square rounded corners :-)
context.set_source_rgba(0.0, 0.0, 0.0, 1.0)
context.show_layout(self.layout)
context.restore()
# render cursor
self.draw_cursor(context)
def draw_cursor(self, context):
(strong, weak) = self.layout.get_cursor_pos(self.cursorindex)
(startx, starty, curx, cury) = strong
startx /= pango.SCALE * float(TheWorld.width)
starty /= pango.SCALE * float(TheWorld.height)
curx /= pango.SCALE * float(TheWorld.width)
cury /= pango.SCALE * float(TheWorld.height)
context.set_line_width(0.02)
context.set_source_rgba(0.0, 0.0, 0.0, 1.0)
context.move_to(0.001 + (startx / self.width), starty / self.height)
context.line_to(0.001 + (startx / self.width), (starty + cury) / self.height)
context.stroke()
def draw_background(self, context, x, y, w, h, r):
x -= self.padding
y -= self.padding
w += self.padding * 2.0
h += self.padding * 2.0
# rounded box
context.move_to(x + r, y) # top left
context.line_to(x + w - r, y) # top right
context.arc(x + w - r, y + r, r, math.pi + math.pi / 2.0, 0.0)
context.line_to(x + w, y + h - r) # bottom right
context.arc(x + w - r, y + h - r, r, 0, math.pi / 2.0)
context.line_to(x + r, y + h) # bottom left
context.arc(x + r, y + h - r, r, math.pi / 2.0, math.pi)
context.line_to(x, y + r) # top left
context.arc(x + r, y + r, r, math.pi, math.pi + math.pi / 2.0)
context.set_source_rgba(0.8, 0.8, 1.0, 0.5)
context.fill_preserve()
context.set_line_width(4.0)
context.set_source_rgba(0.2, 0.2, 1.0, 0.9)
context.stroke()
# thought bubble
context.arc(x + w / 1.5, y + h * 1.1, self.pixel_height / 10.0, 0.0, math.pi * 2.0)
context.set_source_rgba(0.8, 0.8, 1.0, 0.5)
context.fill_preserve()
context.set_source_rgba(0.2, 0.2, 1.0, 0.9)
context.stroke()
context.arc(x + w / 1.7, y + h * 1.2, self.pixel_height / 20.0, 0.0, math.pi * 2.0)
context.set_source_rgba(0.8, 0.8, 1.0, 0.5)
context.fill_preserve()
context.set_source_rgba(0.2, 0.2, 1.0, 0.9)
context.stroke()
context.arc(x + w / 1.9, y + h * 1.3, self.pixel_height / 30.0, 0.0, math.pi * 2.0)
context.set_source_rgba(0.8, 0.8, 1.0, 0.5)
context.fill_preserve()
context.set_source_rgba(0.2, 0.2, 1.0, 0.9)
context.stroke()
# -- key handling ---------------------------------------------------------
def do_key_press_event(self, event):
try :
{ gtk.keysyms.BackSpace : self.do_key_press_backspace,
gtk.keysyms.Delete : self.do_key_press_delete,
gtk.keysyms.Home : self.do_key_press_home,
gtk.keysyms.End : self.do_key_press_end,
gtk.keysyms.Left : self.do_key_press_left,
gtk.keysyms.Right : self.do_key_press_right,
gtk.keysyms.Up : self.do_key_press_up,
gtk.keysyms.Down : self.do_key_press_down } [event.keyval]()
except:
pass
if event.string:
left = self.text[ : self.cursorindex]
right = self.text[self.cursorindex : ]
if event.string == "\r":
self.text = left + "\n" + right
else:
self.text = left + event.string + right
self.cursorindex += 1
def do_key_press_backspace(self):
left = self.text[ : self.cursorindex - 1]
right = self.text[self.cursorindex : ]
self.text = left + right
if self.cursorindex > 0:
self.cursorindex -= 1
def do_key_press_delete(self):
left = self.text[ : self.cursorindex]
right = self.text[self.cursorindex + 1 : ]
self.text = left + right
def do_key_press_home(self):
lines = self.text.splitlines ()
loc = 0
line = 0
for i in lines:
loc += len(i) + 1
if loc > self.cursorindex:
self.cursorindex = loc - len(i) - 1
return
line += 1
def do_key_press_end(self):
lines = self.text.splitlines()
loc = 0
line = 0
for i in lines:
loc += len(i) + 1
if loc > self.cursorindex:
self.cursorindex = loc - 1
return
line += 1
def do_key_press_left(self):
if self.cursorindex > 0:
self.cursorindex -= 1
def do_key_press_right(self):
if self.cursorindex < len(self.text):
self.cursorindex += 1
def do_key_press_up(self):
lines = self.text.splitlines()
if len(lines) == 1:
return
loc = 0
line = 0
for i in lines:
loc += len(i) + 1
if loc > self.cursorindex:
loc -= len(i) + 1
line -= 1
break
line += 1
if line == -1:
return
elif line >= len(lines):
self.cursorindex -= len(lines[-1]) + 1
return
dist = self.cursorindex - loc -1
self.cursorindex = loc
if dist < len(lines[line]):
self.cursorindex -= (len(lines[line]) - dist)
else:
self.cursorindex -= 1
def do_key_press_down(self):
lines = self.text.splitlines()
if len(lines) == 1:
return
loc = 0
line = 0
for i in lines:
loc += len(i) + 1
if loc > self.cursorindex:
break
line += 1
if line >= len(lines) - 1:
return
dist = self.cursorindex - (loc - len(lines[line])) + 1
self.cursorindex = loc
if dist > len (lines[line + 1]):
self.cursorindex += len(lines[line + 1])
else:
self.cursorindex += dist
| antoinevg/survival | widgets/texteditor.py | Python | gpl-2.0 | 7,247 | 0.019594 |
def main():
pass
def generateBlock():
| willybh11/python | tetris/tetris.py | Python | gpl-3.0 | 48 | 0.083333 |
#!/usr/bin/env python
import tornado
import socket
import maproxy.proxyserver
class Session(object):
"""
The Session class if the heart of the system.
- We create the session when a client connects to the server (proxy). this connection is "c2p"
- We create a connection to the server (p2s)
- Each connection (c2p,p2s) has a state (Session.State) , can be CLOSED,CONNECTING,CONNECTED
- Initially, when c2p is created we :
- create the p-s connection
- start read from c2p
- Completion Routings:
- on_XXX_done_read:
When we get data from one side, we initiate a "start_write" to the other side .
Exception: if the target is not connected yet, we queue the data so we can send it later
- on_p2s_connected:
When p2s is connected ,we start read from the server .
if queued data is available (data that was sent from the c2p) we initiate a "start_write" immediately
- on_XXX_done_write:
When we're done "sending" data , we check if there's more data to send in the queue.
if there is - we initiate another "start_write" with the queued data
- on_XXX_close:
When one side closes the connection, we either initiate a "start_close" on the other side, or (if already closed) - remove the session
- I/O routings:
- XXX_start_read: simply start read from the socket (we assume and validate that only one read goes at a time)
- XXX_start_write: if currently writing , add data to queue. if not writing - perform io_write...
"""
class LoggerOptions:
"""
Logging options - which messages/notifications we would like to log...
The logging is for development&maintenance. In production set all to False
"""
# Log charactaristics
LOG_SESSION_ID=True # for each log, add the session-id
# Log different operations
LOG_NEW_SESSION_OP=False
LOG_READ_OP=False
LOG_WRITE_OP=False
LOG_CLOSE_OP=False
LOG_CONNECT_OP=False
LOG_REMOVE_SESSION=False
class State:
"""
Each socket has a state.
We will use the state to identify whether the connection is open or closed
"""
CLOSED,CONNECTING,CONNECTED=range(3)
def __init__(self):
pass
#def new_connection(self,stream : tornado.iostream.IOStream ,address,proxy):
def new_connection(self,stream ,address,proxy):
# First,validation
assert isinstance(proxy,maproxy.proxyserver.ProxyServer)
assert isinstance(stream,tornado.iostream.IOStream)
# Logging
self.logger_nesting_level=0 # logger_nesting_level is the current "nesting level"
if Session.LoggerOptions.LOG_NEW_SESSION_OP:
self.log("New Session")
# Remember our "parent" ProxyServer
self.proxy=proxy
# R/W flags for each socket
# Using the flags, we can tell if we're waiting for I/O completion
# NOTE: the "c2p" and "p2s" prefixes are NOT the direction of the IO,
# they represent the SOCKETS :
# c2p means the socket from the client to the proxy
# p2s means the socket from the proxy to the server
self.c2p_reading=False # whether we're reading from the client
self.c2p_writing=False # whether we're writing to the client
self.p2s_writing=False # whether we're writing to the server
self.p2s_reading=False # whether we're reading from the server
# Init the Client->Proxy stream
self.c2p_stream=stream
self.c2p_address=address
# Client->Proxy is connected
self.c2p_state=Session.State.CONNECTED
# Here we will put incoming data while we're still waiting for the target-server's connection
self.c2s_queued_data=[] # Data that was read from the Client, and needs to be sent to the Server
self.s2c_queued_data=[] # Data that was read from the Server , and needs to be sent to the client
# send data immediately to the client ... (Disable Nagle TCP algorithm)
self.c2p_stream.set_nodelay(True)
# Let us now when the client disconnects (callback on_c2p_close)
self.c2p_stream.set_close_callback( self.on_c2p_close)
# Create the Proxy->Server socket and stream
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
if self.proxy.server_ssl_options is not None:
# if the "server_ssl_options" where specified, it means that when we connect, we need to wrap with SSL
# so we need to use the SSLIOStream stream
self.p2s_stream = tornado.iostream.SSLIOStream(s,ssl_options=self.proxy.server_ssl_options)
else:
# use the standard IOStream stream
self.p2s_stream = tornado.iostream.IOStream(s)
# send data immediately to the server... (Disable Nagle TCP algorithm)
self.p2s_stream.set_nodelay(True)
# Let us now when the server disconnects (callback on_p2s_close)
self.p2s_stream.set_close_callback( self.on_p2s_close )
# P->S state is "connecting"
self.p2s_state=self.p2s_state=Session.State.CONNECTING
self.p2s_stream.connect(( proxy.target_server, proxy.target_port), self.on_p2s_done_connect )
# We can actually start reading immediatelly from the C->P socket
self.c2p_start_read()
# Each member-function can call this method to log data (currently to screen)
def log(self,msg):
prefix=str(id(self))+":" if Session.LoggerOptions.LOG_SESSION_ID else ""
prefix+=self.logger_nesting_level*" "*4
logging.debug(prefix + msg)
# Logging decorator (enter/exit)
def logger(enabled=True):
"""
We use this decorator to wrap functions and log the input/ouput of each function
Since this decorator accepts a parameter, it must return an "inner" decorator....(Python stuff)
"""
def inner_decorator(func):
def log_wrapper(self,*args,**kwargs):
msg="%s (%s,%s)" % (func.__name__,args,kwargs)
self.log(msg)
self.logger_nesting_level+=1
r=func(self,*args,**kwargs)
self.logger_nesting_level-=1
self.log("%s -> %s" % (msg,str(r)) )
return r
return log_wrapper if enabled else func
return inner_decorator
################
## Start Read ##
################
@logger(LoggerOptions.LOG_READ_OP)
def c2p_start_read(self):
"""
Start read from client
"""
assert( not self.c2p_reading)
self.c2p_reading=True
try:
self.c2p_stream.read_until_close(lambda x: None,self.on_c2p_done_read)
except tornado.iostream.StreamClosedError:
self.c2p_reading=False
@logger(LoggerOptions.LOG_READ_OP)
def p2s_start_read(self):
"""
Start read from server
"""
assert( not self.p2s_reading)
self.p2s_reading=True
try:
self.p2s_stream.read_until_close(lambda x:None,self.on_p2s_done_read)
except tornado.iostream.StreamClosedError:
self.p2s_reading=False
##############################
## Read Completion Routines ##
##############################
@logger(LoggerOptions.LOG_READ_OP)
def on_c2p_done_read(self,data):
# # We got data from the client (C->P ) . Send data to the server
assert(self.c2p_reading)
assert(data)
self.p2s_start_write(data)
@logger(LoggerOptions.LOG_READ_OP)
def on_p2s_done_read(self,data):
# got data from Server to Proxy . if the client is still connected - send the data to the client
assert( self.p2s_reading)
assert(data)
self.c2p_start_write(data)
#####################
## Write to stream ##
#####################
@logger(LoggerOptions.LOG_WRITE_OP)
def _c2p_io_write(self,data):
if data is None:
# None means (gracefully) close-socket (a "close request" that was queued...)
self.c2p_state=Session.State.CLOSED
try:
self.c2p_stream.close()
except tornado.iostream.StreamClosedError:
self.c2p_writing=False
else:
self.c2p_writing=True
try:
self.c2p_stream.write(data,callback=self.on_c2p_done_write)
except tornado.iostream.StreamClosedError:
# Cancel the write, we will get on_close instead...
self.c2p_writing=False
@logger(LoggerOptions.LOG_WRITE_OP)
def _p2s_io_write(self,data):
if data is None:
# None means (gracefully) close-socket (a "close request" that was queued...)
self.p2s_state=Session.State.CLOSED
try:
self.p2s_stream.close()
except tornado.iostream.StreamClosedError:
# Cancel the write. we will get on_close instead
self.p2s_writing=False
else:
self.p2s_writing=True
try:
self.p2s_stream.write(data,callback=self.on_p2s_done_write)
except tornado.iostream.StreamClosedError:
# Cancel the write. we will get on_close instead
self.p2s_writing=False
#################
## Start Write ##
#################
@logger(LoggerOptions.LOG_WRITE_OP)
def c2p_start_write(self,data):
"""
Write to client.if there's a pending write-operation, add it to the S->C (s2c) queue
"""
# If not connected - do nothing...
if self.c2p_state != Session.State.CONNECTED: return
if not self.c2p_writing:
# If we're not currently writing
assert( not self.s2c_queued_data ) # we expect the queue to be empty
# Start the "real" write I/O operation
self._c2p_io_write(data)
else:
# Just add to the queue
self.s2c_queued_data.append(data)
@logger(LoggerOptions.LOG_WRITE_OP)
def p2s_start_write(self,data):
"""
Write to the server.
If not connected yet - queue the data
If there's a pending write-operation , add it to the C->S (c2s) queue
"""
# If still connecting to the server - queue the data...
if self.p2s_state == Session.State.CONNECTING:
self.c2s_queued_data.append(data) # TODO: is it better here to append (to list) or concatenate data (to buffer) ?
return
# If not connected - do nothing
if self.p2s_state == Session.State.CLOSED:
return
assert(self.p2s_state == Session.State.CONNECTED)
if not self.p2s_writing:
# Start the "real" write I/O operation
self._p2s_io_write(data)
else:
# Just add to the queue
self.c2s_queued_data.append(data)
##############################
## Write Competion Routines ##
##############################
@logger(LoggerOptions.LOG_WRITE_OP)
def on_c2p_done_write(self):
"""
A start_write C->P (write to client) is done .
if there is queued-data to send - send it
"""
assert(self.c2p_writing)
if self.s2c_queued_data:
# more data in the queue, write next item as well..
self._c2p_io_write( self.s2c_queued_data.pop(0))
return
self.c2p_writing=False
@logger(LoggerOptions.LOG_WRITE_OP)
def on_p2s_done_write(self):
"""
A start_write P->S (write to server) is done .
if there is queued-data to send - send it
"""
assert(self.p2s_writing)
if self.c2s_queued_data:
# more data in the queue, write next item as well..
self._p2s_io_write( self.c2s_queued_data.pop(0))
return
self.p2s_writing=False
######################
## Close Connection ##
######################
@logger(LoggerOptions.LOG_CLOSE_OP)
def c2p_start_close(self,gracefully=True):
"""
Close c->p connection
if gracefully is True then we simply add None to the queue, and start a write-operation
if gracefully is False then this is a "brutal" close:
- mark the stream is closed
- we "reset" (empty) the queued-data
- if the other side (p->s) already closed, remove the session
"""
if self.c2p_state == Session.State.CLOSED:
return
if gracefully:
self.c2p_start_write(None)
return
self.c2p_state = Session.State.CLOSED
self.s2c_queued_data=[]
self.c2p_stream.close()
if self.p2s_state == Session.State.CLOSED:
self.remove_session()
@logger(LoggerOptions.LOG_CLOSE_OP)
def p2s_start_close(self,gracefully=True):
"""
Close p->s connection
if gracefully is True then we simply add None to the queue, and start a write-operation
if gracefully is False then this is a "brutal" close:
- mark the stream is closed
- we "reset" (empty) the queued-data
- if the other side (p->s) already closed, remove the session
"""
if self.p2s_state == Session.State.CLOSED:
return
if gracefully:
self.p2s_start_write(None)
return
self.p2s_state = Session.State.CLOSED
self.c2s_queued_data=[]
self.p2s_stream.close()
if self.c2p_state == Session.State.CLOSED:
self.remove_session()
@logger(LoggerOptions.LOG_CLOSE_OP)
def on_c2p_close(self):
"""
Client closed the connection.
we need to:
1. update the c2p-state
2. if there's no more data to the server (c2s_queued_data is empty) - we can close the p2s connection
3. if p2s already closed - we can remove the session
"""
self.c2p_state=Session.State.CLOSED
if self.p2s_state == Session.State.CLOSED:
self.remove_session()
else:
self.p2s_start_close(gracefully=True)
@logger(LoggerOptions.LOG_CLOSE_OP)
def on_p2s_close(self):
"""
Server closed the connection.
We need to update the satte, and if the client closed as well - delete the session
"""
self.p2s_state=Session.State.CLOSED
if self.c2p_state == Session.State.CLOSED:
self.remove_session()
else:
self.c2p_start_close(gracefully=True)
########################
## Connect-Completion ##
########################
@logger(LoggerOptions.LOG_CONNECT_OP)
def on_p2s_done_connect(self):
assert(self.p2s_state==Session.State.CONNECTING)
self.p2s_state=Session.State.CONNECTED
# Start reading from the socket
self.p2s_start_read()
assert(not self.p2s_writing) # As expect no current write-operation ...
# If we have pending-data to write, start writing...
if self.c2s_queued_data:
# TRICKY: get thte frst item , and write it...
# this is tricky since the "start-write" will
# write this item even if there are queued-items... (since self.p2s_writing=False)
self.p2s_start_write( self.c2s_queued_data.pop(0) )
###########
## UTILS ##
###########
@logger(LoggerOptions.LOG_REMOVE_SESSION)
def remove_session(self):
self.proxy.remove_session(self)
class SessionFactory(object):
"""
This is the default session-factory. it simply returns a "Session" object
"""
def __init__(self):
pass
def new(self,*args,**kwargs):
"""
The caller needs a Session objet (constructed with *args,**kwargs).
In this implementation we're simply creating a new object. you can enhance and create a pool or add logs..
"""
return Session(*args,**kwargs)
def delete(self,session):
"""
Delete a session object
"""
assert( isinstance(session,Session))
del session
| zferentz/maproxy | maproxy/session.py | Python | apache-2.0 | 16,890 | 0.013914 |
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.slicer.filtering.extractskeleton import ExtractSkeleton
def test_ExtractSkeleton_inputs():
input_map = dict(InputImageFileName=dict(argstr='%s',
position=-2,
),
OutputImageFileName=dict(argstr='%s',
hash_files=False,
position=-1,
),
args=dict(argstr='%s',
),
dontPrune=dict(argstr='--dontPrune ',
),
environ=dict(nohash=True,
usedefault=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
numPoints=dict(argstr='--numPoints %d',
),
pointsFile=dict(argstr='--pointsFile %s',
),
terminal_output=dict(mandatory=True,
nohash=True,
),
type=dict(argstr='--type %s',
),
)
inputs = ExtractSkeleton.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_ExtractSkeleton_outputs():
output_map = dict(OutputImageFileName=dict(position=-1,
),
)
outputs = ExtractSkeleton.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
| mick-d/nipype_source | nipype/interfaces/slicer/filtering/tests/test_auto_ExtractSkeleton.py | Python | bsd-3-clause | 1,344 | 0.022321 |
Experiment(description='Synthetic data sets of interest',
data_dir='../data/synth/',
max_depth=9,
random_order=False,
k=1,
debug=False,
local_computation=False,
n_rand=9,
sd=2,
jitter_sd=0.1,
max_jobs=300,
verbose=False,
make_predictions=False,
skip_complete=False,
results_dir='../results/synth/',
iters=250,
base_kernels='SE,Per,Lin,Const,Noise',
random_seed=1,
period_heuristic=5,
period_heuristic_type='min',
subset=True,
subset_size=250,
full_iters=10,
bundle_size=2,
additive_form=True,
mean='ff.MeanZero()', # Starting mean
kernel='ff.NoiseKernel()', # Starting kernel
lik='ff.LikGauss(sf=-np.Inf)', # Starting likelihood
score='pl2',
search_operators=[('A', ('+', 'A', 'B'), {'A': 'kernel', 'B': 'base'}),
('A', ('*', 'A', 'B'), {'A': 'kernel', 'B': 'base-not-const'}),
('A', ('*-const', 'A', 'B'), {'A': 'kernel', 'B': 'base-not-const'}),
('A', 'B', {'A': 'kernel', 'B': 'base'}),
('A', ('CP', 'd', 'A'), {'A': 'kernel', 'd' : 'dimension'}),
('A', ('CW', 'd', 'A'), {'A': 'kernel', 'd' : 'dimension'}),
('A', ('B', 'd', 'A'), {'A': 'kernel', 'd' : 'dimension'}),
('A', ('BL', 'd', 'A'), {'A': 'kernel', 'd' : 'dimension'}),
('A', ('None',), {'A': 'kernel'})])
| ekamioka/gpss-research | experiments/synth/synth.py | Python | mit | 1,731 | 0.010976 |
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import pytest
from pootle_format.models import Format
@pytest.mark.django_db
def test_fs_filetypes_changed_receiver(project_fs, project1, po2):
project1.filetype_tool.add_filetype(po2)
assert not project1.revisions.filter(key="pootle.fs.sync").count()
project1.treestyle = "pootle_fs"
project1.save()
xliff = Format.objects.get(name="xliff")
project1.filetypes.add(xliff)
# still not configured
assert not project1.revisions.filter(key="pootle.fs.sync").count()
sync_start = project_fs.project.revisions.filter(
key="pootle.fs.sync").first()
project_fs.project.filetype_tool.add_filetype(po2)
assert (
sync_start
!= project_fs.project.revisions.filter(
key="pootle.fs.sync").first())
| ta2-1/pootle | tests/pootle_fs/receivers.py | Python | gpl-3.0 | 1,046 | 0 |
"""
Revision ID: 0212_remove_caseworking
Revises: 0211_email_branding_update
Create Date: 2018-07-31 18:00:20.457755
"""
from alembic import op
revision = '0212_remove_caseworking'
down_revision = '0211_email_branding_update'
PERMISSION_NAME = "caseworking"
def upgrade():
op.execute("delete from service_permissions where permission = '{}'".format(PERMISSION_NAME))
op.execute("delete from service_permission_types where name = '{}'".format(PERMISSION_NAME))
def downgrade():
op.execute("insert into service_permission_types values('{}')".format(PERMISSION_NAME))
| alphagov/notifications-api | migrations/versions/0212_remove_caseworking.py | Python | mit | 586 | 0.005119 |
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""LSTM layer."""
from kws_streaming.layers import modes
from kws_streaming.layers.compat import tf
from kws_streaming.layers.compat import tf1
class LSTM(tf.keras.layers.Layer):
"""LSTM with support of streaming inference with internal/external state.
In training mode we use LSTM.
It receives input data [batch, time, feature] and
returns [batch, time, units] if return_sequences==True or
returns [batch, 1, units] if return_sequences==False
In inference mode we use LSTMCell
In streaming mode with internal state
it receives input data [batch, 1, feature]
In streaming mode with internal state it returns: [batch, 1, units]
In streaming mode with external state it receives input data with states:
[batch, 1, feature] + state1[batch, units] + state2[batch, units]
In streaming mode with external state it returns:
(output[batch, 1, units], state1[batch, units], state2[batch, units])
We use layer and parameter description from:
https://www.tensorflow.org/api_docs/python/tf/keras/layers/LSTM
https://www.tensorflow.org/api_docs/python/tf/compat/v1/nn/rnn_cell/LSTMCell
https://www.tensorflow.org/api_docs/python/tf/keras/layers/RNN
Attributes:
units: dimensionality of the output space.
mode: Training or inference modes: non streaming, streaming.
inference_batch_size: batch size for inference mode
return_sequences: Whether to return the last output. in the output sequence,
or the full sequence.
use_peepholes: True to enable diagonal/peephole connections
num_proj: The output dimensionality for the projection matrices. If None, no
projection is performed. It will be used only if use_peepholes is True.
unroll: If True, the network will be unrolled, else a symbolic loop will be
used. For any inference mode it will be set True inside.
stateful: If True, the last state for each sample at index i in a batch will
be used as initial state for the sample of index i in the following batch.
If model will be in streaming mode then it is better to train model with
stateful=True This flag is about stateful training and applied during
training only.
"""
def __init__(self,
units=64,
mode=modes.Modes.TRAINING,
inference_batch_size=1,
return_sequences=False,
use_peepholes=False,
num_proj=128,
unroll=False,
stateful=False,
name='LSTM',
**kwargs):
super(LSTM, self).__init__(**kwargs)
self.mode = mode
self.inference_batch_size = inference_batch_size
self.units = units
self.return_sequences = return_sequences
self.num_proj = num_proj
self.use_peepholes = use_peepholes
self.stateful = stateful
if mode != modes.Modes.TRAINING: # in any inference mode
# let's unroll lstm, so there is no symbolic loops / control flow
unroll = True
self.unroll = unroll
if self.mode in (modes.Modes.TRAINING, modes.Modes.NON_STREAM_INFERENCE):
if use_peepholes:
self.lstm_cell = tf1.nn.rnn_cell.LSTMCell(
num_units=units, use_peepholes=True, num_proj=num_proj, name='cell')
self.lstm = tf.keras.layers.RNN(
cell=self.lstm_cell,
return_sequences=return_sequences,
unroll=self.unroll,
stateful=self.stateful)
else:
self.lstm = tf.keras.layers.LSTM(
units=units,
return_sequences=return_sequences,
name='cell',
unroll=self.unroll,
stateful=self.stateful)
if self.mode == modes.Modes.STREAM_INTERNAL_STATE_INFERENCE:
# create state varaible for stateful streamable inference
self.input_state1 = self.add_weight(
name='input_state1',
shape=[inference_batch_size, units],
trainable=False,
initializer=tf.zeros_initializer)
if use_peepholes:
# second state in peepholes LSTM has different dimensions with
# the first state due to projection layer with dim num_proj
self.input_state2 = self.add_weight(
name='input_state2',
shape=[inference_batch_size, num_proj],
trainable=False,
initializer=tf.zeros_initializer)
self.lstm_cell = tf1.nn.rnn_cell.LSTMCell(
num_units=units, use_peepholes=True, num_proj=num_proj, name='cell')
else:
# second state in the standard LSTM has the same dimensions with
# the first state
self.input_state2 = self.add_weight(
name='input_state2',
shape=[inference_batch_size, units],
trainable=False,
initializer=tf.zeros_initializer)
self.lstm_cell = tf.keras.layers.LSTMCell(units=units, name='cell')
self.lstm = None
elif self.mode == modes.Modes.STREAM_EXTERNAL_STATE_INFERENCE:
# in streaming mode with external state state,
# state becomes an input output placeholders
self.input_state1 = tf.keras.layers.Input(
shape=(units,),
batch_size=inference_batch_size,
name=self.name + 'input_state1')
if use_peepholes:
self.input_state2 = tf.keras.layers.Input(
shape=(num_proj,),
batch_size=inference_batch_size,
name=self.name + 'input_state2')
self.lstm_cell = tf1.nn.rnn_cell.LSTMCell(
num_units=units, use_peepholes=True, num_proj=num_proj)
else:
self.input_state2 = tf.keras.layers.Input(
shape=(units,),
batch_size=inference_batch_size,
name=self.name + 'input_state2')
self.lstm_cell = tf.keras.layers.LSTMCell(units=units, name='cell')
self.lstm = None
self.output_state1 = None
self.output_state2 = None
def call(self, inputs):
if inputs.shape.rank != 3: # [batch, time, feature]
raise ValueError('inputs.shape.rank:%d must be 3' % inputs.shape.rank)
if self.mode == modes.Modes.STREAM_INTERNAL_STATE_INFERENCE:
# run streamable inference on input [batch, 1, features]
# returns output [batch, 1, units]
return self._streaming_internal_state(inputs)
elif self.mode == modes.Modes.STREAM_EXTERNAL_STATE_INFERENCE:
# in streaming mode with external state
# in addition to output we return output state
output, self.output_state1, self.output_state2 = self._streaming_external_state(
inputs, self.input_state1, self.input_state2)
return output
elif self.mode in (modes.Modes.TRAINING, modes.Modes.NON_STREAM_INFERENCE):
# run non streamable training or non streamable inference
# on input [batch, time, features], returns [batch, time, units]
return self._non_streaming(inputs)
else:
raise ValueError(f'Encountered unexpected mode `{self.mode}`.')
def get_config(self):
config = {
'mode': self.mode,
'inference_batch_size': self.inference_batch_size,
'units': self.units,
'return_sequences': self.return_sequences,
'unroll': self.unroll,
'num_proj': self.num_proj,
'use_peepholes': self.use_peepholes,
'stateful': self.stateful,
}
base_config = super(LSTM, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def get_input_state(self):
# input state is used only for STREAM_EXTERNAL_STATE_INFERENCE mode
if self.mode == modes.Modes.STREAM_EXTERNAL_STATE_INFERENCE:
return [self.input_state1, self.input_state2]
else:
raise ValueError('Expected the layer to be in external streaming mode, '
f'not `{self.mode}`.')
def get_output_state(self):
# output state is used only for STREAM_EXTERNAL_STATE_INFERENCE mode
if self.mode == modes.Modes.STREAM_EXTERNAL_STATE_INFERENCE:
return [self.output_state1, self.output_state2]
else:
raise ValueError('Expected the layer to be in external streaming mode, '
f'not `{self.mode}`.')
def _streaming_internal_state(self, inputs):
# first dimension is batch size
if inputs.shape[0] != self.inference_batch_size:
raise ValueError(
'inputs.shape[0]:%d must be = self.inference_batch_size:%d' %
(inputs.shape[0], self.inference_batch_size))
# receive inputs: [batch, 1, feature]
# convert it for lstm cell to inputs1: [batch, feature]
inputs1 = tf.keras.backend.squeeze(inputs, axis=1)
output, states = self.lstm_cell(inputs1,
[self.input_state1, self.input_state2])
# update internal states
assign_state1 = self.input_state1.assign(states[0])
assign_state2 = self.input_state2.assign(states[1])
with tf.control_dependencies([assign_state1, assign_state2]):
# output [batch, 1, feature]
output = tf.keras.backend.expand_dims(output, axis=1)
return output
def _streaming_external_state(self, inputs, state1, state2):
# first dimension is batch size
if inputs.shape[0] != self.inference_batch_size:
raise ValueError(
'inputs.shape[0]:%d must be = self.inference_batch_size:%d' %
(inputs.shape[0], self.inference_batch_size))
# receive inputs: [batch, 1, feature]
# convert it for lstm cell to inputs1: [batch, feature]
inputs1 = tf.keras.backend.squeeze(inputs, axis=1)
output, states = self.lstm_cell(inputs1, [state1, state2])
# output [batch, 1, feature]
output = tf.keras.backend.expand_dims(output, axis=1)
return output, states[0], states[1]
def _non_streaming(self, inputs):
# inputs [batch, time, feature]
output = self.lstm(inputs) # [batch, time, units]
if not self.return_sequences:
# if we do not return sequence the output will be [batch, units]
# for consistency make it [batch, 1, units]
output = tf.keras.backend.expand_dims(output, axis=1)
return output
| google-research/google-research | kws_streaming/layers/lstm.py | Python | apache-2.0 | 10,610 | 0.005467 |
"""SCons.Tool.sunlink
Tool-specific initialization for the Sun Solaris (Forte) linker.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/sunlink.py 4577 2009/12/27 19:43:56 scons"
import os
import os.path
import SCons.Util
import link
ccLinker = None
# search for the acc compiler and linker front end
try:
dirs = os.listdir('/opt')
except (IOError, OSError):
# Not being able to read the directory because it doesn't exist
# (IOError) or isn't readable (OSError) is okay.
dirs = []
for d in dirs:
linker = '/opt/' + d + '/bin/CC'
if os.path.exists(linker):
ccLinker = linker
break
def generate(env):
"""Add Builders and construction variables for Forte to an Environment."""
link.generate(env)
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -G')
env.Append(LINKFLAGS=['$__RPATH'])
env['RPATHPREFIX'] = '-R'
env['RPATHSUFFIX'] = ''
env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}'
def exists(env):
return ccLinker
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| Eigenlabs/EigenD | tools/packages/SCons/Tool/sunlink.py | Python | gpl-3.0 | 2,437 | 0.002052 |
# coding: utf-8
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from operator import itemgetter
import re
from datetime import datetime
from random import randrange
from django import forms
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.forms import widgets
from django.utils.safestring import mark_safe
from localflavor.us.forms import USStateField
from localflavor.us.us_states import STATE_CHOICES
import basket
from basket.base import request
from lib.l10n_utils.dotlang import _
from lib.l10n_utils.dotlang import _lazy
from product_details import product_details
from .email_contribute import INTEREST_CHOICES
FORMATS = (('H', _lazy('HTML')), ('T', _lazy('Text')))
LANGS_TO_STRIP = ['en-US', 'es']
PARENTHETIC_RE = re.compile(r' \([^)]+\)$')
LANG_FILES = ['firefox/partners/index', 'mozorg/contribute', 'mozorg/contribute/index']
def strip_parenthetical(lang_name):
"""
Remove the parenthetical from the end of the language name string.
"""
return PARENTHETIC_RE.sub('', lang_name, 1)
class SideRadios(widgets.RadioFieldRenderer):
"""Render radio buttons as labels"""
def render(self):
radios = [unicode(w) for idx, w in enumerate(self)]
return mark_safe(''.join(radios))
class PrivacyWidget(widgets.CheckboxInput):
"""Render a checkbox with privacy text. Lots of pages need this so
it should be standardized"""
def render(self, name, value, attrs=None):
attrs['required'] = 'required'
input_txt = super(PrivacyWidget, self).render(name, value, attrs)
policy_txt = _(u'I’m okay with Mozilla handling my info as explained '
u'in <a href="%s">this Privacy Policy</a>')
return mark_safe(
'<label for="%s" class="privacy-check-label">'
'%s '
'<span class="title">%s</span></label>'
% (attrs['id'], input_txt,
policy_txt % reverse('privacy'))
)
class HoneyPotWidget(widgets.TextInput):
"""Render a text field to (hopefully) trick bots. Will be used on many pages."""
def render(self, name, value, attrs=None):
honeypot_txt = _(u'Leave this field empty.')
# semi-randomized in case we have more than one per page.
# this is maybe/probably overthought
honeypot_id = 'office-fax-' + str(randrange(1001)) + '-' + str(datetime.now().strftime("%Y%m%d%H%M%S%f"))
return mark_safe(
'<div class="super-priority-field">'
'<label for="%s">%s</label>'
'<input type="text" name="office_fax" id="%s">'
'</div>' % (honeypot_id, honeypot_txt, honeypot_id))
class URLInput(widgets.TextInput):
input_type = 'url'
class EmailInput(widgets.TextInput):
input_type = 'email'
class DateInput(widgets.DateInput):
input_type = 'date'
class TimeInput(widgets.TimeInput):
input_type = 'time'
class TelInput(widgets.TextInput):
input_type = 'tel'
class NumberInput(widgets.TextInput):
input_type = 'number'
class L10nSelect(forms.Select):
def render_option(self, selected_choices, option_value, option_label):
if option_value == '':
option_label = u'-- {0} --'.format(_('select'))
return super(L10nSelect, self).render_option(selected_choices, option_value, option_label)
class ContributeSignupForm(forms.Form):
required_attr = {'required': 'required'}
empty_choice = ('', '')
category_choices = (
('coding', _lazy('Coding')),
('testing', _lazy('Testing')),
('writing', _lazy('Writing')),
('teaching', _lazy('Teaching')),
('helping', _lazy('Helping')),
('translating', _lazy('Translating')),
('activism', _lazy('Activism')),
('dontknow', _lazy(u'I don’t know')),
)
coding_choices = (
empty_choice,
('coding-firefox', _lazy('Firefox')),
('coding-firefoxos', _lazy('Firefox OS')),
('coding-websites', _lazy('Websites')),
('coding-addons', _lazy('Firefox add-ons')),
('coding-marketplace', _lazy('HTML5 apps')),
('coding-webcompat', _lazy('Diagnosing Web compatibility issues')),
('coding-cloud', _lazy('Online services')),
)
testing_choices = (
empty_choice,
('testing-firefox', _lazy('Firefox and Firefox OS')),
('testing-addons', _lazy('Firefox add-ons')),
('testing-marketplace', _lazy('HTML5 apps')),
('testing-websites', _lazy('Websites')),
('testing-webcompat', _lazy('Web compatibility')),
)
translating_choices = (
empty_choice,
('translating-products', _lazy('Products')),
('translating-websites', _lazy('Websites')),
('translating-tools', _lazy(u'I’d like to work on localization tools')),
)
writing_choices = (
empty_choice,
('writing-journalism', _lazy('Journalism')),
('writing-techusers', _lazy('Technical docs for users')),
('writing-techdevs', _lazy('Technical docs for developers')),
('writing-addons', _lazy('Technical docs for Firefox add-ons')),
('writing-marketplace', _lazy('Technical docs for HTML5 apps')),
)
teaching_choices = (
empty_choice,
('teaching-webmaker', _lazy('Teach the Web (Webmaker)')),
('teaching-fellowships', _lazy('Open News fellowships')),
('teaching-hive', _lazy('Hive - Community networks of educators/mentors')),
('teaching-science', _lazy('Open Web science research')),
)
email = forms.EmailField(widget=EmailInput(attrs=required_attr))
privacy = forms.BooleanField(widget=PrivacyWidget)
category = forms.ChoiceField(choices=category_choices,
widget=forms.RadioSelect(attrs=required_attr))
area_coding = forms.ChoiceField(choices=coding_choices, required=False, widget=L10nSelect)
area_testing = forms.ChoiceField(choices=testing_choices, required=False, widget=L10nSelect)
area_translating = forms.ChoiceField(choices=translating_choices, required=False,
widget=L10nSelect)
area_writing = forms.ChoiceField(choices=writing_choices, required=False, widget=L10nSelect)
area_teaching = forms.ChoiceField(choices=teaching_choices, required=False, widget=L10nSelect)
name = forms.CharField(widget=forms.TextInput(attrs=required_attr))
message = forms.CharField(widget=forms.Textarea, required=False)
newsletter = forms.BooleanField(required=False)
format = forms.ChoiceField(widget=forms.RadioSelect(attrs=required_attr), choices=(
('H', _lazy('HTML')),
('T', _lazy('Text')),
))
def __init__(self, locale, *args, **kwargs):
regions = product_details.get_regions(locale)
regions = sorted(regions.iteritems(), key=itemgetter(1))
regions.insert(0, self.empty_choice)
super(ContributeSignupForm, self).__init__(*args, **kwargs)
self.locale = locale
self.fields['country'] = forms.ChoiceField(choices=regions,
widget=L10nSelect(attrs={'required': 'required'}))
def clean(self):
cleaned_data = super(ContributeSignupForm, self).clean()
category = cleaned_data.get('category')
# only bother if category was supplied
if category:
area_name = 'area_' + category
if area_name in cleaned_data and not cleaned_data[area_name]:
required_message = self.fields[area_name].error_messages['required']
self._errors[area_name] = self.error_class([required_message])
del cleaned_data[area_name]
return cleaned_data
class ContributeForm(forms.Form):
email = forms.EmailField(widget=EmailInput(attrs={'required': 'required'}))
privacy = forms.BooleanField(widget=PrivacyWidget)
newsletter = forms.BooleanField(required=False)
interest = forms.ChoiceField(
choices=INTEREST_CHOICES,
widget=forms.Select(attrs={'required': 'required'}))
comments = forms.CharField(
widget=forms.widgets.Textarea(attrs={'rows': '4',
'cols': '30'}))
# honeypot
office_fax = forms.CharField(widget=HoneyPotWidget, required=False)
class ContributeTasksForm(forms.Form):
required_attr = {'required': 'required'}
empty_choice = ('', '')
email = forms.EmailField(widget=EmailInput(attrs=required_attr))
name = forms.CharField(widget=forms.TextInput(attrs=required_attr))
privacy = forms.BooleanField(widget=PrivacyWidget)
def __init__(self, locale, *args, **kwargs):
self.locale = locale
regions = product_details.get_regions(locale)
regions = sorted(regions.iteritems(), key=itemgetter(1))
regions.insert(0, self.empty_choice)
super(ContributeTasksForm, self).__init__(*args, **kwargs)
self.fields['country'] = forms.ChoiceField(choices=regions, widget=L10nSelect)
class WebToLeadForm(forms.Form):
interests_standard = (
('Firefox for Desktop', _lazy(u'Firefox for Desktop')),
('Firefox for Android', _lazy(u'Firefox for Android')),
('Firefox Marketplace', _lazy(u'Firefox Marketplace')),
('Firefox OS', _lazy(u'Firefox OS')),
('Persona', _lazy(u'Persona')),
('Marketing and Co-promotions', _lazy(u'Marketing and Co-promotions')),
('Promoted Content ("Tiles")', _lazy(u'Promoted Content ("Tiles")')),
('Other', _lazy(u'Other')),
)
interests_fx = (
('Firefox for Android', _lazy(u'Firefox for Android')),
('Firefox Marketplace', _lazy(u'Firefox Marketplace')),
('Firefox OS', _lazy(u'Firefox OS')),
('Other', _lazy(u'Other')),
)
industries = (
('', 'Select Industry'),
('Agriculture', _lazy(u'Agriculture')),
('Apparel', _lazy(u'Apparel')),
('Banking', _lazy(u'Banking')),
('Biotechnology', _lazy(u'Biotechnology')),
('Chemicals', _lazy(u'Chemicals')),
('Communications', _lazy(u'Communications')),
('Construction', _lazy(u'Construction')),
('Consulting', _lazy(u'Consulting')),
('Education', _lazy(u'Education')),
('Electronics', _lazy(u'Electronics')),
('Energy', _lazy(u'Energy')),
('Engineering', _lazy(u'Engineering')),
('Entertainment', _lazy(u'Entertainment')),
('Environmental', _lazy(u'Environmental')),
('Finance', _lazy(u'Finance')),
('Food & Beverage', _lazy(u'Food & Beverage')),
('Government', _lazy(u'Government')),
('Healthcare', _lazy(u'Healthcare')),
('Hospitality', _lazy(u'Hospitality')),
('Insurance', _lazy(u'Insurance')),
('Machinery', _lazy(u'Machinery')),
('Manufacturing', _lazy(u'Manufacturing')),
('Media', _lazy(u'Media')),
('Not For Profit', _lazy(u'Not For Profit')),
('Other', _lazy(u'Other')),
('Recreation', _lazy(u'Recreation')),
('Retail', _lazy(u'Retail')),
('Shipping', _lazy(u'Shipping')),
('Technology', _lazy(u'Technology')),
('Telecommunications', _lazy(u'Telecommunications')),
('Transportation', _lazy(u'Transportation')),
('Utilities', _lazy(u'Utilities')),
)
first_name = forms.CharField(
max_length=40,
required=True,
error_messages={
'required': _lazy(u'Please enter your first name.')
},
widget=forms.TextInput(
attrs={
'size': 20,
'placeholder': _lazy(u'First Name'),
'class': 'required',
'required': 'required',
'aria-required': 'true'
}
)
)
last_name = forms.CharField(
max_length=80,
required=True,
error_messages={
'required': _('Please enter your last name.')
},
widget=forms.TextInput(
attrs={
'size': 20,
'placeholder': _lazy(u'Last Name'),
'class': 'required',
'required': 'required',
'aria-required': 'true'
}
)
)
title = forms.CharField(
max_length=40,
required=False,
widget=forms.TextInput(
attrs={
'size': 20,
'placeholder': _lazy(u'Title')
}
)
)
company = forms.CharField(
max_length=40,
required=True,
error_messages={
'required': _lazy(u'Please enter your company name.')
},
widget=forms.TextInput(
attrs={
'size': 20,
'placeholder': _lazy(u'Company'),
'class': 'required',
'required': 'required',
'aria-required': 'true'
}
)
)
URL = forms.URLField(
max_length=80,
required=False,
error_messages={
'invalid': _lazy(u'Please supply a valid URL.')
},
widget=forms.TextInput(
attrs={
'size': 20,
'placeholder': _lazy(u'Website')
}
)
)
email = forms.EmailField(
max_length=80,
required=True,
error_messages={
'required': _lazy(u'Please enter your email address.'),
'invalid': _lazy(u'Please enter a valid email address')
},
widget=forms.TextInput(
attrs={
'size': 20,
'placeholder': _lazy(u'Email'),
'class': 'required',
'required': 'required',
'aria-required': 'true'
}
)
)
phone = forms.CharField(
max_length=40,
required=False,
widget=forms.TextInput(
attrs={
'size': 20,
'placeholder': _lazy(u'Phone')
}
)
)
mobile = forms.CharField(
max_length=40,
required=False,
widget=forms.TextInput(
attrs={
'size': 20,
'placeholder': _lazy(u'Mobile')
}
)
)
street = forms.CharField(
required=False,
widget=forms.Textarea(
attrs={
'placeholder': _lazy(u'Address'),
'rows': '',
'cols': ''
}
)
)
city = forms.CharField(
required=False,
max_length=40,
widget=forms.TextInput(
attrs={
'placeholder': _lazy(u'City')
}
)
)
state = forms.CharField(
required=False,
max_length=40,
widget=forms.TextInput(
attrs={
'placeholder': _lazy(u'State/Province')
}
)
)
country = forms.CharField(
required=False,
max_length=40,
widget=forms.TextInput(
attrs={
'placeholder': _lazy(u'Country')
}
)
)
zip = forms.CharField(
required=False,
max_length=40,
widget=forms.TextInput(
attrs={
'placeholder': _lazy(u'Zip')
}
)
)
description = forms.CharField(
required=False,
widget=forms.Textarea(
attrs={
'placeholder': _lazy(u'Description'),
'rows': '',
'cols': ''
}
)
)
interested_countries = forms.CharField(
required=False,
widget=forms.Textarea(
attrs={
'placeholder': _lazy(u'Countries of Interest'),
'rows': '',
'cols': ''
}
)
)
interested_languages = forms.CharField(
required=False,
widget=forms.Textarea(
attrs={
'placeholder': _lazy(u'Languages of Interest'),
'rows': '',
'cols': ''
}
)
)
industry = forms.ChoiceField(
choices=industries,
required=False,
widget=forms.Select(
attrs={
'title': _lazy('Industry'),
'size': 1
}
)
)
campaign_type = forms.ChoiceField(
choices=(
('', _lazy(u'Select Campaign Type')),
('Brand', _lazy(u'Brand')),
('Direct Response', _lazy(u'Direct Response'))
),
required=False,
widget=forms.Select(
attrs={
'title': _lazy('Campaign Type')
}
)
)
# honeypot
office_fax = forms.CharField(widget=HoneyPotWidget, required=False)
# uncomment below to debug salesforce
# debug = forms.IntegerField(required=False)
# debugEmail = forms.EmailField(required=False)
def __init__(self, *args, **kwargs):
interest_set = kwargs.pop('interest_set', 'standard')
interest_choices = self.interests_fx if (interest_set == 'fx') else self.interests_standard
kwargs.pop('lead_source', None)
super(WebToLeadForm, self).__init__(*args, **kwargs)
self.fields['interest'] = forms.MultipleChoiceField(
choices=interest_choices,
required=False,
widget=forms.SelectMultiple(
attrs={
'title': _lazy(u'Interest'),
'size': 8
}
)
)
class USStateSelectBlank(widgets.Select):
"""Version of USStateSelect widget with a blank first selection."""
def __init__(self, attrs=None, empty_msg=None):
if empty_msg is None:
empty_msg = ''
us_states_blank = (('', empty_msg),) + STATE_CHOICES
super(USStateSelectBlank, self).__init__(attrs, choices=us_states_blank)
class ContentServicesForm(forms.Form):
industries = (
('', 'Select Industry'),
('Agriculture', _lazy(u'Agriculture')),
('Apparel', _lazy(u'Apparel')),
('Banking', _lazy(u'Banking')),
('Biotechnology', _lazy(u'Biotechnology')),
('Chemicals', _lazy(u'Chemicals')),
('Communications', _lazy(u'Communications')),
('Construction', _lazy(u'Construction')),
('Consulting', _lazy(u'Consulting')),
('Education', _lazy(u'Education')),
('Electronics', _lazy(u'Electronics')),
('Energy', _lazy(u'Energy')),
('Engineering', _lazy(u'Engineering')),
('Entertainment', _lazy(u'Entertainment')),
('Environmental', _lazy(u'Environmental')),
('Finance', _lazy(u'Finance')),
('Food & Beverage', _lazy(u'Food & Beverage')),
('Government', _lazy(u'Government')),
('Healthcare', _lazy(u'Healthcare')),
('Hospitality', _lazy(u'Hospitality')),
('Insurance', _lazy(u'Insurance')),
('Machinery', _lazy(u'Machinery')),
('Manufacturing', _lazy(u'Manufacturing')),
('Media', _lazy(u'Media')),
('Not For Profit', _lazy(u'Not For Profit')),
('Other', _lazy(u'Other')),
('Recreation', _lazy(u'Recreation')),
('Retail', _lazy(u'Retail')),
('Shipping', _lazy(u'Shipping')),
('Technology', _lazy(u'Technology')),
('Telecommunications', _lazy(u'Telecommunications')),
('Transportation', _lazy(u'Transportation')),
('Utilities', _lazy(u'Utilities')),
)
first_name = forms.CharField(
max_length=40,
required=True,
error_messages={
'required': _lazy(u'Please enter your first name.')
},
widget=forms.TextInput(
attrs={
'size': 20,
'class': 'required',
'required': 'required',
'aria-required': 'true'
}
)
)
last_name = forms.CharField(
max_length=40,
required=True,
error_messages={
'required': _lazy(u'Please enter your last name.')
},
widget=forms.TextInput(
attrs={
'size': 20,
'class': 'required',
'required': 'required',
'aria-required': 'true'
}
)
)
company = forms.CharField(
max_length=40,
required=True,
error_messages={
'required': _lazy(u'Please enter your company name.')
},
widget=forms.TextInput(
attrs={
'size': 20,
'class': 'required',
'required': 'required',
'aria-required': 'true'
}
)
)
email = forms.EmailField(
max_length=80,
required=True,
error_messages={
'required': _lazy(u'Please enter your email address.'),
'invalid': _lazy(u'Please enter a valid email address')
},
widget=forms.TextInput(
attrs={
'size': 20,
'class': 'required',
'required': 'required',
'aria-required': 'true'
}
)
)
phone = forms.CharField(
max_length=40,
required=True,
error_messages={
'required': _lazy(u'Please enter your phone number.')
},
widget=forms.TextInput(
attrs={
'size': 20,
'class': 'required',
'required': 'required',
'aria-required': 'true'
}
)
)
mobile = forms.CharField(
max_length=40,
required=False,
widget=forms.TextInput(
attrs={
'size': 20
}
)
)
street = forms.CharField(
required=False,
widget=forms.Textarea(
attrs={
'rows': '',
'cols': ''
}
)
)
city = forms.CharField(
required=False,
max_length=40
)
state = USStateField(
required=False,
initial='',
widget=USStateSelectBlank()
)
province = forms.CharField(
required=False,
max_length=40
)
country = forms.ChoiceField(
required=True,
)
zip = forms.CharField(
required=False,
max_length=40
)
campaign_type_description = forms.CharField(
required=False,
widget=forms.Textarea(
attrs={
'rows': '',
'cols': ''
}
)
)
interested_countries = forms.CharField(
required=False,
widget=forms.Textarea(
attrs={
'rows': '',
'cols': ''
}
)
)
interested_languages = forms.CharField(
required=False,
widget=forms.Textarea(
attrs={
'rows': '',
'cols': ''
}
)
)
industry = forms.ChoiceField(
choices=industries,
required=False,
widget=forms.Select(
attrs={
'title': _lazy('Industry'),
'size': 1
}
)
)
campaign_type = forms.ChoiceField(
choices=(
('', _lazy(u'Select Campaign Type')),
('Brand', _lazy(u'Brand')),
('Direct Response', _lazy(u'Direct Response')),
('Other', _lazy(u'Other'))
),
required=False,
widget=forms.Select(
attrs={
'title': _lazy('Campaign Type')
}
)
)
# honeypot
office_fax = forms.CharField(widget=HoneyPotWidget, required=False)
# uncomment below to debug salesforce
# debug = forms.IntegerField(required=False)
# debugEmail = forms.EmailField(required=False)
def __init__(self, *args, **kwargs):
kwargs.pop('lead_source', None)
super(ContentServicesForm, self).__init__(*args, **kwargs)
locale = kwargs.get('locale', 'en-US')
country_list = product_details.get_regions(locale).items()
country_list = sorted(country_list, key=lambda country: country[1])
country_list.insert(0, ('', ''))
self.fields['country'].choices = country_list
def clean(self):
data = super(ContentServicesForm, self).clean()
if data.get('country') == 'us' and not data.get('state'):
raise ValidationError(self.fields['state'].error_messages['invalid'])
return data
class ContributeStudentAmbassadorForm(forms.Form):
first_name = forms.CharField(max_length=50)
last_name = forms.CharField(max_length=50)
email = forms.EmailField(max_length=100)
status = forms.ChoiceField(
choices=(('', ''),
('student', _lazy('Student')), ('teacher', _lazy('Teacher')),
('administrator', _lazy('Administrator')),
('other', _lazy('Other'))))
school = forms.CharField(max_length=100)
grad_year = forms.ChoiceField(
required=False,
choices=([('', _lazy('Expected Graduation Year'))] +
[(i, str(i)) for i in range(datetime.now().year,
datetime.now().year + 8)]))
major = forms.ChoiceField(
required=False,
choices=[('', ''),
('computer science', _lazy('Computer Science')),
('computer engineering', _lazy('Computer Engineering')),
('engineering', _lazy('Engineering (other)')),
('social science', _lazy('Social Science')),
('science', _lazy('Science (other)')),
('business/marketing', _lazy('Business/Marketing')),
('education', _lazy('Education')),
('mathematics', _lazy('Mathematics')),
('other', _lazy('Other'))])
major_free_text = forms.CharField(max_length=100, required=False)
city = forms.CharField(max_length=100)
country = forms.ChoiceField()
fmt = forms.ChoiceField(widget=forms.RadioSelect(renderer=SideRadios),
label=_lazy('Email format preference:'),
choices=FORMATS, initial='H')
age_confirmation = forms.BooleanField(
widget=widgets.CheckboxInput(),
label=_lazy(u'I’m 18 years old and eligible to participate in '
'the program'))
share_information = forms.BooleanField(
required=False,
widget=widgets.CheckboxInput(),
label=_lazy(u'Please share my contact information and interests with '
'related Mozilla contributors for the purpose of '
'collaborating on Mozilla projects'))
privacy = forms.BooleanField(widget=PrivacyWidget)
nl_mozilla_and_you = forms.BooleanField(
required=False,
widget=widgets.CheckboxInput(),
label=_lazy(u'Firefox & You: A monthly newsletter packed with tips to'
' improve your browsing experience'))
nl_mobile = forms.BooleanField(
required=False,
widget=widgets.CheckboxInput(),
label=_lazy(u'Firefox for Android: Get the power of Firefox in the'
' palm of your hand'))
nl_firefox_flicks = forms.BooleanField(
required=False,
widget=widgets.CheckboxInput(),
label=_lazy(u'Firefox Flicks'))
nl_about_mozilla = forms.BooleanField(
required=False,
widget=widgets.CheckboxInput(),
label=_lazy(u'About Mozilla: News from the Mozilla Project'))
# honeypot
office_fax = forms.CharField(widget=HoneyPotWidget, required=False)
source_url = forms.URLField(required=False)
def __init__(self, *args, **kwargs):
locale = kwargs.get('locale', 'en-US')
super(ContributeStudentAmbassadorForm, self).__init__(*args, **kwargs)
country_list = product_details.get_regions(locale).items()
country_list = sorted(country_list, key=lambda country: country[1])
country_list.insert(0, ('', ''))
self.fields['country'].choices = country_list
def clean(self, *args, **kwargs):
super(ContributeStudentAmbassadorForm, self).clean(*args, **kwargs)
if (self.cleaned_data.get('status', '') == 'student' and
not self.cleaned_data.get('grad_year', '')):
self._errors['grad_year'] = (
self.error_class([_('This field is required.')]))
return self.cleaned_data
def clean_grad_year(self):
return self.cleaned_data.get('grad_year', '')
def clean_major(self):
return self.cleaned_data.get('major_free_field',
self.cleaned_data['major'])
def clean_share_information(self):
if self.cleaned_data.get('share_information', False):
return 'Y'
return 'N'
def clean_office_fax(self):
honeypot = self.cleaned_data.pop('office_fax', None)
if honeypot:
raise forms.ValidationError(
_('Your submission could not be processed'))
def newsletters(self):
newsletters = ['ambassadors']
for newsletter in ['nl_mozilla_and_you', 'nl_mobile',
'nl_firefox_flicks', 'nl_about_mozilla']:
if self.cleaned_data.get(newsletter, False):
newsletters.append(newsletter[3:].replace('_', '-'))
return newsletters
def save(self):
data = self.cleaned_data
result = basket.subscribe(data['email'], self.newsletters(),
format=data['fmt'], country=data['country'],
welcome_message='Student_Ambassadors_Welcome',
source_url=data['source_url'], sync='Y')
data = {
'FIRST_NAME': data['first_name'],
'LAST_NAME': data['last_name'],
'STUDENTS_CURRENT_STATUS': data['status'],
'STUDENTS_SCHOOL': data['school'],
'STUDENTS_GRAD_YEAR': data['grad_year'],
'STUDENTS_MAJOR': data['major'],
'COUNTRY_': data['country'],
'STUDENTS_CITY': data['city'],
'STUDENTS_ALLOW_SHARE': data['share_information'],
}
request('post', 'custom_update_student_ambassadors',
token=result['token'], data=data)
| SujaySKumar/bedrock | bedrock/mozorg/forms.py | Python | mpl-2.0 | 30,635 | 0.00062 |
class IoConfig:
def __init__(self, i_path, mode, o_pin=None, set_temp=None, buffer_temp=None, direction=None):
self.i_path = i_path
self.mode = mode
self.o_pin = o_pin
self.set_temp = set_temp
self.buffer_temp = buffer_temp
self.on_fn = self.get_on_fn(direction)
self.off_fn = self.get_off_fn(direction)
def get_on_fn(self, direction):
if direction == '-':
return lambda t: self.set_temp < t
elif direction == '+':
return lambda t: self.set_temp > t
return None
def get_off_fn(self, direction):
if direction == '-':
return lambda t: self.set_temp - self.buffer_temp >= t
elif direction == '+':
return lambda t: self.set_temp + self.buffer_temp <= t
return None
def __repr__(self):
return 'IoConfig()'
def __str__(self):
str_lst = ['Input path:', str(self.i_path),
'Mode:', str(self.mode),
'Output pin:', str(self.o_pin),
'Set temp:', str(self.set_temp),
'Buffer temp:', str(self.buffer_temp)
]
return '\t'.join(str_lst)
class Config:
def __init__(self, config_path, temp_path = '/sys/bus/w1/devices/%(TEMP)s/w1_slave'):
f = open(config_path)
self.log_path = f.readline().strip()
self.io = []
for line in f:
splat = line.strip().split()
if len(splat) == 1:
self.io.append( IoConfig( temp_path % { "TEMP" :splat[0] },
'MONITOR'
) )
else:
self.io.append( IoConfig( temp_path % { "TEMP" :splat[0] },
'CONTROLLER',
int(splat[1]),
float(splat[2]),
float(splat[3]),
splat[4]
) )
f.close()
def __repr__(self):
return 'Config()'
def __str__(self):
str_lst = [ 'Log path:',
self.log_path,
'IO Configs:',
'\n'.join([str(c) for c in self.io])
]
return '\n'.join(str_lst)
if __name__ == "__main__":
import sys
config = Config(sys.argv[1])
print config
| rj3d/PiTempController | config_reader.py | Python | gpl-2.0 | 1,798 | 0.05228 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2006 Ali Sabil <ali.sabil@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
"""SOAP Messages structures."""
import papyon.util.element_tree as ElementTree
import papyon.util.string_io as StringIO
__all__=['SOAPRequest', 'SOAPResponse']
class NameSpace:
SOAP_ENVELOPE = "http://schemas.xmlsoap.org/soap/envelope/"
SOAP_ENCODING = "http://schemas.xmlsoap.org/soap/encoding/"
XML_SCHEMA = "http://www.w3.org/1999/XMLSchema"
XML_SCHEMA_INSTANCE = "http://www.w3.org/1999/XMLSchema-instance"
class Encoding:
SOAP = "http://schemas.xmlsoap.org/soap/encoding/"
class _SOAPSection:
ENVELOPE = "{" + NameSpace.SOAP_ENVELOPE + "}Envelope"
HEADER = "{" + NameSpace.SOAP_ENVELOPE + "}Header"
BODY = "{" + NameSpace.SOAP_ENVELOPE + "}Body"
class _SOAPElement(object):
def __init__(self, element):
self.element = element
def append(self, tag, namespace=None, type=None, attrib={}, value=None, **kwargs):
if namespace is not None:
tag = "{" + namespace + "}" + tag
if type:
if isinstance(type, str):
type = ElementTree.QName(type, NameSpace.XML_SCHEMA)
else:
type = ElementTree.QName(type[1], type[0])
attrib["{" + NameSpace.XML_SCHEMA_INSTANCE + "}type"] = type
child = ElementTree.SubElement(self.element, tag, attrib, **kwargs)
child.text = value
return _SOAPElement(child)
def __str__(self):
return ElementTree.tostring(self.element, "utf-8")
class SOAPRequest(object):
"""Abstracts a SOAP Request to be sent to the server"""
def __init__(self, method, namespace=None, encoding_style=Encoding.SOAP, **attr):
"""Initializer
@param method: the method to be called
@type method: string
@param namespace: the namespace that the method belongs to
@type namespace: URI
@param encoding_style: the encoding style for this method
@type encoding_style: URI
@param attr: attributes to be attached to the method"""
self.header = ElementTree.Element(_SOAPSection.HEADER)
if namespace is not None:
method = "{" + namespace + "}" + method
self.method = ElementTree.Element(method)
if encoding_style is not None:
self.method.set("{" + NameSpace.SOAP_ENVELOPE + "}encodingStyle", encoding_style)
for attr_key, attr_value in attr.iteritems():
self.method.set(attr_key, attr_value)
def add_argument(self, name, namespace=None, type=None, attrib=None, value=None, **kwargs):
if namespace is not None:
name = "{" + namespace + "}" + name
return self._add_element(self.method, name, type, attrib, value, **kwargs)
def add_header(self, name, namespace=None, attrib=None, value=None, **kwargs):
if namespace is not None:
name = "{" + namespace + "}" + name
return self._add_element(self.header, name, None, attrib, value, **kwargs)
def _add_element(self, parent, name, type=None, attributes=None, value=None, **kwargs):
elem = ElementTree.SubElement(parent, name)
if attributes is None:
attributes = {}
attributes.update(kwargs)
if type:
type = self._qname(type, NameSpace.XML_SCHEMA)
elem.set("{" + NameSpace.XML_SCHEMA_INSTANCE + "}type", type)
for attr_key, attr_value in attributes.iteritems():
elem.set(attr_key, attr_value)
elem.text = value
return _SOAPElement(elem)
def _qname(self, name, default_ns):
if name[0] != "{":
return ElementTree.QName(default_ns, name)
return ElementTree.QName(name)
def __str__(self):
envelope = ElementTree.Element(_SOAPSection.ENVELOPE)
if len(self.header) > 0:
envelope.append(self.header)
body = ElementTree.SubElement(envelope, _SOAPSection.BODY)
body.append(self.method)
return "<?xml version=\"1.0\" encoding=\"utf-8\"?>" +\
ElementTree.tostring(envelope, "utf-8")
def __repr__(self):
return "<SOAP request %s>" % self.method.tag
class SOAPResponse(object):
def __init__(self, data):
self.tree = self._parse(data)
self.header = self.tree.find(_SOAPSection.HEADER)
self.body = self.tree.find(_SOAPSection.BODY)
def find(self, path):
return self.tree.find(path)
def _parse(self, data):
events = ("start", "end", "start-ns", "end-ns")
ns = []
data = StringIO.StringIO(data)
context = ElementTree.iterparse(data, events=events)
for event, elem in context:
if event == "start-ns":
ns.append(elem)
elif event == "end-ns":
ns.pop()
elif event == "start":
elem.set("(xmlns)", tuple(ns))
data.close()
return context.root
| billiob/papyon | papyon/gnet/message/SOAP.py | Python | gpl-2.0 | 5,704 | 0.003857 |
# This file is part of MyPaint.
# -*- coding: utf-8 -*-
# Copyright (C) 2014-2019 by the MyPaint Development Team.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
"""Device specific settings and configuration"""
## Imports
from __future__ import division, print_function
import logging
import collections
import re
from lib.gettext import C_
from lib.gibindings import Gtk
from lib.gibindings import Gdk
from lib.gibindings import Pango
from lib.observable import event
import gui.application
import gui.mode
logger = logging.getLogger(__name__)
## Device prefs
# The per-device settings are stored in the prefs in a sub-dict whose
# string keys are formed from the device name and enough extra
# information to (hopefully) identify the device uniquely. Names are not
# unique, and IDs vary according to the order in which you plug devices
# in. So for now, our unique strings use a combination of the device's
# name, its source as presented by GDK, and the number of axes.
_PREFS_ROOT = "input.devices"
_PREFS_DEVICE_SUBKEY_FMT = "{name}:{source}:{num_axes}"
## Device type strings
_DEVICE_TYPE_STRING = {
Gdk.InputSource.CURSOR: C_(
"prefs: device's type label",
"Cursor/puck",
),
Gdk.InputSource.ERASER: C_(
"prefs: device's type label",
"Eraser",
),
Gdk.InputSource.KEYBOARD: C_(
"prefs: device's type label",
"Keyboard",
),
Gdk.InputSource.MOUSE: C_(
"prefs: device's type label",
"Mouse",
),
Gdk.InputSource.PEN: C_(
"prefs: device's type label",
"Pen",
),
Gdk.InputSource.TOUCHPAD: C_(
"prefs: device's type label",
"Touchpad",
),
Gdk.InputSource.TOUCHSCREEN: C_(
"prefs: device's type label",
"Touchscreen",
),
}
## Settings consts and classes
class AllowedUsage:
"""Consts describing how a device may interact with the canvas"""
ANY = "any" #: Device can be used for any tasks.
NOPAINT = "nopaint" #: No direct painting, but can manipulate objects.
NAVONLY = "navonly" #: Device can only be used for navigation.
IGNORED = "ignored" #: Device cannot interact with the canvas at all.
VALUES = (ANY, IGNORED, NOPAINT, NAVONLY)
DISPLAY_STRING = {
IGNORED: C_(
"device settings: allowed usage",
u"Ignore",
),
ANY: C_(
"device settings: allowed usage",
u"Any Task",
),
NOPAINT: C_(
"device settings: allowed usage",
u"Non-painting tasks",
),
NAVONLY: C_(
"device settings: allowed usage",
u"Navigation only",
),
}
BEHAVIOR_MASK = {
ANY: gui.mode.Behavior.ALL,
IGNORED: gui.mode.Behavior.NONE,
NOPAINT: gui.mode.Behavior.NON_PAINTING,
NAVONLY: gui.mode.Behavior.CHANGE_VIEW,
}
class ScrollAction:
"""Consts describing how a device's scroll events should be used.
The user can assign one of these values to a device to configure
whether they'd prefer panning or scrolling for unmodified scroll
events. This setting can be queried via the device monitor.
"""
ZOOM = "zoom" #: Alter the canvas scaling
PAN = "pan" #: Pan across the canvas
VALUES = (ZOOM, PAN)
DISPLAY_STRING = {
ZOOM: C_("device settings: unmodified scroll action", u"Zoom"),
PAN: C_("device settings: unmodified scroll action", u"Pan"),
}
class Settings (object):
"""A device's settings"""
DEFAULT_USAGE = AllowedUsage.VALUES[0]
DEFAULT_SCROLL = ScrollAction.VALUES[0]
def __init__(self, prefs, usage=DEFAULT_USAGE, scroll=DEFAULT_SCROLL):
super(Settings, self).__init__()
self._usage = self.DEFAULT_USAGE
self._update_usage_mask()
self._scroll = self.DEFAULT_SCROLL
self._prefs = prefs
self._load_from_prefs()
@property
def usage(self):
return self._usage
@usage.setter
def usage(self, value):
if value not in AllowedUsage.VALUES:
raise ValueError("Unrecognized usage value")
self._usage = value
self._update_usage_mask()
self._save_to_prefs()
@property
def usage_mask(self):
return self._usage_mask
@property
def scroll(self):
return self._scroll
@scroll.setter
def scroll(self, value):
if value not in ScrollAction.VALUES:
raise ValueError("Unrecognized scroll value")
self._scroll = value
self._save_to_prefs()
def _load_from_prefs(self):
usage = self._prefs.get("usage", self.DEFAULT_USAGE)
if usage not in AllowedUsage.VALUES:
usage = self.DEFAULT_USAGE
self._usage = usage
scroll = self._prefs.get("scroll", self.DEFAULT_SCROLL)
if scroll not in ScrollAction.VALUES:
scroll = self.DEFAULT_SCROLL
self._scroll = scroll
self._update_usage_mask()
def _save_to_prefs(self):
self._prefs.update({
"usage": self._usage,
"scroll": self._scroll,
})
def _update_usage_mask(self):
self._usage_mask = AllowedUsage.BEHAVIOR_MASK[self._usage]
## Main class defs
class Monitor (object):
"""Monitors device use & plugging, and manages their configuration
An instance resides in the main application. It is responsible for
monitoring known devices, determining their characteristics, and
storing their settings. Per-device settings are stored in the main
application preferences.
"""
def __init__(self, app):
"""Initializes, assigning initial input device uses
:param app: the owning Application instance.
:type app: gui.application.Application
"""
super(Monitor, self).__init__()
self._app = app
if app is not None:
self._prefs = app.preferences
else:
self._prefs = {}
if _PREFS_ROOT not in self._prefs:
self._prefs[_PREFS_ROOT] = {}
# Transient device information
self._device_settings = collections.OrderedDict() # {dev: settings}
self._last_event_device = None
self._last_pen_device = None
disp = Gdk.Display.get_default()
mgr = disp.get_device_manager()
mgr.connect("device-added", self._device_added_cb)
mgr.connect("device-removed", self._device_removed_cb)
self._device_manager = mgr
for physical_device in mgr.list_devices(Gdk.DeviceType.SLAVE):
self._init_device_settings(physical_device)
## Devices list
def get_device_settings(self, device):
"""Gets the settings for a device
:param Gdk.Device device: a physical ("slave") device
:returns: A settings object which can be manipulated, or None
:rtype: Settings
Changes to the returned object made via its API are saved to the
user preferences immediately.
If the device is a keyboard, or is otherwise unsuitable as a
pointing device, None is returned instead. The caller needs to
check this case.
"""
return (self._device_settings.get(device)
or self._init_device_settings(device))
def _init_device_settings(self, device):
"""Ensures that the device settings are loaded for a device"""
source = device.get_source()
if source == Gdk.InputSource.KEYBOARD:
return
num_axes = device.get_n_axes()
if num_axes < 2:
return
settings = self._device_settings.get(device)
if not settings:
try:
vendor_id = device.get_vendor_id()
product_id = device.get_product_id()
except AttributeError:
# New in GDK 3.16
vendor_id = "?"
product_id = "?"
logger.info(
"New device %r"
" (%s, axes:%d, class=%s, vendor=%r, product=%r)",
device.get_name(),
source.value_name,
num_axes,
device.__class__.__name__,
vendor_id,
product_id,
)
dev_prefs_key = _device_prefs_key(device)
dev_prefs = self._prefs[_PREFS_ROOT].setdefault(dev_prefs_key, {})
settings = Settings(dev_prefs)
self._device_settings[device] = settings
self.devices_updated()
assert settings is not None
return settings
def _device_added_cb(self, mgr, device):
"""Informs that a device has been plugged in"""
logger.debug("device-added %r", device.get_name())
self._init_device_settings(device)
def _device_removed_cb(self, mgr, device):
"""Informs that a device has been unplugged"""
logger.debug("device-removed %r", device.get_name())
self._device_settings.pop(device, None)
self.devices_updated()
@event
def devices_updated(self):
"""Event: the devices list was changed"""
def get_devices(self):
"""Yields devices and their settings, for UI stuff
:rtype: iterator
:returns: ultimately a sequence of (Gdk.Device, Settings) pairs
"""
for device, settings in self._device_settings.items():
yield (device, settings)
## Current device
@event
def current_device_changed(self, old_device, new_device):
"""Event: the current device has changed
:param Gdk.Device old_device: Previous device used
:param Gdk.Device new_device: New device used
"""
def device_used(self, device):
"""Informs about a device being used, for use by controllers
:param Gdk.Device device: the device being used
:returns: whether the device changed
If the device has changed, this method then notifies interested
parties via the device_changed observable @event.
This method returns True if the device was the same as the previous
device, and False if it has changed.
"""
if not self.get_device_settings(device):
return False
if device == self._last_event_device:
return True
self.current_device_changed(self._last_event_device, device)
old_device = self._last_event_device
new_device = device
self._last_event_device = device
# small problem with this code: it doesn't work well with brushes that
# have (eraser not in [1.0, 0.0])
new_device.name = new_device.props.name
new_device.source = new_device.props.input_source
logger.debug(
"Device change: name=%r source=%s",
new_device.name, new_device.source.value_name,
)
# When editing brush settings, it is often more convenient to use the
# mouse. Because of this, we don't restore brushsettings when switching
# to/from the mouse. We act as if the mouse was identical to the last
# active pen device.
if (new_device.source == Gdk.InputSource.MOUSE and
self._last_pen_device):
new_device = self._last_pen_device
if new_device.source == Gdk.InputSource.PEN:
self._last_pen_device = new_device
if (old_device and old_device.source == Gdk.InputSource.MOUSE and
self._last_pen_device):
old_device = self._last_pen_device
bm = self._app.brushmanager
if old_device:
# Clone for saving
old_brush = bm.clone_selected_brush(name=None)
bm.store_brush_for_device(old_device.name, old_brush)
if new_device.source == Gdk.InputSource.MOUSE:
# Avoid fouling up unrelated devbrushes at stroke end
self._prefs.pop('devbrush.last_used', None)
else:
# Select the brush and update the UI.
# Use a sane default if there's nothing associated
# with the device yet.
brush = bm.fetch_brush_for_device(new_device.name)
if brush is None:
if device_is_eraser(new_device):
brush = bm.get_default_eraser()
else:
brush = bm.get_default_brush()
self._prefs['devbrush.last_used'] = new_device.name
bm.select_brush(brush)
class SettingsEditor (Gtk.Grid):
"""Per-device settings editor"""
## Class consts
_USAGE_CONFIG_COL = 0
_USAGE_STRING_COL = 1
_SCROLL_CONFIG_COL = 0
_SCROLL_STRING_COL = 1
__gtype_name__ = "MyPaintDeviceSettingsEditor"
## Initialization
def __init__(self, monitor=None):
"""Initialize
:param Monitor monitor: monitor instance (for testing)
By default, the central app's `device_monitor` is used to permit
parameterless construction.
"""
super(SettingsEditor, self).__init__()
if monitor is None:
app = gui.application.get_app()
monitor = app.device_monitor
self._monitor = monitor
self._devices_store = Gtk.ListStore(object)
self._devices_view = Gtk.TreeView(model=self._devices_store)
col = Gtk.TreeViewColumn(C_(
"prefs: devices table: column header",
# TRANSLATORS: Column's data is the device's name
"Device",
))
col.set_min_width(200)
col.set_expand(True)
col.set_sizing(Gtk.TreeViewColumnSizing.AUTOSIZE)
self._devices_view.append_column(col)
cell = Gtk.CellRendererText()
cell.set_property("ellipsize", Pango.EllipsizeMode.MIDDLE)
col.pack_start(cell, True)
col.set_cell_data_func(cell, self._device_name_datafunc)
col = Gtk.TreeViewColumn(C_(
"prefs: devices table: column header",
# TRANSLATORS: Column's data is the number of axes (an integer)
"Axes",
))
col.set_min_width(30)
col.set_resizable(True)
col.set_expand(False)
col.set_sizing(Gtk.TreeViewColumnSizing.AUTOSIZE)
self._devices_view.append_column(col)
cell = Gtk.CellRendererText()
col.pack_start(cell, True)
col.set_cell_data_func(cell, self._device_axes_datafunc)
col = Gtk.TreeViewColumn(C_(
"prefs: devices table: column header",
# TRANSLATORS: Column shows type labels ("Touchscreen", "Pen" etc.)
"Type",
))
col.set_min_width(120)
col.set_resizable(True)
col.set_expand(False)
col.set_sizing(Gtk.TreeViewColumnSizing.AUTOSIZE)
self._devices_view.append_column(col)
cell = Gtk.CellRendererText()
cell.set_property("ellipsize", Pango.EllipsizeMode.END)
col.pack_start(cell, True)
col.set_cell_data_func(cell, self._device_type_datafunc)
# Usage config value => string store (dropdowns)
store = Gtk.ListStore(str, str)
for conf_val in AllowedUsage.VALUES:
string = AllowedUsage.DISPLAY_STRING[conf_val]
store.append([conf_val, string])
self._usage_store = store
col = Gtk.TreeViewColumn(C_(
"prefs: devices table: column header",
# TRANSLATORS: Column's data is a dropdown allowing the allowed
# TRANSLATORS: tasks for the row's device to be configured.
u"Use for…",
))
col.set_min_width(100)
col.set_resizable(True)
col.set_expand(False)
self._devices_view.append_column(col)
cell = Gtk.CellRendererCombo()
cell.set_property("model", self._usage_store)
cell.set_property("text-column", self._USAGE_STRING_COL)
cell.set_property("mode", Gtk.CellRendererMode.EDITABLE)
cell.set_property("editable", True)
cell.set_property("has-entry", False)
cell.set_property("ellipsize", Pango.EllipsizeMode.END)
cell.connect("changed", self._usage_cell_changed_cb)
col.pack_start(cell, True)
col.set_cell_data_func(cell, self._device_usage_datafunc)
# Scroll action config value => string store (dropdowns)
store = Gtk.ListStore(str, str)
for conf_val in ScrollAction.VALUES:
string = ScrollAction.DISPLAY_STRING[conf_val]
store.append([conf_val, string])
self._scroll_store = store
col = Gtk.TreeViewColumn(C_(
"prefs: devices table: column header",
# TRANSLATORS: Column's data is a dropdown for how the device's
# TRANSLATORS: scroll wheel or scroll-gesture events are to be
# TRANSLATORS: interpreted normally.
u"Scroll…",
))
col.set_min_width(100)
col.set_resizable(True)
col.set_expand(False)
self._devices_view.append_column(col)
cell = Gtk.CellRendererCombo()
cell.set_property("model", self._scroll_store)
cell.set_property("text-column", self._USAGE_STRING_COL)
cell.set_property("mode", Gtk.CellRendererMode.EDITABLE)
cell.set_property("editable", True)
cell.set_property("has-entry", False)
cell.set_property("ellipsize", Pango.EllipsizeMode.END)
cell.connect("changed", self._scroll_cell_changed_cb)
col.pack_start(cell, True)
col.set_cell_data_func(cell, self._device_scroll_datafunc)
# Pretty borders
view_scroll = Gtk.ScrolledWindow()
view_scroll.set_shadow_type(Gtk.ShadowType.ETCHED_IN)
pol = Gtk.PolicyType.AUTOMATIC
view_scroll.set_policy(pol, pol)
view_scroll.add(self._devices_view)
view_scroll.set_hexpand(True)
view_scroll.set_vexpand(True)
self.attach(view_scroll, 0, 0, 1, 1)
self._update_devices_store()
self._monitor.devices_updated += self._update_devices_store
## Display and sort funcs
def _device_name_datafunc(self, column, cell, model, iter_, *data):
device = model.get_value(iter_, 0)
cell.set_property("text", device.get_name())
def _device_axes_datafunc(self, column, cell, model, iter_, *data):
device = model.get_value(iter_, 0)
n_axes = device.get_n_axes()
cell.set_property("text", "%d" % (n_axes,))
def _device_type_datafunc(self, column, cell, model, iter_, *data):
device = model.get_value(iter_, 0)
source = device.get_source()
text = _DEVICE_TYPE_STRING.get(source, source.value_nick)
cell.set_property("text", text)
def _device_usage_datafunc(self, column, cell, model, iter_, *data):
device = model.get_value(iter_, 0)
settings = self._monitor.get_device_settings(device)
if not settings:
return
text = AllowedUsage.DISPLAY_STRING[settings.usage]
cell.set_property("text", text)
def _device_scroll_datafunc(self, column, cell, model, iter_, *data):
device = model.get_value(iter_, 0)
settings = self._monitor.get_device_settings(device)
if not settings:
return
text = ScrollAction.DISPLAY_STRING[settings.scroll]
cell.set_property("text", text)
## Updates
def _usage_cell_changed_cb(self, combo, device_path_str,
usage_iter, *etc):
config = self._usage_store.get_value(
usage_iter,
self._USAGE_CONFIG_COL,
)
device_iter = self._devices_store.get_iter(device_path_str)
device = self._devices_store.get_value(device_iter, 0)
settings = self._monitor.get_device_settings(device)
if not settings:
return
settings.usage = config
self._devices_view.columns_autosize()
def _scroll_cell_changed_cb(self, conf_combo, device_path_str,
conf_iter, *etc):
conf_store = self._scroll_store
conf_col = self._SCROLL_CONFIG_COL
conf_value = conf_store.get_value(conf_iter, conf_col)
device_store = self._devices_store
device_iter = device_store.get_iter(device_path_str)
device = device_store.get_value(device_iter, 0)
settings = self._monitor.get_device_settings(device)
if not settings:
return
settings.scroll = conf_value
self._devices_view.columns_autosize()
def _update_devices_store(self, *_ignored):
"""Repopulates the displayed list"""
updated_list = list(self._monitor.get_devices())
updated_list_map = dict(updated_list)
paths_for_removal = []
devices_retained = set()
for row in self._devices_store:
device, = row
if device not in updated_list_map:
paths_for_removal.append(row.path)
continue
devices_retained.add(device)
for device, config in updated_list:
if device in devices_retained:
continue
self._devices_store.append([device])
for unwanted_row_path in reversed(paths_for_removal):
unwanted_row_iter = self._devices_store.get_iter(unwanted_row_path)
self._devices_store.remove(unwanted_row_iter)
self._devices_view.queue_draw()
## Helper funcs
def _device_prefs_key(device):
"""Returns the subkey to use in the app prefs for a device"""
source = device.get_source()
name = device.get_name()
n_axes = device.get_n_axes()
return u"%s:%s:%d" % (name, source.value_nick, n_axes)
def device_is_eraser(device):
"""Tests whether a device appears to be an eraser"""
if device is None:
return False
if device.get_source() == Gdk.InputSource.ERASER:
return True
if re.search(r'\<eraser\>', device.get_name(), re.I):
return True
return False
## Testing
def _test():
"""Interactive UI testing for SettingsEditor and Monitor"""
logging.basicConfig(level=logging.DEBUG)
win = Gtk.Window()
win.set_title("gui.device.SettingsEditor")
win.set_default_size(500, 400)
win.connect("destroy", Gtk.main_quit)
monitor = Monitor(app=None)
editor = SettingsEditor(monitor)
win.add(editor)
win.show_all()
Gtk.main()
print(monitor._prefs)
if __name__ == '__main__':
_test()
| mypaint/mypaint | gui/device.py | Python | gpl-2.0 | 22,624 | 0.000575 |
#!/usr/bin/env python
from gnuradio import gr, gr_unittest
from gnuradio import blocks
import grdab
class qa_measure_processing_rate(gr_unittest.TestCase):
"""
@brief QA for measure processing rate sink.
This class implements a test bench to verify the corresponding C++ class.
"""
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_001_measure_processing_rate(self):
src = blocks.null_source(gr.sizeof_gr_complex)
throttle = blocks.throttle(gr.sizeof_gr_complex, 1000000)
head = blocks.head(gr.sizeof_gr_complex, 200000)
sink = grdab.measure_processing_rate(gr.sizeof_gr_complex,100000)
self.tb.connect(src, throttle, head, sink)
self.tb.run()
rate = sink.processing_rate()
assert(rate > 900000 and rate < 1100000)
def test_002_measure_processing_rate(self):
src = blocks.null_source(gr.sizeof_char)
throttle = blocks.throttle(gr.sizeof_char, 10000000)
head = blocks.head(gr.sizeof_char, 1000000)
sink = grdab.measure_processing_rate(gr.sizeof_char,1000000)
self.tb.connect(src, throttle, head, sink)
self.tb.run()
rate = sink.processing_rate()
assert(rate > 8000000 and rate < 12000000)
if __name__ == '__main__':
gr_unittest.main()
| andrmuel/gr-dab | python/qa/qa_measure_processing_rate.py | Python | gpl-3.0 | 1,233 | 0.030819 |
# -*- coding: utf-8 -*-
from __future__ import division, absolute_import, unicode_literals
import operator
from datetime import datetime
from functools import reduce
import extra_views
from django.conf import settings
from django.contrib.auth import update_session_auth_hash
from django.contrib.auth.forms import (PasswordChangeForm,
AdminPasswordChangeForm)
from django.contrib.auth.views import (logout as auth_logout,
login as auth_login)
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse, reverse_lazy
from django.db import models, router
from django.db.models.fields import FieldDoesNotExist
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.utils.encoding import force_text
from django.utils.text import capfirst
from django.utils.translation import ugettext_lazy
from django.views import generic
from . import permissions, utils
from .filters import build_list_filter, build_date_filter
from .forms import AdminAuthenticationForm
from .models import LogEntry
from .viewmixins import Admin2Mixin, Admin2ModelMixin, Admin2ModelFormMixin
class AdminView(object):
def __init__(self, url, view, name=None):
self.url = url
self.view = view
self.name = name
def get_view_kwargs(self):
return {
'app_label': self.model_admin.app_label,
'model': self.model_admin.model,
'model_name': self.model_admin.model_name,
'model_admin': self.model_admin,
}
class IndexView(Admin2Mixin, generic.TemplateView):
"""Context Variables
:apps: A dictionary of apps, each app being a dictionary with keys
being models and the value being djadmin2.types.ModelAdmin2
objects.
:app_verbose_names: A dictionary containing the app verbose names,
each item has a key being the `app_label` and
the value being a string, (or even a lazy
translation object), with the custom app name.
"""
default_template_name = "index.html"
registry = None
apps = None
app_verbose_names = None
def get_context_data(self, **kwargs):
data = super(IndexView, self).get_context_data(**kwargs)
data.update({
'apps': self.apps,
'app_verbose_names': self.app_verbose_names,
})
return data
class AppIndexView(Admin2Mixin, generic.TemplateView):
"""Context Variables
:app_label: Name of your app
:registry: A dictionary of registered models for a given app, each
item has a key being the model and the value being
djadmin2.types.ModelAdmin2 objects.
:app_verbose_names: A dictionary containing the app verbose name for
a given app, the item has a key being the
`app_label` and the value being a string, (or
even a lazy translation object), with the custom
app name.
"""
default_template_name = "app_index.html"
registry = None
apps = None
app_verbose_names = None
def get_context_data(self, **kwargs):
data = super(AppIndexView, self).get_context_data(**kwargs)
app_label = self.kwargs['app_label']
registry = self.apps[app_label]
data.update({
'app_label': app_label,
'registry': registry,
'app_verbose_names': self.app_verbose_names,
})
return data
class ModelListView(Admin2ModelMixin, generic.ListView):
"""Context Variables
:is_paginated: If the page is paginated (page has a next button)
:model: Type of object you are editing
:model_name: Name of the object you are editing
:app_label: Name of your app
:app_verbose_names: A dictionary containing the app verbose name for
a given app, the item has a key being the
`app_label` and the value being a string, (or
even a lazy translation object), with the custom
app name.
"""
default_template_name = "model_list.html"
permission_classes = (
permissions.IsStaffPermission,
permissions.ModelViewPermission)
def post(self, request):
action_name = request.POST['action']
action_callable = self.get_actions()[action_name]['action_callable']
selected_model_pks = request.POST.getlist('selected_model_pk')
if getattr(action_callable, "only_selected", True):
queryset = self.model.objects.filter(pk__in=selected_model_pks)
else:
queryset = self.model.objects.all()
# If action_callable is a class subclassing from
# actions.BaseListAction then we generate the callable object.
if hasattr(action_callable, "process_queryset"):
response = action_callable.as_view(queryset=queryset, model_admin=self.model_admin)(request)
else:
# generate the reponse if a function.
response = action_callable(request, queryset)
if response is None:
return HttpResponseRedirect(self.get_success_url())
else:
return response
def get_search_results(self, queryset, search_term):
# Lifted from django.contrib.admin
def construct_search(field_name):
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % field_name[1:]
else:
return "%s__icontains" % field_name
use_distinct = False
orm_lookups = [construct_search(str(search_field))
for search_field in self.model_admin.search_fields]
for bit in search_term.split():
or_queries = [models.Q(**{orm_lookup: bit})
for orm_lookup in orm_lookups]
queryset = queryset.filter(reduce(operator.or_, or_queries))
if not use_distinct:
for search_spec in orm_lookups:
opts = utils.model_options(self.get_model())
if utils.lookup_needs_distinct(opts, search_spec):
use_distinct = True
break
return queryset, use_distinct
def get_queryset(self):
queryset = super(ModelListView, self).get_queryset()
search_term = self.request.GET.get('q', None)
search_use_distinct = False
if self.model_admin.search_fields and search_term:
queryset, search_use_distinct = self.get_search_results(
queryset, search_term)
queryset = self._modify_queryset_for_ordering(queryset)
if self.model_admin.list_filter:
queryset = self.build_list_filter(queryset).qs
if self.model_admin.date_hierarchy:
queryset = self.build_date_filter(queryset, self.model_admin.date_hierarchy).qs
queryset = self._modify_queryset_for_sort(queryset)
if search_use_distinct:
return queryset.distinct()
else:
return queryset
def _modify_queryset_for_ordering(self, queryset):
ordering = self.model_admin.get_ordering(self.request)
if ordering:
queryset = queryset.order_by(*ordering)
return queryset
def _modify_queryset_for_sort(self, queryset):
# If we are sorting AND the field exists on the model
sort_by = self.request.GET.get('sort', None)
if sort_by:
# Special case when we are not explicityly displaying fields
if sort_by == '-__str__':
queryset = queryset[::-1]
try:
# If we sort on '-' remove it before looking for that field
field_exists = sort_by
if field_exists[0] == '-':
field_exists = field_exists[1:]
options = utils.model_options(self.model)
options.get_field(field_exists)
queryset = queryset.order_by(sort_by)
except FieldDoesNotExist:
# If the field does not exist then we dont sort on it
pass
return queryset
def build_list_filter(self, queryset=None):
if not hasattr(self, '_list_filter'):
if queryset is None:
queryset = self.get_queryset()
self._list_filter = build_list_filter(
self.request,
self.model_admin,
queryset,
)
return self._list_filter
def build_date_filter(self, queryset=None, field_name=None):
if not hasattr(self, "_date_filter"):
if queryset is None:
queryset = self.get_queryset()
self._date_filter = build_date_filter(
self.request,
self.model_admin,
queryset,
field_name
)
return self._date_filter
def get_context_data(self, **kwargs):
context = super(ModelListView, self).get_context_data(**kwargs)
context['model'] = self.get_model()
context['actions'] = self.get_actions().values()
context['search_fields'] = self.get_search_fields()
context['search_term'] = self.request.GET.get('q', '')
context['list_filter'] = self.build_list_filter()
context['sort_term'] = self.request.GET.get('sort', '')
if self.model_admin.date_hierarchy:
year = self.request.GET.get("year", False)
month = self.request.GET.get("month", False)
day = self.request.GET.get("day", False)
if year and month and day:
new_date = datetime.strptime(
"%s %s %s" % (month, day, year),
"%m %d %Y",
)
context["previous_date"] = {
"link": "?year=%s&month=%s" % (year, month),
"text": "‹ %s" % new_date.strftime("%B %Y")
}
context["active_day"] = new_date.strftime("%B %d")
context["dates"] = self._format_days(self.get_queryset())
elif year and month:
context["previous_date"] = {
"link": "?year=%s" % (year),
"text": "‹ %s" % year,
}
context["dates"] = self._format_days(self.get_queryset())
elif year:
context["previous_date"] = {
"link": "?",
"text": ugettext_lazy("‹ All dates"),
}
context["dates"] = self._format_months(self.get_queryset())
else:
context["dates"] = self._format_years(self.get_queryset())
return context
def _format_years(self, queryset):
years = self._qs_date_or_datetime(queryset, 'year')
if len(years) == 1:
return self._format_months(queryset)
else:
return [
(("?year=%s" % year.strftime("%Y")), year.strftime("%Y"))
for year in
self._qs_date_or_datetime(queryset, 'year')
]
def _format_months(self, queryset):
return [
(
"?year=%s&month=%s" % (
date.strftime("%Y"), date.strftime("%m")
),
date.strftime("%B %Y")
) for date in
self._qs_date_or_datetime(queryset, 'month')
]
def _format_days(self, queryset):
return [
(
"?year=%s&month=%s&day=%s" % (
date.strftime("%Y"),
date.strftime("%m"),
date.strftime("%d"),
),
date.strftime("%B %d")
) for date in
self._qs_date_or_datetime(queryset, 'day')
]
def _qs_date_or_datetime(self, object_list, type):
if isinstance(self.model._meta.get_field(self.model_admin.date_hierarchy), models.DateTimeField):
qs = object_list.datetimes(self.model_admin.date_hierarchy, type)
else:
qs = object_list.dates(self.model_admin.date_hierarchy, type)
return qs
def get_success_url(self):
view_name = 'admin2:{}_{}_index'.format(
self.app_label, self.model_name)
return reverse(view_name)
def get_actions(self):
return self.model_admin.get_list_actions()
def get_search_fields(self):
return self.model_admin.search_fields
class ModelDetailView(Admin2ModelMixin, generic.DetailView):
"""Context Variables
:model: Type of object you are editing
:model_name: Name of the object you are editing
:app_label: Name of your app
:app_verbose_names: A dictionary containing the app verbose name for
a given app, the item has a key being the
`app_label` and the value being a string, (or
even a lazy translation object), with the custom
app name.
"""
default_template_name = "model_detail.html"
permission_classes = (
permissions.IsStaffPermission,
permissions.ModelViewPermission)
class ModelEditFormView(Admin2ModelMixin, Admin2ModelFormMixin,
extra_views.UpdateWithInlinesView):
"""Context Variables
:model: Type of object you are editing
:model_name: Name of the object you are editing
:app_label: Name of your app
:app_verbose_names: A dictionary containing the app verbose name for
a given app, the item has a key being the
`app_label` and the value being a string, (or
even a lazy translation object), with the custom
app name.
"""
form_class = None
default_template_name = "model_update_form.html"
permission_classes = (
permissions.IsStaffPermission,
permissions.ModelChangePermission)
def get_context_data(self, **kwargs):
context = super(ModelEditFormView, self).get_context_data(**kwargs)
context['model'] = self.get_model()
context['action'] = "Change"
context['action_name'] = ugettext_lazy("Change")
return context
def forms_valid(self, form, inlines):
response = super(ModelEditFormView, self).forms_valid(form, inlines)
LogEntry.objects.log_action(
self.request.user.id,
self.object,
LogEntry.CHANGE,
self.construct_change_message(self.request, form, inlines))
return response
class ModelAddFormView(Admin2ModelMixin, Admin2ModelFormMixin,
extra_views.CreateWithInlinesView):
"""Context Variables
:model: Type of object you are editing
:model_name: Name of the object you are editing
:app_label: Name of your app
:app_verbose_names: A dictionary containing the app verbose name for
a given app, the item has a key being the
`app_label` and the value being a string, (or
even a lazy translation object), with the custom
app name.
"""
form_class = None
default_template_name = "model_update_form.html"
permission_classes = (
permissions.IsStaffPermission,
permissions.ModelAddPermission)
def get_context_data(self, **kwargs):
context = super(ModelAddFormView, self).get_context_data(**kwargs)
context['model'] = self.get_model()
context['action'] = "Add"
context['action_name'] = ugettext_lazy("Add")
return context
def forms_valid(self, form, inlines):
response = super(ModelAddFormView, self).forms_valid(form, inlines)
LogEntry.objects.log_action(
self.request.user.id,
self.object,
LogEntry.ADDITION,
'Object created.')
return response
class ModelDeleteView(Admin2ModelMixin, generic.DeleteView):
"""Context Variables
:model: Type of object you are editing
:model_name: Name of the object you are editing
:app_label: Name of your app
:deletable_objects: Objects to delete
:app_verbose_names: A dictionary containing the app verbose name for
a given app, the item has a key being the
`app_label` and the value being a string, (or
even a lazy translation object), with the custom
app name.
"""
success_url = "../../" # TODO - fix this!
default_template_name = "model_confirm_delete.html"
permission_classes = (
permissions.IsStaffPermission,
permissions.ModelDeletePermission)
def get_context_data(self, **kwargs):
context = super(ModelDeleteView, self).get_context_data(**kwargs)
def _format_callback(obj):
opts = utils.model_options(obj)
return '%s: %s' % (force_text(capfirst(opts.verbose_name)),
force_text(obj))
using = router.db_for_write(self.get_object()._meta.model)
collector = utils.NestedObjects(using=using)
collector.collect([self.get_object()])
context.update({
'deletable_objects': collector.nested(_format_callback)
})
return context
def delete(self, request, *args, **kwargs):
LogEntry.objects.log_action(
request.user.id,
self.get_object(),
LogEntry.DELETION,
'Object deleted.')
return super(ModelDeleteView, self).delete(request, *args, **kwargs)
class ModelHistoryView(Admin2ModelMixin, generic.ListView):
"""Context Variables
:model: Type of object you are editing
:model_name: Name of the object you are editing
:app_label: Name of your app
:app_verbose_names: A dictionary containing the app verbose name for
a given app, the item has a key being the
`app_label` and the value being a string, (or
even a lazy translation object), with the custom
app name.
"""
default_template_name = "model_history.html"
permission_classes = (
permissions.IsStaffPermission,
permissions.ModelChangePermission
)
def get_context_data(self, **kwargs):
context = super(ModelHistoryView, self).get_context_data(**kwargs)
context['model'] = self.get_model()
context['object'] = self.get_object()
return context
def get_object(self):
return get_object_or_404(self.get_model(), pk=self.kwargs.get('pk'))
def get_queryset(self):
content_type = ContentType.objects.get_for_model(self.get_object())
return LogEntry.objects.filter(
content_type=content_type,
object_id=self.get_object().id
)
class PasswordChangeView(Admin2Mixin, generic.UpdateView):
default_template_name = 'auth/password_change_form.html'
form_class = AdminPasswordChangeForm
admin_form_class = PasswordChangeForm
model = settings.AUTH_USER_MODEL
success_url = reverse_lazy('admin2:password_change_done')
def get_form_kwargs(self, **kwargs):
data = {'user': self.get_object()}
if self.request.method in ('POST', 'PUT'):
data.update({
'data': self.request.POST
})
return data
def get_form_class(self):
if self.request.user == self.get_object():
return self.admin_form_class
return super(PasswordChangeView, self).get_form_class()
def get_queryset(self):
from django.contrib.auth import get_user_model
return get_user_model()._default_manager.all()
def form_valid(self, form):
self.object = form.save()
if self.request.user == self.get_object():
update_session_auth_hash(self.request, form.user)
return HttpResponseRedirect(self.get_success_url())
class PasswordChangeDoneView(Admin2Mixin, generic.TemplateView):
default_template_name = 'auth/password_change_done.html'
class LoginView(Admin2Mixin, generic.TemplateView):
"""Context Variables
:site_name: Name of the site
"""
default_template_name = 'auth/login.html'
authentication_form = AdminAuthenticationForm
def dispatch(self, request, *args, **kwargs):
return auth_login(request,
authentication_form=self.authentication_form,
template_name=self.get_template_names(),
*args, **kwargs)
class LogoutView(Admin2Mixin, generic.TemplateView):
"""Context Variables
:site_name: Name of the site
"""
default_template_name = 'auth/logout.html'
def get(self, request, *args, **kwargs):
return auth_logout(request, template_name=self.get_template_names(),
*args, **kwargs)
| andrewsmedina/django-admin2 | djadmin2/views.py | Python | bsd-3-clause | 21,359 | 0.00014 |
# Copyright 2014 Diamond Light Source Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module:: I18stxm_loader
:platform: Unix
:synopsis: A class for loading I18's stxm data
.. moduleauthor:: Aaron Parsons <scientificsoftware@diamond.ac.uk>
"""
from savu.plugins.loaders.multi_modal_loaders.base_i18_multi_modal_loader import BaseI18MultiModalLoader
from savu.plugins.utils import register_plugin
@register_plugin
class I18stxmLoader(BaseI18MultiModalLoader):
"""
A class to load tomography data from an NXstxm file
:param stxm_detector: path to stxm. Default:'entry1/raster_counterTimer01/It'.
"""
def __init__(self, name='I18stxmLoader'):
super(I18stxmLoader, self).__init__(name)
def setup(self):
"""
Define the input nexus file
:param path: The full path of the NeXus file to load.
:type path: str
"""
data_str = self.parameters['stxm_detector']
data_obj = self.multi_modal_setup('stxm')
data_obj.data = data_obj.backing_file[data_str]
data_obj.set_shape(data_obj.data.shape)
self.set_motors(data_obj, 'stxm')
self.add_patterns_based_on_acquisition(data_obj, 'stxm')
self.set_data_reduction_params(data_obj)
| rcatwood/Savu | savu/plugins/loaders/multi_modal_loaders/i18_loaders/i18stxm_loader.py | Python | gpl-3.0 | 1,776 | 0.001689 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
TEST_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__)))
TMP_DIR = os.path.join(TEST_DIR, '..', '..', 'tmp')
def root_path(*args):
return os.path.join(TEST_DIR, *args)
def test_path(*args):
return os.path.join(TEST_DIR, *args)
def tmp_path(*args):
return os.path.join(TMP_DIR, *args)
| dsiddharth/access-keys | keystone/tests/contrib/kds/paths.py | Python | apache-2.0 | 915 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from paypal.standard.forms import PayPalStandardBaseForm
from paypal.standard.pdt.models import PayPalPDT
class PayPalPDTForm(PayPalStandardBaseForm):
class Meta:
model = PayPalPDT | AlphaCluster/NewsBlur | vendor/paypal/standard/pdt/forms.py | Python | mit | 240 | 0.004167 |
from identifier import Result
RPM_PATTERNS = [
'ED AB EE DB'
]
class RpmResolver:
def identify(self, stream):
return Result('RPM')
def load(hound):
hound.add_matches(RPM_PATTERNS, RpmResolver())
| Bindernews/TheHound | identifiers/rpm_identifier.py | Python | mit | 205 | 0.04878 |
from .mutationstester import MutationsTester
| Lydwen/Mr.Statamutation | Mr.Statapython/statapython/stataspoon/__init__.py | Python | mit | 45 | 0 |
"""Ansible integration test infrastructure."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import contextlib
import json
import os
import shutil
import tempfile
from .. import types as t
from ..target import (
analyze_integration_target_dependencies,
walk_integration_targets,
)
from ..config import (
IntegrationConfig,
NetworkIntegrationConfig,
PosixIntegrationConfig,
WindowsIntegrationConfig,
)
from ..util import (
ApplicationError,
display,
make_dirs,
COVERAGE_CONFIG_NAME,
MODE_DIRECTORY,
MODE_DIRECTORY_WRITE,
MODE_FILE,
to_bytes,
)
from ..util_common import (
named_temporary_file,
write_text_file,
ResultType,
)
from ..coverage_util import (
generate_coverage_config,
)
from ..cache import (
CommonCache,
)
from ..cloud import (
CloudEnvironmentConfig,
)
from ..data import (
data_context,
)
def setup_common_temp_dir(args, path):
"""
:type args: IntegrationConfig
:type path: str
"""
if args.explain:
return
os.mkdir(path)
os.chmod(path, MODE_DIRECTORY)
if args.coverage:
coverage_config_path = os.path.join(path, COVERAGE_CONFIG_NAME)
coverage_config = generate_coverage_config(args)
write_text_file(coverage_config_path, coverage_config)
os.chmod(coverage_config_path, MODE_FILE)
coverage_output_path = os.path.join(path, ResultType.COVERAGE.name)
os.mkdir(coverage_output_path)
os.chmod(coverage_output_path, MODE_DIRECTORY_WRITE)
def generate_dependency_map(integration_targets):
"""
:type integration_targets: list[IntegrationTarget]
:rtype: dict[str, set[IntegrationTarget]]
"""
targets_dict = dict((target.name, target) for target in integration_targets)
target_dependencies = analyze_integration_target_dependencies(integration_targets)
dependency_map = {}
invalid_targets = set()
for dependency, dependents in target_dependencies.items():
dependency_target = targets_dict.get(dependency)
if not dependency_target:
invalid_targets.add(dependency)
continue
for dependent in dependents:
if dependent not in dependency_map:
dependency_map[dependent] = set()
dependency_map[dependent].add(dependency_target)
if invalid_targets:
raise ApplicationError('Non-existent target dependencies: %s' % ', '.join(sorted(invalid_targets)))
return dependency_map
def get_files_needed(target_dependencies):
"""
:type target_dependencies: list[IntegrationTarget]
:rtype: list[str]
"""
files_needed = []
for target_dependency in target_dependencies:
files_needed += target_dependency.needs_file
files_needed = sorted(set(files_needed))
invalid_paths = [path for path in files_needed if not os.path.isfile(path)]
if invalid_paths:
raise ApplicationError('Invalid "needs/file/*" aliases:\n%s' % '\n'.join(invalid_paths))
return files_needed
def check_inventory(args, inventory_path): # type: (IntegrationConfig, str) -> None
"""Check the given inventory for issues."""
if args.docker or args.remote:
if os.path.exists(inventory_path):
with open(inventory_path) as inventory_file:
inventory = inventory_file.read()
if 'ansible_ssh_private_key_file' in inventory:
display.warning('Use of "ansible_ssh_private_key_file" in inventory with the --docker or --remote option is unsupported and will likely fail.')
def get_inventory_relative_path(args): # type: (IntegrationConfig) -> str
"""Return the inventory path used for the given integration configuration relative to the content root."""
inventory_names = {
PosixIntegrationConfig: 'inventory',
WindowsIntegrationConfig: 'inventory.winrm',
NetworkIntegrationConfig: 'inventory.networking',
} # type: t.Dict[t.Type[IntegrationConfig], str]
return os.path.join(data_context().content.integration_path, inventory_names[type(args)])
def delegate_inventory(args, inventory_path_src): # type: (IntegrationConfig, str) -> None
"""Make the given inventory available during delegation."""
if isinstance(args, PosixIntegrationConfig):
return
def inventory_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
"""
Add the inventory file to the payload file list.
This will preserve the file during delegation even if it is ignored or is outside the content and install roots.
"""
if data_context().content.collection:
working_path = data_context().content.collection.directory
else:
working_path = ''
inventory_path = os.path.join(working_path, get_inventory_relative_path(args))
if os.path.isfile(inventory_path_src) and os.path.relpath(inventory_path_src, data_context().content.root) != inventory_path:
originals = [item for item in files if item[1] == inventory_path]
if originals:
for original in originals:
files.remove(original)
display.warning('Overriding inventory file "%s" with "%s".' % (inventory_path, inventory_path_src))
else:
display.notice('Sourcing inventory file "%s" from "%s".' % (inventory_path, inventory_path_src))
files.append((inventory_path_src, inventory_path))
data_context().register_payload_callback(inventory_callback)
@contextlib.contextmanager
def integration_test_environment(args, target, inventory_path_src):
"""
:type args: IntegrationConfig
:type target: IntegrationTarget
:type inventory_path_src: str
"""
ansible_config_src = args.get_ansible_config()
ansible_config_relative = os.path.join(data_context().content.integration_path, '%s.cfg' % args.command)
if args.no_temp_workdir or 'no/temp_workdir/' in target.aliases:
display.warning('Disabling the temp work dir is a temporary debugging feature that may be removed in the future without notice.')
integration_dir = os.path.join(data_context().content.root, data_context().content.integration_path)
targets_dir = os.path.join(data_context().content.root, data_context().content.integration_targets_path)
inventory_path = inventory_path_src
ansible_config = ansible_config_src
vars_file = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
yield IntegrationEnvironment(integration_dir, targets_dir, inventory_path, ansible_config, vars_file)
return
# When testing a collection, the temporary directory must reside within the collection.
# This is necessary to enable support for the default collection for non-collection content (playbooks and roles).
root_temp_dir = os.path.join(ResultType.TMP.path, 'integration')
prefix = '%s-' % target.name
suffix = u'-\u00c5\u00d1\u015a\u00cc\u03b2\u0141\u00c8'
if args.no_temp_unicode or 'no/temp_unicode/' in target.aliases:
display.warning('Disabling unicode in the temp work dir is a temporary debugging feature that may be removed in the future without notice.')
suffix = '-ansible'
if args.explain:
temp_dir = os.path.join(root_temp_dir, '%stemp%s' % (prefix, suffix))
else:
make_dirs(root_temp_dir)
temp_dir = tempfile.mkdtemp(prefix=prefix, suffix=suffix, dir=root_temp_dir)
try:
display.info('Preparing temporary directory: %s' % temp_dir, verbosity=2)
inventory_relative_path = get_inventory_relative_path(args)
inventory_path = os.path.join(temp_dir, inventory_relative_path)
cache = IntegrationCache(args)
target_dependencies = sorted([target] + list(cache.dependency_map.get(target.name, set())))
files_needed = get_files_needed(target_dependencies)
integration_dir = os.path.join(temp_dir, data_context().content.integration_path)
targets_dir = os.path.join(temp_dir, data_context().content.integration_targets_path)
ansible_config = os.path.join(temp_dir, ansible_config_relative)
vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
vars_file = os.path.join(temp_dir, data_context().content.integration_vars_path)
file_copies = [
(ansible_config_src, ansible_config),
(inventory_path_src, inventory_path),
]
if os.path.exists(vars_file_src):
file_copies.append((vars_file_src, vars_file))
file_copies += [(path, os.path.join(temp_dir, path)) for path in files_needed]
integration_targets_relative_path = data_context().content.integration_targets_path
directory_copies = [
(
os.path.join(integration_targets_relative_path, target.relative_path),
os.path.join(temp_dir, integration_targets_relative_path, target.relative_path)
)
for target in target_dependencies
]
directory_copies = sorted(set(directory_copies))
file_copies = sorted(set(file_copies))
if not args.explain:
make_dirs(integration_dir)
for dir_src, dir_dst in directory_copies:
display.info('Copying %s/ to %s/' % (dir_src, dir_dst), verbosity=2)
if not args.explain:
shutil.copytree(to_bytes(dir_src), to_bytes(dir_dst), symlinks=True)
for file_src, file_dst in file_copies:
display.info('Copying %s to %s' % (file_src, file_dst), verbosity=2)
if not args.explain:
make_dirs(os.path.dirname(file_dst))
shutil.copy2(file_src, file_dst)
yield IntegrationEnvironment(integration_dir, targets_dir, inventory_path, ansible_config, vars_file)
finally:
if not args.explain:
shutil.rmtree(temp_dir)
@contextlib.contextmanager
def integration_test_config_file(args, env_config, integration_dir):
"""
:type args: IntegrationConfig
:type env_config: CloudEnvironmentConfig
:type integration_dir: str
"""
if not env_config:
yield None
return
config_vars = (env_config.ansible_vars or {}).copy()
config_vars.update(dict(
ansible_test=dict(
environment=env_config.env_vars,
module_defaults=env_config.module_defaults,
)
))
config_file = json.dumps(config_vars, indent=4, sort_keys=True)
with named_temporary_file(args, 'config-file-', '.json', integration_dir, config_file) as path:
filename = os.path.relpath(path, integration_dir)
display.info('>>> Config File: %s\n%s' % (filename, config_file), verbosity=3)
yield path
class IntegrationEnvironment:
"""Details about the integration environment."""
def __init__(self, integration_dir, targets_dir, inventory_path, ansible_config, vars_file):
self.integration_dir = integration_dir
self.targets_dir = targets_dir
self.inventory_path = inventory_path
self.ansible_config = ansible_config
self.vars_file = vars_file
class IntegrationCache(CommonCache):
"""Integration cache."""
@property
def integration_targets(self):
"""
:rtype: list[IntegrationTarget]
"""
return self.get('integration_targets', lambda: list(walk_integration_targets()))
@property
def dependency_map(self):
"""
:rtype: dict[str, set[IntegrationTarget]]
"""
return self.get('dependency_map', lambda: generate_dependency_map(self.integration_targets))
| kvar/ansible | test/lib/ansible_test/_internal/integration/__init__.py | Python | gpl-3.0 | 11,813 | 0.00364 |
import web
import uuid
from jinja2 import Environment, FileSystemLoader
import os
files_directory = "db/data"
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
urls = (
'/', 'Index',
'/upload', 'Upload'
)
class Index:
def GET(self):
j2_env = Environment(loader=FileSystemLoader(THIS_DIR+"/template/"),
trim_blocks=True)
template_values = {"things_to_show": ["one", "two", "three"]}
return j2_env.get_template('index.html').render(template_values)
class Upload:
def GET(self):
return """<html><head></head><body>
<form method="POST" enctype="multipart/form-data" action="">
<input type="file" name="myfile" />
<br/>
<input type="submit" />
</form>
</body></html>"""
def POST(self):
x = web.input(myfile={})
filepath = x.myfile.filename.replace('\\','/') # replaces the windows-style slashes with linux ones.
filename=filepath.split('/')[-1]
fout = open(files_directory +'/'+ uuid.uuid4().hex + filename,'w') # creates the file where the uploaded file should be stored
fout.write(x.myfile.file.read()) # writes the uploaded file to the newly created file.
fout.close() # closes the file, upload complete.
raise web.seeother('/upload')
if __name__ == '__main__':
app = web.application(urls, globals())
app.internalerror = web.debugerror
app.run()
| tiagoft/sampleseeker | sampleseeker.py | Python | gpl-3.0 | 1,431 | 0.01188 |
#!/usr/bin/env python
"""Script to read the libphonenumber per-prefix metadata and generate Python code.
Invocation:
buildprefixdata.py [options] indir outfile module_prefix
Processes all of the per-prefix data under the given input directory and emit
generated Python code.
Options:
--var XXX : use this prefix for variable names in generated code
--flat : don't do per-locale processing
--sep C : expect metadata to be a list with C as separator
"""
# Based on original metadata data files from libphonenumber:
# resources/geocoding/*/*.txt, resources/carrier/*/*.txt
# Copyright (C) 2011 The Libphonenumber Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import glob
import re
import getopt
import datetime
# Use the local code in preference to any pre-installed version
sys.path.insert(0, '../../python')
from phonenumbers.util import prnt, rpr
PREFIXDATA_SUFFIX = ".txt"
BLANK_LINE_RE = re.compile(r'^\s*$', re.UNICODE)
COMMENT_LINE_RE = re.compile(r'^\s*#.*$', re.UNICODE)
DATA_LINE_RE = re.compile(r'^\+?(?P<prefix>\d+)\|(?P<stringdata>.*)$', re.UNICODE)
# Boilerplate header
PREFIXDATA_LOCALE_FILE_PROLOG = '''"""Per-prefix data, mapping each prefix to a dict of locale:name.
Auto-generated file, do not edit by hand.
"""
from %(module)s.util import u
'''
PREFIXDATA_FILE_PROLOG = '''"""Per-prefix data, mapping each prefix to a name.
Auto-generated file, do not edit by hand.
"""
from %(module)s.util import u
'''
# Copyright notice covering the XML metadata; include current year.
COPYRIGHT_NOTICE = """# Copyright (C) 2011-%s The Libphonenumber Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" % datetime.datetime.now().year
def load_locale_prefixdata_file(prefixdata, filename, locale=None, overall_prefix=None, separator=None):
"""Load per-prefix data from the given file, for the given locale and prefix.
We assume that this file:
- is encoded in UTF-8
- may have comment lines (starting with #) and blank lines
- has data lines of the form '<prefix>|<stringdata>'
- contains only data for prefixes that are extensions of the filename.
If overall_prefix is specified, lines are checked to ensure their prefix falls within this value.
If locale is specified, prefixdata[prefix][locale] is filled in; otherwise, just prefixdata[prefix].
If separator is specified, the string data will be split on this separator, and the output values
in the dict will be tuples of strings rather than strings.
"""
with open(filename, "rb") as infile:
lineno = 0
for line in infile:
uline = line.decode('utf-8')
lineno += 1
dm = DATA_LINE_RE.match(uline)
if dm:
prefix = dm.group('prefix')
stringdata = dm.group('stringdata')
if stringdata != stringdata.rstrip():
print ("%s:%d: Warning: stripping trailing whitespace" % (filename, lineno))
stringdata = stringdata.rstrip()
if overall_prefix is not None and not prefix.startswith(overall_prefix):
raise Exception("%s:%d: Prefix %s is not within %s" %
(filename, lineno, prefix, overall_prefix))
if separator is not None:
stringdata = tuple(stringdata.split(separator))
if prefix not in prefixdata:
prefixdata[prefix] = {}
if locale is not None:
prefixdata[prefix][locale] = stringdata
else:
prefixdata[prefix] = stringdata
elif BLANK_LINE_RE.match(uline):
pass
elif COMMENT_LINE_RE.match(uline):
pass
else:
raise Exception("%s:%d: Unexpected line format: %s" %
(filename, lineno, line))
def load_locale_prefixdata(indir, separator=None):
"""Load per-prefix data from the given top-level directory.
Prefix data is assumed to be held in files <indir>/<locale>/<prefix>.txt.
The same prefix may occur in multiple files, giving the prefix's description
in different locales.
"""
prefixdata = {} # prefix => dict mapping locale to description
for locale in os.listdir(indir):
if not os.path.isdir(os.path.join(indir, locale)):
continue
for filename in glob.glob(os.path.join(indir, locale, "*%s" % PREFIXDATA_SUFFIX)):
overall_prefix, ext = os.path.splitext(os.path.basename(filename))
load_locale_prefixdata_file(prefixdata, filename, locale, overall_prefix, separator)
return prefixdata
def _stable_dict_repr(strdict):
"""Return a repr() for a dict keyed by a string, in sorted key order"""
lines = []
for key in sorted(strdict.keys()):
lines.append("'%s': %s" % (key, rpr(strdict[key])))
return "{%s}" % ", ".join(lines)
def _tuple_repr(data):
"""Return a repr() for a list/tuple"""
if len(data) == 1:
return "(%s,)" % rpr(data[0])
else:
return "(%s)" % ", ".join([rpr(x) for x in data])
def output_prefixdata_code(prefixdata, outfilename, module_prefix, varprefix, per_locale):
"""Output the per-prefix data in Python form to the given file """
with open(outfilename, "w") as outfile:
longest_prefix = 0
if per_locale:
prnt(PREFIXDATA_LOCALE_FILE_PROLOG % {'module': module_prefix}, file=outfile)
else:
prnt(PREFIXDATA_FILE_PROLOG % {'module': module_prefix}, file=outfile)
prnt(COPYRIGHT_NOTICE, file=outfile)
prnt("%s_DATA = {" % varprefix, file=outfile)
for prefix in sorted(prefixdata.keys()):
if len(prefix) > longest_prefix:
longest_prefix = len(prefix)
if per_locale:
prnt(" '%s':%s," % (prefix, _stable_dict_repr(prefixdata[prefix])), file=outfile)
else:
prnt(" '%s':%s," % (prefix, _tuple_repr(prefixdata[prefix])), file=outfile)
prnt("}", file=outfile)
prnt("%s_LONGEST_PREFIX = %d" % (varprefix, longest_prefix), file=outfile)
def _standalone(argv):
"""Parse the given input directory and emit generated code."""
varprefix = "GEOCODE"
per_locale = True
separator = None
try:
opts, args = getopt.getopt(argv, "hv:fs:", ("help", "var=", "flat", "sep="))
except getopt.GetoptError:
prnt(__doc__, file=sys.stderr)
sys.exit(1)
for opt, arg in opts:
if opt in ("-h", "--help"):
prnt(__doc__, file=sys.stderr)
sys.exit(1)
elif opt in ("-v", "--var"):
varprefix = arg
elif opt in ("-f", "--flat"):
per_locale = False
elif opt in ("-s", "--sep"):
separator = arg
else:
prnt("Unknown option %s" % opt, file=sys.stderr)
prnt(__doc__, file=sys.stderr)
sys.exit(1)
if len(args) != 3:
prnt(__doc__, file=sys.stderr)
sys.exit(1)
if per_locale:
prefixdata = load_locale_prefixdata(args[0], separator=separator)
else:
prefixdata = {}
load_locale_prefixdata_file(prefixdata, args[0], separator=separator)
output_prefixdata_code(prefixdata, args[1], args[2], varprefix, per_locale)
if __name__ == "__main__":
_standalone(sys.argv[1:])
| Mitali-Sodhi/CodeLingo | Dataset/python/buildprefixdata.py | Python | mit | 8,450 | 0.002604 |
import os
from setuptools import setup
def read(fname):
with open(fname) as fhandle:
return fhandle.read()
def readMD(fname):
# Utility function to read the README file.
full_fname = os.path.join(os.path.dirname(__file__), fname)
if 'PANDOC_PATH' in os.environ:
import pandoc
pandoc.core.PANDOC_PATH = os.environ['PANDOC_PATH']
doc = pandoc.Document()
with open(full_fname) as fhandle:
doc.markdown = fhandle.read()
return doc.rst
else:
return read(fname)
required = [req.strip() for req in read('requirements.txt').splitlines() if req.strip()]
version = '1.0.1'
setup(
name='GridWalker',
version=version,
author='Matthew Seal',
author_email='mseal@opengov.com',
description='A multi-dimensional grid used for state space searching',
long_description=readMD('README.md'),
install_requires=required,
license='New BSD',
packages=['gridwalker'],
test_suite='tests',
zip_safe=False,
url='https://github.com/OpenGov/grid_walker',
download_url='https://github.com/OpenGov/grid_walker/tarball/v' + version,
keywords=['grids', 'data', 'iterator', 'multi-dimensional'],
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Utilities',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 2 :: Only'
]
)
| OpenGov/grid_walker | setup.py | Python | bsd-3-clause | 1,495 | 0.002676 |
import copy
from types import GeneratorType
class MergeDict(object):
"""
A simple class for creating new "virtual" dictionaries that actually look
up values in more than one dictionary, passed in the constructor.
If a key appears in more than one of the given dictionaries, only the
first occurrence will be used.
"""
def __init__(self, *dicts):
self.dicts = dicts
def __getitem__(self, key):
for dict_ in self.dicts:
try:
return dict_[key]
except KeyError:
pass
raise KeyError
def __copy__(self):
return self.__class__(*self.dicts)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def getlist(self, key):
for dict_ in self.dicts:
if key in dict_.keys():
return dict_.getlist(key)
return []
def iteritems(self):
seen = set()
for dict_ in self.dicts:
for item in dict_.iteritems():
k, v = item
if k in seen:
continue
seen.add(k)
yield item
def iterkeys(self):
for k, v in self.iteritems():
yield k
def itervalues(self):
for k, v in self.iteritems():
yield v
def items(self):
return list(self.iteritems())
def keys(self):
return list(self.iterkeys())
def values(self):
return list(self.itervalues())
def has_key(self, key):
for dict_ in self.dicts:
if key in dict_:
return True
return False
__contains__ = has_key
__iter__ = iterkeys
def copy(self):
"""Returns a copy of this object."""
return self.__copy__()
def __str__(self):
'''
Returns something like
"{'key1': 'val1', 'key2': 'val2', 'key3': 'val3'}"
instead of the generic "<object meta-data>" inherited from object.
'''
return str(dict(self.items()))
def __repr__(self):
'''
Returns something like
MergeDict({'key1': 'val1', 'key2': 'val2'}, {'key3': 'val3'})
instead of generic "<object meta-data>" inherited from object.
'''
dictreprs = ', '.join(repr(d) for d in self.dicts)
return '%s(%s)' % (self.__class__.__name__, dictreprs)
class SortedDict(dict):
"""
A dictionary that keeps its keys in the order in which they're inserted.
"""
def __new__(cls, *args, **kwargs):
instance = super(SortedDict, cls).__new__(cls, *args, **kwargs)
instance.keyOrder = []
return instance
def __init__(self, data=None):
if data is None:
data = {}
elif isinstance(data, GeneratorType):
# Unfortunately we need to be able to read a generator twice. Once
# to get the data into self with our super().__init__ call and a
# second time to setup keyOrder correctly
data = list(data)
super(SortedDict, self).__init__(data)
if isinstance(data, dict):
self.keyOrder = data.keys()
else:
self.keyOrder = []
seen = set()
for key, value in data:
if key not in seen:
self.keyOrder.append(key)
seen.add(key)
def __deepcopy__(self, memo):
return self.__class__([(key, copy.deepcopy(value, memo))
for key, value in self.iteritems()])
def __setitem__(self, key, value):
if key not in self:
self.keyOrder.append(key)
super(SortedDict, self).__setitem__(key, value)
def __delitem__(self, key):
super(SortedDict, self).__delitem__(key)
self.keyOrder.remove(key)
def __iter__(self):
return iter(self.keyOrder)
def pop(self, k, *args):
result = super(SortedDict, self).pop(k, *args)
try:
self.keyOrder.remove(k)
except ValueError:
# Key wasn't in the dictionary in the first place. No problem.
pass
return result
def popitem(self):
result = super(SortedDict, self).popitem()
self.keyOrder.remove(result[0])
return result
def items(self):
return zip(self.keyOrder, self.values())
def iteritems(self):
for key in self.keyOrder:
yield key, self[key]
def keys(self):
return self.keyOrder[:]
def iterkeys(self):
return iter(self.keyOrder)
def values(self):
return map(self.__getitem__, self.keyOrder)
def itervalues(self):
for key in self.keyOrder:
yield self[key]
def update(self, dict_):
for k, v in dict_.iteritems():
self[k] = v
def setdefault(self, key, default):
if key not in self:
self.keyOrder.append(key)
return super(SortedDict, self).setdefault(key, default)
def value_for_index(self, index):
"""Returns the value of the item at the given zero-based index."""
return self[self.keyOrder[index]]
def insert(self, index, key, value):
"""Inserts the key, value pair before the item with the given index."""
if key in self.keyOrder:
n = self.keyOrder.index(key)
del self.keyOrder[n]
if n < index:
index -= 1
self.keyOrder.insert(index, key)
super(SortedDict, self).__setitem__(key, value)
def copy(self):
"""Returns a copy of this object."""
# This way of initializing the copy means it works for subclasses, too.
obj = self.__class__(self)
obj.keyOrder = self.keyOrder[:]
return obj
def __repr__(self):
"""
Replaces the normal dict.__repr__ with a version that returns the keys
in their sorted order.
"""
return '{%s}' % ', '.join(['%r: %r' % (k, v) for k, v in self.items()])
def clear(self):
super(SortedDict, self).clear()
self.keyOrder = []
class MultiValueDictKeyError(KeyError):
pass
class MultiValueDict(dict):
"""
A subclass of dictionary customized to handle multiple values for the
same key.
>>> d = MultiValueDict({'name': ['Adrian', 'Simon'], 'position': ['Developer']})
>>> d['name']
'Simon'
>>> d.getlist('name')
['Adrian', 'Simon']
>>> d.get('lastname', 'nonexistent')
'nonexistent'
>>> d.setlist('lastname', ['Holovaty', 'Willison'])
This class exists to solve the irritating problem raised by cgi.parse_qs,
which returns a list for every key, even though most Web forms submit
single name-value pairs.
"""
def __init__(self, key_to_list_mapping=()):
super(MultiValueDict, self).__init__(key_to_list_mapping)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__,
super(MultiValueDict, self).__repr__())
def __getitem__(self, key):
"""
Returns the last data value for this key, or [] if it's an empty list;
raises KeyError if not found.
"""
try:
list_ = super(MultiValueDict, self).__getitem__(key)
except KeyError:
raise MultiValueDictKeyError("Key %r not found in %r" % (key, self))
try:
return list_[-1]
except IndexError:
return []
def __setitem__(self, key, value):
super(MultiValueDict, self).__setitem__(key, [value])
def __copy__(self):
return self.__class__([
(k, v[:])
for k, v in self.lists()
])
def __deepcopy__(self, memo=None):
if memo is None:
memo = {}
result = self.__class__()
memo[id(self)] = result
for key, value in dict.items(self):
dict.__setitem__(result, copy.deepcopy(key, memo),
copy.deepcopy(value, memo))
return result
def __getstate__(self):
obj_dict = self.__dict__.copy()
obj_dict['_data'] = dict([(k, self.getlist(k)) for k in self])
return obj_dict
def __setstate__(self, obj_dict):
data = obj_dict.pop('_data', {})
for k, v in data.items():
self.setlist(k, v)
self.__dict__.update(obj_dict)
def get(self, key, default=None):
"""
Returns the last data value for the passed key. If key doesn't exist
or value is an empty list, then default is returned.
"""
try:
val = self[key]
except KeyError:
return default
if val == []:
return default
return val
def getlist(self, key):
"""
Returns the list of values for the passed key. If key doesn't exist,
then an empty list is returned.
"""
try:
return super(MultiValueDict, self).__getitem__(key)
except KeyError:
return []
def setlist(self, key, list_):
super(MultiValueDict, self).__setitem__(key, list_)
def setdefault(self, key, default=None):
if key not in self:
self[key] = default
return self[key]
def setlistdefault(self, key, default_list=()):
if key not in self:
self.setlist(key, default_list)
return self.getlist(key)
def appendlist(self, key, value):
"""Appends an item to the internal list associated with key."""
self.setlistdefault(key, [])
super(MultiValueDict, self).__setitem__(key, self.getlist(key) + [value])
def items(self):
"""
Returns a list of (key, value) pairs, where value is the last item in
the list associated with the key.
"""
return [(key, self[key]) for key in self.keys()]
def iteritems(self):
"""
Yields (key, value) pairs, where value is the last item in the list
associated with the key.
"""
for key in self.keys():
yield (key, self[key])
def lists(self):
"""Returns a list of (key, list) pairs."""
return super(MultiValueDict, self).items()
def iterlists(self):
"""Yields (key, list) pairs."""
return super(MultiValueDict, self).iteritems()
def values(self):
"""Returns a list of the last value on every key list."""
return [self[key] for key in self.keys()]
def itervalues(self):
"""Yield the last value on every key list."""
for key in self.iterkeys():
yield self[key]
def copy(self):
"""Returns a shallow copy of this object."""
return copy.copy(self)
def update(self, *args, **kwargs):
"""
update() extends rather than replaces existing key lists.
Also accepts keyword args.
"""
if len(args) > 1:
raise TypeError("update expected at most 1 arguments, got %d" % len(args))
if args:
other_dict = args[0]
if isinstance(other_dict, MultiValueDict):
for key, value_list in other_dict.lists():
self.setlistdefault(key, []).extend(value_list)
else:
try:
for key, value in other_dict.items():
self.setlistdefault(key, []).append(value)
except TypeError:
raise ValueError("MultiValueDict.update() takes either a MultiValueDict or dictionary")
for key, value in kwargs.iteritems():
self.setlistdefault(key, []).append(value)
class DotExpandedDict(dict):
"""
A special dictionary constructor that takes a dictionary in which the keys
may contain dots to specify inner dictionaries. It's confusing, but this
example should make sense.
>>> d = DotExpandedDict({'person.1.firstname': ['Simon'], \
'person.1.lastname': ['Willison'], \
'person.2.firstname': ['Adrian'], \
'person.2.lastname': ['Holovaty']})
>>> d
{'person': {'1': {'lastname': ['Willison'], 'firstname': ['Simon']}, '2': {'lastname': ['Holovaty'], 'firstname': ['Adrian']}}}
>>> d['person']
{'1': {'lastname': ['Willison'], 'firstname': ['Simon']}, '2': {'lastname': ['Holovaty'], 'firstname': ['Adrian']}}
>>> d['person']['1']
{'lastname': ['Willison'], 'firstname': ['Simon']}
# Gotcha: Results are unpredictable if the dots are "uneven":
>>> DotExpandedDict({'c.1': 2, 'c.2': 3, 'c': 1})
{'c': 1}
"""
def __init__(self, key_to_list_mapping):
for k, v in key_to_list_mapping.items():
current = self
bits = k.split('.')
for bit in bits[:-1]:
current = current.setdefault(bit, {})
# Now assign value to current position
try:
current[bits[-1]] = v
except TypeError: # Special-case if current isn't a dict.
current = {bits[-1]: v}
class ImmutableList(tuple):
"""
A tuple-like object that raises useful errors when it is asked to mutate.
Example::
>>> a = ImmutableList(range(5), warning="You cannot mutate this.")
>>> a[3] = '4'
Traceback (most recent call last):
...
AttributeError: You cannot mutate this.
"""
def __new__(cls, *args, **kwargs):
if 'warning' in kwargs:
warning = kwargs['warning']
del kwargs['warning']
else:
warning = 'ImmutableList object is immutable.'
self = tuple.__new__(cls, *args, **kwargs)
self.warning = warning
return self
def complain(self, *wargs, **kwargs):
if isinstance(self.warning, Exception):
raise self.warning
else:
raise AttributeError(self.warning)
# All list mutation functions complain.
__delitem__ = complain
__delslice__ = complain
__iadd__ = complain
__imul__ = complain
__setitem__ = complain
__setslice__ = complain
append = complain
extend = complain
insert = complain
pop = complain
remove = complain
sort = complain
reverse = complain
class DictWrapper(dict):
"""
Wraps accesses to a dictionary so that certain values (those starting with
the specified prefix) are passed through a function before being returned.
The prefix is removed before looking up the real value.
Used by the SQL construction code to ensure that values are correctly
quoted before being used.
"""
def __init__(self, data, func, prefix):
super(DictWrapper, self).__init__(data)
self.func = func
self.prefix = prefix
def __getitem__(self, key):
"""
Retrieves the real value after stripping the prefix string (if
present). If the prefix is present, pass the value through self.func
before returning, otherwise return the raw value.
"""
if key.startswith(self.prefix):
use_func = True
key = key[len(self.prefix):]
else:
use_func = False
value = super(DictWrapper, self).__getitem__(key)
if use_func:
return self.func(value)
return value
| skevy/django | django/utils/datastructures.py | Python | bsd-3-clause | 15,444 | 0.001684 |
#!/usr/bin/env python
import re
import glob
class FileReader(object):
OPTION_LINE_RE = re.compile(r'^#!\s*(.+?)\s*=(.+?)\s*$')
def __init__(self):
self.opts = {
'sep': ' ',
'cols': 'sum',
}
def read_option_line(self, l):
m = self.OPTION_LINE_RE.match(l)
if not m:
raise ValueError
k = m.group(1)
v = m.group(2)
| amiraliakbari/grade-analyzer | pan/pan.py | Python | mit | 414 | 0.007246 |
"""Mixin classes for custom array types that don't inherit from ndarray."""
from numpy.core import umath as um
__all__ = ['NDArrayOperatorsMixin']
def _disables_array_ufunc(obj):
"""True when __array_ufunc__ is set to None."""
try:
return obj.__array_ufunc__ is None
except AttributeError:
return False
def _binary_method(ufunc, name):
"""Implement a forward binary method with a ufunc, e.g., __add__."""
def func(self, other):
if _disables_array_ufunc(other):
return NotImplemented
return ufunc(self, other)
func.__name__ = '__{}__'.format(name)
return func
def _reflected_binary_method(ufunc, name):
"""Implement a reflected binary method with a ufunc, e.g., __radd__."""
def func(self, other):
if _disables_array_ufunc(other):
return NotImplemented
return ufunc(other, self)
func.__name__ = '__r{}__'.format(name)
return func
def _inplace_binary_method(ufunc, name):
"""Implement an in-place binary method with a ufunc, e.g., __iadd__."""
def func(self, other):
return ufunc(self, other, out=(self,))
func.__name__ = '__i{}__'.format(name)
return func
def _numeric_methods(ufunc, name):
"""Implement forward, reflected and inplace binary methods with a ufunc."""
return (_binary_method(ufunc, name),
_reflected_binary_method(ufunc, name),
_inplace_binary_method(ufunc, name))
def _unary_method(ufunc, name):
"""Implement a unary special method with a ufunc."""
def func(self):
return ufunc(self)
func.__name__ = '__{}__'.format(name)
return func
class NDArrayOperatorsMixin:
"""Mixin defining all operator special methods using __array_ufunc__.
This class implements the special methods for almost all of Python's
builtin operators defined in the `operator` module, including comparisons
(``==``, ``>``, etc.) and arithmetic (``+``, ``*``, ``-``, etc.), by
deferring to the ``__array_ufunc__`` method, which subclasses must
implement.
It is useful for writing classes that do not inherit from `numpy.ndarray`,
but that should support arithmetic and numpy universal functions like
arrays as described in `A Mechanism for Overriding Ufuncs
<https://numpy.org/neps/nep-0013-ufunc-overrides.html>`_.
As an trivial example, consider this implementation of an ``ArrayLike``
class that simply wraps a NumPy array and ensures that the result of any
arithmetic operation is also an ``ArrayLike`` object::
class ArrayLike(np.lib.mixins.NDArrayOperatorsMixin):
def __init__(self, value):
self.value = np.asarray(value)
# One might also consider adding the built-in list type to this
# list, to support operations like np.add(array_like, list)
_HANDLED_TYPES = (np.ndarray, numbers.Number)
def __array_ufunc__(self, ufunc, method, *inputs, **kwargs):
out = kwargs.get('out', ())
for x in inputs + out:
# Only support operations with instances of _HANDLED_TYPES.
# Use ArrayLike instead of type(self) for isinstance to
# allow subclasses that don't override __array_ufunc__ to
# handle ArrayLike objects.
if not isinstance(x, self._HANDLED_TYPES + (ArrayLike,)):
return NotImplemented
# Defer to the implementation of the ufunc on unwrapped values.
inputs = tuple(x.value if isinstance(x, ArrayLike) else x
for x in inputs)
if out:
kwargs['out'] = tuple(
x.value if isinstance(x, ArrayLike) else x
for x in out)
result = getattr(ufunc, method)(*inputs, **kwargs)
if type(result) is tuple:
# multiple return values
return tuple(type(self)(x) for x in result)
elif method == 'at':
# no return value
return None
else:
# one return value
return type(self)(result)
def __repr__(self):
return '%s(%r)' % (type(self).__name__, self.value)
In interactions between ``ArrayLike`` objects and numbers or numpy arrays,
the result is always another ``ArrayLike``:
>>> x = ArrayLike([1, 2, 3])
>>> x - 1
ArrayLike(array([0, 1, 2]))
>>> 1 - x
ArrayLike(array([ 0, -1, -2]))
>>> np.arange(3) - x
ArrayLike(array([-1, -1, -1]))
>>> x - np.arange(3)
ArrayLike(array([1, 1, 1]))
Note that unlike ``numpy.ndarray``, ``ArrayLike`` does not allow operations
with arbitrary, unrecognized types. This ensures that interactions with
ArrayLike preserve a well-defined casting hierarchy.
.. versionadded:: 1.13
"""
# Like np.ndarray, this mixin class implements "Option 1" from the ufunc
# overrides NEP.
# comparisons don't have reflected and in-place versions
__lt__ = _binary_method(um.less, 'lt')
__le__ = _binary_method(um.less_equal, 'le')
__eq__ = _binary_method(um.equal, 'eq')
__ne__ = _binary_method(um.not_equal, 'ne')
__gt__ = _binary_method(um.greater, 'gt')
__ge__ = _binary_method(um.greater_equal, 'ge')
# numeric methods
__add__, __radd__, __iadd__ = _numeric_methods(um.add, 'add')
__sub__, __rsub__, __isub__ = _numeric_methods(um.subtract, 'sub')
__mul__, __rmul__, __imul__ = _numeric_methods(um.multiply, 'mul')
__matmul__, __rmatmul__, __imatmul__ = _numeric_methods(
um.matmul, 'matmul')
# Python 3 does not use __div__, __rdiv__, or __idiv__
__truediv__, __rtruediv__, __itruediv__ = _numeric_methods(
um.true_divide, 'truediv')
__floordiv__, __rfloordiv__, __ifloordiv__ = _numeric_methods(
um.floor_divide, 'floordiv')
__mod__, __rmod__, __imod__ = _numeric_methods(um.remainder, 'mod')
__divmod__ = _binary_method(um.divmod, 'divmod')
__rdivmod__ = _reflected_binary_method(um.divmod, 'divmod')
# __idivmod__ does not exist
# TODO: handle the optional third argument for __pow__?
__pow__, __rpow__, __ipow__ = _numeric_methods(um.power, 'pow')
__lshift__, __rlshift__, __ilshift__ = _numeric_methods(
um.left_shift, 'lshift')
__rshift__, __rrshift__, __irshift__ = _numeric_methods(
um.right_shift, 'rshift')
__and__, __rand__, __iand__ = _numeric_methods(um.bitwise_and, 'and')
__xor__, __rxor__, __ixor__ = _numeric_methods(um.bitwise_xor, 'xor')
__or__, __ror__, __ior__ = _numeric_methods(um.bitwise_or, 'or')
# unary methods
__neg__ = _unary_method(um.negative, 'neg')
__pos__ = _unary_method(um.positive, 'pos')
__abs__ = _unary_method(um.absolute, 'abs')
__invert__ = _unary_method(um.invert, 'invert')
| charris/numpy | numpy/lib/mixins.py | Python | bsd-3-clause | 7,052 | 0 |
"""AsciiTable is the main table class. To be inherited by other tables. Define convenience methods here."""
from terminaltables.base_table import BaseTable
from terminaltables.terminal_io import terminal_size
from terminaltables.width_and_alignment import column_max_width, max_dimensions, table_width
class AsciiTable(BaseTable):
"""Draw a table using regular ASCII characters, such as ``+``, ``|``, and ``-``.
:ivar iter table_data: List (empty or list of lists of strings) representing the table.
:ivar str title: Optional title to show within the top border of the table.
:ivar bool inner_column_border: Separates columns.
:ivar bool inner_footing_row_border: Show a border before the last row.
:ivar bool inner_heading_row_border: Show a border after the first row.
:ivar bool inner_row_border: Show a border in between every row.
:ivar bool outer_border: Show the top, left, right, and bottom border.
:ivar dict justify_columns: Horizontal justification. Keys are column indexes (int). Values are right/left/center.
:ivar int padding_left: Number of spaces to pad on the left side of every cell.
:ivar int padding_right: Number of spaces to pad on the right side of every cell.
"""
def column_max_width(self, column_number):
"""Return the maximum width of a column based on the current terminal width.
:param int column_number: The column number to query.
:return: The max width of the column.
:rtype: int
"""
inner_widths = max_dimensions(self.table_data)[0]
outer_border = 2 if self.outer_border else 0
inner_border = 1 if self.inner_column_border else 0
padding = self.padding_left + self.padding_right
return column_max_width(inner_widths, column_number, outer_border, inner_border, padding)
@property
def column_widths(self):
"""Return a list of integers representing the widths of each table column without padding."""
if not self.table_data:
return list()
return max_dimensions(self.table_data)[0]
@property
def ok(self): # Too late to change API. # pylint: disable=invalid-name
"""Return True if the table fits within the terminal width, False if the table breaks."""
return self.table_width <= terminal_size()[0]
@property
def table_width(self):
"""Return the width of the table including padding and borders."""
outer_widths = max_dimensions(self.table_data, self.padding_left, self.padding_right)[2]
outer_border = 2 if self.outer_border else 0
inner_border = 1 if self.inner_column_border else 0
return table_width(outer_widths, outer_border, inner_border)
| Robpol86/terminaltables | terminaltables/ascii_table.py | Python | mit | 2,734 | 0.004389 |
# -*- coding: utf-8 -*-
# Copyright (C) 2007, 2008, 2014 Toms Bauģis <toms.baugis at gmail.com>
# This file is part of Project Hamster.
# Project Hamster is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Project Hamster is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Project Hamster. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import Gtk as gtk
from gi.repository import Gdk as gdk
from gi.repository import GObject as gobject
import datetime as dt
from gettext import ngettext
from hamster.lib.configuration import Controller
def get_prev(selection, model):
(model, iter) = selection.get_selected()
#previous item
path = model.get_path(iter)[0] - 1
if path >= 0:
return model.get_iter_from_string(str(path))
else:
return None
class CategoryStore(gtk.ListStore):
def __init__(self):
#id, name, color_code, order
gtk.ListStore.__init__(self, int, str)
def load(self):
category_list = runtime.storage.get_categories()
for category in category_list:
self.append([category['id'], category['name']])
self.unsorted_category = self.append([-1, _("Unsorted")]) # all activities without category
class ActivityStore(gtk.ListStore):
def __init__(self):
#id, name, category_id, order
gtk.ListStore.__init__(self, int, str, int)
def load(self, category_id):
self.clear()
if category_id is None:
return
activity_list = runtime.storage.get_category_activities(category_id)
for activity in activity_list:
self.append([activity['id'],
activity['name'],
activity['category_id']])
formats = ["fixed", "symbolic", "minutes"]
appearances = ["text", "icon", "both"]
from hamster.lib.configuration import runtime, conf
import widgets
from lib import stuff, trophies
class PreferencesEditor(Controller):
TARGETS = [
('MY_TREE_MODEL_ROW', gtk.TargetFlags.SAME_WIDGET, 0),
('MY_TREE_MODEL_ROW', gtk.TargetFlags.SAME_APP, 0),
]
def __init__(self, parent = None):
Controller.__init__(self, parent, ui_file="preferences.ui")
# Translators: 'None' refers here to the Todo list choice in Hamster preferences (Tracking tab)
self.activities_sources = [("", _("None")),
("evo", "Evolution"),
("gtg", "Getting Things Gnome")]
self.todo_combo = gtk.ComboBoxText()
for code, label in self.activities_sources:
self.todo_combo.append_text(label)
self.todo_combo.connect("changed", self.on_todo_combo_changed)
self.get_widget("todo_pick").add(self.todo_combo)
# create and fill activity tree
self.activity_tree = self.get_widget('activity_list')
self.get_widget("activities_label").set_mnemonic_widget(self.activity_tree)
self.activity_store = ActivityStore()
self.external_listeners = []
self.activityColumn = gtk.TreeViewColumn(_("Name"))
self.activityColumn.set_expand(True)
self.activityCell = gtk.CellRendererText()
self.external_listeners.extend([
(self.activityCell, self.activityCell.connect('edited', self.activity_name_edited_cb, self.activity_store))
])
self.activityColumn.pack_start(self.activityCell, True)
self.activityColumn.set_attributes(self.activityCell, text=1)
self.activityColumn.set_sort_column_id(1)
self.activity_tree.append_column(self.activityColumn)
self.activity_tree.set_model(self.activity_store)
self.selection = self.activity_tree.get_selection()
self.external_listeners.extend([
(self.selection, self.selection.connect('changed', self.activity_changed, self.activity_store))
])
# create and fill category tree
self.category_tree = self.get_widget('category_list')
self.get_widget("categories_label").set_mnemonic_widget(self.category_tree)
self.category_store = CategoryStore()
self.categoryColumn = gtk.TreeViewColumn(_("Category"))
self.categoryColumn.set_expand(True)
self.categoryCell = gtk.CellRendererText()
self.external_listeners.extend([
(self.categoryCell, self.categoryCell.connect('edited', self.category_edited_cb, self.category_store))
])
self.categoryColumn.pack_start(self.categoryCell, True)
self.categoryColumn.set_attributes(self.categoryCell, text=1)
self.categoryColumn.set_sort_column_id(1)
self.categoryColumn.set_cell_data_func(self.categoryCell, self.unsorted_painter)
self.category_tree.append_column(self.categoryColumn)
self.category_store.load()
self.category_tree.set_model(self.category_store)
selection = self.category_tree.get_selection()
self.external_listeners.extend([
(selection, selection.connect('changed', self.category_changed_cb, self.category_store))
])
self.day_start = widgets.TimeInput(dt.time(5,30))
self.get_widget("day_start_placeholder").add(self.day_start)
self.load_config()
# Allow enable drag and drop of rows including row move
self.activity_tree.enable_model_drag_source(gdk.ModifierType.BUTTON1_MASK,
self.TARGETS,
gdk.DragAction.DEFAULT|
gdk.DragAction.MOVE)
self.category_tree.enable_model_drag_dest(self.TARGETS,
gdk.DragAction.MOVE)
self.activity_tree.connect("drag_data_get", self.drag_data_get_data)
self.category_tree.connect("drag_data_received", self.on_category_drop)
#select first category
selection = self.category_tree.get_selection()
selection.select_path((0,))
self.prev_selected_activity = None
self.prev_selected_category = None
self.external_listeners.extend([
(self.day_start, self.day_start.connect("time-entered", self.on_day_start_changed))
])
self.show()
def show(self):
self.get_widget("notebook1").set_current_page(0)
self.window.show_all()
def on_todo_combo_changed(self, combo):
conf.set("activities_source", self.activities_sources[combo.get_active()][0])
def load_config(self, *args):
self.get_widget("shutdown_track").set_active(conf.get("stop_on_shutdown"))
self.get_widget("idle_track").set_active(conf.get("enable_timeout"))
self.get_widget("notify_interval").set_value(conf.get("notify_interval"))
self.get_widget("notify_on_idle").set_active(conf.get("notify_on_idle"))
self.get_widget("notify_on_idle").set_sensitive(conf.get("notify_interval") <=120)
day_start = conf.get("day_start_minutes")
day_start = dt.time(day_start / 60, day_start % 60)
self.day_start.set_time(day_start)
self.tags = [tag["name"] for tag in runtime.storage.get_tags(only_autocomplete=True)]
self.get_widget("autocomplete_tags").set_text(", ".join(self.tags))
current_source = conf.get("activities_source")
for i, (code, label) in enumerate(self.activities_sources):
if code == current_source:
self.todo_combo.set_active(i)
def on_autocomplete_tags_view_focus_out_event(self, view, event):
buf = self.get_widget("autocomplete_tags")
updated_tags = buf.get_text(buf.get_start_iter(), buf.get_end_iter(), 0) \
.decode("utf-8")
if updated_tags == self.tags:
return
self.tags = updated_tags
runtime.storage.update_autocomplete_tags(updated_tags)
def drag_data_get_data(self, treeview, context, selection, target_id,
etime):
treeselection = treeview.get_selection()
model, iter = treeselection.get_selected()
data = model.get_value(iter, 0) #get activity ID
selection.set(selection.target, 0, str(data))
def select_activity(self, id):
model = self.activity_tree.get_model()
i = 0
for row in model:
if row[0] == id:
self.activity_tree.set_cursor((i, ))
i += 1
def select_category(self, id):
model = self.category_tree.get_model()
i = 0
for row in model:
if row[0] == id:
self.category_tree.set_cursor((i, ))
i += 1
def on_category_list_drag_motion(self, treeview, drag_context, x, y, eventtime):
self.prev_selected_category = None
try:
target_path, drop_position = treeview.get_dest_row_at_pos(x, y)
model, source = treeview.get_selection().get_selected()
except:
return
drop_yes = ("drop_yes", gtk.TARGET_SAME_APP, 0)
drop_no = ("drop_no", gtk.TARGET_SAME_APP, 0)
if drop_position != gtk.TREE_VIEW_DROP_AFTER and \
drop_position != gtk.TREE_VIEW_DROP_BEFORE:
treeview.enable_model_drag_dest(self.TARGETS, gdk.DragAction.MOVE)
else:
treeview.enable_model_drag_dest([drop_no], gdk.DragAction.MOVE)
def on_category_drop(self, treeview, context, x, y, selection,
info, etime):
model = self.category_tree.get_model()
data = selection.data
drop_info = treeview.get_dest_row_at_pos(x, y)
if drop_info:
path, position = drop_info
iter = model.get_iter(path)
changed = runtime.storage.change_category(int(data), model[iter][0])
context.finish(changed, True, etime)
else:
context.finish(False, True, etime)
return
# callbacks
def category_edited_cb(self, cell, path, new_text, model):
new_text = new_text.decode("utf-8")
id = model[path][0]
if id == -1:
return False #ignoring unsorted category
#look for dupes
categories = runtime.storage.get_categories()
for category in categories:
if category['name'].lower() == new_text.lower():
if id == -2: # that was a new category
self.category_store.remove(model.get_iter(path))
self.select_category(category['id'])
return False
if id == -2: #new category
id = runtime.storage.add_category(new_text)
model[path][0] = id
else:
runtime.storage.update_category(id, new_text)
model[path][1] = new_text
def activity_name_edited_cb(self, cell, path, new_text, model):
new_text = new_text.decode("utf-8")
id = model[path][0]
category_id = model[path][2]
activities = runtime.storage.get_category_activities(category_id)
prev = None
for activity in activities:
if id == activity['id']:
prev = activity['name']
else:
# avoid two activities in same category with same name
if activity['name'].lower() == new_text.lower():
if id == -1: # that was a new activity
self.activity_store.remove(model.get_iter(path))
self.select_activity(activity['id'])
return False
if id == -1: #new activity -> add
model[path][0] = runtime.storage.add_activity(new_text, category_id)
else: #existing activity -> update
new = new_text
runtime.storage.update_activity(id, new, category_id)
# size matters - when editing activity name just changed the case (bar -> Bar)
if prev != new and prev.lower() == new.lower():
trophies.unlock("size_matters")
model[path][1] = new_text
return True
def category_changed_cb(self, selection, model):
""" enables and disables action buttons depending on selected item """
(model, iter) = selection.get_selected()
id = 0
if iter is None:
self.activity_store.clear()
else:
self.prev_selected_activity = None
id = model[iter][0]
self.activity_store.load(model[iter][0])
#start with nothing
self.get_widget('activity_edit').set_sensitive(False)
self.get_widget('activity_remove').set_sensitive(False)
return True
def _get_selected_category(self):
selection = self.get_widget('category_list').get_selection()
(model, iter) = selection.get_selected()
return model[iter][0] if iter else None
def activity_changed(self, selection, model):
""" enables and disables action buttons depending on selected item """
(model, iter) = selection.get_selected()
# treat any selected case
unsorted_selected = self._get_selected_category() == -1
self.get_widget('activity_edit').set_sensitive(iter != None)
self.get_widget('activity_remove').set_sensitive(iter != None)
def _del_selected_row(self, tree):
selection = tree.get_selection()
(model, iter) = selection.get_selected()
next_row = model.iter_next(iter)
if next_row:
selection.select_iter(next_row)
else:
path = model.get_path(iter)[0] - 1
if path > 0:
selection.select_path(path)
removable_id = model[iter][0]
model.remove(iter)
return removable_id
def unsorted_painter(self, column, cell, model, iter, data):
cell_id = model.get_value(iter, 0)
cell_text = model.get_value(iter, 1)
if cell_id == -1:
text = '<span color="#555" style="italic">%s</span>' % cell_text # TODO - should get color from theme
cell.set_property('markup', text)
else:
cell.set_property('text', cell_text)
return
def on_activity_list_button_pressed(self, tree, event):
self.activityCell.set_property("editable", False)
def on_activity_list_button_released(self, tree, event):
if event.button == 1 and tree.get_path_at_pos(int(event.x), int(event.y)):
# Get treeview path.
path, column, x, y = tree.get_path_at_pos(int(event.x), int(event.y))
if self.prev_selected_activity == path:
self.activityCell.set_property("editable", True)
tree.set_cursor_on_cell(path, self.activityColumn, self.activityCell, True)
self.prev_selected_activity = path
def on_category_list_button_pressed(self, tree, event):
self.activityCell.set_property("editable", False)
def on_category_list_button_released(self, tree, event):
if event.button == 1 and tree.get_path_at_pos(int(event.x), int(event.y)):
# Get treeview path.
path, column, x, y = tree.get_path_at_pos(int(event.x), int(event.y))
if self.prev_selected_category == path and \
self._get_selected_category() != -1: #do not allow to edit unsorted
self.categoryCell.set_property("editable", True)
tree.set_cursor_on_cell(path, self.categoryColumn, self.categoryCell, True)
else:
self.categoryCell.set_property("editable", False)
self.prev_selected_category = path
def on_activity_remove_clicked(self, button):
self.remove_current_activity()
def on_activity_edit_clicked(self, button):
self.activityCell.set_property("editable", True)
selection = self.activity_tree.get_selection()
(model, iter) = selection.get_selected()
path = model.get_path(iter)[0]
self.activity_tree.set_cursor_on_cell(path, focus_column = self.activityColumn, start_editing = True)
"""keyboard events"""
def on_activity_list_key_pressed(self, tree, event_key):
key = event_key.keyval
selection = tree.get_selection()
(model, iter) = selection.get_selected()
if (event_key.keyval == gdk.KEY_Delete):
self.remove_current_activity()
elif key == gdk.KEY_F2 :
self.activityCell.set_property("editable", True)
path = model.get_path(iter)[0]
tree.set_cursor_on_cell(path, focus_column = self.activityColumn, start_editing = True)
def remove_current_activity(self):
selection = self.activity_tree.get_selection()
(model, iter) = selection.get_selected()
runtime.storage.remove_activity(model[iter][0])
self._del_selected_row(self.activity_tree)
def on_category_remove_clicked(self, button):
self.remove_current_category()
def on_category_edit_clicked(self, button):
self.categoryCell.set_property("editable", True)
selection = self.category_tree.get_selection()
(model, iter) = selection.get_selected()
path = model.get_path(iter)[0]
self.category_tree.set_cursor_on_cell(path, focus_column = self.categoryColumn, start_editing = True)
def on_category_list_key_pressed(self, tree, event_key):
key = event_key.keyval
if self._get_selected_category() == -1:
return #ignoring unsorted category
selection = tree.get_selection()
(model, iter) = selection.get_selected()
if key == gdk.KEY_Delete:
self.remove_current_category()
elif key == gdk.KEY_F2:
self.categoryCell.set_property("editable", True)
path = model.get_path(iter)[0]
tree.set_cursor_on_cell(path, focus_column = self.categoryColumn, start_editing = True)
def remove_current_category(self):
selection = self.category_tree.get_selection()
(model, iter) = selection.get_selected()
id = model[iter][0]
if id != -1:
runtime.storage.remove_category(id)
self._del_selected_row(self.category_tree)
def on_preferences_window_key_press(self, widget, event):
# ctrl+w means close window
if (event.keyval == gdk.KEY_w \
and event.state & gdk.ModifierType.CONTROL_MASK):
self.close_window()
# escape can mean several things
if event.keyval == gdk.KEY_Escape:
#check, maybe we are editing stuff
if self.activityCell.get_property("editable"):
self.activityCell.set_property("editable", False)
return
if self.categoryCell.get_property("editable"):
self.categoryCell.set_property("editable", False)
return
self.close_window()
"""button events"""
def on_category_add_clicked(self, button):
""" appends row, jumps to it and allows user to input name """
new_category = self.category_store.insert_before(self.category_store.unsorted_category,
[-2, _(u"New category")])
self.categoryCell.set_property("editable", True)
self.category_tree.set_cursor_on_cell((len(self.category_tree.get_model()) - 2, ),
focus_column = self.category_tree.get_column(0),
focus_cell = None,
start_editing = True)
def on_activity_add_clicked(self, button):
""" appends row, jumps to it and allows user to input name """
category_id = self._get_selected_category()
new_activity = self.activity_store.append([-1, _(u"New activity"), category_id])
(model, iter) = self.selection.get_selected()
self.activityCell.set_property("editable", True)
self.activity_tree.set_cursor_on_cell(model.get_path(new_activity),
focus_column = self.activity_tree.get_column(0),
focus_cell = None,
start_editing = True)
def on_activity_remove_clicked(self, button):
removable_id = self._del_selected_row(self.activity_tree)
runtime.storage.remove_activity(removable_id)
def on_shutdown_track_toggled(self, checkbox):
conf.set("stop_on_shutdown", checkbox.get_active())
def on_idle_track_toggled(self, checkbox):
conf.set("enable_timeout", checkbox.get_active())
def on_notify_on_idle_toggled(self, checkbox):
conf.set("notify_on_idle", checkbox.get_active())
def on_notify_interval_format_value(self, slider, value):
if value <=120:
# notify interval slider value label
label = ngettext("%(interval_minutes)d minute",
"%(interval_minutes)d minutes",
value) % {'interval_minutes': value}
else:
# notify interval slider value label
label = _(u"Never")
return label
def on_notify_interval_value_changed(self, scale):
value = int(scale.get_value())
conf.set("notify_interval", value)
self.get_widget("notify_on_idle").set_sensitive(value <= 120)
def on_day_start_changed(self, widget):
day_start = self.day_start.get_time()
if day_start is None:
return
day_start = day_start.hour * 60 + day_start.minute
conf.set("day_start_minutes", day_start)
def on_close_button_clicked(self, button):
self.close_window()
def close_window(self):
if self.parent:
for obj, handler in self.external_listeners:
obj.disconnect(handler)
self._gui = None
self.wNameColumn = None
self.categoryColumn = None
Controller.close_window(self)
| wreckJ/hamster | src/hamster/preferences.py | Python | gpl-3.0 | 22,492 | 0.006269 |
#!/usr/bin/python3
#
# Copyright: Conor O'Callghan 2016
# Version: v1.1.3
#
# Please feel free to fork this project, modify the code and improve
# it on the github repo https://github.com/brioscaibriste/iarnrod
#
# Powered by TfL Open Data
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import argparse
import json
import sys
import tempfile
import time
import os
from urllib.request import urlopen
'''
ParseArgs
A simple function to parse the command line arguments passed to the function.
The function does very little sanitisation on the input variables. The
argument passed is then returned from the function.
'''
def ParseArgs():
# Parse our command line argument for the line name
parser = argparse.ArgumentParser()
parser.add_argument('--line',dest='LineName',help='Specify the London line you want to report on')
args = parser.parse_args()
# Check if the value is blank
Line = (args.LineName)
if not Line:
print ("\nError, you must specify a line name! e.g. --line district\n")
sys.exit(1)
# Convert the line name to lower case for easy comparison
Line = Line.lower()
# If the line isn't in the line list, fail badly
if Line not in ('district','circle','victoria','central','northern',
'bakerloo','hammersmith-city','jubilee','metropolitan',
'piccadilly','waterloo-city','dlr',):
print ("\nError, you have specified " + Line + " as your line. You must specify one of the following: "
"\n\tDistrict"
"\n\tCircle"
"\n\tVictora"
"\n\tCentral"
"\n\tNorthern"
"\n\tPiccadilly"
"\n\tBakerloo"
"\n\thammersmith-city"
"\n\twaterloo-city"
"\n\tDLR"
"\n\tMetropolitan"
"\n\tJubilee\n")
sys.exit(1)
# Convert the tube line back to upper case for nice display
Line = Line.upper()
return Line
'''
RetrieveTFLData
Inputs:
Line - Which line to retrieve information on
Run - Should the data retrieval be run or should the cache file be used
SFileName - The file in which to store the line status cache
This function takes the Line variable (a name of a Transport For London line
name) and polls the TFL API. The function then returns the current line
status for the specified line.
'''
def RetrieveTFLData(Line,Run,SFileName):
# TFL Unified API URL
TFLDataURL = "https://api.tfl.gov.uk/Line/" + Line + ("/Status?detail=False"
"&app_id=&app_key=")
if Run:
# Read all the information from JSON at the specified URL, can be re-done with requests?
RawData = urlopen(TFLDataURL).readall().decode('utf8') or die("Error, failed to "
"retrieve the data from the TFL website")
TFLData = json.loads(RawData)
# Sanitize the data to get the line status
Scratch = (TFLData[0]['lineStatuses'])
LineStatusData = (Scratch[0]['statusSeverityDescription'])
# Cache the staus in a file
with open(SFileName, 'w+') as SFile:
SFile.write(LineStatusData)
SFile.closed
else:
with open(SFileName, 'r+') as SFile:
LineStatusData = SFile.read()
SFile.closed
return LineStatusData
'''
Throttle
Inputs
PollIntervalMinutes - Polling interval in minutes
Throttle - Should we throttle the connection or not?
TFileName - The file where the timestamp for throttling usage is stored
This function is used to determine whether or not the next run of the retrieval of data should run.
It retrieves the previously run time from a file in /tmp if it exists, if the file does not exist
the run status will return as 1 and the current time stamp will be written into a new file.
If throttling is disabled, the file will be removed from /tmp and run will be set to 1.
'''
def Throttle(PollIntervalMinutes,Throttling,TFileName):
if Throttling == "True":
# Current epoch time
# CurrentStamp = str(time.time()).split('.')[0]
CurrentStamp = int(time.time())
# Does the temporary file exist or not
if os.path.isfile(TFileName):
# Open the temp file and read the time stamp
with open(TFileName, 'r+') as TFile:
TimeFile = TFile.read()
Remainder = CurrentStamp - int(TimeFile)
else:
# Get the current time stamp and write it to the temp file
with open(TFileName, 'w') as TFile:
TFile.write(str(CurrentStamp))
# Set the Remainder high to force the next run
Remainder = 1000000
# If the remainder is less than the poll interval don't run the command, if it isn't run the command
if ( Remainder < (PollIntervalMinutes * 60) ):
Run = 0
else:
Run = 1
# Set the command to run and re-write the poll time to file
with open(TFileName, 'w') as TFile:
TFile.write(str(CurrentStamp))
return Run
else:
# Remove the time file if it exists
try:
os.remove(TFileName)
except OSError:
pass
Run = 1
return Run
| brioscaibriste/iarnrod | coire.py | Python | gpl-3.0 | 6,003 | 0.01316 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import json
import logging
from django.utils.translation import ugettext_lazy as _
from saharaclient.api import base as api_base
from horizon import exceptions
from horizon import forms
from horizon import workflows
from openstack_dashboard.contrib.sahara.api import sahara as saharaclient
from openstack_dashboard.contrib.sahara.content.data_processing. \
utils import helpers as helpers
from openstack_dashboard.contrib.sahara.content.data_processing. \
utils import anti_affinity as aa
import openstack_dashboard.contrib.sahara.content.data_processing. \
utils.workflow_helpers as whelpers
LOG = logging.getLogger(__name__)
class SelectPluginAction(workflows.Action):
hidden_create_field = forms.CharField(
required=False,
widget=forms.HiddenInput(attrs={"class": "hidden_create_field"}))
def __init__(self, request, *args, **kwargs):
super(SelectPluginAction, self).__init__(request, *args, **kwargs)
try:
plugins = saharaclient.plugin_list(request)
except Exception:
plugins = []
exceptions.handle(request,
_("Unable to fetch plugin list."))
plugin_choices = [(plugin.name, plugin.title) for plugin in plugins]
self.fields["plugin_name"] = forms.ChoiceField(
label=_("Plugin name"),
choices=plugin_choices,
widget=forms.Select(attrs={"class": "plugin_name_choice"}))
for plugin in plugins:
field_name = plugin.name + "_version"
choice_field = forms.ChoiceField(
label=_("Version"),
choices=[(version, version) for version in plugin.versions],
widget=forms.Select(
attrs={"class": "plugin_version_choice "
+ field_name + "_choice"})
)
self.fields[field_name] = choice_field
class Meta(object):
name = _("Select plugin and hadoop version for cluster template")
help_text_template = ("project/data_processing.cluster_templates/"
"_create_general_help.html")
class SelectPlugin(workflows.Step):
action_class = SelectPluginAction
class CreateClusterTemplate(workflows.Workflow):
slug = "create_cluster_template"
name = _("Create Cluster Template")
finalize_button_name = _("Next")
success_message = _("Created")
failure_message = _("Could not create")
success_url = "horizon:project:data_processing.cluster_templates:index"
default_steps = (SelectPlugin,)
class GeneralConfigAction(workflows.Action):
hidden_configure_field = forms.CharField(
required=False,
widget=forms.HiddenInput(attrs={"class": "hidden_configure_field"}))
hidden_to_delete_field = forms.CharField(
required=False,
widget=forms.HiddenInput(attrs={"class": "hidden_to_delete_field"}))
cluster_template_name = forms.CharField(label=_("Template Name"))
description = forms.CharField(label=_("Description"),
required=False,
widget=forms.Textarea(attrs={'rows': 4}))
anti_affinity = aa.anti_affinity_field()
def __init__(self, request, *args, **kwargs):
super(GeneralConfigAction, self).__init__(request, *args, **kwargs)
plugin, hadoop_version = whelpers.\
get_plugin_and_hadoop_version(request)
self.fields["plugin_name"] = forms.CharField(
widget=forms.HiddenInput(),
initial=plugin
)
self.fields["hadoop_version"] = forms.CharField(
widget=forms.HiddenInput(),
initial=hadoop_version
)
populate_anti_affinity_choices = aa.populate_anti_affinity_choices
def get_help_text(self):
extra = dict()
plugin, hadoop_version = whelpers\
.get_plugin_and_hadoop_version(self.request)
extra["plugin_name"] = plugin
extra["hadoop_version"] = hadoop_version
return super(GeneralConfigAction, self).get_help_text(extra)
def clean(self):
cleaned_data = super(GeneralConfigAction, self).clean()
if cleaned_data.get("hidden_configure_field", None) \
== "create_nodegroup":
self._errors = dict()
return cleaned_data
class Meta(object):
name = _("Details")
help_text_template = ("project/data_processing.cluster_templates/"
"_configure_general_help.html")
class GeneralConfig(workflows.Step):
action_class = GeneralConfigAction
contributes = ("hidden_configure_field", )
def contribute(self, data, context):
for k, v in data.items():
context["general_" + k] = v
post = self.workflow.request.POST
context['anti_affinity_info'] = post.getlist("anti_affinity")
return context
class ConfigureNodegroupsAction(workflows.Action):
hidden_nodegroups_field = forms.CharField(
required=False,
widget=forms.HiddenInput(attrs={"class": "hidden_nodegroups_field"}))
forms_ids = forms.CharField(
required=False,
widget=forms.HiddenInput())
def __init__(self, request, *args, **kwargs):
super(ConfigureNodegroupsAction, self). \
__init__(request, *args, **kwargs)
plugin = request.REQUEST.get("plugin_name")
version = request.REQUEST.get("hadoop_version")
if plugin and not version:
version_name = plugin + "_version"
version = request.REQUEST.get(version_name)
if not plugin or not version:
self.templates = saharaclient.nodegroup_template_find(request)
else:
self.templates = saharaclient.nodegroup_template_find(
request, plugin_name=plugin, hadoop_version=version)
deletable = request.REQUEST.get("deletable", dict())
request_source = None
if 'forms_ids' in request.POST:
request_source = request.POST
elif 'forms_ids' in request.REQUEST:
request_source = request.REQUEST
if request_source:
self.groups = []
for id in json.loads(request_source['forms_ids']):
group_name = "group_name_" + str(id)
template_id = "template_id_" + str(id)
count = "count_" + str(id)
serialized = "serialized_" + str(id)
self.groups.append({"name": request_source[group_name],
"template_id": request_source[template_id],
"count": request_source[count],
"id": id,
"deletable": deletable.get(
request_source[group_name], "true"),
"serialized": request_source[serialized]})
whelpers.build_node_group_fields(self,
group_name,
template_id,
count,
serialized)
def clean(self):
cleaned_data = super(ConfigureNodegroupsAction, self).clean()
if cleaned_data.get("hidden_nodegroups_field", None) \
== "create_nodegroup":
self._errors = dict()
return cleaned_data
class Meta(object):
name = _("Node Groups")
class ConfigureNodegroups(workflows.Step):
action_class = ConfigureNodegroupsAction
contributes = ("hidden_nodegroups_field", )
template_name = ("project/data_processing.cluster_templates/"
"cluster_node_groups_template.html")
def contribute(self, data, context):
for k, v in data.items():
context["ng_" + k] = v
return context
class ConfigureClusterTemplate(whelpers.ServiceParametersWorkflow,
whelpers.StatusFormatMixin):
slug = "configure_cluster_template"
name = _("Create Cluster Template")
finalize_button_name = _("Create")
success_message = _("Created Cluster Template %s")
name_property = "general_cluster_template_name"
success_url = "horizon:project:data_processing.cluster_templates:index"
default_steps = (GeneralConfig,
ConfigureNodegroups)
def __init__(self, request, context_seed, entry_point, *args, **kwargs):
ConfigureClusterTemplate._cls_registry = set([])
hlps = helpers.Helpers(request)
plugin, hadoop_version = whelpers.\
get_plugin_and_hadoop_version(request)
general_parameters = hlps.get_cluster_general_configs(
plugin,
hadoop_version)
service_parameters = hlps.get_targeted_cluster_configs(
plugin,
hadoop_version)
self._populate_tabs(general_parameters, service_parameters)
super(ConfigureClusterTemplate, self).__init__(request,
context_seed,
entry_point,
*args, **kwargs)
def is_valid(self):
steps_valid = True
for step in self.steps:
if not step.action.is_valid():
steps_valid = False
step.has_errors = True
errors_fields = list(step.action.errors.keys())
step.action.errors_fields = errors_fields
if not steps_valid:
return steps_valid
return self.validate(self.context)
def handle(self, request, context):
try:
node_groups = []
configs_dict = whelpers.parse_configs_from_context(context,
self.defaults)
ids = json.loads(context['ng_forms_ids'])
for id in ids:
name = context['ng_group_name_' + str(id)]
template_id = context['ng_template_id_' + str(id)]
count = context['ng_count_' + str(id)]
raw_ng = context.get("ng_serialized_" + str(id))
if raw_ng and raw_ng != 'null':
ng = json.loads(base64.urlsafe_b64decode(str(raw_ng)))
else:
ng = dict()
ng["name"] = name
ng["count"] = count
if template_id and template_id != u'None':
ng["node_group_template_id"] = template_id
node_groups.append(ng)
plugin, hadoop_version = whelpers.\
get_plugin_and_hadoop_version(request)
# TODO(nkonovalov): Fix client to support default_image_id
saharaclient.cluster_template_create(
request,
context["general_cluster_template_name"],
plugin,
hadoop_version,
context["general_description"],
configs_dict,
node_groups,
context["anti_affinity_info"],
)
hlps = helpers.Helpers(request)
if hlps.is_from_guide():
request.session["guide_cluster_template_name"] = (
context["general_cluster_template_name"])
self.success_url = (
"horizon:project:data_processing.wizard:cluster_guide")
return True
except api_base.APIException as e:
self.error_description = str(e)
return False
except Exception:
exceptions.handle(request,
_("Cluster template creation failed"))
return False
| Metaswitch/horizon | openstack_dashboard/contrib/sahara/content/data_processing/cluster_templates/workflows/create.py | Python | apache-2.0 | 12,426 | 0.000161 |
__author__ = 'george'
from baseclass import Plugin
import time
from apscheduler.scheduler import Scheduler
class AreWeDone(Plugin):
def __init__(self, skype):
super(AreWeDone, self).__init__(skype)
self.command = "arewedoneyet"
self.sched = Scheduler()
self.sched.start()
self.sched.add_cron_job(self.set_topic, hour="*", minute=2, day_of_week="monun")
def message_received(self, args, status, msg):
cur_time = time.localtime()
if cur_time.tm_mday == 31 or cur_time.tm_mday == 1:
time_left = 1 - cur_time.tm_mday % 31
hours_left = 23 - cur_time.tm_hour
mins_left = 59 - cur_time.tm_min
msg.Chat.SendMessage("%d days, %d hours and %d mins left until we are done" % (time_left, hours_left, mins_left))
print "%d days, %d hours and %d mins left until we are done" % (time_left, hours_left, mins_left)
else:
msg.Chat.SendMessage("You are now done. Please visit http://www.nav.no for more information")
def set_topic(self):
channel = "#stigrk85/$jvlomax;b43a0c90a2592b9b"
chat = self.skype.Chat(channel)
cur_time = time.localtime()
days_left = 1 - cur_time.tm_mday % 31
time_left = 24 - cur_time.tm_hour + days_left * 24
if cur_time.tm_hour >= 21 or cur_time.tm_hour < 6:
tod = "night"
else:
tod= "day"
if days_left > 0:
left = "second"
else:
left = "final"
if cur_time.tm_mday == 1:
chat.SendMessage("/topic {} of the {} day - {} hours remain".format(tod, left, time_left))
else:
chat.SendMessage("Congratulations, You have survived. Please visit http://www.nav.no for more information".format(tod, left, time_left))
| jvlomax/Beaker-bot | plugins/arewedone.py | Python | gpl-2.0 | 1,837 | 0.006532 |
# DO NOT EDIT THIS FILE. This file will be overwritten when re-running go-raml.
from app import app
import gevent
from gevent.pywsgi import WSGIServer
from gevent.pool import Pool
from gevent import monkey
import signal
monkey.patch_all()
server = WSGIServer(('', 5000), app, spawn=Pool(None))
def stop():
server.stop()
gevent.signal(signal.SIGINT, stop)
if __name__ == "__main__":
server.serve_forever()
| Jumpscale/go-raml | codegen/fixtures/congo/python_server/server.py | Python | bsd-2-clause | 425 | 0 |
from django.utils.translation import ugettext, ugettext_lazy as _
from django.conf import settings
STATE_CHOICES = [
('AN', 'Ancona'),
('AP', 'Ascoli Piceno'),
('FM', 'Fermo'),
('MC', 'Macerata'),
('PU', 'Pesaro Urbino')
]
SUPPLIER_FLAVOUR_LIST = [
('COMPANY', _('Company')),
('COOPERATING', _('Cooperating')),
('FREELANCE', _('Freelance')),
]
MU_CHOICES = [('Km', 'Km')]
ALWAYS_AVAILABLE = 1000000000
PHONE = 'PHONE'
EMAIL = 'EMAIL'
FAX = 'FAX'
#WWW = 'WWW'
CONTACT_CHOICES = [
(PHONE, _('PHONE')),
(EMAIL, _('EMAIL')),
(FAX, _('FAX')),
]
# (WWW, _('WWW')),
DAY_CHOICES = [
('MONDAY', _('Monday')),
('TUESDAY', _('Tuesday')),
('WEDNESDAY', _('Wednesday')),
('THURSDAY', _('Thursday')),
('FRIDAY', _('Friday')),
('SATURDAY', _('Saturday')),
('SUNDAY', _('Sunday')),
]
| michelesr/gasistafelice | gasistafelice/gf/base/const.py | Python | agpl-3.0 | 851 | 0.00235 |
# This file is part of the Fluggo Media Library for high-quality
# video and audio processing.
#
# Copyright 2010-2 Brian J. Crowell <brian@fluggo.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import collections, itertools
from .items import *
from fluggo import logging
from PyQt4.QtCore import *
from PyQt4.QtGui import *
logger = logging.getLogger(__name__)
_Placement = collections.namedtuple('_Placement', 'min max index')
def _split_sequence_items_by_overlap(items):
'''Splits the *items* (which should all belong to the same sequence and be
sorted by index) into a list of lists of items, where items in each list overlap,
but do not overlap with items in the other lists (and thus each list can be
moved independently).'''
if not items:
return []
next_list = [items[0]]
result = [next_list]
for item in items[1:]:
if item.index != next_list[-1].index + 1 or next_list[-1].transition_length >= 0:
next_list = [item]
result.append(next_list)
else:
next_list.append(item)
return result
def _split_sequence_items_by_adjacency(items):
'''Splits the *items* (which should all belong to the same sequence and be
sorted by index) into a list of lists of items, where items in each list are
adjacent (have indexes that differ by one)'''
if not items:
return []
next_list = [items[0]]
result = [next_list]
start_offset = items[0].x
for item in items[1:]:
if item.index != next_list[-1].index + 1:
next_list = [item]
result.append(next_list)
else:
next_list.append(item)
return result
class SequenceItemsMover:
'''Class for moving any group of sequence items.
The items should either all belong to the same sequence or not belong to a
sequence at all. If they don't belong to a sequence, they need to already be
in the right order.'''
def __init__(self, items):
if items[0].sequence:
# Sort and set positions by index and x
items = sorted(items, key=lambda a: a.index)
base_x = items[0].x
self.overlap_movers = list(
SequenceOverlapItemsMover(group, group[0].x - base_x) for group in
_split_sequence_items_by_overlap(items))
else:
# Update _x attributes before we go into this
x = 0
index = 0
for item in items:
if index != 0:
x -= item.transition_length
item._x = x
item._index = index
x += item.length
index += 1
self.overlap_movers = list(
SequenceOverlapItemsMover(group, group[0].x) for group in
_split_sequence_items_by_overlap(items))
def to_item(self, height=10.0, x=0, y=0):
'''Return a space Item for containing the items from this SequenceItemsMover.
If there is one item, this will be a Clip. Otherwise, it will be a Sequence.
The items will be cloned for moving to the new sequence.'''
if len(self.overlap_movers) == 1 and len(self.overlap_movers[0].items) == 1:
# Make a clip
item = self.overlap_movers[0].items[0]
return Clip(
x=x, y=y,
length=item.length,
height=height,
type=item.type(),
source=item.source,
offset=item.offset,
in_motion=item.in_motion,
anchor=item.anchor)
seq_items = []
last_x = 0
for group in self.overlap_movers:
items = group.clone_items()
items[0].update(transition_length=-(group.offset - last_x))
seq_items.extend(items)
last_x = group.offset + group.length
return Sequence(x=x, y=y, type=seq_items[0].type(), items=seq_items,
height=height, in_motion=self.overlap_movers[0].items[0].in_motion)
class SequenceOverlapItemsMover:
'''Mover for overlapping items belonging to the same sequence.'''
def __init__(self, items, offset=None):
'''Creates an overlapping items mover with the given *items*.
*offset* can be used to store the group's offset from other groups.'''
self.items = items
self.offset = offset
# The items may not have x values, or they may not be accurate
# Calculate the long way
self.length = sum(items[i].length - (items[i].transition_length if i > 0 else 0)
for i in range(len(items)))
# max_fadeout_length: The maximum value for the next item's
# transition_length, which is the distance from the end of our last item
# to max(start of item, end of item's transition); since these items are
# overlapping, the last item must have a positive transition_length
self.max_fadeout_length = items[-1].length
# Ditto, but at the beginning of the items
self.max_fadein_length = items[0].length
if len(items) > 1:
self.max_fadeout_length -= items[-1].transition_length
self.max_fadein_length -= items[1].transition_length
def clone_items(self):
'''Clones all of this mover's items. The cloned items will be homeless.'''
return [item.clone() for item in self.items]
def clone(self):
'''Clones this mover and all of its items. The cloned items will be homeless.'''
return SequenceOverlapItemsMover(self.clone_items(), offset=self.offset)
@classmethod
def from_clip(cls, clip):
seq_item = SequenceItem(source=clip.source,
length=clip.length,
offset=clip.offset,
transition_length=0,
type=clip.type(),
in_motion=clip.in_motion)
return cls([seq_item])
class NoRoomError(Exception):
def __init__(self, message='There is no room for the item.', *args, **kw):
Exception.__init__(self, message, *args, **kw)
class AddOverlapItemsToSequenceCommand(QUndoCommand):
def __init__(self, sequence, mover, x, parent=None):
'''This is where the real add work is done. To add a clip to a sequence,
convert it to a sequence item and add it to a SequenceOverlapItemsMover.
*x* is space-relative. The clips should not belong to a sequence already.
If the given mover can be placed at two indexes, it it is put at the
lower index.'''
QUndoCommand.__init__(self, 'Add overlapping items to sequence', parent)
self.sequence = sequence
self.mover = mover
self.x = x
if self.sequence.type() != self.mover.items[0].type():
raise NoRoomError('The item type is incompatible with the sequence type.')
# We do a check here, but we'll be doing it again at the actual insert
if self.where_can_fit(x) is None:
raise NoRoomError
# BJC: Note that we don't calculate the new transition_lengths or keep
# the next item here; this is because the nearby items are allowed to
# change (in certain ways) between the creation of this command and its
# execution
self.orig_transition_length = self.mover.items[0].transition_length
def redo(self):
index = self.where_can_fit(self.x)
if index is None:
raise NoRoomError
self.index = index
x = self.x - self.sequence.x
self.orig_sequence_x = self.sequence.x
# at_index - Item at the insertion index
at_index = self.sequence[index] if index < len(self.sequence) else None
at_start = at_index and not at_index.previous_item()
prev_item = at_index.previous_item() if at_index else self.sequence[-1]
removed_x = 0
old_x = at_index.x if at_index else self.sequence.length
self.orig_next_item = index < len(self.sequence) and self.sequence[index] or None
self.orig_next_item_trans_length = self.orig_next_item and self.orig_next_item.transition_length
# Hit the mark "x"
self.mover.items[0].update(transition_length=
0 if at_start else old_x - x + (at_index.transition_length if at_index else 0))
self.sequence[index:index] = self.mover.items
if self.orig_next_item:
# Retain this item's position in spite of any removals/insertions in self.item.insert
self.orig_next_item.update(transition_length=self.mover.length - (old_x - x) - removed_x)
if at_start:
# Move the sequence to compensate for insertions at the beginning
self.sequence.update(x=self.sequence.x - (old_x - x) - removed_x)
def undo(self):
# Pop the items out of the sequence (they will be homeless)
del self.sequence[self.index:self.index + len(self.mover.items)]
if self.sequence.x != self.orig_sequence_x:
self.sequence.update(x=self.orig_sequence_x)
# Reset the before-and-after
self.mover.items[0].update(transition_length=self.orig_transition_length)
if self.orig_next_item:
self.orig_next_item.update(transition_length=self.orig_next_item_trans_length)
del self.index
del self.orig_next_item
del self.orig_next_item_trans_length
def determine_range(self, index):
'''Determine the range where a clip will fit. These are tuples of (min, max, mark).
*min* and *max* are offsets from the beginning of the scene. *mark* is a
left-gravity mark in the sequence at the index. If the
item can't fit at all at an index, None might be returned.'''
if index < 0 or index > len(self.sequence):
raise IndexError('index out of range')
if index < len(self.sequence):
seq_item = self.sequence[index]
if seq_item.transition_length > 0 and seq_item.index > 0:
# Previous item is locked in a transition with us and is here to stay
return None
# If the item before that is in motion, we have to ignore prev_item's
# transition_length (which would otherwise be zero or less)
prev_item = seq_item.previous_item()
prev_prev_item = prev_item and prev_item.previous_item()
# Find next_item so we can get its transition_length
next_item = seq_item.next_item()
_min = max(
# Previous item's position plus any transition_length it has
(prev_item.x +
(max(0, prev_item.transition_length) if prev_prev_item else 0))
# Or the space before the sequence if this item is first
# (but really, it could go as far back as it wants)
if prev_item else -self.mover.length,
# The beginning of this clip (or the gap before it)
seq_item.x + min(0, seq_item.transition_length)
- (self.mover.max_fadein_length if prev_item else self.mover.length))
_max = (
# At the item's start
seq_item.x
# But back to the beginning of the mover, so they don't overlap
- self.mover.length
# How much they can overlap
+ min(self.mover.max_fadeout_length,
seq_item.length - (next_item.transition_length if next_item else 0)))
_min += self.sequence.x
_max += self.sequence.x
if not prev_item:
_min = None
elif _max < _min:
return None
return _Placement(_min, _max, index)
else:
# Final index
prev_item = self.sequence[-1]
# If the item before that is in motion, we have to ignore prev_item's
# transition_length (which would otherwise be zero or less)
prev_prev_item = prev_item and prev_item.previous_item()
_min = max(
# Previous item's position plus any transition_length it has
prev_item.x +
(max(0, prev_item.transition_length) if prev_prev_item else 0),
# End of the sequence minus how much fadein we can give it
prev_item.x + prev_item.length - self.mover.max_fadein_length)
_min += self.sequence.x
return _Placement(_min, None, index)
def where_can_fit(self, x):
'''Returns index where the item would be inserted if it can fit, None if it won't.
"x" is space-relative.'''
# TODO: This would be faster as a binary search
# Or a simple optimization would be to skip indexes where X is too low
for _range in (self.determine_range(i) for i in range(len(self.sequence) + 1)):
if not _range:
continue
if (_range.min is None or x >= _range.min) and (_range.max is None or x <= _range.max):
return _range.index
return None
class CompoundCommand(QUndoCommand):
'''A command consisting of other commands. This lets us create a compound
command (which QUndoCommand already supports) after its constituent commands
have already been created and done.'''
def __init__(self, text, commands, done=False, parent=None):
QUndoCommand.__init__(self, text, parent)
self._commands = commands
self._done = done
def redo(self):
if not self._done:
for command in self._commands:
command.redo()
self._done = True
def undo(self):
if self._done:
for command in reversed(self._commands):
command.undo()
self._done = False
class UpdateItemPropertiesCommand(QUndoCommand):
'''Updates the given properties of an item. This can be used to move the item
around.'''
def __init__(self, item, parent=None, **properties):
QUndoCommand.__init__(self, 'Update item properties', parent)
self.item = item
self.orig_values = {name: getattr(item, name) for name in properties}
self.new_values = properties
self.done = False
def mergeWith(self, next):
'''This command *can* be merged, but only manually.'''
if not isinstance(next, UpdateItemPropertiesCommand):
return False
self.new_values.update(next.new_values)
return True
def redo(self):
if not self.done:
self.item.update(**self.new_values)
self.done = True
def undo(self):
if self.done:
self.item.update(**self.orig_values)
self.done = False
class MoveItemCommand(QUndoCommand):
# In recognition that moving an item is likely to get more complicated.
def __init__(self, item, x, y, parent=None):
QUndoCommand.__init__(self, 'Move item', parent)
self.item = item
self.command = UpdateItemPropertiesCommand(item, x=x, y=y, parent=self)
def mergeWith(self, next):
'''This command *can* be merged, but only manually.'''
if not isinstance(next, MoveItemCommand):
return False
self.command.mergeWith(next.command)
return True
def redo(self):
if self.item.space is None:
raise RuntimeError('Item must belong to a space to use MoveItemCommand.')
self.command.redo()
def undo(self):
self.command.undo()
class AddSequenceToSequenceCommand(QUndoCommand):
def __init__(self, sequence, mover, x, parent=None):
QUndoCommand.__init__(self, 'Add sequence to sequence', parent)
'''Adds a given SequenceItemsMover to a *sequence* at the given scene-relative *x*.
The mover's items are added directly, and therefore should not belong to
a sequence; if you don't want this, you should produce a copy first.
If the constructor raises a NoRoomError, the addition isn't possible.'''
for group in mover.overlap_groups:
AddOverlapItemsToSequenceCommand(sequence, group, x + group.offset)
class MoveSequenceOverlapItemsInPlaceCommand(QUndoCommand):
def __init__(self, mover, offset, parent=None):
'''Moves the given SequenceOverlapItemsMover back and forth in a sequence.
This command does not change the index of the items, just their distance
to the previous and next items. As such, you'll get a NoRoomError if you
try to move them too far. The NoRoomError does not occur until redo(),
but you can call check_room() early if you want.
This command can be merged with another MoveSequenceOverlapItemsInPlaceCommand,
provided they refer to the same *mover*.
'''
QUndoCommand.__init__(self, 'Move overlapping sequence items in place', parent)
self.mover = mover
self.offset = offset
self.sequence = self.mover.items[0].sequence
if not self.sequence:
raise ValueError('The given items are not in a sequence.')
def id(self):
return id(MoveSequenceOverlapItemsInPlaceCommand)
def mergeWith(self, command):
if not isinstance(command, MoveSequenceOverlapItemsInPlaceCommand):
return False
if self.mover is not command.mover:
return False
# For future reference-- not that it matters here-- the order of events
# is *this* command followed by the command given as a parameter.
self.offset += command.offset
def check_room(self):
# TODO: We do not consider whether the items around us are in motion,
# leading to an inefficiency that we don't know if all the items *can*
# be moved until all the items are moved; this can be improved
next_item = self.mover.items[-1].next_item()
previous_item = self.mover.items[0].previous_item()
if self.offset > 0 and next_item:
next_next_item = next_item.next_item()
max_offset = min(
# How much room is left in the next item
next_item.length
- max(next_next_item.transition_length if next_next_item else 0, 0)
- next_item.transition_length,
# How much room is left in the max_fadeout_length
self.mover.max_fadeout_length - next_item.transition_length)
if self.offset > max_offset:
raise NoRoomError
if self.offset < 0 and previous_item:
min_offset = -min(
# How much room is left in the previous item
previous_item.length
- self.mover.items[0].transition_length
- max(previous_item.transition_length, 0),
# How much room is left in the max_fadein_length
self.mover.max_fadein_length - self.mover.items[0].transition_length)
if self.offset < min_offset:
raise NoRoomError
def redo(self):
self.check_room()
next_item = self.mover.items[-1].next_item()
if next_item:
next_item.update(transition_length=next_item.transition_length + self.offset)
if self.mover.items[0].index == 0:
# First index-- move the sequence
self.sequence.update(x=self.sequence.x + self.offset)
else:
# Update our own transition_length
self.mover.items[0].update(
transition_length=self.mover.items[0].transition_length - self.offset)
def undo(self):
next_item = self.mover.items[-1].next_item()
if next_item:
next_item.update(transition_length=next_item.transition_length - self.offset)
if self.mover.items[0].index == 0:
# First index-- move the sequence
self.sequence.update(x=self.sequence.x - self.offset)
else:
# Update our own transition_length
self.mover.items[0].update(
transition_length=self.mover.items[0].transition_length + self.offset)
class MoveSequenceItemsInPlaceCommand(QUndoCommand):
def __init__(self, mover, offset, parent=None):
'''Moves the given SequenceItemsMover back and forth in a sequence.
This command does not change the index of the items, just their distance
to the previous and next items. As such, you'll get a NoRoomError if you
try to move them too far. The NoRoomError does not occur until redo(),
but you can call check_room() early if you want.
This command can be merged with another MoveSequenceItemsInPlaceCommand, provided
they refer to the same *mover*.
'''
# This can be seen as just a series of MoveOverlapSequenceItemsInPlaceCommand,
# and that's just what we do, but there's a catch: without our original
# in_motion checker algorithm (which I'd rather not go back to), these
# commands must happen in the right order, and pretty much need to be
# executed to see if they'll work. Someone in the future can work out a
# shortcut algorithm to check the moves before we attempt them.
QUndoCommand.__init__(self, 'Move sequence items in place', parent)
self.mover = mover
self.offset = offset
self.sequence = self.mover.overlap_movers[0].items[0].sequence
if not self.sequence:
raise ValueError('The given items are not in a sequence.')
if offset < 0:
self.commands = [MoveSequenceOverlapItemsInPlaceCommand(overlap_mover, offset)
for overlap_mover in mover.overlap_movers]
else:
self.commands = [MoveSequenceOverlapItemsInPlaceCommand(overlap_mover, offset)
for overlap_mover in reversed(mover.overlap_movers)]
def id(self):
return id(MoveSequenceItemsInPlaceCommand)
def mergeWith(self, command):
if not isinstance(command, MoveSequenceItemsInPlaceCommand):
return False
if self.mover is not command.mover:
return False
# Combine commands
if (self.offset < 0) != (command.offset < 0):
for c1, c2 in zip(reversed(self.commands), command.commands):
c1.mergeWith(c2)
else:
for c1, c2 in zip(self.commands, command.commands):
c1.mergeWith(c2)
# Reverse our commands if we're now going the other way
if (self.offset < 0) != (self.offset + command.offset < 0):
self.commands.reverse()
self.offset += command.offset
def check_room(self):
# If redo() fails, redo() will roll itself back and raise an exception.
# If redo() succeeds, we undo() to roll it back, and there is no exception.
# TODO: Really, there's probably an algorithm we can use here to avoid
# moving anything.
self.redo()
self.undo()
def redo(self):
cmd_index = -1
try:
for i in range(len(self.commands)):
self.commands[i].redo()
cmd_index = i
except:
for i in range(cmd_index, -1, -1):
self.commands[i].undo()
raise
def undo(self):
for command in reversed(self.commands):
command.undo()
class RemoveAdjacentItemsFromSequenceCommand(QUndoCommand):
'''Removes adjacent (or single) items from a sequence, trying not to disturb
the timing in the sequence.
This command may move the sequence or adjust the transition lengths of items
to retain the sequence's timing.'''
def __init__(self, items, parent=None):
# Items supplied to this command need to be adjacent in the same sequence
# TODO: How does this kind of command, which hangs onto old clips,
# interact with asset name changes?
# If the user does change an asset name as it is, at the very least when
# they undo over this step, the graph manager will look for an asset
# that's not there (or worse, a different asset with the same name!).
# (1) Perhaps this can be solved with a kind of "global" command, one that
# appears on all stacks and undoes the global action when traversed.
# (2) Or we can reach into the undo stack and commit name changes there,
# too? Certain commands will listen to the asset list and modify items
# they hold?
# (3) Maybe there is only one undo stack to begin with? That kind of
# stack could undo name changes. -- IXNAY, users won't like that.
# (4) Or, as above, accept that we can't track all asset name changes,
# and leave it up to the user to do something smart. This at least
# can hold until I get a better idea on what to do.
QUndoCommand.__init__(self, 'Delete adjacent item(s) from sequence', parent)
for i in range(0, len(items) - 1):
if items[i].index != items[i+1].index - 1:
raise ValueError('This operation is only supported on adjacent items.')
self.items = items
self.original_sequence = items[0].sequence
# Original position X in scene
self.original_x = items[0].x + self.original_sequence.x
self.length = items[-1].x + items[-1].length - items[0].x
self.original_sequence = items[0].sequence
self.original_sequence_index = items[0].index
self.original_next = items[-1].next_item()
self.original_next_trans_length = self.original_next and self.original_next.transition_length
self.orig_trans_length = items[0].transition_length
def redo(self):
del self.original_sequence[self.original_sequence_index:self.original_sequence_index + len(self.items)]
if self.original_sequence_index == 0:
self.original_sequence.update(x=self.original_sequence.x + self.length
- self.original_next.transition_length if self.original_next else 0)
if self.original_next:
self.original_next.update(transition_length=0 if self.original_sequence_index == 0 else (self.original_next_trans_length - self.length + self.orig_trans_length))
def undo(self):
self.original_sequence[self.original_sequence_index:self.original_sequence_index] = self.items
self.items[0].update(transition_length=self.orig_trans_length)
if self.original_sequence_index == 0:
self.original_sequence.update(x=self.original_x)
if self.original_next:
self.original_next.update(transition_length=self.original_next_trans_length)
class RemoveItemCommand(QUndoCommand):
'''Removes an item from its container.
This really works for any item in any mutable list, so long as the list's
index method can find it. But this means it can also work for spaces.
Sequences have special requirements as far as keeping items where they are.
Use the RemoveItemsFromSequenceCommand to handle those.'''
def __init__(self, list_, item, parent=None):
QUndoCommand.__init__(self, 'Delete item', parent)
self.list = list_
self.item = item
def redo(self):
self.index = self.list.index(self.item)
del self.list[self.index]
def undo(self):
self.list.insert(self.index, self.item)
class InsertItemCommand(QUndoCommand):
'''Inserts an item into a list.
This really works for any item in any mutable list, but it can also work for
spaces.
Sequences have special requirements as far as keeping items where they are.
Use the AddOverlapItemsToSequenceSequenceCommand to handle those.'''
def __init__(self, list_, item, index, parent=None):
QUndoCommand.__init__(self, 'Insert item', parent)
self.list = list_
self.item = item
self.index = index
def redo(self):
self.list.insert(self.index, self.item)
def undo(self):
del self.list[self.index]
class RemoveItemsFromSequenceCommand(QUndoCommand):
'''Removes any set of items from a sequence. Note that each item needs to
belong to the same sequence and must be specified only once.
If all the items of a sequence are specified, the whole sequence is removed.'''
def __init__(self, items, parent=None):
QUndoCommand.__init__(self, 'Delete item(s) from sequence', parent)
if len(items) == len(items[0].sequence):
# Just remove the whole sequence
RemoveItemCommand(items[0].sequence.space, items[0].sequence, self)
else:
items = sorted(items, key=lambda a: a.index)
for group in _split_sequence_items_by_adjacency(items):
RemoveAdjacentItemsFromSequenceCommand(group, parent=self)
class _AdjustClipHandleCommand(QUndoCommand):
def __init__(self, text, item, offset, command, parent=None):
QUndoCommand.__init__(self, text, parent)
self.item = item
self.offset = offset
self.command = command
def id(self):
return id(self.__class__)
def mergeWith(self, next):
'''This command *can* be merged, but only manually.'''
if not isinstance(next, self.__class__) or self.item != next.item:
return False
self.command.mergeWith(next.command)
self.offset += next.offset
return True
def redo(self):
if self.item.space is None:
raise RuntimeError('Item must belong to a space to use ' + str(self.__class__) + '.')
self.command.redo()
def undo(self):
self.command.undo()
class AdjustClipLengthCommand(_AdjustClipHandleCommand):
'''Adjusts the length of a clip.'''
def __init__(self, item, offset):
if item.length + offset <= 0:
raise NoRoomError
_AdjustClipHandleCommand.__init__(self,
'Adjust clip length', item, offset,
UpdateItemPropertiesCommand(item, length=item.length + offset))
class AdjustClipStartCommand(_AdjustClipHandleCommand):
'''Adjusts the start of a clip.'''
def __init__(self, item, offset):
if item.length - offset <= 0:
raise NoRoomError
_AdjustClipHandleCommand.__init__(self,
'Adjust clip start', item, offset,
UpdateItemPropertiesCommand(item,
x=item.x + offset,
offset=item.offset + offset,
length=item.length - offset))
class SlipBehindCommand(_AdjustClipHandleCommand):
'''Adjusts the offset of a clip.'''
def __init__(self, item, offset):
_AdjustClipHandleCommand.__init__(self,
'Slip behind clip', item, offset,
UpdateItemPropertiesCommand(item,
offset=item.offset + offset))
class AdjustClipTopCommand(_AdjustClipHandleCommand):
'''Adjusts the top of a clip.'''
def __init__(self, item, offset):
if item.height - offset <= 0.0:
raise NoRoomError
_AdjustClipHandleCommand.__init__(self,
'Adjust clip top', item, offset,
UpdateItemPropertiesCommand(item,
y=item.y + offset,
height=item.height - offset))
class AdjustClipHeightCommand(_AdjustClipHandleCommand):
'''Adjusts the height of a clip.'''
def __init__(self, item, offset):
if item.height + offset <= 0.0:
raise NoRoomError
_AdjustClipHandleCommand.__init__(self,
'Adjust clip height', item, offset,
UpdateItemPropertiesCommand(item,
height=item.height + offset))
class AdjustSequenceItemStartCommand(QUndoCommand):
'''Adjusts the start of a sequence item without affecting the timing of its
neighbors.'''
def __init__(self, item, offset):
if not item.sequence:
raise RuntimeError('Item needs to belong to a sequence.')
prev_item = item.previous_item()
next_item = item.next_item()
if item.length - offset < 1:
raise NoRoomError('Cannot set length to zero or less.')
if prev_item:
prev_room = (prev_item.length
# Room taken up by its own transition
- max(prev_item.transition_length, 0)
# Room taken up by ours
- max(item.transition_length - offset, 0))
if prev_room < 0:
raise NoRoomError
if next_item:
# Don't run past the start of the next item
if item.length - offset < next_item.transition_length:
raise NoRoomError('Cannot move point past start of next item.')
QUndoCommand.__init__(self, 'Adjust sequence clip start')
self.item = item
self.offset = offset
self.item_command = UpdateItemPropertiesCommand(item,
transition_length=item.transition_length - offset if prev_item else 0,
offset=item.offset + offset,
length=item.length - offset)
self.seq_command = not prev_item and UpdateItemPropertiesCommand(item.sequence,
x=item.sequence.x + offset)
def id(self):
return id(self.__class__)
def mergeWith(self, next):
if not isinstance(next, self.__class__) or self.item != next.item:
return False
self.item_command.mergeWith(next.item_command)
self.offset += next.offset
if self.seq_command:
self.seq_command.mergeWith(next.seq_command)
return True
def redo(self):
self.item_command.redo()
if self.seq_command:
self.seq_command.redo()
def undo(self):
if self.seq_command:
self.seq_command.undo()
self.item_command.undo()
class AdjustSequenceItemLengthCommand(QUndoCommand):
'''Adjusts the length of a sequence item without affecting the timing of its
neighbors.'''
def __init__(self, item, offset):
if not item.sequence:
raise RuntimeError('Item needs to belong to a sequence.')
next_item = item.next_item()
next_next_item = next_item and next_item.next_item()
if item.length + offset < 1:
raise NoRoomError('Cannot set length to zero or less.')
if next_item:
next_room = (next_item.length
# Room taken up by its own transition
- (next_item.transition_length + offset)
# Room taken up by the next
- max(next_next_item.transition_length if next_next_item else 0, 0))
if next_room < 0:
raise NoRoomError
QUndoCommand.__init__(self, 'Adjust sequence clip length')
self.item = item
self.offset = offset
self.item_command = UpdateItemPropertiesCommand(item,
length = item.length + offset)
self.next_command = next_item and UpdateItemPropertiesCommand(next_item,
transition_length=next_item.transition_length + offset)
def id(self):
return id(self.__class__)
def mergeWith(self, next):
if not isinstance(next, self.__class__) or self.item != next.item:
return False
self.item_command.mergeWith(next.item_command)
self.offset += next.offset
if self.next_command:
self.next_command.mergeWith(next.next_command)
return True
def redo(self):
self.item_command.redo()
if self.next_command:
self.next_command.redo()
def undo(self):
if self.next_command:
self.next_command.undo()
self.item_command.undo()
class BringItemForwardCommand(QUndoCommand):
def __init__(self, item):
QUndoCommand.__init__(self, 'Bring item forward')
self.item = item
self.remove_command = None
self.insert_command = None
def redo(self):
item = self.item
key = item.z
overlaps = item.overlap_items()
above_items = [x.z for x in overlaps if x.z < key]
if not above_items:
return
bottom_z = max(above_items)
self.remove_command = RemoveItemCommand(item.space, item)
self.insert_command = InsertItemCommand(item.space, item, bottom_z)
self.remove_command.redo()
self.insert_command.redo()
def undo(self):
if self.insert_command:
self.insert_command.undo()
self.insert_command = None
self.remove_command.undo()
self.remove_command = None
class SendItemBackCommand(QUndoCommand):
def __init__(self, item):
QUndoCommand.__init__(self, 'Send item back')
self.item = item
self.remove_command = None
self.insert_command = None
def redo(self):
item = self.item
key = item.z
overlaps = item.overlap_items()
below_items = [x.z for x in overlaps if x.z > key]
if not below_items:
return
top_z = min(below_items)
self.remove_command = RemoveItemCommand(item.space, item)
self.insert_command = InsertItemCommand(item.space, item, top_z)
self.remove_command.redo()
self.insert_command.redo()
def undo(self):
if self.insert_command:
self.insert_command.undo()
self.insert_command = None
self.remove_command.undo()
self.remove_command = None
| fluggo/Canvas | fluggo/editor/model/commands.py | Python | gpl-3.0 | 38,160 | 0.004271 |
#!/home/vincenzo/Development/RobHome/venv/bin/python
# $Id: rst2s5.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: Chris Liechti <cliechti@gmx.net>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing HTML slides using
the S5 template system.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates S5 (X)HTML slideshow documents from standalone '
'reStructuredText sources. ' + default_description)
publish_cmdline(writer_name='s5', description=description)
| burzillibus/RobHome | venv/bin/rst2s5.py | Python | mit | 671 | 0.00149 |
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for run_finetuning_lib."""
import tensorflow.compat.v1 as tf
from etcmodel.models.openkp import run_finetuning_lib
class RunFinetuningLibTest(tf.test.TestCase):
def test_dense_feature_scaler(self):
tensor = tf.constant([-15., -12., -20., 5., -100.])
expected = [0, 0.6, -1., 1., -1.]
scaler = run_finetuning_lib.DenseFeatureScaler(min_value=-20, max_value=-10)
self.assertAllClose(expected, scaler.transform(tensor))
def test_dense_feature_scaler_invalid_range(self):
with self.assertRaises(ValueError):
run_finetuning_lib.DenseFeatureScaler(min_value=10, max_value=5)
with self.assertRaises(ValueError):
run_finetuning_lib.DenseFeatureScaler(min_value=10, max_value=10)
def test_indicators_to_id(self):
indicator1 = tf.constant([0, 1, 0, 1], dtype=tf.int32)
indicator2 = tf.constant([1, 0, 0, 1], dtype=tf.int32)
indicator3 = tf.constant([0, 1, 1, 1], dtype=tf.int32)
expected = [2, 5, 1, 7]
self.assertAllEqual(
expected,
run_finetuning_lib.indicators_to_id(indicator1, indicator2, indicator3))
def test_gather_global_embeddings_to_long(self):
global_embeddings = [
[
[.1, -.1],
[.2, -.2],
[.3, -.3],
], #
[
[1.1, -1.1],
[1.2, -1.2],
[1.3, -1.3],
], #
[
[2.1, -2.1],
[2.2, -2.2],
[2.3, -2.3],
], #
]
long_vdom_idx = [
[0, 1, 1, 2, 2], #
[0, 0, 0, 1, 2], #
[0, 1, 2, 0, 0], # Padding can be 0 since their embedding is ignored.
]
expected = [
[
[.1, -.1],
[.2, -.2],
[.2, -.2],
[.3, -.3],
[.3, -.3],
], #
[
[1.1, -1.1],
[1.1, -1.1],
[1.1, -1.1],
[1.2, -1.2],
[1.3, -1.3],
], #
[
[2.1, -2.1],
[2.2, -2.2],
[2.3, -2.3],
[2.1, -2.1],
[2.1, -2.1],
], #
]
self.assertAllClose(
expected,
run_finetuning_lib.gather_global_embeddings_to_long(
global_embeddings, long_vdom_idx))
def test_batch_segment_sum_embeddings(self):
# batch_size = 2
# long_max_length = 8
# hidden_size = 2
long_embeddings = tf.constant(
[
[
[0.1, -0.1],
[0.2, -0.2],
[0.3, -0.3],
[0.4, -0.4],
[0.5, -0.5],
[100.0, -100.0], # Padding embeddings may be arbitrary.
[200.0, -200.0],
[300.0, -300.0],
], #
[
[1.1, -1.1],
[1.2, -1.2],
[1.3, -1.3],
[1.4, -1.4],
[1.5, -1.5],
[1.6, -1.6],
[400.0, 400.0], # Padding embeddings may be arbitrary.
[500.0, 500.0],
], #
],
dtype=tf.float32)
long_word_idx = tf.constant(
[
[0, 1, 2, 2, 3, 0, 0, 0], # Padding indices can just be 0.
[0, 0, 0, 1, 2, 2, 0, 0], # Padding indices can just be 0.
],
dtype=tf.int32)
long_input_mask = tf.constant(
[
[1, 1, 1, 1, 1, 0, 0, 0], #
[1, 1, 1, 1, 1, 1, 0, 0], #
],
dtype=tf.int32)
expected = [
[
[0.1, -0.1],
[0.2, -0.2],
[0.7, -0.7],
[0.5, -0.5],
[0.0, 0.0],
[0.0, 0.0],
[0.0, 0.0],
[0.0, 0.0],
], #
[
[3.6, -3.6],
[1.4, -1.4],
[3.1, -3.1],
[0.0, 0.0],
[0.0, 0.0],
[0.0, 0.0],
[0.0, 0.0],
[0.0, 0.0],
], #
]
self.assertAllClose(
expected,
run_finetuning_lib.batch_segment_sum_embeddings(
long_embeddings=long_embeddings,
long_word_idx=long_word_idx,
long_input_mask=long_input_mask))
def test_make_ngram_labels(self):
label_start_idx = tf.constant([
[1, -1, -1],
[2, 3, 0],
])
label_phrase_len = tf.constant([
[3, -1, -1],
[2, 1, 2],
])
long_max_length = 4
kp_max_length = 5
reshaped_expected = [
[
[0, 0, 0, 0], # 1-grams
[0, 0, 0, 0], # 2-grams
[0, 1, 0, 0], # 3-grams
[0, 0, 0, 0], # 4-grams
[0, 0, 0, 0], # 5-grams
],
[
[0, 0, 0, 1 / 3], # 1-grams
[1 / 3, 0, 1 / 3, 0], # 2-grams
[0, 0, 0, 0], # 3-grams
[0, 0, 0, 0], # 4-grams
[0, 0, 0, 0], # 5-grams
],
]
batch_size = len(reshaped_expected)
expected = tf.reshape(reshaped_expected,
[batch_size, kp_max_length * long_max_length])
self.assertAllClose(
expected,
run_finetuning_lib.make_ngram_labels(
label_start_idx=label_start_idx,
label_phrase_len=label_phrase_len,
long_max_length=long_max_length,
kp_max_length=kp_max_length))
def test_make_ngram_labels_additive_smoothing(self):
label_start_idx = tf.constant([
[1, -1, -1],
[2, 3, 0],
])
label_phrase_len = tf.constant([
[3, -1, -1],
[2, 1, 2],
])
long_max_length = 4
kp_max_length = 5
additive_smoothing_mass = 1.0
smoothed_third = (1 / 3) + 0.05
reshaped_unnormalized_expected = [
[
[0.05, 0.05, 0.05, 0.05], # 1-grams
[0.05, 0.05, 0.05, 0.05], # 2-grams
[0.05, 1.05, 0.05, 0.05], # 3-grams
[0.05, 0.05, 0.05, 0.05], # 4-grams
[0.05, 0.05, 0.05, 0.05], # 5-grams
],
[
[0.05, 0.05, 0.05, smoothed_third], # 1-grams
[smoothed_third, 0.05, smoothed_third, 0.05], # 2-grams
[0.05, 0.05, 0.05, 0.05], # 3-grams
[0.05, 0.05, 0.05, 0.05], # 4-grams
[0.05, 0.05, 0.05, 0.05], # 5-grams
],
]
batch_size = len(reshaped_unnormalized_expected)
unnormalized_expected = tf.reshape(
reshaped_unnormalized_expected,
[batch_size, kp_max_length * long_max_length])
expected = (
unnormalized_expected /
tf.reduce_sum(unnormalized_expected, axis=-1, keepdims=True))
self.assertAllClose(
expected,
run_finetuning_lib.make_ngram_labels(
label_start_idx=label_start_idx,
label_phrase_len=label_phrase_len,
long_max_length=long_max_length,
kp_max_length=kp_max_length,
additive_smoothing_mass=additive_smoothing_mass))
if __name__ == '__main__':
tf.test.main()
| google-research/google-research | etcmodel/models/openkp/run_finetuning_lib_test.py | Python | apache-2.0 | 7,530 | 0.001726 |
"""
Tests for botogram/utils.py
Copyright (c) 2015 Pietro Albini <pietro@pietroalbini.io>
Released under the MIT license
"""
import botogram.decorators
def test_help_message_for(bot):
@bot.command("test")
def func():
"""docstring"""
pass
cmd = {cmd.name: cmd for cmd in bot.available_commands()}["test"]
assert cmd.raw_docstring == "docstring"
@botogram.decorators.help_message_for(func)
def help_func():
return "function"
assert cmd.raw_docstring == "function"
| thesharp/botogram | tests/test_decorators.py | Python | mit | 535 | 0 |
import galaxy.model
from galaxy.model.orm import *
from base.twilltestcase import TwillTestCase
class UploadData( TwillTestCase ):
def test_000_upload_files_from_disk( self ):
"""Test uploading data files from disk"""
self.logout()
self.login( email='tst@bx.psu.edu' )
history1 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
self.upload_file( '1.bed' )
hda1 = galaxy.model.HistoryDatasetAssociation.query() \
.order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
assert hda1 is not None, "Problem retrieving hda1 from database"
self.verify_dataset_correctness( '1.bed', hid=str( hda1.hid ) )
self.upload_file( '2.bed', dbkey='hg17' )
hda2 = galaxy.model.HistoryDatasetAssociation.query() \
.order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
assert hda2 is not None, "Problem retrieving hda2 from database"
self.verify_dataset_correctness( '2.bed', hid=str( hda2.hid ) )
self.upload_file( '3.bed', dbkey='hg17', ftype='bed' )
hda3 = galaxy.model.HistoryDatasetAssociation.query() \
.order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
assert hda3 is not None, "Problem retrieving hda3 from database"
self.verify_dataset_correctness( '3.bed', hid=str( hda3.hid ) )
self.upload_file( '4.bed.gz', dbkey='hg17', ftype='bed' )
hda4 = galaxy.model.HistoryDatasetAssociation.query() \
.order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
assert hda4 is not None, "Problem retrieving hda4 from database"
self.verify_dataset_correctness( '4.bed', hid=str( hda4.hid ) )
self.upload_file( '1.scf', ftype='scf' )
hda5 = galaxy.model.HistoryDatasetAssociation.query() \
.order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
assert hda5 is not None, "Problem retrieving hda5 from database"
self.verify_dataset_correctness( '1.scf', hid=str( hda5.hid ) )
self.upload_file( '1.scf.zip', ftype='binseq.zip' )
hda6 = galaxy.model.HistoryDatasetAssociation.query() \
.order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
assert hda6 is not None, "Problem retrieving hda6 from database"
self.verify_dataset_correctness( '1.scf.zip', hid=str( hda6.hid ) )
self.delete_history( id=str( history1.id ) )
def test_005_url_paste( self ):
"""Test url paste behavior"""
# Deleting the current history should have created a new history
self.check_history_for_string( 'Your history is empty' )
history2 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
self.upload_url_paste( 'hello world' )
self.check_history_for_string( 'Pasted Entry' )
self.check_history_for_string( 'hello world' )
self.upload_url_paste( u'hello world' )
self.check_history_for_string( 'Pasted Entry' )
self.check_history_for_string( 'hello world' )
self.delete_history( id=str( history2.id ) )
def test_010_upload_encode_data( self ):
"""Test uploading encode data"""
# Deleting the current history should have created a new history
self.check_history_for_string( 'Your history is empty' )
history3 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
self.run_tool( 'encode_import_chromatin_and_chromosomes1', hg17=['cc.EarlyRepSeg.20051216.bed'] )
self.wait()
hda7 = galaxy.model.HistoryDatasetAssociation.query() \
.order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
assert hda7 is not None, "Problem retrieving hda7 from database"
self.verify_dataset_correctness( 'cc.EarlyRepSeg.20051216.bed', hid=str( hda7.hid ) )
self.run_tool('encode_import_gencode1', hg17=['gencode.CDS.20051206.bed'])
self.wait()
hda8 = galaxy.model.HistoryDatasetAssociation.query() \
.order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
assert hda8 is not None, "Problem retrieving hda8 from database"
self.verify_dataset_correctness( 'sc_3D_cds.bed', hid=str( hda8.hid ) )
self.delete_history( id=str( history3.id ) )
def test_015_reset_data_for_later_test_runs( self ):
"""Reseting data to enable later test runs to pass"""
self.logout()
| dbcls/dbcls-galaxy | test/functional/test_get_data.py | Python | mit | 4,761 | 0.031086 |
#! /usr/bin/python
# this module is for jenkins attacks
from core.webModule import webModule
from requests import session
import requests
import re
from argparse import ArgumentParser
from lxml import html
import random
class jenkinsBrute(webModule):
def __init__(self, config, display, lock):
super(jenkinsBrute, self).__init__(config, display, lock)
self.fingerprint="Jenkins"
self.response="Success"
self.protocol="web"
def somethingCool(self, config, payload, proxy):
#Do something cool here
print("Something Cool not yet implemented")
def connectTest(self, config, payload, proxy, submitLoc, submitType):
#Create a session and check if account is valid
with session() as c:
requests.packages.urllib3.disable_warnings()
cookie = {'JSESSIONID.d29cad0c':'14qy4wdxentt311fbwxdw85z7o'}
resp1 = c.get(config["HOST"] + '/j_acegi_security_check', cookies=cookie, verify=False, data=payload, proxies=proxy)
print resp1.headers
cpost = c.post(config["HOST"] + '/employee/j_spring_security_check', cookies=cookies, data=payload, allow_redirects=True, verify=False,proxies=proxy)
m = re.search('You are unauthorized to access this page.', cpost.text)
if m:
print("[+] User Credentials Successful: " + config["USERNAME"] + ":" + config["PASSWORD"])
if not config["dry_run"]:
print("[!] Time to do something cool!")
self.somethingCool(config, payload)
else:
print("[-] Login Failed for: " + config["USERNAME"] + ":" + config["PASSWORD"])
| MooseDojo/myBFF | modules/jenkinsBrute.py | Python | mit | 1,691 | 0.009462 |
from google.appengine.ext import vendor
vendor.add('extensions')
from google.appengine.api import mail
import jinja2
import os
import premailer
_appid = os.getenv('APPLICATION_ID').replace('s~', '')
EMAIL_SENDER = 'noreply@{}.appspotmail.com'.format(_appid)
class Emailer(object):
def __init__(self, sender=None):
self.sender = sender or EMAIL_SENDER
def send(self, to, subject, template_path, kwargs=None):
html = self._render(template_path, kwargs=kwargs)
self._send(subject, to, html)
def _render(self, template_path, kwargs=None):
params = {}
if kwargs:
params.update(kwargs)
template = self.env.get_template(template_path)
html = template.render(params)
return premailer.transform(html)
def _send(self, subject, to, html):
message = mail.EmailMessage(sender=self.sender, subject=subject)
message.to = to
message.html = html
message.send()
@property
def env(self):
path = os.path.join(os.path.dirname(__file__), 'templates')
loader = jinja2.FileSystemLoader([path])
extensions = [
'jinja2.ext.autoescape',
'jinja2.ext.do',
'jinja2.ext.loopcontrols',
'jinja2.ext.with_',
]
return jinja2.Environment(
loader=loader, extensions=extensions, autoescape=True,
trim_blocks=True)
| grow/grow-ext-build-server | grow_build_server/emailer.py | Python | mit | 1,438 | 0.002782 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('patchwork', '0002_fix_patch_state_default_values'),
]
operations = [
migrations.CreateModel(
name='Series',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(default=b'Series without cover letter', max_length=200)),
('submitted', models.DateTimeField(default=datetime.datetime.now)),
('last_updated', models.DateTimeField(auto_now=True)),
('version', models.IntegerField(default=1)),
('n_patches', models.IntegerField(default=0)),
('project', models.ForeignKey(to='patchwork.Project')),
('reviewer', models.ForeignKey(related_name='reviewers', blank=True, to=settings.AUTH_USER_MODEL, null=True)),
('submitter', models.ForeignKey(related_name='submitters', to='patchwork.Person')),
],
),
migrations.CreateModel(
name='SeriesRevision',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('version', models.IntegerField(default=1)),
('root_msgid', models.CharField(max_length=255)),
('cover_letter', models.TextField(null=True, blank=True)),
],
options={
'ordering': ['version'],
},
),
migrations.CreateModel(
name='SeriesRevisionPatch',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('order', models.IntegerField()),
('patch', models.ForeignKey(to='patchwork.Patch')),
('revision', models.ForeignKey(to='patchwork.SeriesRevision')),
],
options={
'ordering': ['order'],
},
),
migrations.AddField(
model_name='seriesrevision',
name='patches',
field=models.ManyToManyField(to='patchwork.Patch', through='patchwork.SeriesRevisionPatch'),
),
migrations.AddField(
model_name='seriesrevision',
name='series',
field=models.ForeignKey(to='patchwork.Series'),
),
migrations.AlterUniqueTogether(
name='seriesrevisionpatch',
unique_together=set([('revision', 'order'), ('revision', 'patch')]),
),
migrations.AlterUniqueTogether(
name='seriesrevision',
unique_together=set([('series', 'version')]),
),
]
| joselamego/patchwork | patchwork/migrations/0003_series.py | Python | gpl-2.0 | 2,993 | 0.003007 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2013 the BabelFish authors. All rights reserved.
# Use of this source code is governed by the 3-clause BSD license
# that can be found in the LICENSE file.
#
from __future__ import unicode_literals
from collections import namedtuple
from functools import partial
from pkg_resources import resource_stream # @UnresolvedImport
from .converters import ConverterManager
from . import basestr
COUNTRIES = {}
COUNTRY_MATRIX = []
#: The namedtuple used in the :data:`COUNTRY_MATRIX`
IsoCountry = namedtuple('IsoCountry', ['name', 'alpha2'])
f = resource_stream('babelfish', 'data/iso-3166-1.txt')
f.readline()
for l in f:
iso_country = IsoCountry(*l.decode('utf-8').strip().split(';'))
COUNTRIES[iso_country.alpha2] = iso_country.name
COUNTRY_MATRIX.append(iso_country)
f.close()
class CountryConverterManager(ConverterManager):
""":class:`~babelfish.converters.ConverterManager` for country converters"""
entry_point = 'babelfish.country_converters'
internal_converters = ['name = babelfish.converters.countryname:CountryNameConverter']
country_converters = CountryConverterManager()
class CountryMeta(type):
"""The :class:`Country` metaclass
Dynamically redirect :meth:`Country.frommycode` to :meth:`Country.fromcode` with the ``mycode`` `converter`
"""
def __getattr__(cls, name):
if name.startswith('from'):
return partial(cls.fromcode, converter=name[4:])
return type.__getattribute__(cls, name)
class Country(CountryMeta(str('CountryBase'), (object,), {})):
"""A country on Earth
A country is represented by a 2-letter code from the ISO-3166 standard
:param string country: 2-letter ISO-3166 country code
"""
def __init__(self, country):
if country not in COUNTRIES:
raise ValueError('%r is not a valid country' % country)
#: ISO-3166 2-letter country code
self.alpha2 = country
@classmethod
def fromcode(cls, code, converter):
"""Create a :class:`Country` by its `code` using `converter` to
:meth:`~babelfish.converters.CountryReverseConverter.reverse` it
:param string code: the code to reverse
:param string converter: name of the :class:`~babelfish.converters.CountryReverseConverter` to use
:return: the corresponding :class:`Country` instance
:rtype: :class:`Country`
"""
return cls(country_converters[converter].reverse(code))
def __getstate__(self):
return self.alpha2
def __setstate__(self, state):
self.alpha2 = state
def __getattr__(self, name):
try:
return country_converters[name].convert(self.alpha2)
except KeyError:
raise AttributeError(name)
def __hash__(self):
return hash(self.alpha2)
def __eq__(self, other):
if isinstance(other, basestr):
return str(self) == other
if not isinstance(other, Country):
return False
return self.alpha2 == other.alpha2
def __ne__(self, other):
return not self == other
def __repr__(self):
return '<Country [%s]>' % self
def __str__(self):
return self.alpha2
| clinton-hall/nzbToMedia | libs/common/babelfish/country.py | Python | gpl-3.0 | 3,242 | 0.001851 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# generate-diffs.py - generate changes and diff files for new packages
#
# Copyright © 2008 Canonical Ltd.
# Author: Scott James Remnant <scott@ubuntu.com>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of version 3 of the GNU General Public License as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import logging
from momlib import *
from util import tree, run
from model.base import (Distro, PackageVersion)
import config
def options(parser):
parser.add_option("-t", "--target", type="string", metavar="TARGET",
default=None,
help="Process only this distribution target")
logger = logging.getLogger('generate_diffs')
def main(options, args):
logger.info('Comparing current and previous versions in source distros...')
# For latest version of each package in the given distributions, iterate the pool in order
# and generate a diff from the previous version and a changes file
for target in config.targets(args):
d = target.distro
for source in d.newestSources(target.dist, target.component):
if options.package and source['Package'] not in options.package:
continue
if source['Package'] in target.blacklist:
logger.debug("%s is blacklisted, skipping", source['Package'])
continue
try:
pkg = d.package(target.dist, target.component, source['Package'])
except model.error.PackageNotFound, e:
logger.exception("Spooky stuff going on with %s.", d)
continue
sources = pkg.poolDirectory().getSourceStanzas()
version_sort(sources)
last = None
try:
for version in pkg.poolDirectory().getVersions():
pv = PackageVersion(pkg, version)
try:
generate_diff(last, pv)
except model.error.PackageNotFound:
logger.exception("Could not find a package to diff against.")
except ValueError:
logger.exception("Could not find a .dsc file, perhaps it moved components?")
finally:
if last is not None:
cleanup_source(last.getSources())
last = pv
finally:
if last is not None:
cleanup_source(last.getSources())
def generate_diff(last, this):
"""Generate the differences."""
changes_filename = changes_file(this.package.distro, this.getSources())
if last is None:
return
if not os.path.isfile(changes_filename) \
and not os.path.isfile(changes_filename + ".bz2"):
try:
unpack_source(this)
except ValueError:
logger.exception("Couldn't unpack %s.", this)
return
try:
save_changes_file(changes_filename, this.getSources(),
last.getSources())
logger.info("Saved changes file: %s",
tree.subdir(ROOT, changes_filename))
except (ValueError, OSError):
logger.error("dpkg-genchanges for %s failed",
tree.subdir(ROOT, changes_filename))
logger.debug("Producing diff from %s to %s", this, last)
diff_filename = diff_file(this.package.distro.name, this.getSources())
if not os.path.isfile(diff_filename) \
and not os.path.isfile(diff_filename + ".bz2"):
unpack_source(this)
unpack_source(last)
save_patch_file(diff_filename, last.getSources(), this.getSources())
save_basis(diff_filename, last.getSources()["Version"])
logger.info("Saved diff file: %s", tree.subdir(ROOT, diff_filename))
if __name__ == "__main__":
run(main, options, usage="%prog [DISTRO...]",
description="generate changes and diff files for new packages")
| dbnicholson/merge-our-misc | generate_diffs.py | Python | gpl-3.0 | 4,253 | 0.006115 |
#!/usr/bin/env python3
# export_to_pdf.py
#
# references
# - https://onesheep.org/scripting-libreoffice-python/
# - http://christopher5106.github.io/office/2015/12/06/openoffice-lib
# reoffice-automate-your-office-tasks-with-python-macros.html
import uno
from com.sun.star.beans import PropertyValue
localContext = uno.getComponentContext()
resolver = localContext.ServiceManager.createInstanceWithContext(
"com.sun.star.bridge.UnoUrlResolver", localContext)
context = resolver.resolve(
"uno:socket,host=localhost,port=2002;urp;StarOffice.ComponentContext")
desktop = context.ServiceManager.createInstanceWithContext(
"com.sun.star.frame.Desktop", context)
model = desktop.getCurrentComponent()
# pdf
#properties=[]
#p=PropertyValue()
#p.Name='FilterName'
#p.Value='calc_pdf_Export'
#properties.append(p)
#model.storeToURL('file:///home/ubuntu/tmp/testout.pdf',tuple(properties))
model.close(True)
# access the active sheet
active_sheet = model.CurrentController.ActiveSheet
# access cell C4
cell1 = active_sheet.getCellRangeByName("C4")
# set text inside
cell1.String = "Hello world"
# other example with a value
cell2 = active_sheet.getCellRangeByName("E6")
cell2.Value = cell2.Value + 1
import sys
sys.exit(0)
| pchaitat/invrcptexporter | sample-code/misc/hello_world_librecalc.py | Python | gpl-3.0 | 1,240 | 0.006452 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 3.0.0.11832 (http://hl7.org/fhir/StructureDefinition/ValueSet) on 2017-03-22.
# 2017, SMART Health IT.
from . import domainresource
class ValueSet(domainresource.DomainResource):
""" A set of codes drawn from one or more code systems.
A value set specifies a set of codes drawn from one or more code systems.
"""
resource_type = "ValueSet"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.compose = None
""" Definition of the content of the value set (CLD).
Type `ValueSetCompose` (represented as `dict` in JSON). """
self.contact = None
""" Contact details for the publisher.
List of `ContactDetail` items (represented as `dict` in JSON). """
self.copyright = None
""" Use and/or publishing restrictions.
Type `str`. """
self.date = None
""" Date this was last changed.
Type `FHIRDate` (represented as `str` in JSON). """
self.description = None
""" Natural language description of the value set.
Type `str`. """
self.expansion = None
""" Used when the value set is "expanded".
Type `ValueSetExpansion` (represented as `dict` in JSON). """
self.experimental = None
""" For testing purposes, not real usage.
Type `bool`. """
self.extensible = None
""" Whether this is intended to be used with an extensible binding.
Type `bool`. """
self.identifier = None
""" Additional identifier for the value set.
List of `Identifier` items (represented as `dict` in JSON). """
self.immutable = None
""" Indicates whether or not any change to the content logical
definition may occur.
Type `bool`. """
self.jurisdiction = None
""" Intended jurisdiction for value set (if applicable).
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.name = None
""" Name for this value set (computer friendly).
Type `str`. """
self.publisher = None
""" Name of the publisher (organization or individual).
Type `str`. """
self.purpose = None
""" Why this value set is defined.
Type `str`. """
self.status = None
""" draft | active | retired | unknown.
Type `str`. """
self.title = None
""" Name for this value set (human friendly).
Type `str`. """
self.url = None
""" Logical URI to reference this value set (globally unique).
Type `str`. """
self.useContext = None
""" Context the content is intended to support.
List of `UsageContext` items (represented as `dict` in JSON). """
self.version = None
""" Business version of the value set.
Type `str`. """
super(ValueSet, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ValueSet, self).elementProperties()
js.extend([
("compose", "compose", ValueSetCompose, False, None, False),
("contact", "contact", contactdetail.ContactDetail, True, None, False),
("copyright", "copyright", str, False, None, False),
("date", "date", fhirdate.FHIRDate, False, None, False),
("description", "description", str, False, None, False),
("expansion", "expansion", ValueSetExpansion, False, None, False),
("experimental", "experimental", bool, False, None, False),
("extensible", "extensible", bool, False, None, False),
("identifier", "identifier", identifier.Identifier, True, None, False),
("immutable", "immutable", bool, False, None, False),
("jurisdiction", "jurisdiction", codeableconcept.CodeableConcept, True, None, False),
("name", "name", str, False, None, False),
("publisher", "publisher", str, False, None, False),
("purpose", "purpose", str, False, None, False),
("status", "status", str, False, None, True),
("title", "title", str, False, None, False),
("url", "url", str, False, None, False),
("useContext", "useContext", usagecontext.UsageContext, True, None, False),
("version", "version", str, False, None, False),
])
return js
from . import backboneelement
class ValueSetCompose(backboneelement.BackboneElement):
""" Definition of the content of the value set (CLD).
A set of criteria that define the content logical definition of the value
set by including or excluding codes from outside this value set. This I
also known as the "Content Logical Definition" (CLD).
"""
resource_type = "ValueSetCompose"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.exclude = None
""" Explicitly exclude codes from a code system or other value sets.
List of `ValueSetComposeInclude` items (represented as `dict` in JSON). """
self.inactive = None
""" Whether inactive codes are in the value set.
Type `bool`. """
self.include = None
""" Include one or more codes from a code system or other value set(s).
List of `ValueSetComposeInclude` items (represented as `dict` in JSON). """
self.lockedDate = None
""" Fixed date for version-less references (transitive).
Type `FHIRDate` (represented as `str` in JSON). """
super(ValueSetCompose, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ValueSetCompose, self).elementProperties()
js.extend([
("exclude", "exclude", ValueSetComposeInclude, True, None, False),
("inactive", "inactive", bool, False, None, False),
("include", "include", ValueSetComposeInclude, True, None, True),
("lockedDate", "lockedDate", fhirdate.FHIRDate, False, None, False),
])
return js
class ValueSetComposeInclude(backboneelement.BackboneElement):
""" Include one or more codes from a code system or other value set(s).
"""
resource_type = "ValueSetComposeInclude"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.concept = None
""" A concept defined in the system.
List of `ValueSetComposeIncludeConcept` items (represented as `dict` in JSON). """
self.filter = None
""" Select codes/concepts by their properties (including relationships).
List of `ValueSetComposeIncludeFilter` items (represented as `dict` in JSON). """
self.system = None
""" The system the codes come from.
Type `str`. """
self.valueSet = None
""" Select only contents included in this value set.
List of `str` items. """
self.version = None
""" Specific version of the code system referred to.
Type `str`. """
super(ValueSetComposeInclude, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ValueSetComposeInclude, self).elementProperties()
js.extend([
("concept", "concept", ValueSetComposeIncludeConcept, True, None, False),
("filter", "filter", ValueSetComposeIncludeFilter, True, None, False),
("system", "system", str, False, None, False),
("valueSet", "valueSet", str, True, None, False),
("version", "version", str, False, None, False),
])
return js
class ValueSetComposeIncludeConcept(backboneelement.BackboneElement):
""" A concept defined in the system.
Specifies a concept to be included or excluded.
"""
resource_type = "ValueSetComposeIncludeConcept"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.code = None
""" Code or expression from system.
Type `str`. """
self.designation = None
""" Additional representations for this concept.
List of `ValueSetComposeIncludeConceptDesignation` items (represented as `dict` in JSON). """
self.display = None
""" Text to display for this code for this value set in this valueset.
Type `str`. """
super(ValueSetComposeIncludeConcept, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ValueSetComposeIncludeConcept, self).elementProperties()
js.extend([
("code", "code", str, False, None, True),
("designation", "designation", ValueSetComposeIncludeConceptDesignation, True, None, False),
("display", "display", str, False, None, False),
])
return js
class ValueSetComposeIncludeConceptDesignation(backboneelement.BackboneElement):
""" Additional representations for this concept.
Additional representations for this concept when used in this value set -
other languages, aliases, specialized purposes, used for particular
purposes, etc.
"""
resource_type = "ValueSetComposeIncludeConceptDesignation"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.language = None
""" Human language of the designation.
Type `str`. """
self.use = None
""" Details how this designation would be used.
Type `Coding` (represented as `dict` in JSON). """
self.value = None
""" The text value for this designation.
Type `str`. """
super(ValueSetComposeIncludeConceptDesignation, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ValueSetComposeIncludeConceptDesignation, self).elementProperties()
js.extend([
("language", "language", str, False, None, False),
("use", "use", coding.Coding, False, None, False),
("value", "value", str, False, None, True),
])
return js
class ValueSetComposeIncludeFilter(backboneelement.BackboneElement):
""" Select codes/concepts by their properties (including relationships).
Select concepts by specify a matching criteria based on the properties
(including relationships) defined by the system. If multiple filters are
specified, they SHALL all be true.
"""
resource_type = "ValueSetComposeIncludeFilter"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.op = None
""" = | is-a | descendent-of | is-not-a | regex | in | not-in |
generalizes | exists.
Type `str`. """
self.property = None
""" A property defined by the code system.
Type `str`. """
self.value = None
""" Code from the system, or regex criteria, or boolean value for
exists.
Type `str`. """
super(ValueSetComposeIncludeFilter, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ValueSetComposeIncludeFilter, self).elementProperties()
js.extend([
("op", "op", str, False, None, True),
("property", "property", str, False, None, True),
("value", "value", str, False, None, True),
])
return js
class ValueSetExpansion(backboneelement.BackboneElement):
""" Used when the value set is "expanded".
A value set can also be "expanded", where the value set is turned into a
simple collection of enumerated codes. This element holds the expansion, if
it has been performed.
"""
resource_type = "ValueSetExpansion"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.contains = None
""" Codes in the value set.
List of `ValueSetExpansionContains` items (represented as `dict` in JSON). """
self.identifier = None
""" Uniquely identifies this expansion.
Type `str`. """
self.offset = None
""" Offset at which this resource starts.
Type `int`. """
self.parameter = None
""" Parameter that controlled the expansion process.
List of `ValueSetExpansionParameter` items (represented as `dict` in JSON). """
self.timestamp = None
""" Time ValueSet expansion happened.
Type `FHIRDate` (represented as `str` in JSON). """
self.total = None
""" Total number of codes in the expansion.
Type `int`. """
super(ValueSetExpansion, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ValueSetExpansion, self).elementProperties()
js.extend([
("contains", "contains", ValueSetExpansionContains, True, None, False),
("identifier", "identifier", str, False, None, True),
("offset", "offset", int, False, None, False),
("parameter", "parameter", ValueSetExpansionParameter, True, None, False),
("timestamp", "timestamp", fhirdate.FHIRDate, False, None, True),
("total", "total", int, False, None, False),
])
return js
class ValueSetExpansionContains(backboneelement.BackboneElement):
""" Codes in the value set.
The codes that are contained in the value set expansion.
"""
resource_type = "ValueSetExpansionContains"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.abstract = None
""" If user cannot select this entry.
Type `bool`. """
self.code = None
""" Code - if blank, this is not a selectable code.
Type `str`. """
self.contains = None
""" Codes contained under this entry.
List of `ValueSetExpansionContains` items (represented as `dict` in JSON). """
self.designation = None
""" Additional representations for this item.
List of `ValueSetComposeIncludeConceptDesignation` items (represented as `dict` in JSON). """
self.display = None
""" User display for the concept.
Type `str`. """
self.inactive = None
""" If concept is inactive in the code system.
Type `bool`. """
self.system = None
""" System value for the code.
Type `str`. """
self.version = None
""" Version in which this code/display is defined.
Type `str`. """
super(ValueSetExpansionContains, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ValueSetExpansionContains, self).elementProperties()
js.extend([
("abstract", "abstract", bool, False, None, False),
("code", "code", str, False, None, False),
("contains", "contains", ValueSetExpansionContains, True, None, False),
("designation", "designation", ValueSetComposeIncludeConceptDesignation, True, None, False),
("display", "display", str, False, None, False),
("inactive", "inactive", bool, False, None, False),
("system", "system", str, False, None, False),
("version", "version", str, False, None, False),
])
return js
class ValueSetExpansionParameter(backboneelement.BackboneElement):
""" Parameter that controlled the expansion process.
A parameter that controlled the expansion process. These parameters may be
used by users of expanded value sets to check whether the expansion is
suitable for a particular purpose, or to pick the correct expansion.
"""
resource_type = "ValueSetExpansionParameter"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.name = None
""" Name as assigned by the server.
Type `str`. """
self.valueBoolean = None
""" Value of the named parameter.
Type `bool`. """
self.valueCode = None
""" Value of the named parameter.
Type `str`. """
self.valueDecimal = None
""" Value of the named parameter.
Type `float`. """
self.valueInteger = None
""" Value of the named parameter.
Type `int`. """
self.valueString = None
""" Value of the named parameter.
Type `str`. """
self.valueUri = None
""" Value of the named parameter.
Type `str`. """
super(ValueSetExpansionParameter, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ValueSetExpansionParameter, self).elementProperties()
js.extend([
("name", "name", str, False, None, True),
("valueBoolean", "valueBoolean", bool, False, "value", False),
("valueCode", "valueCode", str, False, "value", False),
("valueDecimal", "valueDecimal", float, False, "value", False),
("valueInteger", "valueInteger", int, False, "value", False),
("valueString", "valueString", str, False, "value", False),
("valueUri", "valueUri", str, False, "value", False),
])
return js
import sys
try:
from . import codeableconcept
except ImportError:
codeableconcept = sys.modules[__package__ + '.codeableconcept']
try:
from . import coding
except ImportError:
coding = sys.modules[__package__ + '.coding']
try:
from . import contactdetail
except ImportError:
contactdetail = sys.modules[__package__ + '.contactdetail']
try:
from . import fhirdate
except ImportError:
fhirdate = sys.modules[__package__ + '.fhirdate']
try:
from . import identifier
except ImportError:
identifier = sys.modules[__package__ + '.identifier']
try:
from . import usagecontext
except ImportError:
usagecontext = sys.modules[__package__ + '.usagecontext']
| all-of-us/raw-data-repository | rdr_service/lib_fhir/fhirclient_3_0_0/models/valueset.py | Python | bsd-3-clause | 21,281 | 0.007753 |
#!/usr/bin/env python
""" =================================================
execute.py - Top-level hybrid controller executor
=================================================
This module executes a hybrid controller for a robot in a simulated or real environment.
:Usage: ``execute.py [-hn] [-p listen_port] [-a automaton_file] [-s spec_file]``
* The controlling automaton is imported from the specified ``automaton_file``.
* The supporting handler modules (e.g. sensor, actuator, motion control, simulation environment initialization, etc)
are loaded according to the settings in the config file specified as current in the ``spec_file``.
* If no port to listen on is specified, an open one will be chosen randomly.
* Unless otherwise specified with the ``-n`` or ``--no_gui`` option, a status/control window
will also be opened for informational purposes.
"""
import sys, os, getopt, textwrap
import threading, subprocess, time
# Climb the tree to find out where we are
p = os.path.abspath(__file__)
t = ""
while t != "src":
(p, t) = os.path.split(p)
if p == "":
print "I have no idea where I am; this is ridiculous"
sys.exit(1)
sys.path.append(os.path.join(p,"src","lib"))
import fsa, project
import handlerSubsystem
import strategy
from copy import deepcopy
from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
import xmlrpclib
import socket
import random
import math
import traceback
from resynthesis import ExecutorResynthesisExtensions
from executeStrategy import ExecutorStrategyExtensions
import globalConfig, logging
####################
# HELPER FUNCTIONS #
####################
def usage(script_name):
""" Print command-line usage information. """
print textwrap.dedent("""\
Usage: %s [-hn] [-p listen_port] [-a automaton_file] [-s spec_file]
-h, --help:
Display this message
-n, --no-gui:
Do not show status/control window
-p PORT, --xmlrpc-listen-port PORT:
Listen on PORT for XML-RPC calls
-a FILE, --aut-file FILE:
Load automaton from FILE
-s FILE, --spec-file FILE:
Load experiment configuration from FILE """ % script_name)
class LTLMoPExecutor(ExecutorStrategyExtensions,ExecutorResynthesisExtensions, object):
"""
This is the main execution object, which combines the synthesized discrete automaton
with a set of handlers (as specified in a .config file) to create and run a hybrid controller
"""
def __init__(self):
"""
Create a new execution context object
"""
super(LTLMoPExecutor, self).__init__()
self.proj = project.Project() # this is the project that we are currently using to execute
self.strategy = None
# Choose a timer func with maximum accuracy for given platform
if sys.platform in ['win32', 'cygwin']:
self.timer_func = time.clock
else:
self.timer_func = time.time
self.externalEventTarget = None
self.externalEventTargetRegistered = threading.Event()
self.postEventLock = threading.Lock()
self.runStrategy = threading.Event() # Start out paused
self.alive = threading.Event()
self.alive.set()
self.current_outputs = {} # keep track on current outputs values (for actuations)
def postEvent(self, eventType, eventData=None):
""" Send a notice that an event occurred, if anyone wants it """
with self.postEventLock:
if self.externalEventTarget is None:
return
try:
self.externalEventTarget.handleEvent(eventType, eventData)
except socket.error as e:
logging.warning("Could not send event to remote event target: %s", e)
logging.warning("Forcefully unsubscribing target.")
self.externalEventTarget = None
def loadSpecFile(self, filename):
# Update with this new project
self.proj = project.Project()
self.proj.loadProject(filename)
self.hsub = handlerSubsystem.HandlerSubsystem(self, self.proj.project_root)
# Tell GUI to load the spec file
self.postEvent("SPEC", self.proj.getFilenamePrefix() + ".spec")
def loadAutFile(self, filename):
"""
This function loads the the .aut/.bdd file named filename and returns the strategy object.
filename (string): name of the file with path included
"""
region_domain = strategy.Domain("region", self.proj.rfi.regions, strategy.Domain.B0_IS_MSB)
strat = strategy.createStrategyFromFile(filename,
self.proj.enabled_sensors,
self.proj.enabled_actuators + self.proj.all_customs + [region_domain])
return strat
def _getCurrentRegionFromPose(self, rfi=None):
# TODO: move this to regions.py
if rfi is None:
rfi = self.proj.rfi
pose = self.hsub.coordmap_lab2map(self.hsub.getPose())
region = next((i for i, r in enumerate(rfi.regions) if r.name.lower() != "boundary" and \
r.objectContainsPoint(*pose)), None)
if region is None:
logging.warning("Pose of {} not inside any region!".format(pose))
return region
def shutdown(self):
self.runStrategy.clear()
logging.info("QUITTING.")
all_handler_types = ['init', 'pose', 'locomotionCommand', 'drive', 'motionControl', 'sensor', 'actuator']
for htype in all_handler_types:
logging.info("Terminating {} handler...".format(htype))
if htype in self.proj.h_instance:
if isinstance(self.proj.h_instance[htype], dict):
handlers = [v for k,v in self.proj.h_instance[htype].iteritems()]
else:
handlers = [self.proj.h_instance[htype]]
for h in handlers:
if hasattr(h, "_stop"):
logging.debug("Calling _stop() on {}".format(h.__class__.__name__))
h._stop()
else:
logging.debug("{} does not have _stop() function".format(h.__class__.__name__))
else:
logging.debug("{} handler not found in h_instance".format(htype))
self.alive.clear()
def pause(self):
""" pause execution of the automaton """
self.runStrategy.clear()
time.sleep(0.1) # Wait for FSA to stop
self.postEvent("PAUSE")
def resume(self):
""" start/resume execution of the automaton """
self.runStrategy.set()
def isRunning(self):
""" return whether the automaton is currently executing """
return self.runStrategy.isSet()
def registerExternalEventTarget(self, address):
self.externalEventTarget = xmlrpclib.ServerProxy(address, allow_none=True)
# Redirect all output to the log
redir = RedirectText(self.externalEventTarget.handleEvent)
sys.stdout = redir
sys.stderr = redir
self.externalEventTargetRegistered.set()
def initialize(self, spec_file, strategy_file, firstRun=True):
"""
Prepare for execution, by loading and initializing all the relevant files (specification, map, handlers, strategy)
If `firstRun` is true, all handlers will be imported; otherwise, only the motion control handler will be reloaded.
"""
# load project only first time; otherwise self.proj is modified in-place
# TODO: make this less hacky
if firstRun:
self.loadSpecFile(spec_file)
if self.proj.compile_options['decompose']:
self.proj.rfiold = self.proj.rfi # Save the undecomposed regions
if self.proj.compile_options['decompose']:
self.proj.rfi = self.proj.loadRegionFile(decomposed=True)
if self.proj.current_config == "":
logging.error("Can not simulate without a simulation configuration.")
logging.error("Please create one by going to [Run] > [Configure Simulation...] in SpecEditor and then try again.")
sys.exit(2)
logging.info("Setting current executing config...")
self.hsub.setExecutingConfig(self.proj.current_config)
# make sure the coord transformation function is ready
# get the main robot config
robot_config = self.hsub.executing_config.getRobotByName(self.hsub.executing_config.main_robot)
self.hsub.coordmap_map2lab, self.hsub.coordmap_lab2map = robot_config.getCoordMaps()
self.proj.coordmap_map2lab, self.proj.coordmap_lab2map = robot_config.getCoordMaps()
# Import the relevant handlers
if firstRun:
# Instantiate all handlers
logging.info("Instantiate all handlers...")
self.hsub.instantiateAllHandlers()
logging.info("Preparing proposition mapping...")
self.hsub.prepareMapping()
else:
#print "Reloading motion control handler..."
#self.proj.importHandlers(['motionControl'])
pass
# We are done initializing at this point if there is no aut file yet
if strategy_file is None:
return
# TODO: maybe an option for BDD here later
# Load automaton file
new_strategy = self.loadAutFile(strategy_file)
if firstRun:
### Wait for the initial start command
logging.info("Ready. Press [Start] to begin...")
self.runStrategy.wait()
### Figure out where we should start from by passing proposition assignments to strategy and search for initial state
### pass in sensor values, current actuator and custom proposition values, and current region object
## Region
# FIXME: make getcurrentregion return object instead of number, also fix the isNone check
init_region = self.proj.rfi.regions[self._getCurrentRegionFromPose()]
if init_region is None:
logging.error("Initial pose not inside any region!")
sys.exit(-1)
logging.info("Starting from initial region: " + init_region.name)
init_prop_assignments = {"region": init_region}
# initialize all sensor and actuator methods
logging.info("Initializing sensor and actuator methods...")
self.hsub.initializeAllMethods()
## outputs
if firstRun or self.strategy is None:
# save the initial values of the actuators and the custom propositions
for prop in self.proj.enabled_actuators + self.proj.all_customs:
self.current_outputs[prop] = (prop in self.hsub.executing_config.initial_truths)
init_prop_assignments.update(self.current_outputs)
## inputs
init_prop_assignments.update(self.hsub.getSensorValue(self.proj.enabled_sensors))
#search for initial state in the strategy
init_state = new_strategy.searchForOneState(init_prop_assignments)
if init_state is None:
logging.error("No suitable initial state found; unable to execute. Quitting...")
sys.exit(-1)
else:
logging.info("Starting from state %s." % init_state.state_id)
self.strategy = new_strategy
self.strategy.current_state = init_state
def run(self):
### Get everything moving
# Rate limiting is approximately 20Hz
avg_freq = 20
last_gui_update_time = 0
# FIXME: don't crash if no spec file is loaded initially
while self.alive.isSet():
# Idle if we're not running
if not self.runStrategy.isSet():
self.hsub.setVelocity(0,0)
# wait for either the FSA to unpause or for termination
while (not self.runStrategy.wait(0.1)) and self.alive.isSet():
pass
# Exit immediately if we're quitting
if not self.alive.isSet():
break
self.prev_outputs = self.strategy.current_state.getOutputs()
self.prev_z = self.strategy.current_state.goal_id
tic = self.timer_func()
self.runStrategyIteration()
toc = self.timer_func()
#self.checkForInternalFlags()
# Rate limiting of execution and GUI update
while (toc - tic) < 0.05:
time.sleep(0.005)
toc = self.timer_func()
# Update GUI
# If rate limiting is disabled in the future add in rate limiting here for the GUI:
# if show_gui and (timer_func() - last_gui_update_time > 0.05)
avg_freq = 0.9 * avg_freq + 0.1 * 1 / (toc - tic) # IIR filter
self.postEvent("FREQ", int(math.ceil(avg_freq)))
pose = self.hsub.getPose(cached=True)[0:2]
self.postEvent("POSE", tuple(map(int, self.hsub.coordmap_lab2map(pose))))
last_gui_update_time = self.timer_func()
logging.debug("execute.py quitting...")
# This function is necessary to prevent xmlrpcserver from catching
# exceptions and eating the tracebacks
def _dispatch(self, method, args):
try:
return getattr(self, method)(*args)
except:
traceback.print_exc()
raise
class RedirectText:
def __init__(self, event_handler):
self.event_handler = event_handler
def write(self, message):
if message.strip() != "":
self.event_handler("OTHER", message.strip())
def flush(self):
pass
####################################################
# Main function, run when called from command-line #
####################################################
def execute_main(listen_port=None, spec_file=None, aut_file=None, show_gui=False):
logging.info("Hello. Let's do this!")
# Create the XML-RPC server
if listen_port is None:
# Search for a port we can successfully bind to
while True:
listen_port = random.randint(10000, 65535)
try:
xmlrpc_server = SimpleXMLRPCServer(("127.0.0.1", listen_port), logRequests=False, allow_none=True)
except socket.error as e:
pass
else:
break
else:
xmlrpc_server = SimpleXMLRPCServer(("127.0.0.1", listen_port), logRequests=False, allow_none=True)
# Create the execution context object
e = LTLMoPExecutor()
# Register functions with the XML-RPC server
xmlrpc_server.register_instance(e)
# Kick off the XML-RPC server thread
XMLRPCServerThread = threading.Thread(target=xmlrpc_server.serve_forever)
XMLRPCServerThread.daemon = True
XMLRPCServerThread.start()
logging.info("Executor listening for XML-RPC calls on http://127.0.0.1:{} ...".format(listen_port))
# Start the GUI if necessary
if show_gui:
# Create a subprocess
logging.info("Starting GUI window...")
p_gui = subprocess.Popen([sys.executable, "-u", "-m", "lib.simGUI", str(listen_port)])
# Wait for GUI to fully load, to make sure that
# to make sure all messages are redirected
e.externalEventTargetRegistered.wait()
if spec_file is not None:
# Tell executor to load spec & aut
#if aut_file is None:
# aut_file = spec_file.rpartition('.')[0] + ".aut"
e.initialize(spec_file, aut_file, firstRun=True)
# Start the executor's main loop in this thread
e.run()
# Clean up on exit
logging.info("Waiting for XML-RPC server to shut down...")
xmlrpc_server.shutdown()
XMLRPCServerThread.join()
logging.info("XML-RPC server shutdown complete. Goodbye.")
### Command-line argument parsing ###
if __name__ == "__main__":
### Check command-line arguments
aut_file = None
spec_file = None
show_gui = True
listen_port = None
try:
opts, args = getopt.getopt(sys.argv[1:], "hnp:a:s:", ["help", "no-gui", "xmlrpc-listen-port=", "aut-file=", "spec-file="])
except getopt.GetoptError:
logging.exception("Bad arguments")
usage(sys.argv[0])
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage(sys.argv[0])
sys.exit()
elif opt in ("-n", "--no-gui"):
show_gui = False
elif opt in ("-p", "--xmlrpc-listen-port"):
try:
listen_port = int(arg)
except ValueError:
logging.error("Invalid port '{}'".format(arg))
sys.exit(2)
elif opt in ("-a", "--aut-file"):
aut_file = arg
elif opt in ("-s", "--spec-file"):
spec_file = arg
execute_main(listen_port, spec_file, aut_file, show_gui)
| VerifiableRobotics/LTLMoP | src/lib/execute.py | Python | gpl-3.0 | 17,245 | 0.005683 |
#!/usr/bin/python
"""
Step file creator/editor.
:copyright: Red Hat Inc 2009
:author: mgoldish@redhat.com (Michael Goldish)
"""
import os
import glob
import shutil
import sys
import logging
import pygtk
import gtk
from virttest import ppm_utils
pygtk.require('2.0')
# General utilities
def corner_and_size_clipped(startpoint, endpoint, limits):
c0 = startpoint[:]
c1 = endpoint[:]
if c0[0] < 0:
c0[0] = 0
if c0[1] < 0:
c0[1] = 0
if c1[0] < 0:
c1[0] = 0
if c1[1] < 0:
c1[1] = 0
if c0[0] > limits[0] - 1:
c0[0] = limits[0] - 1
if c0[1] > limits[1] - 1:
c0[1] = limits[1] - 1
if c1[0] > limits[0] - 1:
c1[0] = limits[0] - 1
if c1[1] > limits[1] - 1:
c1[1] = limits[1] - 1
return ([min(c0[0], c1[0]),
min(c0[1], c1[1])],
[abs(c1[0] - c0[0]) + 1,
abs(c1[1] - c0[1]) + 1])
def key_event_to_qemu_string(event):
keymap = gtk.gdk.keymap_get_default()
keyvals = keymap.get_entries_for_keycode(event.hardware_keycode)
keyval = keyvals[0][0]
keyname = gtk.gdk.keyval_name(keyval)
keymap = {"Return": "ret",
"Tab": "tab",
"space": "spc",
"Left": "left",
"Right": "right",
"Up": "up",
"Down": "down",
"F1": "f1",
"F2": "f2",
"F3": "f3",
"F4": "f4",
"F5": "f5",
"F6": "f6",
"F7": "f7",
"F8": "f8",
"F9": "f9",
"F10": "f10",
"F11": "f11",
"F12": "f12",
"Escape": "esc",
"minus": "minus",
"equal": "equal",
"BackSpace": "backspace",
"comma": "comma",
"period": "dot",
"slash": "slash",
"Insert": "insert",
"Delete": "delete",
"Home": "home",
"End": "end",
"Page_Up": "pgup",
"Page_Down": "pgdn",
"Menu": "menu",
"semicolon": "0x27",
"backslash": "0x2b",
"apostrophe": "0x28",
"grave": "0x29",
"less": "0x2b",
"bracketleft": "0x1a",
"bracketright": "0x1b",
"Super_L": "0xdc",
"Super_R": "0xdb",
}
if ord('a') <= keyval <= ord('z') or ord('0') <= keyval <= ord('9'):
sr = keyname
elif keyname in list(keymap.keys()):
sr = keymap[keyname]
else:
return ""
if event.state & gtk.gdk.CONTROL_MASK:
sr = "ctrl-" + str
if event.state & gtk.gdk.MOD1_MASK:
sr = "alt-" + str
if event.state & gtk.gdk.SHIFT_MASK:
sr = "shift-" + str
return sr
class StepMakerWindow(object):
def __init__(self):
# Window
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.window.set_title("Step Maker Window")
self.window.connect("delete-event", self.delete_event)
self.window.connect("destroy", self.destroy)
self.window.set_default_size(600, 800)
# Main box (inside a frame which is inside a VBox)
self.menu_vbox = gtk.VBox()
self.window.add(self.menu_vbox)
self.menu_vbox.show()
frame = gtk.Frame()
frame.set_border_width(10)
frame.set_shadow_type(gtk.SHADOW_NONE)
self.menu_vbox.pack_end(frame)
frame.show()
self.main_vbox = gtk.VBox(spacing=10)
frame.add(self.main_vbox)
self.main_vbox.show()
# EventBox
self.scrolledwindow = gtk.ScrolledWindow()
self.scrolledwindow.set_policy(gtk.POLICY_AUTOMATIC,
gtk.POLICY_AUTOMATIC)
self.scrolledwindow.set_shadow_type(gtk.SHADOW_NONE)
self.main_vbox.pack_start(self.scrolledwindow)
self.scrolledwindow.show()
table = gtk.Table(1, 1)
self.scrolledwindow.add_with_viewport(table)
table.show()
table.realize()
self.event_box = gtk.EventBox()
table.attach(self.event_box, 0, 1, 0, 1, gtk.EXPAND, gtk.EXPAND)
self.event_box.show()
self.event_box.realize()
# Image
self.image = gtk.Image()
self.event_box.add(self.image)
self.image.show()
# Data VBox
self.data_vbox = gtk.VBox(spacing=10)
self.main_vbox.pack_start(self.data_vbox, expand=False)
self.data_vbox.show()
# User VBox
self.user_vbox = gtk.VBox(spacing=10)
self.main_vbox.pack_start(self.user_vbox, expand=False)
self.user_vbox.show()
# Screendump ID HBox
box = gtk.HBox(spacing=10)
self.data_vbox.pack_start(box)
box.show()
label = gtk.Label("Screendump ID:")
box.pack_start(label, False)
label.show()
self.entry_screendump = gtk.Entry()
self.entry_screendump.set_editable(False)
box.pack_start(self.entry_screendump)
self.entry_screendump.show()
label = gtk.Label("Time:")
box.pack_start(label, False)
label.show()
self.entry_time = gtk.Entry()
self.entry_time.set_editable(False)
self.entry_time.set_width_chars(10)
box.pack_start(self.entry_time, False)
self.entry_time.show()
# Comment HBox
box = gtk.HBox(spacing=10)
self.data_vbox.pack_start(box)
box.show()
label = gtk.Label("Comment:")
box.pack_start(label, False)
label.show()
self.entry_comment = gtk.Entry()
box.pack_start(self.entry_comment)
self.entry_comment.show()
# Sleep HBox
box = gtk.HBox(spacing=10)
self.data_vbox.pack_start(box)
box.show()
self.check_sleep = gtk.CheckButton("Sleep:")
self.check_sleep.connect("toggled", self.event_check_sleep_toggled)
box.pack_start(self.check_sleep, False)
self.check_sleep.show()
self.spin_sleep = gtk.SpinButton(gtk.Adjustment(0, 0, 50000, 1, 10, 0),
climb_rate=0.0)
box.pack_start(self.spin_sleep, False)
self.spin_sleep.show()
# Barrier HBox
box = gtk.HBox(spacing=10)
self.data_vbox.pack_start(box)
box.show()
self.check_barrier = gtk.CheckButton("Barrier:")
self.check_barrier.connect("toggled", self.event_check_barrier_toggled)
box.pack_start(self.check_barrier, False)
self.check_barrier.show()
vbox = gtk.VBox()
box.pack_start(vbox)
vbox.show()
self.label_barrier_region = gtk.Label("Region:")
self.label_barrier_region.set_alignment(0, 0.5)
vbox.pack_start(self.label_barrier_region)
self.label_barrier_region.show()
self.label_barrier_md5sum = gtk.Label("MD5:")
self.label_barrier_md5sum.set_alignment(0, 0.5)
vbox.pack_start(self.label_barrier_md5sum)
self.label_barrier_md5sum.show()
self.label_barrier_timeout = gtk.Label("Timeout:")
box.pack_start(self.label_barrier_timeout, False)
self.label_barrier_timeout.show()
self.spin_barrier_timeout = gtk.SpinButton(gtk.Adjustment(0, 0, 50000,
1, 10, 0),
climb_rate=0.0)
box.pack_start(self.spin_barrier_timeout, False)
self.spin_barrier_timeout.show()
self.check_barrier_optional = gtk.CheckButton("Optional")
box.pack_start(self.check_barrier_optional, False)
self.check_barrier_optional.show()
# Keystrokes HBox
box = gtk.HBox(spacing=10)
self.data_vbox.pack_start(box)
box.show()
label = gtk.Label("Keystrokes:")
box.pack_start(label, False)
label.show()
frame = gtk.Frame()
frame.set_shadow_type(gtk.SHADOW_IN)
box.pack_start(frame)
frame.show()
self.text_buffer = gtk.TextBuffer()
self.entry_keys = gtk.TextView(self.text_buffer)
self.entry_keys.set_wrap_mode(gtk.WRAP_WORD)
self.entry_keys.connect("key-press-event", self.event_key_press)
frame.add(self.entry_keys)
self.entry_keys.show()
self.check_manual = gtk.CheckButton("Manual")
self.check_manual.connect("toggled", self.event_manual_toggled)
box.pack_start(self.check_manual, False)
self.check_manual.show()
button = gtk.Button("Clear")
button.connect("clicked", self.event_clear_clicked)
box.pack_start(button, False)
button.show()
# Mouse click HBox
box = gtk.HBox(spacing=10)
self.data_vbox.pack_start(box)
box.show()
label = gtk.Label("Mouse action:")
box.pack_start(label, False)
label.show()
self.button_capture = gtk.Button("Capture")
box.pack_start(self.button_capture, False)
self.button_capture.show()
self.check_mousemove = gtk.CheckButton("Move: ...")
box.pack_start(self.check_mousemove, False)
self.check_mousemove.show()
self.check_mouseclick = gtk.CheckButton("Click: ...")
box.pack_start(self.check_mouseclick, False)
self.check_mouseclick.show()
self.spin_sensitivity = gtk.SpinButton(gtk.Adjustment(1, 1, 100, 1, 10,
0),
climb_rate=0.0)
box.pack_end(self.spin_sensitivity, False)
self.spin_sensitivity.show()
label = gtk.Label("Sensitivity:")
box.pack_end(label, False)
label.show()
self.spin_latency = gtk.SpinButton(
gtk.Adjustment(10, 1, 500, 1, 10, 0),
climb_rate=0.0)
box.pack_end(self.spin_latency, False)
self.spin_latency.show()
label = gtk.Label("Latency:")
box.pack_end(label, False)
label.show()
self.handler_event_box_press = None
self.handler_event_box_release = None
self.handler_event_box_scroll = None
self.handler_event_box_motion = None
self.handler_event_box_expose = None
self.window.realize()
self.window.show()
self.clear_state()
# Utilities
def message(self, text, title):
dlg = gtk.MessageDialog(self.window,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_INFO,
gtk.BUTTONS_CLOSE,
title)
dlg.set_title(title)
dlg.format_secondary_text(text)
dlg.run()
dlg.destroy()
def question_yes_no(self, text, title):
dlg = gtk.MessageDialog(self.window,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_QUESTION,
gtk.BUTTONS_YES_NO,
title)
dlg.set_title(title)
dlg.format_secondary_text(text)
response = dlg.run()
dlg.destroy()
if response == gtk.RESPONSE_YES:
return True
return False
def inputdialog(self, text, title, default_response=""):
# Define a little helper function
def inputdialog_entry_activated(entry):
dlg.response(gtk.RESPONSE_OK)
# Create the dialog
dlg = gtk.MessageDialog(self.window,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_QUESTION,
gtk.BUTTONS_OK_CANCEL,
title)
dlg.set_title(title)
dlg.format_secondary_text(text)
# Create an entry widget
entry = gtk.Entry()
entry.set_text(default_response)
entry.connect("activate", inputdialog_entry_activated)
dlg.vbox.pack_start(entry)
entry.show()
# Run the dialog
response = dlg.run()
dlg.destroy()
if response == gtk.RESPONSE_OK:
return entry.get_text()
return None
def filedialog(self, title=None, default_filename=None):
chooser = gtk.FileChooserDialog(title=title, parent=self.window,
action=gtk.FILE_CHOOSER_ACTION_OPEN,
buttons=(
gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN,
gtk.RESPONSE_OK))
chooser.resize(700, 500)
if default_filename:
chooser.set_filename(os.path.abspath(default_filename))
filename = None
response = chooser.run()
if response == gtk.RESPONSE_OK:
filename = chooser.get_filename()
chooser.destroy()
return filename
def redirect_event_box_input(self, press=None, release=None, scroll=None,
motion=None, expose=None):
if self.handler_event_box_press is not None:
self.event_box.disconnect(self.handler_event_box_press)
if self.handler_event_box_release is not None:
self.event_box.disconnect(self.handler_event_box_release)
if self.handler_event_box_scroll is not None:
self.event_box.disconnect(self.handler_event_box_scroll)
if self.handler_event_box_motion is not None:
self.event_box.disconnect(self.handler_event_box_motion)
if self.handler_event_box_expose is not None:
self.event_box.disconnect(self.handler_event_box_expose)
self.handler_event_box_press = None
self.handler_event_box_release = None
self.handler_event_box_scroll = None
self.handler_event_box_motion = None
self.handler_event_box_expose = None
if press is not None:
self.handler_event_box_press = (
self.event_box.connect("button-press-event", press))
if release is not None:
self.handler_event_box_release = (
self.event_box.connect("button-release-event", release))
if scroll is not None:
self.handler_event_box_scroll = (
self.event_box.connect("scroll-event", scroll))
if motion is not None:
self.handler_event_box_motion = (
self.event_box.connect("motion-notify-event", motion))
if expose is not None:
self.handler_event_box_expose = (
self.event_box.connect_after("expose-event", expose))
def get_keys(self):
return self.text_buffer.get_text(
self.text_buffer.get_start_iter(),
self.text_buffer.get_end_iter())
def add_key(self, key):
text = self.get_keys()
if len(text) > 0 and text[-1] != ' ':
text += " "
text += key
self.text_buffer.set_text(text)
def clear_keys(self):
self.text_buffer.set_text("")
def update_barrier_info(self):
if self.barrier_selected:
self.label_barrier_region.set_text("Selected region: Corner: " +
str(tuple(self.barrier_corner)) +
" Size: " +
str(tuple(self.barrier_size)))
else:
self.label_barrier_region.set_text("No region selected.")
self.label_barrier_md5sum.set_text("MD5: " + self.barrier_md5sum)
def update_mouse_click_info(self):
if self.mouse_click_captured:
self.check_mousemove.set_label("Move: " +
str(tuple(self.mouse_click_coords)))
self.check_mouseclick.set_label("Click: button %d" %
self.mouse_click_button)
else:
self.check_mousemove.set_label("Move: ...")
self.check_mouseclick.set_label("Click: ...")
def clear_state(self, clear_screendump=True):
# Recording time
self.entry_time.set_text("unknown")
if clear_screendump:
# Screendump
self.clear_image()
# Screendump ID
self.entry_screendump.set_text("")
# Comment
self.entry_comment.set_text("")
# Sleep
self.check_sleep.set_active(True)
self.check_sleep.set_active(False)
self.spin_sleep.set_value(10)
# Barrier
self.clear_barrier_state()
# Keystrokes
self.check_manual.set_active(False)
self.clear_keys()
# Mouse actions
self.check_mousemove.set_sensitive(False)
self.check_mouseclick.set_sensitive(False)
self.check_mousemove.set_active(False)
self.check_mouseclick.set_active(False)
self.mouse_click_captured = False
self.mouse_click_coords = [0, 0]
self.mouse_click_button = 0
self.update_mouse_click_info()
def clear_barrier_state(self):
self.check_barrier.set_active(True)
self.check_barrier.set_active(False)
self.check_barrier_optional.set_active(False)
self.spin_barrier_timeout.set_value(10)
self.barrier_selection_started = False
self.barrier_selected = False
self.barrier_corner0 = [0, 0]
self.barrier_corner1 = [0, 0]
self.barrier_corner = [0, 0]
self.barrier_size = [0, 0]
self.barrier_md5sum = ""
self.update_barrier_info()
def set_image(self, w, h, data):
(self.image_width, self.image_height, self.image_data) = (w, h, data)
self.image.set_from_pixbuf(gtk.gdk.pixbuf_new_from_data(
data, gtk.gdk.COLORSPACE_RGB, False, 8,
w, h, w * 3))
hscrollbar = self.scrolledwindow.get_hscrollbar()
hscrollbar.set_range(0, w)
vscrollbar = self.scrolledwindow.get_vscrollbar()
vscrollbar.set_range(0, h)
def set_image_from_file(self, filename):
if not ppm_utils.image_verify_ppm_file(filename):
logging.warning("set_image_from_file: Warning: received invalid"
"screendump file")
return self.clear_image()
(w, h, data) = ppm_utils.image_read_from_ppm_file(filename)
self.set_image(w, h, data)
def clear_image(self):
self.image.clear()
self.image_width = 0
self.image_height = 0
self.image_data = ""
def update_screendump_id(self, data_dir):
if not self.image_data:
return
# Find a proper ID for the screendump
scrdump_md5sum = ppm_utils.image_md5sum(self.image_width,
self.image_height,
self.image_data)
scrdump_id = ppm_utils.find_id_for_screendump(scrdump_md5sum, data_dir)
if not scrdump_id:
# Not found; generate one
scrdump_id = ppm_utils.generate_id_for_screendump(scrdump_md5sum,
data_dir)
self.entry_screendump.set_text(scrdump_id)
def get_step_lines(self, data_dir=None):
if self.check_barrier.get_active() and not self.barrier_selected:
self.message("No barrier region selected.", "Error")
return
sr = "step"
# Add step recording time
if self.entry_time.get_text():
sr += " " + self.entry_time.get_text()
sr += "\n"
# Add screendump line
if self.image_data:
sr += "screendump %s\n" % self.entry_screendump.get_text()
# Add comment
if self.entry_comment.get_text():
sr += "# %s\n" % self.entry_comment.get_text()
# Add sleep line
if self.check_sleep.get_active():
sr += "sleep %d\n" % self.spin_sleep.get_value()
# Add barrier_2 line
if self.check_barrier.get_active():
sr += "barrier_2 %d %d %d %d %s %d" % (
self.barrier_size[0], self.barrier_size[1],
self.barrier_corner[0], self.barrier_corner[1],
self.barrier_md5sum, self.spin_barrier_timeout.get_value())
if self.check_barrier_optional.get_active():
sr += " optional"
sr += "\n"
# Add "Sending keys" comment
keys_to_send = self.get_keys().split()
if keys_to_send:
sr += "# Sending keys: %s\n" % self.get_keys()
# Add key and var lines
for key in keys_to_send:
if key.startswith("$"):
varname = key[1:]
sr += "var %s\n" % varname
else:
sr += "key %s\n" % key
# Add mousemove line
if self.check_mousemove.get_active():
sr += "mousemove %d %d\n" % (self.mouse_click_coords[0],
self.mouse_click_coords[1])
# Add mouseclick line
if self.check_mouseclick.get_active():
mapping = {1: 1,
2: 2,
3: 4}
sr += "mouseclick %d\n" % mapping[self.mouse_click_button]
# Write screendump and cropped screendump image files
if data_dir and self.image_data:
# Create the data dir if it doesn't exist
if not os.path.exists(data_dir):
os.makedirs(data_dir)
# Get the full screendump filename
scrdump_filename = os.path.join(data_dir,
self.entry_screendump.get_text())
# Write screendump file if it doesn't exist
if not os.path.exists(scrdump_filename):
try:
ppm_utils.image_write_to_ppm_file(scrdump_filename,
self.image_width,
self.image_height,
self.image_data)
except IOError:
self.message("Could not write screendump file.", "Error")
return sr
def set_state_from_step_lines(self, sr, data_dir, warn=True):
self.clear_state()
for line in sr.splitlines():
words = line.split()
if not words:
continue
if (line.startswith("#") and not
self.entry_comment.get_text() and not
line.startswith("# Sending keys:") and not
line.startswith("# ----")):
self.entry_comment.set_text(line.strip("#").strip())
elif words[0] == "step":
if len(words) >= 2:
self.entry_time.set_text(words[1])
elif words[0] == "screendump":
self.entry_screendump.set_text(words[1])
self.set_image_from_file(os.path.join(data_dir, words[1]))
elif words[0] == "sleep":
self.spin_sleep.set_value(int(words[1]))
self.check_sleep.set_active(True)
elif words[0] == "key":
self.add_key(words[1])
elif words[0] == "var":
self.add_key("$%s" % words[1])
elif words[0] == "mousemove":
self.mouse_click_captured = True
self.mouse_click_coords = [int(words[1]), int(words[2])]
self.update_mouse_click_info()
elif words[0] == "mouseclick":
self.mouse_click_captured = True
self.mouse_click_button = int(words[1])
self.update_mouse_click_info()
elif words[0] == "barrier_2":
# Get region corner and size from step lines
self.barrier_corner = [int(words[3]), int(words[4])]
self.barrier_size = [int(words[1]), int(words[2])]
# Get corner0 and corner1 from step lines
self.barrier_corner0 = self.barrier_corner
self.barrier_corner1 = [self.barrier_corner[0] +
self.barrier_size[0] - 1,
self.barrier_corner[1] +
self.barrier_size[1] - 1]
# Get the md5sum
self.barrier_md5sum = words[5]
# Pretend the user selected the region with the mouse
self.barrier_selection_started = True
self.barrier_selected = True
# Update label widgets according to region information
self.update_barrier_info()
# Check the barrier checkbutton
self.check_barrier.set_active(True)
# Set timeout value
self.spin_barrier_timeout.set_value(int(words[6]))
# Set 'optional' checkbutton state
self.check_barrier_optional.set_active(words[-1] == "optional")
# Update the image widget
self.event_box.queue_draw()
if warn:
# See if the computed md5sum matches the one recorded in
# the file
computed_md5sum = ppm_utils.get_region_md5sum(
self.image_width, self.image_height,
self.image_data, self.barrier_corner[0],
self.barrier_corner[1], self.barrier_size[0],
self.barrier_size[1])
if computed_md5sum != self.barrier_md5sum:
self.message("Computed MD5 sum (%s) differs from MD5"
" sum recorded in steps file (%s)" %
(computed_md5sum, self.barrier_md5sum),
"Warning")
# Events
def delete_event(self, widget, event):
pass
def destroy(self, widget):
gtk.main_quit()
def event_check_barrier_toggled(self, widget):
if self.check_barrier.get_active():
self.redirect_event_box_input(
self.event_button_press,
self.event_button_release,
None,
None,
self.event_expose)
self.event_box.queue_draw()
self.event_box.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.CROSSHAIR))
self.label_barrier_region.set_sensitive(True)
self.label_barrier_md5sum.set_sensitive(True)
self.label_barrier_timeout.set_sensitive(True)
self.spin_barrier_timeout.set_sensitive(True)
self.check_barrier_optional.set_sensitive(True)
else:
self.redirect_event_box_input()
self.event_box.queue_draw()
self.event_box.window.set_cursor(None)
self.label_barrier_region.set_sensitive(False)
self.label_barrier_md5sum.set_sensitive(False)
self.label_barrier_timeout.set_sensitive(False)
self.spin_barrier_timeout.set_sensitive(False)
self.check_barrier_optional.set_sensitive(False)
def event_check_sleep_toggled(self, widget):
if self.check_sleep.get_active():
self.spin_sleep.set_sensitive(True)
else:
self.spin_sleep.set_sensitive(False)
def event_manual_toggled(self, widget):
self.entry_keys.grab_focus()
def event_clear_clicked(self, widget):
self.clear_keys()
self.entry_keys.grab_focus()
def event_expose(self, widget, event):
if not self.barrier_selection_started:
return
(corner, size) = corner_and_size_clipped(self.barrier_corner0,
self.barrier_corner1,
self.event_box.size_request())
gc = self.event_box.window.new_gc(line_style=gtk.gdk.LINE_DOUBLE_DASH,
line_width=1)
gc.set_foreground(gc.get_colormap().alloc_color("red"))
gc.set_background(gc.get_colormap().alloc_color("dark red"))
gc.set_dashes(0, (4, 4))
self.event_box.window.draw_rectangle(
gc, False,
corner[0], corner[1],
size[0] - 1, size[1] - 1)
def event_drag_motion(self, widget, event):
old_corner1 = self.barrier_corner1
self.barrier_corner1 = [int(event.x), int(event.y)]
(corner, size) = corner_and_size_clipped(self.barrier_corner0,
self.barrier_corner1,
self.event_box.size_request())
(old_corner, old_size) = corner_and_size_clipped(self.barrier_corner0,
old_corner1,
self.event_box.size_request())
corner0 = [
min(corner[0], old_corner[0]), min(corner[1], old_corner[1])]
corner1 = [max(corner[0] + size[0], old_corner[0] + old_size[0]),
max(corner[1] + size[1], old_corner[1] + old_size[1])]
size = [corner1[0] - corner0[0] + 1,
corner1[1] - corner0[1] + 1]
self.event_box.queue_draw_area(
corner0[0], corner0[1], size[0], size[1])
def event_button_press(self, widget, event):
(corner, size) = corner_and_size_clipped(self.barrier_corner0,
self.barrier_corner1,
self.event_box.size_request())
self.event_box.queue_draw_area(corner[0], corner[1], size[0], size[1])
self.barrier_corner0 = [int(event.x), int(event.y)]
self.barrier_corner1 = [int(event.x), int(event.y)]
self.redirect_event_box_input(
self.event_button_press,
self.event_button_release,
None,
self.event_drag_motion,
self.event_expose)
self.barrier_selection_started = True
def event_button_release(self, widget, event):
self.redirect_event_box_input(
self.event_button_press,
self.event_button_release,
None,
None,
self.event_expose)
(self.barrier_corner, self.barrier_size) = \
corner_and_size_clipped(self.barrier_corner0, self.barrier_corner1,
self.event_box.size_request())
self.barrier_md5sum = ppm_utils.get_region_md5sum(
self.image_width, self.image_height, self.image_data,
self.barrier_corner[0], self.barrier_corner[1],
self.barrier_size[0], self.barrier_size[1])
self.barrier_selected = True
self.update_barrier_info()
def event_key_press(self, widget, event):
if self.check_manual.get_active():
return False
sr = key_event_to_qemu_string(event)
self.add_key(sr)
return True
class StepEditor(StepMakerWindow):
ui = '''<ui>
<menubar name="MenuBar">
<menu action="File">
<menuitem action="Open"/>
<separator/>
<menuitem action="Quit"/>
</menu>
<menu action="Edit">
<menuitem action="CopyStep"/>
<menuitem action="DeleteStep"/>
</menu>
<menu action="Insert">
<menuitem action="InsertNewBefore"/>
<menuitem action="InsertNewAfter"/>
<separator/>
<menuitem action="InsertStepsBefore"/>
<menuitem action="InsertStepsAfter"/>
</menu>
<menu action="Tools">
<menuitem action="CleanUp"/>
</menu>
</menubar>
</ui>'''
# Constructor
def __init__(self, filename=None):
StepMakerWindow.__init__(self)
self.steps_filename = None
self.steps = []
# Create a UIManager instance
uimanager = gtk.UIManager()
# Add the accelerator group to the toplevel window
accelgroup = uimanager.get_accel_group()
self.window.add_accel_group(accelgroup)
# Create an ActionGroup
actiongroup = gtk.ActionGroup('StepEditor')
# Create actions
actiongroup.add_actions([
('Quit', gtk.STOCK_QUIT, '_Quit', None, 'Quit the Program',
self.quit),
('Open', gtk.STOCK_OPEN, '_Open', None, 'Open steps file',
self.open_steps_file),
('CopyStep', gtk.STOCK_COPY, '_Copy current step...', "",
'Copy current step to user specified position', self.copy_step),
('DeleteStep', gtk.STOCK_DELETE, '_Delete current step', "",
'Delete current step', self.event_remove_clicked),
('InsertNewBefore', gtk.STOCK_ADD, '_New step before current', "",
'Insert new step before current step', self.insert_before),
('InsertNewAfter', gtk.STOCK_ADD, 'N_ew step after current', "",
'Insert new step after current step', self.insert_after),
('InsertStepsBefore', gtk.STOCK_ADD, '_Steps before current...',
"", 'Insert steps (from file) before current step',
self.insert_steps_before),
('InsertStepsAfter', gtk.STOCK_ADD, 'Steps _after current...', "",
'Insert steps (from file) after current step',
self.insert_steps_after),
('CleanUp', gtk.STOCK_DELETE, '_Clean up data directory', "",
'Move unused PPM files to a backup directory', self.cleanup),
('File', None, '_File'),
('Edit', None, '_Edit'),
('Insert', None, '_Insert'),
('Tools', None, '_Tools')
])
def create_shortcut(name, callback, keyname):
# Create an action
action = gtk.Action(name, None, None, None)
# Connect a callback to the action
action.connect("activate", callback)
actiongroup.add_action_with_accel(action, keyname)
# Have the action use accelgroup
action.set_accel_group(accelgroup)
# Connect the accelerator to the action
action.connect_accelerator()
create_shortcut("Next", self.event_next_clicked, "Page_Down")
create_shortcut("Previous", self.event_prev_clicked, "Page_Up")
# Add the actiongroup to the uimanager
uimanager.insert_action_group(actiongroup, 0)
# Add a UI description
uimanager.add_ui_from_string(self.ui)
# Create a MenuBar
menubar = uimanager.get_widget('/MenuBar')
self.menu_vbox.pack_start(menubar, False)
# Remember the Edit menu bar for future reference
self.menu_edit = uimanager.get_widget('/MenuBar/Edit')
self.menu_edit.set_sensitive(False)
# Remember the Insert menu bar for future reference
self.menu_insert = uimanager.get_widget('/MenuBar/Insert')
self.menu_insert.set_sensitive(False)
# Remember the Tools menu bar for future reference
self.menu_tools = uimanager.get_widget('/MenuBar/Tools')
self.menu_tools.set_sensitive(False)
# Next/Previous HBox
hbox = gtk.HBox(spacing=10)
self.user_vbox.pack_start(hbox)
hbox.show()
self.button_first = gtk.Button(stock=gtk.STOCK_GOTO_FIRST)
self.button_first.connect("clicked", self.event_first_clicked)
hbox.pack_start(self.button_first)
self.button_first.show()
#self.button_prev = gtk.Button("<< Previous")
self.button_prev = gtk.Button(stock=gtk.STOCK_GO_BACK)
self.button_prev.connect("clicked", self.event_prev_clicked)
hbox.pack_start(self.button_prev)
self.button_prev.show()
self.label_step = gtk.Label("Step:")
hbox.pack_start(self.label_step, False)
self.label_step.show()
self.entry_step_num = gtk.Entry()
self.entry_step_num.connect(
"activate", self.event_entry_step_activated)
self.entry_step_num.set_width_chars(3)
hbox.pack_start(self.entry_step_num, False)
self.entry_step_num.show()
#self.button_next = gtk.Button("Next >>")
self.button_next = gtk.Button(stock=gtk.STOCK_GO_FORWARD)
self.button_next.connect("clicked", self.event_next_clicked)
hbox.pack_start(self.button_next)
self.button_next.show()
self.button_last = gtk.Button(stock=gtk.STOCK_GOTO_LAST)
self.button_last.connect("clicked", self.event_last_clicked)
hbox.pack_start(self.button_last)
self.button_last.show()
# Save HBox
hbox = gtk.HBox(spacing=10)
self.user_vbox.pack_start(hbox)
hbox.show()
self.button_save = gtk.Button("_Save current step")
self.button_save.connect("clicked", self.event_save_clicked)
hbox.pack_start(self.button_save)
self.button_save.show()
self.button_remove = gtk.Button("_Delete current step")
self.button_remove.connect("clicked", self.event_remove_clicked)
hbox.pack_start(self.button_remove)
self.button_remove.show()
self.button_replace = gtk.Button("_Replace screendump")
self.button_replace.connect("clicked", self.event_replace_clicked)
hbox.pack_start(self.button_replace)
self.button_replace.show()
# Disable unused widgets
self.button_capture.set_sensitive(False)
self.spin_latency.set_sensitive(False)
self.spin_sensitivity.set_sensitive(False)
# Disable main vbox because no steps file is loaded
self.main_vbox.set_sensitive(False)
# Set title
self.window.set_title("Step Editor")
# Events
def delete_event(self, widget, event):
# Make sure the step is saved (if the user wants it to be)
self.verify_save()
def event_first_clicked(self, widget):
if not self.steps:
return
# Make sure the step is saved (if the user wants it to be)
self.verify_save()
# Go to first step
self.set_step(0)
def event_last_clicked(self, widget):
if not self.steps:
return
# Make sure the step is saved (if the user wants it to be)
self.verify_save()
# Go to last step
self.set_step(len(self.steps) - 1)
def event_prev_clicked(self, widget):
if not self.steps:
return
# Make sure the step is saved (if the user wants it to be)
self.verify_save()
# Go to previous step
index = self.current_step_index - 1
if self.steps:
index = index % len(self.steps)
self.set_step(index)
def event_next_clicked(self, widget):
if not self.steps:
return
# Make sure the step is saved (if the user wants it to be)
self.verify_save()
# Go to next step
index = self.current_step_index + 1
if self.steps:
index = index % len(self.steps)
self.set_step(index)
def event_entry_step_activated(self, widget):
if not self.steps:
return
step_index = self.entry_step_num.get_text()
if not step_index.isdigit():
return
step_index = int(step_index) - 1
if step_index == self.current_step_index:
return
self.verify_save()
self.set_step(step_index)
def event_save_clicked(self, widget):
if not self.steps:
return
self.save_step()
def event_remove_clicked(self, widget):
if not self.steps:
return
if not self.question_yes_no("This will modify the steps file."
" Are you sure?", "Remove step?"):
return
# Remove step
del self.steps[self.current_step_index]
# Write changes to file
self.write_steps_file(self.steps_filename)
# Move to previous step
self.set_step(self.current_step_index)
def event_replace_clicked(self, widget):
if not self.steps:
return
# Let the user choose a screendump file
current_filename = os.path.join(self.steps_data_dir,
self.entry_screendump.get_text())
filename = self.filedialog("Choose PPM image file",
default_filename=current_filename)
if not filename:
return
if not ppm_utils.image_verify_ppm_file(filename):
self.message("Not a valid PPM image file.", "Error")
return
self.clear_image()
self.clear_barrier_state()
self.set_image_from_file(filename)
self.update_screendump_id(self.steps_data_dir)
# Menu actions
def open_steps_file(self, action):
# Make sure the step is saved (if the user wants it to be)
self.verify_save()
# Let the user choose a steps file
current_filename = self.steps_filename
filename = self.filedialog("Open steps file",
default_filename=current_filename)
if not filename:
return
self.set_steps_file(filename)
def quit(self, action):
# Make sure the step is saved (if the user wants it to be)
self.verify_save()
# Quit
gtk.main_quit()
def copy_step(self, action):
if not self.steps:
return
self.verify_save()
self.set_step(self.current_step_index)
# Get the desired position
step_index = self.inputdialog("Copy step to position:",
"Copy step",
str(self.current_step_index + 2))
if not step_index:
return
step_index = int(step_index) - 1
# Get the lines of the current step
step = self.steps[self.current_step_index]
# Insert new step at position step_index
self.steps.insert(step_index, step)
# Go to new step
self.set_step(step_index)
# Write changes to disk
self.write_steps_file(self.steps_filename)
def insert_before(self, action):
if not self.steps_filename:
return
if not self.question_yes_no("This will modify the steps file."
" Are you sure?", "Insert new step?"):
return
self.verify_save()
step_index = self.current_step_index
# Get the lines of a blank step
self.clear_state()
step = self.get_step_lines()
# Insert new step at position step_index
self.steps.insert(step_index, step)
# Go to new step
self.set_step(step_index)
# Write changes to disk
self.write_steps_file(self.steps_filename)
def insert_after(self, action):
if not self.steps_filename:
return
if not self.question_yes_no("This will modify the steps file."
" Are you sure?", "Insert new step?"):
return
self.verify_save()
step_index = self.current_step_index + 1
# Get the lines of a blank step
self.clear_state()
step = self.get_step_lines()
# Insert new step at position step_index
self.steps.insert(step_index, step)
# Go to new step
self.set_step(step_index)
# Write changes to disk
self.write_steps_file(self.steps_filename)
def insert_steps(self, filename, index):
# Read the steps file
(steps, _) = self.read_steps_file(filename)
data_dir = ppm_utils.get_data_dir(filename)
for step in steps:
self.set_state_from_step_lines(step, data_dir, warn=False)
step = self.get_step_lines(self.steps_data_dir)
# Insert steps into self.steps
self.steps[index:index] = steps
# Write changes to disk
self.write_steps_file(self.steps_filename)
def insert_steps_before(self, action):
if not self.steps_filename:
return
# Let the user choose a steps file
current_filename = self.steps_filename
filename = self.filedialog("Choose steps file",
default_filename=current_filename)
if not filename:
return
self.verify_save()
step_index = self.current_step_index
# Insert steps at position step_index
self.insert_steps(filename, step_index)
# Go to new steps
self.set_step(step_index)
def insert_steps_after(self, action):
if not self.steps_filename:
return
# Let the user choose a steps file
current_filename = self.steps_filename
filename = self.filedialog("Choose steps file",
default_filename=current_filename)
if not filename:
return
self.verify_save()
step_index = self.current_step_index + 1
# Insert new steps at position step_index
self.insert_steps(filename, step_index)
# Go to new steps
self.set_step(step_index)
def cleanup(self, action):
if not self.steps_filename:
return
if not self.question_yes_no("All unused PPM files will be moved to a"
" backup directory. Are you sure?",
"Clean up data directory?"):
return
# Remember the current step index
current_step_index = self.current_step_index
# Get the backup dir
backup_dir = os.path.join(self.steps_data_dir, "backup")
# Create it if it doesn't exist
if not os.path.exists(backup_dir):
os.makedirs(backup_dir)
# Move all files to the backup dir
for filename in glob.glob(os.path.join(self.steps_data_dir,
"*.[Pp][Pp][Mm]")):
shutil.move(filename, backup_dir)
# Get the used files back
for step in self.steps:
self.set_state_from_step_lines(step, backup_dir, warn=False)
self.get_step_lines(self.steps_data_dir)
# Remove the used files from the backup dir
used_files = os.listdir(self.steps_data_dir)
for filename in os.listdir(backup_dir):
if filename in used_files:
os.unlink(os.path.join(backup_dir, filename))
# Restore step index
self.set_step(current_step_index)
# Inform the user
self.message("All unused PPM files may be found at %s." %
os.path.abspath(backup_dir),
"Clean up data directory")
# Methods
def read_steps_file(self, filename):
steps = []
header = ""
fileobj = open(filename, "r")
for line in fileobj.readlines():
words = line.split()
if not words:
continue
if line.startswith("# ----"):
continue
if words[0] == "step":
steps.append("")
if steps:
steps[-1] += line
else:
header += line
fileobj.close()
return (steps, header)
def set_steps_file(self, filename):
try:
(self.steps, self.header) = self.read_steps_file(filename)
except (TypeError, IOError):
self.message("Cannot read file %s." % filename, "Error")
return
self.steps_filename = filename
self.steps_data_dir = ppm_utils.get_data_dir(filename)
# Go to step 0
self.set_step(0)
def set_step(self, index):
# Limit index to legal boundaries
if index < 0:
index = 0
if index > len(self.steps) - 1:
index = len(self.steps) - 1
# Enable the menus
self.menu_edit.set_sensitive(True)
self.menu_insert.set_sensitive(True)
self.menu_tools.set_sensitive(True)
# If no steps exist...
if self.steps == []:
self.current_step_index = index
self.current_step = None
# Set window title
self.window.set_title("Step Editor -- %s" %
os.path.basename(self.steps_filename))
# Set step entry widget text
self.entry_step_num.set_text("")
# Clear the state of all widgets
self.clear_state()
# Disable the main vbox
self.main_vbox.set_sensitive(False)
return
self.current_step_index = index
self.current_step = self.steps[index]
# Set window title
self.window.set_title("Step Editor -- %s -- step %d" %
(os.path.basename(self.steps_filename),
index + 1))
# Set step entry widget text
self.entry_step_num.set_text(str(self.current_step_index + 1))
# Load the state from the step lines
self.set_state_from_step_lines(self.current_step, self.steps_data_dir)
# Enable the main vbox
self.main_vbox.set_sensitive(True)
# Make sure the step lines in self.current_step are identical to the
# output of self.get_step_lines
self.current_step = self.get_step_lines()
def verify_save(self):
if not self.steps:
return
# See if the user changed anything
if self.get_step_lines() != self.current_step:
if self.question_yes_no("Step contents have been modified."
" Save step?", "Save changes?"):
self.save_step()
def save_step(self):
lines = self.get_step_lines(self.steps_data_dir)
if lines is not None:
self.steps[self.current_step_index] = lines
self.current_step = lines
self.write_steps_file(self.steps_filename)
def write_steps_file(self, filename):
fileobj = open(filename, "w")
fileobj.write(self.header)
for step in self.steps:
fileobj.write("# " + "-" * 32 + "\n")
fileobj.write(step)
fileobj.close()
if __name__ == "__main__":
se = StepEditor()
if len(sys.argv) > 1:
se.set_steps_file(sys.argv[1])
gtk.main()
| lmr/avocado-vt | virttest/step_editor.py | Python | gpl-2.0 | 50,744 | 0.000158 |
import logging
from pprint import pprint
from flask_wtf import Form
from wtforms import IntegerField, BooleanField
from flask import Blueprint, send_from_directory
from flask import Flask, redirect, url_for, session, request, jsonify, g,\
make_response, Response, render_template
from werkzeug.utils import secure_filename
from sqlalchemy import Date, cast, func, desc, or_
from main import app
from controllers.helpers import crossdomain
from interaction_logger import StructuredEvent
services = Blueprint('services', __name__, url_prefix='/services')
from controllers.service_libraries import weather as weather_service
@services.route('/weather/', methods=['GET', "POST"])
@services.route('/weather', methods=['GET', 'POST'])
def weather():
function = request.args.get("function", "get_temperature")
city = request.args.get("city", "Blacksburg, VA")
weather_function = getattr(weather_service, function)
return jsonify(data=weather_function(city))
@services.route('/sheets', methods=['GET'])
def sheets(sheet_url):
sheet_id = ''
if sheet_url.startswith('http'):
sheet_url.split('/')
elif sheet_url.startswith('docs'):
sheet_url.split('/')
elif sheet_url.startswith('docs'):
sheet_url.split('/')
# sample:
# https://docs.google.com/spreadsheets/d/1eLbX_5EFvZYc7JOGYF8ATdu5uQeu6OvILNnr4vH3vFI/pubhtml
# =>
# https://spreadsheets.google.com/feeds/list/___/od6/public/basic?alt=json
# https://spreadsheets.google.com/feeds/list/1eLbX_5EFvZYc7JOGYF8ATdu5uQeu6OvILNnr4vH3vFI/od6/public/basic?alt=json
@services.route('/log/', methods=['GET', 'POST', 'OPTIONS'])
@services.route('/log', methods=['GET', 'POST', 'OPTIONS'])
#@crossdomain(origin='*')
def log_event():
user_id = request.form.get('user_id', "")
if user_id == "":
user_id = str(request.remote_addr)
question_id = request.form.get('question_id', "")
event = request.form.get('event', "")
action = request.form.get('action', "")
body = request.form.get('body', "")
external_interactions_logger = logging.getLogger('ExternalInteractions')
external_interactions_logger.info(
StructuredEvent(user_id, question_id, event, action, body)
)
response = make_response('success')
response.headers['Access-Control-Allow-Origin'] = "*"
return response
| RealTimeWeb/Blockpy-Server | controllers/services.py | Python | mit | 2,386 | 0.003353 |
#!/usr/bin/env python
"""Parses language data from IANA subtag registry plus several other files,
and outputs JSON data in the following format:
[
{
'name': 'Ghotuo',
'code': { 'three': 'aaa' },
'country': ['Nigeria'],
'altNames': [],
},
{
'name': 'Alumu',
'code': { 'three': 'aab' },
'country': ['Nigeria'],
'altNames': ['Alumu', 'Tesu', 'Arum', 'Alumu-Tesu', 'Alumu', 'Arum-Cesu', 'Arum-Chessu', 'Arum-Tesu'],
},
# ...
{
'name': 'Waorani',
'code': { 'three': 'auc' },
'country': ['Brazil'],
'altNames': ['Huaorani', 'Sabela', 'Waodani', 'Auca'], # Pejorative names like Auca are *not* flagged as such
}
# ...
{
'name': 'English',
'code': { 'two': 'en', 'three': 'eng' },
'country': ['Australia', 'United Kingdom', 'United States', ...],
'altNames': ['Belfast', 'Birmingham', ...], # Dialects are *not* flagged as such
}
# ...
]"""
import os, sys
import re
from pprint import pprint, pformat
import codecs
import collections
import json
# Constants - mostly hardcoded filenames
SUBTAG_REGISTRY_FNAME = "ianaSubtagRegistry.txt"
COUNTRY_CODES_FNAME = "CountryCodes.txt"
LANGUAGE_CODES_FNAME = "LanguageCodes.txt"
LANGUAGE_INDEX_FNAME = "LanguageIndex.txt"
CONVERT_2_TO_3_FNAME = "TwoToThreeCodes.txt"
OUTPUT_FNAME = "inputSystems_languages.js"
OUTPUT_KEY_ORDER = ['name', 'code', 'country', 'altNames']
# OUTPUT_PREFIX is the text to write *before* the JSON output
OUTPUT_PREFIX = """\
'use strict';
// THIS FILE IS AUTOMATICALLY GENERATED.
// Do not make changes to this file; they will be overwritten.
// input systems languages data
var _inputSystems_languages = """
# OUTPUT_SUFFIX is the text to write *after* the JSON output
OUTPUT_SUFFIX = ";\n"
def read_file(fname):
with codecs.open(fname, 'rU', 'utf-8-sig') as f:
result = f.read() # utf-8-sig means strip BOM from start of file, if present
return result
def read_all_files():
try:
data = {
"subtags": read_file(SUBTAG_REGISTRY_FNAME),
"ccs": read_file(COUNTRY_CODES_FNAME),
"lcs": read_file(LANGUAGE_CODES_FNAME),
"lndx": read_file(LANGUAGE_INDEX_FNAME),
"2to3": read_file(CONVERT_2_TO_3_FNAME),
}
except IOError:
return None
else:
return data
def parse_subtag_registry(raw_text):
"""Returns data as a dict of lists, keyed by record type:
result['language'] = (list of language records)
result['extlang'] = (list of extended language records)
result['script'] = (list of script records)
And so on. Valid keys for result dict will be language, extlang, script,
region, variant, grandfathered, redundant."""
result = collections.defaultdict(list)
records = raw_text.split(u"%%\n")
for record in records:
data = {}
if record.startswith(u"File-Date:"):
continue # First "record" of file is only file-date
record = record.replace(u"\n ", u" ") # Line continuations: newline plus two spaces
record = re.sub(u" +", u" ", record) # Multiple spaces are collapsed into one, per spec
for line in record.splitlines():
key, val = line.split(": ", 1)
if key == 'Description':
# Descriptions can, and often do, appear more than once per record
data.setdefault(key, []).append(val)
else:
data[key] = val
result[data[u'Type']].append(data)
return result
def parse_tab_separated_file(raw_text, first_line_contains_field_names=True):
"""Returns data as either:
- a list of dicts, if first_line_contains_field_names is True
- a list of lists, if first_line_contains_field_names is False
"""
result = []
lines = raw_text.splitlines()
if first_line_contains_field_names:
field_names = lines[0].split('\t')
lines = lines[1:]
for line in lines:
fields = [field.strip() for field in line.split('\t') if line.strip()]
if first_line_contains_field_names:
result.append(dict(zip(field_names, fields)))
else:
result.append(fields)
return result
def parse_all_files(data):
result = {}
result['subtags'] = parse_subtag_registry(data['subtags'])
result['ccs'] = parse_tab_separated_file(data['ccs'], True)
result['lcs'] = parse_tab_separated_file(data['lcs'], True)
result['lndx'] = parse_tab_separated_file(data['lndx'], True)
result['2to3'] = parse_tab_separated_file(data['2to3'], False)
# Build lookup tables
result['2to3_lookup'] = {record[0]: record[1] for record in result['2to3']}
result['3to2_lookup'] = {record[1]: record[0] for record in result['2to3']}
result['country_lookup'] = {record['CountryID']: record['Name'] for record in result['ccs']}
return result
def build_language_data(data):
result = collections.OrderedDict()
for language_record in data['lndx']:
langid3 = language_record[u'LangID']
langid = data['3to2_lookup'].get(langid3, langid3) # 2-letter code preferred, 3-letter code is fallback
record = result.get(langid, {})
if not record.has_key('code'):
record['code'] = {}
if len(langid) == 2:
record['code']['two'] = langid
record['code']['three'] = langid3
country = data['country_lookup'].get(language_record[u'CountryID'])
if country:
record.setdefault('country', set()).add(country)
name = language_record['Name']
if language_record['NameType'] == 'L':
record['name'] = name
else:
record.setdefault('altNames', set()).add(name)
if not result.has_key(langid):
result[langid] = record
return result
def build_regions_data(data):
result = collections.OrderedDict()
for record in data['ccs']:
result[record['CountryID']] = record['Name']
return result
def build_scripts_data(data):
result = collections.OrderedDict()
for record in data['subtags']['script']:
result[record['Subtag']] = record['Description']
return result
def write_json(final_result, out_fname, prefix, suffix, fix_records=False):
if fix_records:
records_for_output = []
for record in final_result.itervalues():
for key in ['country', 'altNames']:
if record.has_key(key):
record[key] = list(sorted(record[key]))
else:
record[key] = [] # Ensure country and altNames lists exist, even if they're empty
# Rearrange output record so keys will be in predictable order in JSON file
new_record = collections.OrderedDict()
for key in OUTPUT_KEY_ORDER:
new_record[key] = record[key]
records_for_output.append(new_record)
else:
records_for_output = final_result
with codecs.open(out_fname, 'wU', 'utf-8') as f:
f.write(prefix)
json.dump(records_for_output, f, ensure_ascii=False, indent=4, separators=(',', ': '))
f.write(suffix)
def main():
sys.stderr.write('Reading files...\n')
data = read_all_files()
if not data:
sys.stderr.write("Error reading input data files\n")
sys.exit(2)
sys.stderr.write('Parsing files...\n')
data = parse_all_files(data)
sys.stderr.write('Preparing JSON output...\n')
langdata = build_language_data(data)
write_json(langdata, OUTPUT_FNAME, OUTPUT_PREFIX, OUTPUT_SUFFIX, fix_records=True)
regdata = build_regions_data(data)
write_json(regdata,
OUTPUT_FNAME.replace('languages', 'regions'),
OUTPUT_PREFIX.replace('languages', 'regions'),
OUTPUT_SUFFIX.replace('languages', 'regions'))
scriptdata = build_scripts_data(data)
write_json(scriptdata,
OUTPUT_FNAME.replace('languages', 'scripts'),
OUTPUT_PREFIX.replace('languages', 'scripts'),
OUTPUT_SUFFIX.replace('languages', 'scripts'))
if __name__ == '__main__':
main()
| sil-jterm-2015/sfwebchecks | src/scripts/language picker/build-json-language-data.py | Python | mit | 8,241 | 0.004004 |
func int suma(int a, int b)
return a+b
endfunc
func int absdif(int a, int b)
if(a>b)
return a-b
else
return b-a
endif
endfunc
int i, a, b,B[200], aux, A[100]
int z=suma(2*5+a,aux*A[0])-absdif(10000, 500)
int w=10, C[a/b**aux]
double x=0, y, z, pi=3.141592
a=0
b=1
A[0]=10
A[a+b]=pi**x
for(i=0,i<10,i+=1)
print("f(")
print(i)
print(")=")
println(a)
aux=a
a=a+b
b=aux
endfor
read(x)
while(a<b)
println(x)
y=x*pi
z=ln(y)
x+=z**0.5
endwhile
if(x!=0)
println(a)
else
println(b)
endif
if(x!=0)
println(a)
elseif(x<0)
println(b)
elseif(1==1)
println(pi)
else
x+=1
endif
int MAX=1000
int lista[MAX]
int j
for(i=0, i<MAX, i+=1)
read(lista[i])
endfor
for(i=0, i<MAX, i+=1)
for(j=1, j<MAX, j+=1)
if(lista[j]<lista[j-1])
int temp=lista[j]
lista[j]=lista[j-1]
lista[j-1]=temp
endif
endfor
endfor
i=0
while(not (i==10))
println(i)
i+=1
endwhile | TachoMex/Compiladores-14b | Parser/compilador/programa.py | Python | gpl-2.0 | 888 | 0.108108 |
"""
Integration Tests for LMS instructor-initiated background tasks.
Runs tasks on answers to course problems to validate that code
paths actually work.
"""
from collections import namedtuple
import ddt
import json
import logging
from mock import patch
from nose.plugins.attrib import attr
import textwrap
from celery.states import SUCCESS, FAILURE
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from openedx.core.djangoapps.util.testing import TestConditionalContent
from capa.tests.response_xml_factory import (CodeResponseXMLFactory,
CustomResponseXMLFactory)
from xmodule.modulestore.tests.factories import ItemFactory
from xmodule.modulestore import ModuleStoreEnum
from courseware.model_data import StudentModule
from instructor_task.api import (submit_rescore_problem_for_all_students,
submit_rescore_problem_for_student,
submit_reset_problem_attempts_for_all_students,
submit_delete_problem_state_for_all_students)
from instructor_task.models import InstructorTask
from instructor_task.tasks_helper import upload_grades_csv
from instructor_task.tests.test_base import (
InstructorTaskModuleTestCase,
TestReportMixin,
OPTION_1,
OPTION_2,
)
from capa.responsetypes import StudentInputError
from lms.djangoapps.lms_xblock.runtime import quote_slashes
from lms.djangoapps.grades.new.course_grade import CourseGradeFactory
log = logging.getLogger(__name__)
class TestIntegrationTask(InstructorTaskModuleTestCase):
"""
Base class to provide general methods used for "integration" testing of particular tasks.
"""
def _assert_task_failure(self, entry_id, task_type, problem_url_name, expected_message):
"""Confirm that expected values are stored in InstructorTask on task failure."""
instructor_task = InstructorTask.objects.get(id=entry_id)
self.assertEqual(instructor_task.task_state, FAILURE)
self.assertEqual(instructor_task.requester.username, 'instructor')
self.assertEqual(instructor_task.task_type, task_type)
task_input = json.loads(instructor_task.task_input)
self.assertNotIn('student', task_input)
self.assertEqual(task_input['problem_url'], InstructorTaskModuleTestCase.problem_location(problem_url_name).to_deprecated_string())
status = json.loads(instructor_task.task_output)
self.assertEqual(status['exception'], 'ZeroDivisionError')
self.assertEqual(status['message'], expected_message)
# check status returned:
status = InstructorTaskModuleTestCase.get_task_status(instructor_task.task_id)
self.assertEqual(status['message'], expected_message)
@attr(shard=3)
@ddt.ddt
class TestRescoringTask(TestIntegrationTask):
"""
Integration-style tests for rescoring problems in a background task.
Exercises real problems with a minimum of patching.
"""
def setUp(self):
super(TestRescoringTask, self).setUp()
self.initialize_course()
self.create_instructor('instructor')
self.user1 = self.create_student('u1')
self.user2 = self.create_student('u2')
self.user3 = self.create_student('u3')
self.user4 = self.create_student('u4')
self.users = [self.user1, self.user2, self.user3, self.user4]
self.logout()
# set up test user for performing test operations
self.setup_user()
def render_problem(self, username, problem_url_name):
"""
Use ajax interface to request html for a problem.
"""
# make sure that the requested user is logged in, so that the ajax call works
# on the right problem:
self.login_username(username)
# make ajax call:
modx_url = reverse('xblock_handler', kwargs={
'course_id': self.course.id.to_deprecated_string(),
'usage_id': quote_slashes(InstructorTaskModuleTestCase.problem_location(problem_url_name).to_deprecated_string()),
'handler': 'xmodule_handler',
'suffix': 'problem_get',
})
resp = self.client.post(modx_url, {})
return resp
def check_state(self, user, descriptor, expected_score, expected_max_score, expected_attempts=1):
"""
Check that the StudentModule state contains the expected values.
The student module is found for the test course, given the `username` and problem `descriptor`.
Values checked include the number of attempts, the score, and the max score for a problem.
"""
module = self.get_student_module(user.username, descriptor)
self.assertEqual(module.grade, expected_score)
self.assertEqual(module.max_grade, expected_max_score)
state = json.loads(module.state)
attempts = state['attempts']
self.assertEqual(attempts, expected_attempts)
if attempts > 0:
self.assertIn('correct_map', state)
self.assertIn('student_answers', state)
self.assertGreater(len(state['correct_map']), 0)
self.assertGreater(len(state['student_answers']), 0)
# assume only one problem in the subsection and the grades
# are in sync.
expected_subsection_grade = expected_score
course_grade = CourseGradeFactory(user).create(self.course)
self.assertEquals(
course_grade.subsection_grade_totals_by_format['Homework'][0].earned,
expected_subsection_grade,
)
def submit_rescore_all_student_answers(self, instructor, problem_url_name):
"""Submits the particular problem for rescoring"""
return submit_rescore_problem_for_all_students(self.create_task_request(instructor),
InstructorTaskModuleTestCase.problem_location(problem_url_name))
def submit_rescore_one_student_answer(self, instructor, problem_url_name, student):
"""Submits the particular problem for rescoring for a particular student"""
return submit_rescore_problem_for_student(self.create_task_request(instructor),
InstructorTaskModuleTestCase.problem_location(problem_url_name),
student)
RescoreTestData = namedtuple('RescoreTestData', 'edit, new_expected_scores, new_expected_max')
@ddt.data(
RescoreTestData(edit=dict(correct_answer=OPTION_2), new_expected_scores=(0, 1, 1, 2), new_expected_max=2),
RescoreTestData(edit=dict(num_inputs=2), new_expected_scores=(2, 1, 1, 0), new_expected_max=4),
RescoreTestData(edit=dict(num_inputs=4), new_expected_scores=(2, 1, 1, 0), new_expected_max=8),
RescoreTestData(edit=dict(num_responses=4), new_expected_scores=(2, 1, 1, 0), new_expected_max=4),
RescoreTestData(edit=dict(num_inputs=2, num_responses=4), new_expected_scores=(2, 1, 1, 0), new_expected_max=8),
)
@ddt.unpack
def test_rescoring_option_problem(self, problem_edit, new_expected_scores, new_expected_max):
"""
Run rescore scenario on option problem.
Verify rescoring updates grade after content change.
Original problem definition has:
num_inputs = 1
num_responses = 2
correct_answer = OPTION_1
"""
# get descriptor:
problem_url_name = 'H1P1'
self.define_option_problem(problem_url_name)
location = InstructorTaskModuleTestCase.problem_location(problem_url_name)
descriptor = self.module_store.get_item(location)
# first store answers for each of the separate users:
self.submit_student_answer('u1', problem_url_name, [OPTION_1, OPTION_1])
self.submit_student_answer('u2', problem_url_name, [OPTION_1, OPTION_2])
self.submit_student_answer('u3', problem_url_name, [OPTION_2, OPTION_1])
self.submit_student_answer('u4', problem_url_name, [OPTION_2, OPTION_2])
# verify each user's grade
expected_original_scores = (2, 1, 1, 0)
expected_original_max = 2
for i, user in enumerate(self.users):
self.check_state(user, descriptor, expected_original_scores[i], expected_original_max)
# update the data in the problem definition so the answer changes.
self.redefine_option_problem(problem_url_name, **problem_edit)
# confirm that simply rendering the problem again does not change the grade
self.render_problem('u1', problem_url_name)
self.check_state(self.user1, descriptor, expected_original_scores[0], expected_original_max)
# rescore the problem for only one student -- only that student's grade should change:
self.submit_rescore_one_student_answer('instructor', problem_url_name, self.user1)
self.check_state(self.user1, descriptor, new_expected_scores[0], new_expected_max)
for i, user in enumerate(self.users[1:], start=1): # everyone other than user1
self.check_state(user, descriptor, expected_original_scores[i], expected_original_max)
# rescore the problem for all students
self.submit_rescore_all_student_answers('instructor', problem_url_name)
for i, user in enumerate(self.users):
self.check_state(user, descriptor, new_expected_scores[i], new_expected_max)
def test_rescoring_failure(self):
"""Simulate a failure in rescoring a problem"""
problem_url_name = 'H1P1'
self.define_option_problem(problem_url_name)
self.submit_student_answer('u1', problem_url_name, [OPTION_1, OPTION_1])
expected_message = "bad things happened"
with patch('capa.capa_problem.LoncapaProblem.rescore_existing_answers') as mock_rescore:
mock_rescore.side_effect = ZeroDivisionError(expected_message)
instructor_task = self.submit_rescore_all_student_answers('instructor', problem_url_name)
self._assert_task_failure(instructor_task.id, 'rescore_problem', problem_url_name, expected_message)
def test_rescoring_bad_unicode_input(self):
"""Generate a real failure in rescoring a problem, with an answer including unicode"""
# At one point, the student answers that resulted in StudentInputErrors were being
# persisted (even though they were not counted as an attempt). That is not possible
# now, so it's harder to generate a test for how such input is handled.
problem_url_name = 'H1P1'
# set up an option problem -- doesn't matter really what problem it is, but we need
# it to have an answer.
self.define_option_problem(problem_url_name)
self.submit_student_answer('u1', problem_url_name, [OPTION_1, OPTION_1])
# return an input error as if it were a numerical response, with an embedded unicode character:
expected_message = u"Could not interpret '2/3\u03a9' as a number"
with patch('capa.capa_problem.LoncapaProblem.rescore_existing_answers') as mock_rescore:
mock_rescore.side_effect = StudentInputError(expected_message)
instructor_task = self.submit_rescore_all_student_answers('instructor', problem_url_name)
# check instructor_task returned
instructor_task = InstructorTask.objects.get(id=instructor_task.id)
self.assertEqual(instructor_task.task_state, 'SUCCESS')
self.assertEqual(instructor_task.requester.username, 'instructor')
self.assertEqual(instructor_task.task_type, 'rescore_problem')
task_input = json.loads(instructor_task.task_input)
self.assertNotIn('student', task_input)
self.assertEqual(task_input['problem_url'], InstructorTaskModuleTestCase.problem_location(problem_url_name).to_deprecated_string())
status = json.loads(instructor_task.task_output)
self.assertEqual(status['attempted'], 1)
self.assertEqual(status['succeeded'], 0)
self.assertEqual(status['total'], 1)
def define_code_response_problem(self, problem_url_name):
"""
Define an arbitrary code-response problem.
We'll end up mocking its evaluation later.
"""
factory = CodeResponseXMLFactory()
grader_payload = json.dumps({"grader": "ps04/grade_square.py"})
problem_xml = factory.build_xml(initial_display="def square(x):",
answer_display="answer",
grader_payload=grader_payload,
num_responses=2)
ItemFactory.create(parent_location=self.problem_section.location,
category="problem",
display_name=str(problem_url_name),
data=problem_xml)
def test_rescoring_code_problem(self):
"""Run rescore scenario on problem with code submission"""
problem_url_name = 'H1P2'
self.define_code_response_problem(problem_url_name)
# we fully create the CodeResponse problem, but just pretend that we're queuing it:
with patch('capa.xqueue_interface.XQueueInterface.send_to_queue') as mock_send_to_queue:
mock_send_to_queue.return_value = (0, "Successfully queued")
self.submit_student_answer('u1', problem_url_name, ["answer1", "answer2"])
instructor_task = self.submit_rescore_all_student_answers('instructor', problem_url_name)
instructor_task = InstructorTask.objects.get(id=instructor_task.id)
self.assertEqual(instructor_task.task_state, FAILURE)
status = json.loads(instructor_task.task_output)
self.assertEqual(status['exception'], 'NotImplementedError')
self.assertEqual(status['message'], "Problem's definition does not support rescoring.")
status = InstructorTaskModuleTestCase.get_task_status(instructor_task.task_id)
self.assertEqual(status['message'], "Problem's definition does not support rescoring.")
def define_randomized_custom_response_problem(self, problem_url_name, redefine=False):
"""
Defines a custom response problem that uses a random value to determine correctness.
Generated answer is also returned as the `msg`, so that the value can be used as a
correct answer by a test.
If the `redefine` flag is set, then change the definition of correctness (from equals
to not-equals).
"""
factory = CustomResponseXMLFactory()
script = textwrap.dedent("""
def check_func(expect, answer_given):
expected = str(random.randint(0, 100))
return {'ok': answer_given %s expected, 'msg': expected}
""" % ('!=' if redefine else '=='))
problem_xml = factory.build_xml(script=script, cfn="check_func", expect="42", num_responses=1)
if redefine:
descriptor = self.module_store.get_item(
InstructorTaskModuleTestCase.problem_location(problem_url_name)
)
descriptor.data = problem_xml
with self.module_store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, descriptor.location.course_key):
self.module_store.update_item(descriptor, self.user.id)
self.module_store.publish(descriptor.location, self.user.id)
else:
# Use "per-student" rerandomization so that check-problem can be called more than once.
# Using "always" means we cannot check a problem twice, but we want to call once to get the
# correct answer, and call a second time with that answer to confirm it's graded as correct.
# Per-student rerandomization will at least generate different seeds for different users, so
# we get a little more test coverage.
ItemFactory.create(parent_location=self.problem_section.location,
category="problem",
display_name=str(problem_url_name),
data=problem_xml,
metadata={"rerandomize": "per_student"})
def test_rescoring_randomized_problem(self):
"""Run rescore scenario on custom problem that uses randomize"""
# First define the custom response problem:
problem_url_name = 'H1P1'
self.define_randomized_custom_response_problem(problem_url_name)
location = InstructorTaskModuleTestCase.problem_location(problem_url_name)
descriptor = self.module_store.get_item(location)
# run with more than one user
for user in self.users:
# first render the problem, so that a seed will be created for this user
self.render_problem(user.username, problem_url_name)
# submit a bogus answer, in order to get the problem to tell us its real answer
dummy_answer = "1000"
self.submit_student_answer(user.username, problem_url_name, [dummy_answer, dummy_answer])
# we should have gotten the problem wrong, since we're way out of range:
self.check_state(user, descriptor, 0, 1, expected_attempts=1)
# dig the correct answer out of the problem's message
module = self.get_student_module(user.username, descriptor)
state = json.loads(module.state)
correct_map = state['correct_map']
log.info("Correct Map: %s", correct_map)
# only one response, so pull it out:
answer = correct_map.values()[0]['msg']
self.submit_student_answer(user.username, problem_url_name, [answer, answer])
# we should now get the problem right, with a second attempt:
self.check_state(user, descriptor, 1, 1, expected_attempts=2)
# redefine the problem (as stored in Mongo) so that the definition of correct changes
self.define_randomized_custom_response_problem(problem_url_name, redefine=True)
# confirm that simply rendering the problem again does not result in a change
# in the grade (or the attempts):
self.render_problem('u1', problem_url_name)
self.check_state(self.user1, descriptor, 1, 1, expected_attempts=2)
# rescore the problem for only one student -- only that student's grade should change
# (and none of the attempts):
self.submit_rescore_one_student_answer('instructor', problem_url_name, User.objects.get(username='u1'))
for user in self.users:
expected_score = 0 if user.username == 'u1' else 1
self.check_state(user, descriptor, expected_score, 1, expected_attempts=2)
# rescore the problem for all students
self.submit_rescore_all_student_answers('instructor', problem_url_name)
# all grades should change to being wrong (with no change in attempts)
for user in self.users:
self.check_state(user, descriptor, 0, 1, expected_attempts=2)
class TestResetAttemptsTask(TestIntegrationTask):
"""
Integration-style tests for resetting problem attempts in a background task.
Exercises real problems with a minimum of patching.
"""
userlist = ['u1', 'u2', 'u3', 'u4']
def setUp(self):
super(TestResetAttemptsTask, self).setUp()
self.initialize_course()
self.create_instructor('instructor')
for username in self.userlist:
self.create_student(username)
self.logout()
def get_num_attempts(self, username, descriptor):
"""returns number of attempts stored for `username` on problem `descriptor` for test course"""
module = self.get_student_module(username, descriptor)
state = json.loads(module.state)
return state['attempts']
def reset_problem_attempts(self, instructor, location):
"""Submits the current problem for resetting"""
return submit_reset_problem_attempts_for_all_students(self.create_task_request(instructor),
location)
def test_reset_attempts_on_problem(self):
"""Run reset-attempts scenario on option problem"""
# get descriptor:
problem_url_name = 'H1P1'
self.define_option_problem(problem_url_name)
location = InstructorTaskModuleTestCase.problem_location(problem_url_name)
descriptor = self.module_store.get_item(location)
num_attempts = 3
# first store answers for each of the separate users:
for _ in range(num_attempts):
for username in self.userlist:
self.submit_student_answer(username, problem_url_name, [OPTION_1, OPTION_1])
for username in self.userlist:
self.assertEquals(self.get_num_attempts(username, descriptor), num_attempts)
self.reset_problem_attempts('instructor', location)
for username in self.userlist:
self.assertEquals(self.get_num_attempts(username, descriptor), 0)
def test_reset_failure(self):
"""Simulate a failure in resetting attempts on a problem"""
problem_url_name = 'H1P1'
location = InstructorTaskModuleTestCase.problem_location(problem_url_name)
self.define_option_problem(problem_url_name)
self.submit_student_answer('u1', problem_url_name, [OPTION_1, OPTION_1])
expected_message = "bad things happened"
with patch('courseware.models.StudentModule.save') as mock_save:
mock_save.side_effect = ZeroDivisionError(expected_message)
instructor_task = self.reset_problem_attempts('instructor', location)
self._assert_task_failure(instructor_task.id, 'reset_problem_attempts', problem_url_name, expected_message)
def test_reset_non_problem(self):
"""confirm that a non-problem can still be successfully reset"""
location = self.problem_section.location
instructor_task = self.reset_problem_attempts('instructor', location)
instructor_task = InstructorTask.objects.get(id=instructor_task.id)
self.assertEqual(instructor_task.task_state, SUCCESS)
class TestDeleteProblemTask(TestIntegrationTask):
"""
Integration-style tests for deleting problem state in a background task.
Exercises real problems with a minimum of patching.
"""
userlist = ['u1', 'u2', 'u3', 'u4']
def setUp(self):
super(TestDeleteProblemTask, self).setUp()
self.initialize_course()
self.create_instructor('instructor')
for username in self.userlist:
self.create_student(username)
self.logout()
def delete_problem_state(self, instructor, location):
"""Submits the current problem for deletion"""
return submit_delete_problem_state_for_all_students(self.create_task_request(instructor), location)
def test_delete_problem_state(self):
"""Run delete-state scenario on option problem"""
# get descriptor:
problem_url_name = 'H1P1'
self.define_option_problem(problem_url_name)
location = InstructorTaskModuleTestCase.problem_location(problem_url_name)
descriptor = self.module_store.get_item(location)
# first store answers for each of the separate users:
for username in self.userlist:
self.submit_student_answer(username, problem_url_name, [OPTION_1, OPTION_1])
# confirm that state exists:
for username in self.userlist:
self.assertIsNotNone(self.get_student_module(username, descriptor))
# run delete task:
self.delete_problem_state('instructor', location)
# confirm that no state can be found:
for username in self.userlist:
with self.assertRaises(StudentModule.DoesNotExist):
self.get_student_module(username, descriptor)
def test_delete_failure(self):
"""Simulate a failure in deleting state of a problem"""
problem_url_name = 'H1P1'
location = InstructorTaskModuleTestCase.problem_location(problem_url_name)
self.define_option_problem(problem_url_name)
self.submit_student_answer('u1', problem_url_name, [OPTION_1, OPTION_1])
expected_message = "bad things happened"
with patch('courseware.models.StudentModule.delete') as mock_delete:
mock_delete.side_effect = ZeroDivisionError(expected_message)
instructor_task = self.delete_problem_state('instructor', location)
self._assert_task_failure(instructor_task.id, 'delete_problem_state', problem_url_name, expected_message)
def test_delete_non_problem(self):
"""confirm that a non-problem can still be successfully deleted"""
location = self.problem_section.location
instructor_task = self.delete_problem_state('instructor', location)
instructor_task = InstructorTask.objects.get(id=instructor_task.id)
self.assertEqual(instructor_task.task_state, SUCCESS)
class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent, TestIntegrationTask):
"""
Test grade report in cases where there are problems contained within split tests.
"""
def verify_csv_task_success(self, task_result):
"""
Verify that all students were successfully graded by
`upload_grades_csv`.
Arguments:
task_result (dict): Return value of `upload_grades_csv`.
"""
self.assertDictContainsSubset({'attempted': 2, 'succeeded': 2, 'failed': 0}, task_result)
def verify_grades_in_csv(self, students_grades, ignore_other_columns=False):
"""
Verify that the grades CSV contains the expected grades data.
Arguments:
students_grades (iterable): An iterable of dictionaries,
where each dict maps a student to another dict
representing their grades we expect to see in the CSV.
For example: [student_a: {'grade': 1.0, 'HW': 1.0}]
"""
def merge_dicts(*dicts):
"""
Return the union of dicts
Arguments:
dicts: tuple of dicts
"""
return dict([item for d in dicts for item in d.items()])
def user_partition_group(user):
"""Return a dict having single key with value equals to students group in partition"""
group_config_hdr_tpl = 'Experiment Group ({})'
return {
group_config_hdr_tpl.format(self.partition.name): self.partition.scheme.get_group_for_user(
self.course.id, user, self.partition, track_function=None
).name
}
self.verify_rows_in_csv(
[
merge_dicts(
{'id': str(student.id), 'username': student.username, 'email': student.email},
grades,
user_partition_group(student)
)
for student_grades in students_grades for student, grades in student_grades.iteritems()
],
ignore_other_columns=ignore_other_columns
)
def test_both_groups_problems(self):
"""
Verify that grade export works when each user partition
receives (different) problems. Each user's grade on their
particular problem should show up in the grade report.
"""
problem_a_url = 'problem_a_url'
problem_b_url = 'problem_b_url'
self.define_option_problem(problem_a_url, parent=self.vertical_a)
self.define_option_problem(problem_b_url, parent=self.vertical_b)
# student A will get 100%, student B will get 50% because
# OPTION_1 is the correct option, and OPTION_2 is the
# incorrect option
self.submit_student_answer(self.student_a.username, problem_a_url, [OPTION_1, OPTION_1])
self.submit_student_answer(self.student_b.username, problem_b_url, [OPTION_1, OPTION_2])
with patch('instructor_task.tasks_helper._get_current_task'):
result = upload_grades_csv(None, None, self.course.id, None, 'graded')
self.verify_csv_task_success(result)
self.verify_grades_in_csv(
[
{self.student_a: {'grade': '1.0', 'HW': '1.0'}},
{self.student_b: {'grade': '0.5', 'HW': '0.5'}}
],
ignore_other_columns=True
)
def test_one_group_problem(self):
"""
Verify that grade export works when only the Group A user
partition receives a problem. We expect to see a column for
the homework where student_a's entry includes their grade, and
student b's entry shows a 0.
"""
problem_a_url = 'problem_a_url'
self.define_option_problem(problem_a_url, parent=self.vertical_a)
self.submit_student_answer(self.student_a.username, problem_a_url, [OPTION_1, OPTION_1])
with patch('instructor_task.tasks_helper._get_current_task'):
result = upload_grades_csv(None, None, self.course.id, None, 'graded')
self.verify_csv_task_success(result)
self.verify_grades_in_csv(
[
{self.student_a: {'grade': '1.0', 'HW': '1.0'}},
{self.student_b: {'grade': '0.0', 'HW': '0.0'}}
],
ignore_other_columns=True
)
| deepsrijit1105/edx-platform | lms/djangoapps/instructor_task/tests/test_integration.py | Python | agpl-3.0 | 29,509 | 0.003728 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.