text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2007, 2008, 2009, 2010, 2011, 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
Invenio mail sending utilities. send_email() is the main API function
people should be using; just check out its docstring.
"""
__revision__ = "$Id$"
from time import sleep
import re
import os
import sys
from email.MIMEMultipart import MIMEMultipart
from email.MIMEBase import MIMEBase
from email import Encoders
from email.MIMEImage import MIMEImage
from email.Utils import formatdate
from cStringIO import StringIO
from flask import g
from formatter import DumbWriter, AbstractFormatter
from flask.ext.email.message import EmailMultiAlternatives, EmailMessage
from invenio.config import \
CFG_EMAIL_BACKEND, \
CFG_SITE_SUPPORT_EMAIL, \
CFG_SITE_URL, \
CFG_SITE_LANG, \
CFG_SITE_ADMIN_EMAIL, \
CFG_MISCUTIL_SMTP_HOST, \
CFG_MISCUTIL_SMTP_PORT, \
CFG_VERSION, \
CFG_DEVEL_SITE, \
CFG_LOGDIR
from invenio.config import CFG_MISCUTIL_SMTP_HOST, CFG_MISCUTIL_SMTP_PORT
try:
from invenio.config import \
CFG_MISCUTIL_SMTP_USER,\
CFG_MISCUTIL_SMTP_PASS,\
CFG_MISCUTIL_SMTP_TLS
except ImportError:
CFG_MISCUTIL_SMTP_USER = ''
CFG_MISCUTIL_SMTP_PASS = ''
CFG_MISCUTIL_SMTP_TLS = False
from invenio.errorlib import register_exception
from invenio.miscutil_config import InvenioMiscUtilError
from invenio.jinja2utils import render_template_to_string
from invenio.webinterface_handler_flask_utils import unicodifier
def initialize_email_backend(app):
"""
Prepare application config from Invenio configuration.
@see: https://flask-email.readthedocs.org/en/latest/#configuration
"""
app.config['DEFAULT_FROM_EMAIL'] = CFG_SITE_SUPPORT_EMAIL
app.config['SERVER_EMAIL'] = CFG_SITE_ADMIN_EMAIL
app.config['ADMINS'] = (CFG_SITE_ADMIN_EMAIL, )
app.config['MANAGERS'] = (CFG_SITE_SUPPORT_EMAIL, )
if app.config.get('EMAIL_BACKEND') is None:
if app.config.get('CFG_EMAIL_BACKEND') or CFG_EMAIL_BACKEND:
app.config['EMAIL_BACKEND'] = app.config.get('CFG_EMAIL_BACKEND',
CFG_EMAIL_BACKEND)
elif CFG_MISCUTIL_SMTP_HOST and CFG_MISCUTIL_SMTP_PORT:
app.config['EMAIL_BACKEND'] = 'flask.ext.email.backends.smtp.Mail'
# Defaults to 'flask.ext.email.backends.locmem.Mail'
app.config['EMAIL_HOST'] = CFG_MISCUTIL_SMTP_HOST
app.config['EMAIL_PORT'] = CFG_MISCUTIL_SMTP_PORT
app.config['EMAIL_HOST_USER'] = CFG_MISCUTIL_SMTP_USER
app.config['EMAIL_HOST_PASSWORD'] = CFG_MISCUTIL_SMTP_PASS
app.config['EMAIL_USE_TLS'] = CFG_MISCUTIL_SMTP_TLS
# app.config['EMAIL_USE_SSL']: defaults to False
app.config['EMAIL_FILE_PATH'] = CFG_LOGDIR
def scheduled_send_email(fromaddr,
toaddr,
subject="",
content="",
header=None,
footer=None,
copy_to_admin=0,
attempt_times=1,
attempt_sleeptime=10,
user=None,
other_bibtasklet_arguments=None,
replytoaddr=""):
"""
Like send_email, but send an email via the bibsched
infrastructure.
@param fromaddr: sender
@type fromaddr: string
@param toaddr: list of receivers
@type toaddr: string (comma separated) or list of strings
@param subject: the subject
@param content: the body of the message
@param header: optional header, otherwise default is used
@param footer: optional footer, otherwise default is used
@param copy_to_admin: set to 1 in order to send email the admins
@param attempt_times: try at least n times before giving up sending
@param attempt_sleeptime: number of seconds to sleep between two attempts
@param user: the user name to user when scheduling the bibtasklet. If
None, the sender will be used
@param other_bibtasklet_arguments: other arguments to append to the list
of arguments to the call of task_low_level_submission
@param replytoaddr: [string or list-of-strings] to be used for the
reply-to header of the email (if string, then
receivers are separated by ',')
@return: the scheduled bibtasklet
"""
from invenio.bibtask import task_low_level_submission
if not isinstance(toaddr, (unicode, str)):
toaddr = ','.join(toaddr)
if not isinstance(replytoaddr, (unicode, str)):
replytoaddr = ','.join(replytoaddr)
toaddr = remove_temporary_emails(toaddr)
if user is None:
user = fromaddr
if other_bibtasklet_arguments is None:
other_bibtasklet_arguments = []
else:
other_bibtasklet_arguments = list(other_bibtasklet_arguments)
if not header is None:
other_bibtasklet_arguments.extend(("-a", "header=%s" % header))
if not footer is None:
other_bibtasklet_arguments.extend(("-a", "footer=%s" % footer))
return task_low_level_submission(
"bibtasklet", user, "-T", "bst_send_email",
"-a", "fromaddr=%s" % fromaddr,
"-a", "toaddr=%s" % toaddr,
"-a", "replytoaddr=%s" % replytoaddr,
"-a", "subject=%s" % subject,
"-a", "content=%s" % content,
"-a", "copy_to_admin=%s" % copy_to_admin,
"-a", "attempt_times=%s" % attempt_times,
"-a", "attempt_sleeptime=%s" % attempt_sleeptime,
*other_bibtasklet_arguments)
def send_email(fromaddr,
toaddr,
subject="",
content="",
html_content='',
html_images=None,
header=None,
footer=None,
html_header=None,
html_footer=None,
copy_to_admin=0,
attempt_times=1,
attempt_sleeptime=10,
debug_level=0,
ln=CFG_SITE_LANG,
charset=None,
replytoaddr="",
attachments=None
):
"""Send a forged email to TOADDR from FROMADDR with message created from subjet, content and possibly
header and footer.
@param fromaddr: [string] sender
@param toaddr: [string or list-of-strings] list of receivers (if string, then
receivers are separated by ',')
@param subject: [string] subject of the email
@param content: [string] content of the email
@param html_content: [string] html version of the email
@param html_images: [dict] dictionary of image id, image path
@param header: [string] header to add, None for the Default
@param footer: [string] footer to add, None for the Default
@param html_header: [string] header to add to the html part, None for the Default
@param html_footer: [string] footer to add to the html part, None for the Default
@param copy_to_admin: [int] if 1 add CFG_SITE_ADMIN_EMAIL in receivers
@param attempt_times: [int] number of tries
@param attempt_sleeptime: [int] seconds in between tries
@param debug_level: [int] debug level
@param ln: [string] invenio language
@param charset: [string] the content charset. By default is None which means
to try to encode the email as ascii, then latin1 then utf-8.
@param replytoaddr: [string or list-of-strings] to be used for the
reply-to header of the email (if string, then
receivers are separated by ',')
@param attachments: list of paths of files to be attached. Alternatively,
every element of the list could be a tuple: (filename, mimetype)
If sending fails, try to send it ATTEMPT_TIMES, and wait for
ATTEMPT_SLEEPTIME seconds in between tries.
e.g.:
send_email('foo.bar@cern.ch', 'bar.foo@cern.ch', 'Let\'s try!'', 'check 1234', '<strong>check</strong> <em>1234</em><img src="cid:image1">', {'image1': '/tmp/quantum.jpg'})
@return: [bool]: True if email was sent okay, False if it was not.
"""
if html_images is None:
html_images = {}
if type(toaddr) is str:
toaddr = toaddr.strip().split(',')
toaddr = remove_temporary_emails(toaddr)
usebcc = len(toaddr.split(',')) > 1 # More than one address, let's use Bcc in place of To
if copy_to_admin:
if CFG_SITE_ADMIN_EMAIL not in toaddr:
toaddr.append(CFG_SITE_ADMIN_EMAIL)
body = forge_email(fromaddr, toaddr, subject, content, html_content,
html_images, usebcc, header, footer, html_header,
html_footer, ln, charset, replytoaddr, attachments)
if attempt_times < 1 or not toaddr:
try:
raise InvenioMiscUtilError(g._('The system is not attempting to send an email from %s, to %s, with body %s.') % (fromaddr, toaddr, body))
except InvenioMiscUtilError:
register_exception()
return False
sent = False
while not sent and attempt_times > 0:
try:
sent = body.send()
except Exception:
register_exception()
if debug_level > 1:
try:
raise InvenioMiscUtilError(g._('Error in sending message. Waiting %s seconds. Exception is %s, while sending email from %s to %s with body %s.') % (attempt_sleeptime, sys.exc_info()[0], fromaddr, toaddr, body))
except InvenioMiscUtilError:
register_exception()
if not sent:
attempt_times -= 1
if attempt_times > 0: # sleep only if we shall retry again
sleep(attempt_sleeptime)
if not sent:
try:
raise InvenioMiscUtilError(g._('Error in sending email from %s to %s with body %s.') % (fromaddr, toaddr, body))
except InvenioMiscUtilError:
register_exception()
return sent
def attach_embed_image(email, image_id, image_path):
"""
Attach an image to the email.
"""
with open(image_path, 'rb') as image_data:
img = MIMEImage(image_data.read())
img.add_header('Content-ID', '<%s>' % image_id)
img.add_header('Content-Disposition', 'attachment', filename=os.path.split(image_path)[1])
email.attach(img)
def forge_email(fromaddr, toaddr, subject, content, html_content='',
html_images=None, usebcc=False, header=None, footer=None,
html_header=None, html_footer=None, ln=CFG_SITE_LANG,
charset=None, replytoaddr="", attachments=None):
"""Prepare email. Add header and footer if needed.
@param fromaddr: [string] sender
@param toaddr: [string or list-of-strings] list of receivers (if string, then
receivers are separated by ',')
@param usebcc: [bool] True for using Bcc in place of To
@param subject: [string] subject of the email
@param content: [string] content of the email
@param html_content: [string] html version of the email
@param html_images: [dict] dictionary of image id, image path
@param header: [string] None for the default header
@param footer: [string] None for the default footer
@param ln: language
@charset: [string] the content charset. By default is None which means
to try to encode the email as ascii, then latin1 then utf-8.
@param replytoaddr: [string or list-of-strings] to be used for the
reply-to header of the email (if string, then
receivers are separated by ',')
@param attachments: list of paths of files to be attached. Alternatively,
every element of the list could be a tuple: (filename, mimetype)
@return: forged email as a string"""
if html_images is None:
html_images = {}
content = render_template_to_string('mail_text.tpl',
content=unicodifier(content),
header=unicodifier(header),
footer=unicodifier(footer)
).encode('utf8')
if type(toaddr) is not str:
toaddr = ','.join(toaddr)
if type(replytoaddr) is not str:
replytoaddr = ','.join(replytoaddr)
toaddr = remove_temporary_emails(toaddr)
headers = {}
kwargs = {'to': [], 'cc': [], 'bcc': []}
if replytoaddr:
headers['Reply-To'] = replytoaddr
if usebcc:
headers['Bcc'] = toaddr
kwargs['bcc'] = toaddr.split(',')
kwargs['to'] = ['Undisclosed.Recipients:']
else:
kwargs['to'] = toaddr.split(',')
headers['From'] = fromaddr
headers['Date'] = formatdate(localtime=True)
headers['User-Agent'] = 'Invenio %s at %s' % (CFG_VERSION, CFG_SITE_URL)
if html_content:
html_content = render_template_to_string(
'mail_html.tpl',
content=unicodifier(html_content),
header=unicodifier(html_header),
footer=unicodifier(html_footer)
).encode('utf8')
msg_root = EmailMultiAlternatives(subject=subject, body=content,
from_email=fromaddr,
headers=headers, **kwargs)
msg_root.attach_alternative(html_content, "text/html")
#if not html_images:
# # No image? Attach the HTML to the root
# msg_root.attach(msg_text)
#else:
if html_images:
# Image(s)? Attach the HTML and image(s) as children of a
# "related" block
msg_related = MIMEMultipart('related')
#msg_related.attach(msg_text)
for image_id, image_path in html_images.iteritems():
attach_embed_image(msg_related, image_id, image_path)
msg_root.attach(msg_related)
else:
msg_root = EmailMessage(subject=subject, body=content,
from_email=fromaddr, headers=headers, **kwargs)
if attachments:
from invenio.bibdocfile import _mimes, guess_format_from_url
#old_msg_root = msg_root
#msg_root = MIMEMultipart()
#msg_root.attach(old_msg_root)
for attachment in attachments:
try:
mime = None
if type(attachment) in (list, tuple):
attachment, mime = attachment
if mime is None:
## Automatic guessing of mimetype
mime = _mimes.guess_type(attachment)[0]
if mime is None:
ext = guess_format_from_url(attachment)
mime = _mimes.guess_type("foo" + ext)[0]
if not mime:
mime = 'application/octet-stream'
part = MIMEBase(*mime.split('/', 1))
part.set_payload(open(attachment, 'rb').read())
Encoders.encode_base64(part)
part.add_header('Content-Disposition', 'attachment; filename="%s"' % os.path.basename(attachment))
msg_root.attach(part)
except:
register_exception(alert_admin=True, prefix="Can't attach %s" % attachment)
return msg_root
RE_NEWLINES = re.compile(r'<br\s*/?>|</p>', re.I)
RE_SPACES = re.compile(r'\s+')
RE_HTML_TAGS = re.compile(r'<.+?>')
def email_strip_html(html_content):
"""Strip html tags from html_content, trying to respect formatting."""
html_content = RE_SPACES.sub(' ', html_content)
html_content = RE_NEWLINES.sub('\n', html_content)
html_content = RE_HTML_TAGS.sub('', html_content)
html_content = html_content.split('\n')
out = StringIO()
out_format = AbstractFormatter(DumbWriter(out))
for row in html_content:
out_format.add_flowing_data(row)
out_format.end_paragraph(1)
return out.getvalue()
def remove_temporary_emails(emails):
"""
Removes the temporary emails (which are constructed randomly when user logs in
with an external authentication provider which doesn't supply an email
address) from an email list.
@param emails: email list (if string, then receivers are separated by ',')
@type emails: str|[str]
@rtype: str
"""
from invenio.access_control_config import CFG_TEMP_EMAIL_ADDRESS
if not isinstance(emails, (str, unicode)):
emails = ','.join(emails)
# Remove all of the spaces
emails = emails.replace(' ', '')
# Remove all of the emails formatted like CFG_TEMP_EMAIL_ADDRESS
emails = re.sub((CFG_TEMP_EMAIL_ADDRESS % '\w+') + '(,|$)', '', emails,
re.IGNORECASE)
# Remove all consecutive commas
emails = re.sub(',+', ',', emails)
if emails[0] == ',':
# Remove the comma at the beginning of the string
emails = emails[1:]
if emails[-1] == ',':
# Remove the comma at the end of the string
emails = emails[:-1]
return emails
| EUDAT-B2SHARE/invenio-old | modules/miscutil/lib/mailutils.py | Python | gpl-2.0 | 17,746 | 0.00231 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'sign_in.ui'
#
# Created: Mon Jun 22 00:34:42 2015
# by: PyQt5 UI code generator 5.2.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_sign_inDialog(object):
def setupUi(self, sign_inDialog):
sign_inDialog.setObjectName("sign_inDialog")
sign_inDialog.setWindowModality(QtCore.Qt.ApplicationModal)
sign_inDialog.resize(400, 300)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(sign_inDialog.sizePolicy().hasHeightForWidth())
sign_inDialog.setSizePolicy(sizePolicy)
sign_inDialog.setStyleSheet("QDialog {\n"
" background: white;\n"
" border: 1px solid #29668f;\n"
"}\n"
"QPushButton {\n"
" background:-webkit-gradient(linear, left top, left bottom, color-stop(0.05, #599bb3), color-stop(1, #408c99));\n"
" background:-moz-linear-gradient(top, #599bb3 5%, #408c99 100%);\n"
" background:-webkit-linear-gradient(top, #599bb3 5%, #408c99 100%);\n"
" background:-o-linear-gradient(top, #599bb3 5%, #408c99 100%);\n"
" background:-ms-linear-gradient(top, #599bb3 5%, #408c99 100%);\n"
" background:linear-gradient(to bottom, #599bb3 5%, #408c99 100%);\n"
" background-color:#599bb3;\n"
" border-radius:6px;\n"
" border:1px solid #29668f;\n"
" color:#ffffff;\n"
" font-family:arial;\n"
" font-size:15px;\n"
" font-weight:bold;\n"
" padding:6px 24px;\n"
" text-decoration:none;\n"
"}\n"
"QPushButton:hover {\n"
" background:-webkit-gradient(linear, left top, left bottom, color-stop(0.05, #408c99), color-stop(1, #599bb3));\n"
" background:-moz-linear-gradient(top, #408c99 5%, #599bb3 100%);\n"
" background:-webkit-linear-gradient(top, #408c99 5%, #599bb3 100%);\n"
" background:-o-linear-gradient(top, #408c99 5%, #599bb3 100%);\n"
" background:-ms-linear-gradient(top, #408c99 5%, #599bb3 100%);\n"
" background:linear-gradient(to bottom, #408c99 5%, #599bb3 100%);\n"
" background-color:#408c99;\n"
"}\n"
"QPushButton:active {\n"
" position:relative;\n"
" top:1px;\n"
"}\n"
"QLineEdit {\n"
" border-radius:6px;\n"
" border:1px solid #29668f;\n"
" font-weight:bold;\n"
" padding:6px 24px;\n"
"}\n"
"QLineEdit:disabled {\n"
" background: #8C8C8C;\n"
" border-radius:6px;\n"
" border:1px solid #29668f;\n"
" font-weight:bold;\n"
" padding:6px 24px;\n"
"}\n"
"QLabel{\n"
" font-family:arial;\n"
" font-size:15px;\n"
" font-weight:bold;\n"
" color:#599bb3;\n"
" padding:6px 6px;\n"
" text-decoration:none;\n"
"}\n"
"QLabel:disabled{\n"
" font-family:arial;\n"
" font-size:15px;\n"
" font-weight:bold;\n"
" color:grey;\n"
" padding:6px 6px;\n"
" text-decoration:none;\n"
"}\n"
"\n"
"QCheckBox{\n"
" font-family:arial;\n"
" font-size:15px;\n"
" font-weight:bold;\n"
" color:#599bb3;\n"
" padding:6px 6px;\n"
" text-decoration:none;\n"
"}\n"
"\n"
"\n"
"")
self.gridLayout_2 = QtWidgets.QGridLayout(sign_inDialog)
self.gridLayout_2.setObjectName("gridLayout_2")
spacerItem = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_2.addItem(spacerItem, 3, 1, 1, 1)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem1, 1, 2, 1, 1)
spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem2, 1, 0, 1, 1)
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setObjectName("gridLayout")
self.txt_serial = QtWidgets.QLineEdit(sign_inDialog)
self.txt_serial.setEnabled(True)
self.txt_serial.setObjectName("txt_serial")
self.gridLayout.addWidget(self.txt_serial, 0, 1, 1, 1)
self.txt_id = QtWidgets.QLineEdit(sign_inDialog)
self.txt_id.setObjectName("txt_id")
self.gridLayout.addWidget(self.txt_id, 1, 1, 1, 1)
self.lb_serial = QtWidgets.QLabel(sign_inDialog)
self.lb_serial.setEnabled(True)
self.lb_serial.setObjectName("lb_serial")
self.gridLayout.addWidget(self.lb_serial, 0, 0, 1, 1)
self.lb_id = QtWidgets.QLabel(sign_inDialog)
self.lb_id.setObjectName("lb_id")
self.gridLayout.addWidget(self.lb_id, 1, 0, 1, 1)
self.gridLayout_2.addLayout(self.gridLayout, 1, 1, 1, 1)
self.buttonBox = QtWidgets.QDialogButtonBox(sign_inDialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.gridLayout_2.addWidget(self.buttonBox, 4, 1, 1, 1)
spacerItem3 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_2.addItem(spacerItem3, 0, 1, 1, 1)
self.cbx_laptop = QtWidgets.QCheckBox(sign_inDialog)
self.cbx_laptop.setLayoutDirection(QtCore.Qt.RightToLeft)
self.cbx_laptop.setObjectName("cbx_laptop")
self.gridLayout_2.addWidget(self.cbx_laptop, 2, 1, 1, 1)
self.retranslateUi(sign_inDialog)
self.buttonBox.accepted.connect(sign_inDialog.accept)
self.buttonBox.rejected.connect(sign_inDialog.reject)
QtCore.QMetaObject.connectSlotsByName(sign_inDialog)
def retranslateUi(self, sign_inDialog):
_translate = QtCore.QCoreApplication.translate
sign_inDialog.setWindowTitle(_translate("sign_inDialog", "Sign In Dialog"))
self.lb_serial.setText(_translate("sign_inDialog", "Serial No."))
self.lb_id.setText(_translate("sign_inDialog", "ID No."))
self.cbx_laptop.setText(_translate("sign_inDialog", "No laptop?"))
| dita-programming/dita-access | view/ui_sign_in.py | Python | gpl-2.0 | 6,113 | 0.014068 |
"""Tests for the widgets of the ``django_libs`` app."""
from django.test import TestCase
from ..widgets import ColorPickerWidget
class ColorPickerWidgetTestCase(TestCase):
"""Tests for the ``ColorPickerWidget`` widget."""
longMessage = True
def setUp(self):
self.widget = ColorPickerWidget()
def test_render_tag(self):
self.assertIn('value="ffffff"', self.widget.render('field', 'ffffff'),
msg=('Should render the input form.'))
| bitmazk/django-libs | django_libs/tests/widget_tests.py | Python | mit | 488 | 0 |
from __future__ import absolute_import
from django.utils.translation import ugettext as _
from django.utils import timezone
from django.http import HttpResponse, HttpRequest
from zilencer.models import Deployment, RemotePushDeviceToken, RemoteZulipServer
from zerver.decorator import has_request_variables, REQ
from zerver.lib.error_notify import do_report_error
from zerver.lib.push_notifications import send_android_push_notification, \
send_apple_push_notification
from zerver.lib.request import JsonableError
from zerver.lib.response import json_error, json_success
from zerver.lib.validator import check_dict, check_int
from zerver.models import UserProfile, PushDeviceToken, Realm
from zerver.views.push_notifications import validate_token
from typing import Any, Dict, Optional, Union, Text, cast
def validate_entity(entity):
# type: (Union[UserProfile, RemoteZulipServer]) -> None
if not isinstance(entity, RemoteZulipServer):
raise JsonableError(_("Must validate with valid Zulip server API key"))
def validate_bouncer_token_request(entity, token, kind):
# type: (Union[UserProfile, RemoteZulipServer], bytes, int) -> None
if kind not in [RemotePushDeviceToken.APNS, RemotePushDeviceToken.GCM]:
raise JsonableError(_("Invalid token type"))
validate_entity(entity)
validate_token(token, kind)
@has_request_variables
def report_error(request, deployment, type=REQ(), report=REQ(validator=check_dict([]))):
# type: (HttpRequest, Deployment, Text, Dict[str, Any]) -> HttpResponse
return do_report_error(deployment.name, type, report)
@has_request_variables
def remote_server_register_push(request, entity, user_id=REQ(),
token=REQ(), token_kind=REQ(validator=check_int), ios_app_id=None):
# type: (HttpRequest, Union[UserProfile, RemoteZulipServer], int, bytes, int, Optional[Text]) -> HttpResponse
validate_bouncer_token_request(entity, token, token_kind)
server = cast(RemoteZulipServer, entity)
# If a user logged out on a device and failed to unregister,
# we should delete any other user associations for this token
# & RemoteServer pair
RemotePushDeviceToken.objects.filter(
token=token, kind=token_kind, server=server).exclude(user_id=user_id).delete()
# Save or update
remote_token, created = RemotePushDeviceToken.objects.update_or_create(
user_id=user_id,
server=server,
kind=token_kind,
token=token,
defaults=dict(
ios_app_id=ios_app_id,
last_updated=timezone.now()))
return json_success()
@has_request_variables
def remote_server_unregister_push(request, entity, token=REQ(),
token_kind=REQ(validator=check_int), ios_app_id=None):
# type: (HttpRequest, Union[UserProfile, RemoteZulipServer], bytes, int, Optional[Text]) -> HttpResponse
validate_bouncer_token_request(entity, token, token_kind)
server = cast(RemoteZulipServer, entity)
deleted = RemotePushDeviceToken.objects.filter(token=token,
kind=token_kind,
server=server).delete()
if deleted[0] == 0:
return json_error(_("Token does not exist"))
return json_success()
@has_request_variables
def remote_server_notify_push(request, # type: HttpRequest
entity, # type: Union[UserProfile, RemoteZulipServer]
payload=REQ(argument_type='body') # type: Dict[str, Any]
):
# type: (...) -> HttpResponse
validate_entity(entity)
server = cast(RemoteZulipServer, entity)
user_id = payload['user_id']
gcm_payload = payload['gcm_payload']
apns_payload = payload['apns_payload']
android_devices = list(RemotePushDeviceToken.objects.filter(
user_id=user_id,
kind=RemotePushDeviceToken.GCM,
server=server
))
apple_devices = list(RemotePushDeviceToken.objects.filter(
user_id=user_id,
kind=RemotePushDeviceToken.APNS,
server=server
))
if android_devices:
send_android_push_notification(android_devices, gcm_payload, remote=True)
# TODO: set badge count in a better way
if apple_devices:
send_apple_push_notification(user_id, apple_devices,
badge=1, zulip=apns_payload)
return json_success()
| vabs22/zulip | zilencer/views.py | Python | apache-2.0 | 4,479 | 0.003572 |
# -*-mode: python; py-indent-offset: 4; tab-width: 8; coding: iso-8859-1 -*-
# DLLM (non-linear Differentiated Lifting Line Model, open source software)
#
# Copyright (C) 2013-2015 Airbus Group SAS
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# https://github.com/matthieu-meaux/DLLM.git
#
# @author : Francois Gallard
# @author : Matthieu MEAUX
#
import numpy
from numpy import array, transpose, outer, ones, zeros, copy, divide, diag, dot
from numpy.linalg import norm, solve
class DLLMMesh:
"""
Class that deals with geometry for the lifting mine wing solver
"""
def __init__(self, LLW,verbose = 0):
self.__LLW = LLW
self.__verbose = verbose
self.__ndv = self.get_geom().get_ndv()
self.__N = None
self.__K = None
self.__dK_dchi = None
self.recompute()
#-- Accessors
def get_airfoils(self):
return self.__LLW.get_airfoils()
def get_tag(self):
return self.__LLW.get_tag()
def get_geom(self):
return self.__LLW.get_geom()
def get_OC(self):
return self.__LLW.get_OC()
def get_grad_active(self):
return self.__LLW.get_grad_active()
def get_K(self):
return self.__K
def get_dK_dchi(self):
return self.__dK_dchi
#-- Methods
def recompute(self):
self.__N = self.get_geom().get_n_sect()
# Set computational geometry
self.__K = None
self.__dK_dchi = None
self.__setGeom()
def __setGeom(self):
'''
Sets the geometry of the wing, builds the lifting line metric matrix
'''
#V = self.get_OC().get_V()
eta = self.get_geom().get_eta()[1,:]
y = self.get_geom().get_XYZ()[1,:]
YminEta=transpose(outer(ones([self.__N+1]),y))-outer(ones([self.__N]),eta)
Kmetric=divide(ones([self.__N,self.__N+1]),YminEta)
Kmetric/=4.*numpy.pi
DdGammaDy_DGamma = zeros([self.__N+1,self.__N])
DdGammaDy_DGamma[0:self.__N,:] = diag(ones([self.__N]))
DdGammaDy_DGamma[self.__N,:] = 0.0
DdGammaDy_DGamma[1:self.__N+1,:]-= diag(ones([self.__N]))
self.__K = - dot(Kmetric,DdGammaDy_DGamma)
if self.get_grad_active():
eta_grad = self.get_geom().get_eta_grad()[1,:,:]
y_grad = self.get_geom().get_XYZ_grad()[1,:,:]
YminEta_grad=zeros((self.__N,self.__N+1,self.__ndv))
for n in xrange(self.__ndv):
YminEta_grad[:,:,n] = transpose(outer(ones([self.__N+1]),y_grad[:,n]))-outer(ones([self.__N]),eta_grad[:,n])
dKmetric_dchi=zeros((self.__N,self.__N+1,self.__ndv))
for n in xrange(self.__ndv):
dKmetric_dchi[:,:,n]=-YminEta_grad[:,:,n]/YminEta[:,:]**2
dKmetric_dchi/=4.*numpy.pi
self.__dK_dchi = zeros((self.__N,self.__N,self.__ndv))
for n in xrange(self.__ndv):
self.__dK_dchi[:,:,n]=-dot(dKmetric_dchi[:,:,n],DdGammaDy_DGamma)
| matthieu-meaux/DLLM | modules/DLLM/DLLMKernel/DLLMMesh.py | Python | gpl-2.0 | 3,858 | 0.023587 |
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import time
import pytest
import uamqp
from uamqp import authentication, errors, c_uamqp, compat
from azure.eventhub import (
EventData,
EventHubSharedKeyCredential,
EventHubProducerClient,
EventHubConsumerClient
)
from azure.eventhub.exceptions import OperationTimeoutError
@pytest.mark.liveTest
def test_send_with_long_interval_sync(live_eventhub, sleep):
test_partition = "0"
sender = EventHubProducerClient(live_eventhub['hostname'], live_eventhub['event_hub'],
EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key']))
with sender:
batch = sender.create_batch(partition_id=test_partition)
batch.add(EventData(b"A single event"))
sender.send_batch(batch)
if sleep:
time.sleep(250)
else:
sender._producers[test_partition]._handler._connection._conn.destroy()
batch = sender.create_batch(partition_id=test_partition)
batch.add(EventData(b"A single event"))
sender.send_batch(batch)
received = []
uri = "sb://{}/{}".format(live_eventhub['hostname'], live_eventhub['event_hub'])
sas_auth = authentication.SASTokenAuth.from_shared_access_key(
uri, live_eventhub['key_name'], live_eventhub['access_key'])
source = "amqps://{}/{}/ConsumerGroups/{}/Partitions/{}".format(
live_eventhub['hostname'],
live_eventhub['event_hub'],
live_eventhub['consumer_group'],
test_partition)
receiver = uamqp.ReceiveClient(source, auth=sas_auth, debug=False, timeout=5000, prefetch=500)
try:
receiver.open()
# receive_message_batch() returns immediately once it receives any messages before the max_batch_size
# and timeout reach. Could be 1, 2, or any number between 1 and max_batch_size.
# So call it twice to ensure the two events are received.
received.extend([EventData._from_message(x) for x in receiver.receive_message_batch(max_batch_size=1, timeout=5000)])
received.extend([EventData._from_message(x) for x in receiver.receive_message_batch(max_batch_size=1, timeout=5000)])
finally:
receiver.close()
assert len(received) == 2
assert list(received[0].body)[0] == b"A single event"
@pytest.mark.liveTest
def test_send_connection_idle_timeout_and_reconnect_sync(connstr_receivers):
connection_str, receivers = connstr_receivers
client = EventHubProducerClient.from_connection_string(conn_str=connection_str, idle_timeout=10)
with client:
ed = EventData('data')
sender = client._create_producer(partition_id='0')
with sender:
sender._open_with_retry()
time.sleep(11)
sender._unsent_events = [ed.message]
ed.message.on_send_complete = sender._on_outcome
with pytest.raises((uamqp.errors.ConnectionClose,
uamqp.errors.MessageHandlerError, OperationTimeoutError)):
# Mac may raise OperationTimeoutError or MessageHandlerError
sender._send_event_data()
sender._send_event_data_with_retry()
retry = 0
while retry < 3:
try:
messages = receivers[0].receive_message_batch(max_batch_size=10, timeout=10000)
if messages:
received_ed1 = EventData._from_message(messages[0])
assert received_ed1.body_as_str() == 'data'
break
except compat.TimeoutException:
retry += 1
@pytest.mark.liveTest
def test_receive_connection_idle_timeout_and_reconnect_sync(connstr_senders):
connection_str, senders = connstr_senders
client = EventHubConsumerClient.from_connection_string(
conn_str=connection_str,
consumer_group='$default',
idle_timeout=10
)
def on_event_received(event):
on_event_received.event = event
with client:
consumer = client._create_consumer("$default", "0", "-1", on_event_received)
with consumer:
consumer._open()
time.sleep(11)
ed = EventData("Event")
senders[0].send(ed)
consumer._handler.do_work()
assert consumer._handler._connection._state == c_uamqp.ConnectionState.DISCARDING
duration = 10
now_time = time.time()
end_time = now_time + duration
while now_time < end_time:
consumer.receive()
time.sleep(0.01)
now_time = time.time()
assert on_event_received.event.body_as_str() == "Event"
| Azure/azure-sdk-for-python | sdk/eventhub/azure-eventhub/tests/livetest/synctests/test_reconnect.py | Python | mit | 4,935 | 0.003647 |
from pymongo import MongoClient
import multiprocessing
import threading
import datetime
import time
cache = MongoClient(host='10.8.8.111', port=27017, connect=False)['cache25']
db30 = MongoClient(host='10.8.8.111', port=27017, connect=False)['onionsBackupOnline']
events = db30['events']
userAttr = cache['userAttr']
deviceAttr = cache['deviceAttr']
eventFlow = cache['eventFlow']
tu = cache['tu']
td = cache['td']
NUM_OF_PROCESS = 6
NUM_OF_WORKERS = 10
START_DATE_BEIJING = datetime.datetime(2016, 1, 10, 0)
END_DATE_BEIJING = datetime.datetime(2016, 1, 17, 0)
START_DATE = START_DATE_BEIJING - datetime.timedelta(hours=8)
END_DATE = END_DATE_BEIJING - datetime.timedelta(hours=8)
n = 0
def assign():
global n
if n < num_of_hours:
n += 1
return n
else:
return 'done'
def process(start, end):
global mobile
global num_of_hours
global beginning
mobile = ['android', 'iOS']
num_of_hours = int((end-start).total_seconds() / 3600)
print multiprocessing.current_process().name + ' total hours: %d' % num_of_hours
beginning = start
threads = []
for i in range(NUM_OF_WORKERS):
t = threading.Thread(target=worker)
threads.append(t)
t.start()
for t in threads:
t.join()
print multiprocessing.current_process().name + ' finished...'
def worker():
finish = False
while not finish:
res = assign()
if res == 'done':
finish = True
else:
start_time = beginning + datetime.timedelta(hours=res-1)
end_time = beginning + datetime.timedelta(hours=res)
# eventFlow: device, user, startTime, endTime, eventFlow
pipeline_event = [
{
"$match": {
"serverTime": {"$gte": start_time, "$lt": end_time},
"platform": {"$in": ["web", "app"]},
"device": {"$exists": True, "$nin": ["", None]}
}
},
{
"$sort": {
"serverTime": 1
}
},
{
"$group": {
"_id": {"device": "$device", "user": "$user"},
"startTime": {"$first": "$serverTime"},
"endTime": {"$last": "$serverTime"},
"eventFlow": {"$push": "$eventKey"},
"platform": {"$first": "$platform2"}
}
},
{
"$project": {
"_id": 0,
"device": "$_id.device",
"user": "$_id.user",
"startTime": 1,
"endTime": 1,
"eventFlow": 1,
"platform": {"$toLower": "$platform"}
}
}
]
event_flow = list(events.aggregate(pipeline_event, allowDiskUse=True))
if len(event_flow):
eventFlow.insert_many(event_flow)
# deviceAttr: device, activateDate, recentSession, platform, users
pipeline_device = [
{
"$match": {
# "platform2": {"$in": mobile},
"platform": {"$in": ["web", "app"]},
"device": {"$exists": True, "$nin": ["", None]},
"serverTime": {"$gte": start_time, "$lt": end_time}
}
},
{
"$group": {
"_id": "$device",
"activateDate": {"$min": "$serverTime"},
"recentSession": {"$max": "$serverTime"},
"users": {"$addToSet": "$user"},
"platform": {"$first": "$platform2"}
}
},
{
"$project": {
"_id": 0,
"device": "$_id",
"activateDate": 1,
"recentSession": 1,
"platform": {"$toLower": "$platform"},
"users": 1
}
}
]
device = list(events.aggregate(pipeline_device, allowDiskUse=True))
if device:
deviceAttr.insert_many(device)
# userAttr: user, activateDate, recentPCSession , recentMobileSession
pipeline_pc = [
{
"$match": {
"serverTime": {"$gte": start_time, "$lt": end_time},
"platform": {"$in": ["web", "app"]},
"platform2": 'PC',
"user": {"$exists": True}
}
},
{
"$group": {
"_id": "$user",
"activateDate": {"$min": "$serverTime"},
"recentPCSession": {"$max": "$serverTime"}
}
},
{
"$project": {
"_id": 0,
"user": "$_id",
"activateDate": 1,
"recentPCSession": 1,
}
}
]
pipeline_mobile = [
{
"$match": {
"serverTime": {"$gte": start_time, "$lt": end_time},
"platform": {"$in": ["web", "app"]},
"platform2": {"$in": mobile},
"user": {"$exists": True}
}
},
{
"$group": {
"_id": "$user",
"activateDate": {"$min": "$serverTime"},
"recentMobileSession": {"$max": "$serverTime"}
}
},
{
"$project": {
"_id": 0,
"user": "$_id",
"activateDate": 1,
"recentMobileSession": 1,
}
}
]
users_pc = list(events.aggregate(pipeline_pc, allowDiskUse=True))
users_mobile = list(events.aggregate(pipeline_mobile, allowDiskUse=True))
if users_pc:
userAttr.insert_many(users_pc)
if users_mobile:
userAttr.insert_many(users_mobile)
# print 'Finished processing data from ', start_time, ' to ', end_time
def merge_device():
print 'Start merge device......'
pipeline = [
{
"$match": {
"device": {"$exists": True, "$nin": ["", None]}
}
},
{
"$unwind": {
"path": "$users",
"preserveNullAndEmptyArrays": True
}
},
{
"$group": {
"_id": '$device',
"activateDate": {"$min": "$activateDate"},
"recentSession": {"$max": "$recentSession"},
"users": {"$addToSet": "$users"},
"platform": {"$first": "$platform"}
}
},
{
"$project": {
"_id": 0,
"device": "$_id",
"activateDate": 1,
"recentSession": 1,
"users": 1,
"platform": 1
}
}
]
devices = list(deviceAttr.aggregate(pipeline, allowDiskUse=True))
td.insert_many(devices)
deviceAttr.drop()
deviceAttr.insert_many(devices)
print 'Finished merge device....'
print '----------------------------------'
def merge_user():
print 'Start merge user......'
pipeline_update = [
{
"$group": {
"_id": "$user",
"activateDate": {"$min": "$activateDate"},
"recentPCSession": {"$max": "$recentPCSession"},
"recentMobileSession": {"$max": "$recentMobileSession"}
}
},
{
"$project": {
"_id": 0,
"user": "$_id",
"activateDate": 1,
"recentPCSession": 1,
"recentMobileSession": 1
}
}
]
users = list(userAttr.aggregate(pipeline_update, allowDiskUse=True))
tu.insert_many(users)
userAttr.drop()
userAttr.insert_many(users)
print 'Finished merge user.....'
print '----------------------------------'
if __name__ == '__main__':
print 'Start running cache script for data from ', START_DATE, 'to ', END_DATE
s = time.time()
pool = multiprocessing.Pool(processes=NUM_OF_PROCESS)
delta = (END_DATE - START_DATE).total_seconds()
hours = int(delta/3600)
interval = hours / NUM_OF_PROCESS
for i in range(NUM_OF_PROCESS):
pool.apply_async(process, (START_DATE + datetime.timedelta(hours=i*interval), START_DATE + datetime.timedelta(hours=(i+1)*interval)))
pool.close()
pool.join()
print '----------------------------------'
td.drop()
tu.drop()
merge_device()
merge_user()
e = time.time()
print 'Done.......1.10-1.17'
print 'Total time: ', ((e-s)/60), ' min'
print 'endTime: ', datetime.datetime.now()
| summer-liu/events_cache_scripts | report/cache10.py | Python | mit | 9,568 | 0.000941 |
# coding:utf-8
# 测试多线程
import threading
import time
from utils import fn_timer
from multiprocessing.dummy import Pool
import requests
from utils import urls
# 耗时任务:听音乐
def music(name):
print 'I am listening to music {0}'.format(name)
time.sleep(1)
# 耗时任务:看电影
def movie(name):
print 'I am watching movie {0}'.format(name)
time.sleep(5)
# 单线程操作:顺序执行听10首音乐,看2部电影
@fn_timer
def single_thread():
for i in range(10):
music(i)
for i in range(2):
movie(i)
# 多线程执行:听10首音乐,看2部电影
@fn_timer
def multi_thread():
# 线程列表
threads = []
for i in range(10):
# 创建一个线程,target参数为任务处理函数,args为任务处理函数所需的参数元组
threads.append(threading.Thread(target = music,args = (i,)))
for i in range(2):
threads.append(threading.Thread(target = movie,args = (i,)))
for t in threads:
# 设为守护线程
t.setDaemon(True)
# 开始线程
t.start()
for t in threads:
t.join()
# 使用线程池执行:听10首音乐,看2部电影
@fn_timer
def use_pool():
# 设置线程池大小为20,如果不设置,默认值是CPU核心数
pool = Pool(20)
pool.map(movie,range(2))
pool.map(music,range(10))
pool.close()
pool.join()
# 应用:使用单线程下载多个网页的内容
@fn_timer
def download_using_single_thread(urls):
resps = []
for url in urls:
resp = requests.get(url)
resps.append(resp)
return resps
# 应用:使用多线程下载多个网页的内容
@fn_timer
def download_using_multi_thread(urls):
threads = []
for url in urls:
threads.append(threading.Thread(target = requests.get,args = (url,)))
for t in threads:
t.setDaemon(True)
t.start()
for t in threads:
t.join()
# 应用:使用线程池下载多个网页的内容
@fn_timer
def download_using_pool(urls):
pool = Pool(20)
# 第一个参数为函数名,第二个参数一个可迭代对象,为函数所需的参数列表
resps = pool.map(requests.get,urls)
pool.close()
pool.join()
return resps
def main():
# 测试单线程
# single_thread()
# 输出:
'''
I am listening to music 0
I am listening to music 1
I am listening to music 2
I am listening to music 3
I am listening to music 4
I am listening to music 5
I am listening to music 6
I am listening to music 7
I am listening to music 8
I am listening to music 9
I am watching movie 0
I am watching movie 1
[finished function:single_thread in 20.14s]
'''
# 测试多线程
# multi_thread()
# 输出:
'''
I am listening to music 0
I am listening to music 1
I am listening to music 2
I am listening to music 3
I am listening to music 4
I am listening to music 5
I am listening to music 6
I am listening to music 7
I am listening to music 8
I am listening to music 9
I am watching movie 0
I am watching movie 1
[finished function:multi_thread in 5.02s]
'''
# 测试线程池
# use_pool()
# 输出:
'''
I am listening to music 0
I am listening to music 1
I am listening to music 2
I am listening to music 3
I am listening to music 4
I am listening to music 5
I am listening to music 6
I am listening to music 7
I am listening to music 8
I am listening to music 9
I am watching movie 0
I am watching movie 1
[finished function:use_pool in 6.12s]
'''
# 1.使用单线程
# resps = download_using_single_thread(urls)
# print len(resps)
# 输出:
'''
[finished function:download_using_single_thread in 6.18s]
20
'''
# 2. 使用多线程
# download_using_multi_thread(urls)
# 输出:
'''
[finished function:download_using_multi_thread in 0.73s]
'''
# 3.使用线程池
resps = download_using_pool(urls)
print len(resps)
# 输出:
'''
[finished function:download_using_pool in 0.84s]
20
'''
if __name__ == '__main__':
main()
| dnxbjyj/python-basic | concurrence/multi_threading.py | Python | mit | 4,446 | 0.007172 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sample_app.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| bianchimro/django-search-views | sample_app/manage.py | Python | mit | 808 | 0 |
from collections import namedtuple
from django.core.management.base import BaseCommand
from tqdm import tqdm
Progress = namedtuple(
'Progress',
[
'progress_fraction',
'message',
'extra_data',
'level',
]
)
class ProgressTracker():
def __init__(self, total=100, level=0, update_callback=None):
# set default values
self.progress = 0
self.message = ""
self.extra_data = None
# store provided arguments
self.total = total
self.level = level
self.update_callback = update_callback
# initialize the tqdm progress bar
self.progressbar = tqdm(total=total)
def update_progress(self, increment=1, message="", extra_data=None):
self.progressbar.update(increment)
self.progress += increment
self.message = message
self.extra_data = extra_data
if callable(self.update_callback):
p = self.get_progress()
self.update_callback(p.progress_fraction, p)
def get_progress(self):
return Progress(
progress_fraction=self.progress / float(self.total),
message=self.message,
extra_data=self.extra_data,
level=self.level,
)
def __enter__(self):
return self.update_progress
def __exit__(self, *exc_details):
if self.progressbar:
self.progressbar.close()
class AsyncCommand(BaseCommand):
"""A management command with added convenience functions for displaying
progress to the user.
Rather than implementing handle() (as is for BaseCommand), subclasses, must
implement handle_async(), which accepts the same arguments as handle().
If ran from the command line, AsynCommand displays a progress bar to the
user. If ran asynchronously through kolibri.tasks.schedule_command(),
AsyncCommand sends results through the Progress class to the main Django
process. Anyone who knows the task id for the command instance can check
the intermediate progress by looking at the task's AsyncResult.result
variable.
"""
def __init__(self, *args, **kwargs):
self.progresstrackers = []
def _update_all_progress(self, progress_fraction, progress):
if callable(self.update_progress):
progress_list = [p.get_progress() for p in self.progresstrackers]
self.update_progress(progress_list[0].progress_fraction, progress_list)
def handle(self, *args, **options):
self.update_progress = options.pop("update_state", None)
return self.handle_async(*args, **options)
def start_progress(self, total=100):
level = len(self.progresstrackers)
tracker = ProgressTracker(total=total, level=level, update_callback=self._update_all_progress)
self.progresstrackers.append(tracker)
return tracker
| jamalex/kolibri | kolibri/tasks/management/commands/base.py | Python | mit | 2,905 | 0.000688 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ava.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| benhoff/ava | src/manage.py | Python | gpl-3.0 | 246 | 0 |
META = [{
'lookup': 'city',
'tag': 'city',
'path': ['names','en'],
},{
'lookup': 'continent',
'tag': 'continent',
'path': ['names','en'],
},{
'lookup': 'continent_code',
'tag': 'continent',
'path': ['code'],
},{
'lookup': 'country',
'tag': 'country',
'path': ['names','en'],
},{
'lookup': 'iso_code',
'tag': 'country',
'path': ['iso_code'],
},{
'lookup': 'latitude',
'tag': 'location',
'path': ['latitude'],
},{
'lookup': 'longitude',
'tag': 'location',
'path': ['longitude'],
},{
'lookup': 'metro_code',
'tag': 'location',
'path': ['metro_code'],
},{
'lookup': 'postal_code',
'tag': 'postal',
'path': ['code'],
}]
PORTMAP = {
0:"DoS", # Denial of Service
1:"ICMP", # ICMP
20:"FTP", # FTP Data
21:"FTP", # FTP Control
22:"SSH", # SSH
23:"TELNET", # Telnet
25:"EMAIL", # SMTP
43:"WHOIS", # Whois
53:"DNS", # DNS
80:"HTTP", # HTTP
88:"AUTH", # Kerberos
109:"EMAIL", # POP v2
110:"EMAIL", # POP v3
115:"FTP", # SFTP
118:"SQL", # SQL
143:"EMAIL", # IMAP
156:"SQL", # SQL
161:"SNMP", # SNMP
220:"EMAIL", # IMAP v3
389:"AUTH", # LDAP
443:"HTTPS", # HTTPS
445:"SMB", # SMB
636:"AUTH", # LDAP of SSL/TLS
1433:"SQL", # MySQL Server
1434:"SQL", # MySQL Monitor
3306:"SQL", # MySQL
3389:"RDP", # RDP
5900:"RDP", # VNC:0
5901:"RDP", # VNC:1
5902:"RDP", # VNC:2
5903:"RDP", # VNC:3
8080:"HTTP", # HTTP Alternative
}
| ovcrash/geoip-attack-map | DataServer/const.py | Python | gpl-3.0 | 1,888 | 0.039725 |
#!/usr/bin/python
import os
# With the addition of Keystone, to use an openstack cloud you should
# authenticate against keystone, which returns a **Token** and **Service
# Catalog**. The catalog contains the endpoint for all services the
# user/tenant has access to - including nova, glance, keystone, swift.
#
# *NOTE*: Using the 2.0 *auth api* does not mean that compute api is 2.0. We
# will use the 1.1 *compute api*
os.environ['OS_AUTH_URL'] = "https://keystone.rc.nectar.org.au:5000/v2.0/"
# With the addition of Keystone we have standardized on the term **tenant**
# as the entity that owns the resources.
os.environ['OS_TENANT_ID'] = "123456789012345678901234567890"
os.environ['OS_TENANT_NAME'] = "tenant_name"
# In addition to the owning entity (tenant), openstack stores the entity
# performing the action as the **user**.
os.environ['OS_USERNAME'] = "joe.bloggs@uni.edu.au"
# With Keystone you pass the keystone password.
os.environ['OS_PASSWORD'] = "????????????????????"
| wettenhj/mytardis-swift-uploader | openrc.py | Python | bsd-3-clause | 994 | 0.001006 |
# ~*~ coding: utf-8 ~*~
from collections import namedtuple
from rest_framework import status
from rest_framework.serializers import ValidationError
from rest_framework.response import Response
from django.utils.translation import ugettext_lazy as _
from django.shortcuts import get_object_or_404, Http404
from common.utils import get_logger, get_object_or_none
from common.tree import TreeNodeSerializer
from orgs.mixins.api import OrgModelViewSet
from orgs.mixins import generics
from ..hands import IsOrgAdmin
from ..models import Node
from ..tasks import (
update_node_assets_hardware_info_manual,
test_node_assets_connectivity_manual,
)
from .. import serializers
logger = get_logger(__file__)
__all__ = [
'NodeViewSet', 'NodeChildrenApi', 'NodeAssetsApi',
'NodeAddAssetsApi', 'NodeRemoveAssetsApi', 'NodeReplaceAssetsApi',
'NodeAddChildrenApi', 'NodeListAsTreeApi',
'NodeChildrenAsTreeApi',
'NodeTaskCreateApi',
]
class NodeViewSet(OrgModelViewSet):
model = Node
filter_fields = ('value', 'key', 'id')
search_fields = ('value', )
permission_classes = (IsOrgAdmin,)
serializer_class = serializers.NodeSerializer
# 仅支持根节点指直接创建,子节点下的节点需要通过children接口创建
def perform_create(self, serializer):
child_key = Node.org_root().get_next_child_key()
serializer.validated_data["key"] = child_key
serializer.save()
def perform_update(self, serializer):
node = self.get_object()
if node.is_org_root() and node.value != serializer.validated_data['value']:
msg = _("You can't update the root node name")
raise ValidationError({"error": msg})
return super().perform_update(serializer)
def destroy(self, request, *args, **kwargs):
node = self.get_object()
if node.has_children_or_has_assets():
error = _("Deletion failed and the node contains children or assets")
return Response(data={'error': error}, status=status.HTTP_403_FORBIDDEN)
return super().destroy(request, *args, **kwargs)
class NodeListAsTreeApi(generics.ListAPIView):
"""
获取节点列表树
[
{
"id": "",
"name": "",
"pId": "",
"meta": ""
}
]
"""
model = Node
permission_classes = (IsOrgAdmin,)
serializer_class = TreeNodeSerializer
@staticmethod
def to_tree_queryset(queryset):
queryset = [node.as_tree_node() for node in queryset]
return queryset
def filter_queryset(self, queryset):
queryset = super().filter_queryset(queryset)
queryset = self.to_tree_queryset(queryset)
return queryset
class NodeChildrenApi(generics.ListCreateAPIView):
permission_classes = (IsOrgAdmin,)
serializer_class = serializers.NodeSerializer
instance = None
is_initial = False
def initial(self, request, *args, **kwargs):
self.instance = self.get_object()
return super().initial(request, *args, **kwargs)
def perform_create(self, serializer):
data = serializer.validated_data
_id = data.get("id")
value = data.get("value")
if not value:
value = self.instance.get_next_child_preset_name()
node = self.instance.create_child(value=value, _id=_id)
# 避免查询 full value
node._full_value = node.value
serializer.instance = node
def get_object(self):
pk = self.kwargs.get('pk') or self.request.query_params.get('id')
key = self.request.query_params.get("key")
if not pk and not key:
node = Node.org_root()
self.is_initial = True
return node
if pk:
node = get_object_or_404(Node, pk=pk)
else:
node = get_object_or_404(Node, key=key)
return node
def get_queryset(self):
query_all = self.request.query_params.get("all", "0") == "all"
if not self.instance:
return Node.objects.none()
if self.is_initial:
with_self = True
else:
with_self = False
if query_all:
queryset = self.instance.get_all_children(with_self=with_self)
else:
queryset = self.instance.get_children(with_self=with_self)
return queryset
class NodeChildrenAsTreeApi(NodeChildrenApi):
"""
节点子节点作为树返回,
[
{
"id": "",
"name": "",
"pId": "",
"meta": ""
}
]
"""
model = Node
serializer_class = TreeNodeSerializer
http_method_names = ['get']
def get_queryset(self):
queryset = super().get_queryset()
queryset = [node.as_tree_node() for node in queryset]
queryset = self.add_assets_if_need(queryset)
queryset = sorted(queryset)
return queryset
def add_assets_if_need(self, queryset):
include_assets = self.request.query_params.get('assets', '0') == '1'
if not include_assets:
return queryset
assets = self.instance.get_assets().only(
"id", "hostname", "ip", "os",
"org_id", "protocols",
)
for asset in assets:
queryset.append(asset.as_tree_node(self.instance))
return queryset
def check_need_refresh_nodes(self):
if self.request.query_params.get('refresh', '0') == '1':
Node.refresh_nodes()
class NodeAssetsApi(generics.ListAPIView):
permission_classes = (IsOrgAdmin,)
serializer_class = serializers.AssetSerializer
def get_queryset(self):
node_id = self.kwargs.get('pk')
query_all = self.request.query_params.get('all')
instance = get_object_or_404(Node, pk=node_id)
if query_all:
return instance.get_all_assets()
else:
return instance.get_assets()
class NodeAddChildrenApi(generics.UpdateAPIView):
model = Node
permission_classes = (IsOrgAdmin,)
serializer_class = serializers.NodeAddChildrenSerializer
instance = None
def put(self, request, *args, **kwargs):
instance = self.get_object()
nodes_id = request.data.get("nodes")
children = [get_object_or_none(Node, id=pk) for pk in nodes_id]
for node in children:
if not node:
continue
node.parent = instance
return Response("OK")
class NodeAddAssetsApi(generics.UpdateAPIView):
model = Node
serializer_class = serializers.NodeAssetsSerializer
permission_classes = (IsOrgAdmin,)
instance = None
def perform_update(self, serializer):
assets = serializer.validated_data.get('assets')
instance = self.get_object()
instance.assets.add(*tuple(assets))
class NodeRemoveAssetsApi(generics.UpdateAPIView):
model = Node
serializer_class = serializers.NodeAssetsSerializer
permission_classes = (IsOrgAdmin,)
instance = None
def perform_update(self, serializer):
assets = serializer.validated_data.get('assets')
instance = self.get_object()
if instance != Node.org_root():
instance.assets.remove(*tuple(assets))
else:
assets = [asset for asset in assets if asset.nodes.count() > 1]
instance.assets.remove(*tuple(assets))
class NodeReplaceAssetsApi(generics.UpdateAPIView):
model = Node
serializer_class = serializers.NodeAssetsSerializer
permission_classes = (IsOrgAdmin,)
instance = None
def perform_update(self, serializer):
assets = serializer.validated_data.get('assets')
instance = self.get_object()
for asset in assets:
asset.nodes.set([instance])
class NodeTaskCreateApi(generics.CreateAPIView):
model = Node
serializer_class = serializers.NodeTaskSerializer
permission_classes = (IsOrgAdmin,)
def get_object(self):
node_id = self.kwargs.get('pk')
node = get_object_or_none(self.model, id=node_id)
return node
@staticmethod
def set_serializer_data(s, task):
data = getattr(s, '_data', {})
data["task"] = task.id
setattr(s, '_data', data)
@staticmethod
def refresh_nodes_cache():
Node.refresh_nodes()
Task = namedtuple('Task', ['id'])
task = Task(id="0")
return task
def perform_create(self, serializer):
action = serializer.validated_data["action"]
node = self.get_object()
if action == "refresh_cache" and node is None:
task = self.refresh_nodes_cache()
self.set_serializer_data(serializer, task)
return
if node is None:
raise Http404()
if action == "refresh":
task = update_node_assets_hardware_info_manual.delay(node)
else:
task = test_node_assets_connectivity_manual.delay(node)
self.set_serializer_data(serializer, task)
| zsjohny/jumpserver | apps/assets/api/node.py | Python | gpl-2.0 | 8,992 | 0.00045 |
import parser
import unittest
import sys
class TestVideoParser(unittest.TestCase):
def test_parse_video(self):
if sys.platform.startswith('win'):
path = '\\server\\Movies\\Brave (2007)\\Brave (2006).mkv'
else:
path = '/server/Movies/Brave (2007)/Brave (2006).mkv'
video_info = parser.parse_video(path)
self.assertEqual(video_info['name'], 'Brave')
self.assertEqual(video_info['container'], 'mkv')
self.assertEqual(video_info['year'], 2006)
class TestVideoStackParser(unittest.TestCase):
def test_parse_simple_stack(self):
files = (
'Bad Boys (2006) part1.mkv',
'Bad Boys (2006) part2.mkv',
'Bad Boys (2006) part3.mkv',
'Bad Boys (2006) part4.mkv',
'Bad Boys (2006)-trailer.mkv',
)
stack = parser.parse_video_stack(files)
print(stack)
self.assertEqual(len(stack), 1)
def test_parse_dual_stacks(self):
files = (
'Bad Boys (2006) part1.mkv',
'Bad Boys (2006) part2.mkv',
'Bad Boys (2006) part3.mkv',
'Bad Boys (2006) part4.mkv',
'Bad Boys (2006)-trailer.mkv',
'300 (2006) part1.mkv',
'300 (2006) part2.mkv',
'300 (2006) part3.mkv',
'300 (2006)-trailer.mkv'
)
stacks = parser.parse_video_stack(files)
for s in stacks:
print(s)
self.assertEqual(len(stacks), 2)
def test_dirty_names(self):
files = (
"Bad Boys (2006).part1.stv.unrated.multi.1080p.bluray.x264-rough.mkv",
"Bad Boys (2006).part2.stv.unrated.multi.1080p.bluray.x264-rough.mkv",
"Bad Boys (2006).part3.stv.unrated.multi.1080p.bluray.x264-rough.mkv",
"Bad Boys (2006).part4.stv.unrated.multi.1080p.bluray.x264-rough.mkv",
"Bad Boys (2006)-trailer.mkv"
)
stack = parser.parse_video_stack(files)
print(stack)
self.assertEqual(len(stack), 1)
#TestStackInfo(result.Stacks[0], "Bad Boys (2006).stv.unrated.multi.1080p.bluray.x264-rough", 4);
def test_parse_mixed_expressions(self):
files = (
'Bad Boys (2006) part1.mkv',
'Bad Boys (2006) part2.mkv',
'Bad Boys (2006) part3.mkv',
'Bad Boys (2006) part4.mkv',
'Bad Boys (2006)-trailer.mkv',
'300 (2006) parta.mkv',
'300 (2006) partb.mkv',
'300 (2006) partc.mkv',
'300 (2006) partd.mkv',
'300 (2006)-trailer.mkv',
'300a.mkv',
'300b.mkv',
'300c.mkv',
'300-trailer.mkv'
)
stacks = parser.parse_video_stack(files)
for s in stacks:
print(s)
self.assertEqual(len(stacks), 3)
if __name__ == '__main__':
unittest.main()
| tickbg/skaer | naming/test_parser.py | Python | gpl-3.0 | 2,913 | 0.00309 |
from django.core import checks
from feincms import extensions
class Extension(extensions.Extension):
def handle_model(self):
cls = self.model
def render_json(self, request):
"""Render the feincms regions into a dictionary."""
def region_data(region):
content_list = getattr(self.content, region.key)
return [content.json(request=request) for content in content_list]
regions = self.template.regions
return {region.key: region_data(region) for region in regions}
cls.add_to_class('render_json', render_json)
@classmethod
def check(cls, **kwargs):
errors = super(self.model, cls).check(**kwargs)
errors.extend(cls._check_json_method())
return errors
@classmethod
def _check_json_method(cls, **kwargs):
"""Check all registered content types have a `.json` method."""
message = (
'Feincms content has no `json` method, but the ' +
'`render_json` extension is active for model `{}`.'
).format(cls)
for content_type in cls._feincms_content_types:
if not hasattr(content_type, 'json'):
yield checks.Error(
message,
obj=content_type,
id='feincms_extensions.E001',
)
cls.add_to_class('check', check)
cls.add_to_class('_check_json_method', _check_json_method)
| incuna/feincms-extensions | feincms_extensions/render_json.py | Python | bsd-2-clause | 1,559 | 0.000641 |
"""Support for getting statistical data from a Pi-hole system."""
import logging
from homeassistant.helpers.entity import Entity
from .const import (
ATTR_BLOCKED_DOMAINS,
DOMAIN as PIHOLE_DOMAIN,
SENSOR_DICT,
SENSOR_LIST,
)
LOGGER = logging.getLogger(__name__)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the pi-hole sensor."""
if discovery_info is None:
return
sensors = []
for pi_hole in hass.data[PIHOLE_DOMAIN].values():
for sensor in [
PiHoleSensor(pi_hole, sensor_name) for sensor_name in SENSOR_LIST
]:
sensors.append(sensor)
async_add_entities(sensors, True)
class PiHoleSensor(Entity):
"""Representation of a Pi-hole sensor."""
def __init__(self, pi_hole, sensor_name):
"""Initialize a Pi-hole sensor."""
self.pi_hole = pi_hole
self._name = pi_hole.name
self._condition = sensor_name
variable_info = SENSOR_DICT[sensor_name]
self._condition_name = variable_info[0]
self._unit_of_measurement = variable_info[1]
self._icon = variable_info[2]
self.data = {}
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name} {self._condition_name}"
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def state(self):
"""Return the state of the device."""
try:
return round(self.data[self._condition], 2)
except TypeError:
return self.data[self._condition]
@property
def device_state_attributes(self):
"""Return the state attributes of the Pi-Hole."""
return {ATTR_BLOCKED_DOMAINS: self.data["domains_being_blocked"]}
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self.pi_hole.available
async def async_update(self):
"""Get the latest data from the Pi-hole API."""
await self.pi_hole.async_update()
self.data = self.pi_hole.api.data
| leppa/home-assistant | homeassistant/components/pi_hole/sensor.py | Python | apache-2.0 | 2,321 | 0.000431 |
# -*- coding: utf-8 -*-
"""
Test cases to cover Accounts-related behaviors of the User API application
"""
import datetime
import hashlib
import json
from copy import deepcopy
import unittest
import ddt
import mock
import pytz
import six
from django.conf import settings
from django.test.testcases import TransactionTestCase
from django.test.utils import override_settings
from django.urls import reverse
from rest_framework.test import APIClient, APITestCase
from six.moves import range
from openedx.core.djangoapps.oauth_dispatch.jwt import create_jwt_for_user
from openedx.core.djangoapps.user_api.accounts import ACCOUNT_VISIBILITY_PREF_KEY
from openedx.core.djangoapps.user_api.models import UserPreference
from openedx.core.djangoapps.user_api.preferences.api import set_user_preference
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase, skip_unless_lms
from student.models import PendingEmailChange, UserProfile
from student.tests.factories import TEST_PASSWORD, UserFactory
from .. import ALL_USERS_VISIBILITY, CUSTOM_VISIBILITY, PRIVATE_VISIBILITY
TEST_PROFILE_IMAGE_UPLOADED_AT = datetime.datetime(2002, 1, 9, 15, 43, 1, tzinfo=pytz.UTC)
# this is used in one test to check the behavior of profile image url
# generation with a relative url in the config.
TEST_PROFILE_IMAGE_BACKEND = deepcopy(settings.PROFILE_IMAGE_BACKEND)
TEST_PROFILE_IMAGE_BACKEND['options']['base_url'] = '/profile-images/'
TEST_BIO_VALUE = u"Tired mother of twins"
TEST_LANGUAGE_PROFICIENCY_CODE = u"hi"
class UserAPITestCase(APITestCase):
"""
The base class for all tests of the User API
"""
def setUp(self):
super(UserAPITestCase, self).setUp()
self.anonymous_client = APIClient()
self.different_user = UserFactory.create(password=TEST_PASSWORD)
self.different_client = APIClient()
self.staff_user = UserFactory(is_staff=True, password=TEST_PASSWORD)
self.staff_client = APIClient()
self.user = UserFactory.create(password=TEST_PASSWORD) # will be assigned to self.client by default
def login_client(self, api_client, user):
"""Helper method for getting the client and user and logging in. Returns client. """
client = getattr(self, api_client)
user = getattr(self, user)
client.login(username=user.username, password=TEST_PASSWORD)
return client
def send_patch(self, client, json_data, content_type="application/merge-patch+json", expected_status=200):
"""
Helper method for sending a patch to the server, defaulting to application/merge-patch+json content_type.
Verifies the expected status and returns the response.
"""
# pylint: disable=no-member
response = client.patch(self.url, data=json.dumps(json_data), content_type=content_type)
self.assertEqual(expected_status, response.status_code)
return response
def send_get(self, client, query_parameters=None, expected_status=200):
"""
Helper method for sending a GET to the server. Verifies the expected status and returns the response.
"""
url = self.url + '?' + query_parameters if query_parameters else self.url # pylint: disable=no-member
response = client.get(url)
self.assertEqual(expected_status, response.status_code)
return response
# pylint: disable=no-member
def send_put(self, client, json_data, content_type="application/json", expected_status=204):
"""
Helper method for sending a PUT to the server. Verifies the expected status and returns the response.
"""
response = client.put(self.url, data=json.dumps(json_data), content_type=content_type)
self.assertEqual(expected_status, response.status_code)
return response
# pylint: disable=no-member
def send_delete(self, client, expected_status=204):
"""
Helper method for sending a DELETE to the server. Verifies the expected status and returns the response.
"""
response = client.delete(self.url)
self.assertEqual(expected_status, response.status_code)
return response
def create_mock_profile(self, user):
"""
Helper method that creates a mock profile for the specified user
:return:
"""
legacy_profile = UserProfile.objects.get(id=user.id)
legacy_profile.country = "US"
legacy_profile.state = "MA"
legacy_profile.level_of_education = "m"
legacy_profile.year_of_birth = 2000
legacy_profile.goals = "world peace"
legacy_profile.mailing_address = "Park Ave"
legacy_profile.gender = "f"
legacy_profile.bio = TEST_BIO_VALUE
legacy_profile.profile_image_uploaded_at = TEST_PROFILE_IMAGE_UPLOADED_AT
legacy_profile.language_proficiencies.create(code=TEST_LANGUAGE_PROFICIENCY_CODE)
legacy_profile.phone_number = "+18005555555"
legacy_profile.save()
def _verify_profile_image_data(self, data, has_profile_image):
"""
Verify the profile image data in a GET response for self.user
corresponds to whether the user has or hasn't set a profile
image.
"""
template = '{root}/{filename}_{{size}}.{extension}'
if has_profile_image:
url_root = 'http://example-storage.com/profile-images'
filename = hashlib.md5(('secret' + self.user.username).encode('utf-8')).hexdigest()
file_extension = 'jpg'
template += '?v={}'.format(TEST_PROFILE_IMAGE_UPLOADED_AT.strftime("%s"))
else:
url_root = 'http://testserver/static'
filename = 'default'
file_extension = 'png'
template = template.format(root=url_root, filename=filename, extension=file_extension)
self.assertEqual(
data['profile_image'],
{
'has_image': has_profile_image,
'image_url_full': template.format(size=50),
'image_url_small': template.format(size=10),
}
)
@ddt.ddt
@skip_unless_lms
class TestOwnUsernameAPI(CacheIsolationTestCase, UserAPITestCase):
"""
Unit tests for the Accounts API.
"""
ENABLED_CACHES = ['default']
def setUp(self):
super(TestOwnUsernameAPI, self).setUp()
self.url = reverse("own_username_api")
def _verify_get_own_username(self, queries, expected_status=200):
"""
Internal helper to perform the actual assertion
"""
if settings.TAHOE_ALWAYS_SKIP_TEST: # Skip query checks
response = self.send_get(self.client, expected_status=expected_status)
else:
with self.assertNumQueries(queries):
response = self.send_get(self.client, expected_status=expected_status)
if expected_status == 200:
data = response.data
self.assertEqual(1, len(data))
self.assertEqual(self.user.username, data["username"])
def test_get_username(self):
"""
Test that a client (logged in) can get her own username.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self._verify_get_own_username(16)
def test_get_username_inactive(self):
"""
Test that a logged-in client can get their
username, even if inactive.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.user.is_active = False
self.user.save()
self._verify_get_own_username(16)
def test_get_username_not_logged_in(self):
"""
Test that a client (not logged in) gets a 401
when trying to retrieve their username.
"""
# verify that the endpoint is inaccessible when not logged in
self._verify_get_own_username(13, expected_status=401)
@ddt.ddt
@skip_unless_lms
@mock.patch('openedx.core.djangoapps.user_api.accounts.image_helpers._PROFILE_IMAGE_SIZES', [50, 10])
@mock.patch.dict(
'django.conf.settings.PROFILE_IMAGE_SIZES_MAP',
{'full': 50, 'small': 10},
clear=True
)
class TestAccountsAPI(CacheIsolationTestCase, UserAPITestCase):
"""
Unit tests for the Accounts API.
"""
ENABLED_CACHES = ['default']
def setUp(self):
super(TestAccountsAPI, self).setUp()
self.url = reverse("accounts_api", kwargs={'username': self.user.username})
def _set_user_age_to_10_years(self, user):
"""
Sets the given user's age to 10.
Returns the calculated year of birth.
"""
legacy_profile = UserProfile.objects.get(id=user.id)
current_year = datetime.datetime.now().year
year_of_birth = current_year - 10
legacy_profile.year_of_birth = year_of_birth
legacy_profile.save()
return year_of_birth
def _verify_full_shareable_account_response(self, response, account_privacy=None, badges_enabled=False):
"""
Verify that the shareable fields from the account are returned
"""
data = response.data
self.assertEqual(12, len(data))
# public fields (3)
self.assertEqual(account_privacy, data["account_privacy"])
self._verify_profile_image_data(data, True)
self.assertEqual(self.user.username, data["username"])
# additional shareable fields (8)
self.assertEqual(TEST_BIO_VALUE, data["bio"])
self.assertEqual("US", data["country"])
self.assertIsNotNone(data["date_joined"])
self.assertEqual([{"code": TEST_LANGUAGE_PROFICIENCY_CODE}], data["language_proficiencies"])
self.assertEqual("m", data["level_of_education"])
self.assertIsNotNone(data["social_links"])
self.assertIsNone(data["time_zone"])
self.assertEqual(badges_enabled, data['accomplishments_shared'])
def _verify_private_account_response(self, response, requires_parental_consent=False):
"""
Verify that only the public fields are returned if a user does not want to share account fields
"""
data = response.data
self.assertEqual(3, len(data))
self.assertEqual(PRIVATE_VISIBILITY, data["account_privacy"])
self._verify_profile_image_data(data, not requires_parental_consent)
self.assertEqual(self.user.username, data["username"])
def _verify_full_account_response(self, response, requires_parental_consent=False, year_of_birth=2000):
"""
Verify that all account fields are returned (even those that are not shareable).
"""
data = response.data
self.assertEqual(25, len(data))
# public fields (3)
expected_account_privacy = (
PRIVATE_VISIBILITY if requires_parental_consent else
UserPreference.get_value(self.user, 'account_privacy')
)
self.assertEqual(expected_account_privacy, data["account_privacy"])
self._verify_profile_image_data(data, not requires_parental_consent)
self.assertEqual(self.user.username, data["username"])
# additional shareable fields (8)
self.assertEqual(TEST_BIO_VALUE, data["bio"])
self.assertEqual("US", data["country"])
self.assertIsNotNone(data["date_joined"])
self.assertEqual([{"code": TEST_LANGUAGE_PROFICIENCY_CODE}], data["language_proficiencies"])
self.assertEqual("m", data["level_of_education"])
self.assertIsNotNone(data["social_links"])
self.assertEqual(UserPreference.get_value(self.user, 'time_zone'), data["time_zone"])
self.assertIsNotNone(data["accomplishments_shared"])
self.assertEqual(self.user.first_name + " " + self.user.last_name, data["name"])
# additional admin fields (10)
self.assertEqual(self.user.email, data["email"])
self.assertIsNotNone(data["extended_profile"])
self.assertEqual("MA", data["state"])
self.assertEqual("f", data["gender"])
self.assertEqual("world peace", data["goals"])
self.assertTrue(data["is_active"])
self.assertEqual("Park Ave", data['mailing_address'])
self.assertEqual(requires_parental_consent, data["requires_parental_consent"])
self.assertIsNone(data["secondary_email"])
self.assertIsNone(data["secondary_email_enabled"])
self.assertEqual(year_of_birth, data["year_of_birth"])
def test_anonymous_access(self):
"""
Test that an anonymous client (not logged in) cannot call GET or PATCH.
"""
self.send_get(self.anonymous_client, expected_status=401)
self.send_patch(self.anonymous_client, {}, expected_status=401)
def test_unsupported_methods(self):
"""
Test that DELETE, POST, and PUT are not supported.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.assertEqual(405, self.client.put(self.url).status_code)
self.assertEqual(405, self.client.post(self.url).status_code)
self.assertEqual(405, self.client.delete(self.url).status_code)
@ddt.data(
("client", "user"),
("staff_client", "staff_user"),
)
@ddt.unpack
def test_get_account_unknown_user(self, api_client, user):
"""
Test that requesting a user who does not exist returns a 404.
"""
client = self.login_client(api_client, user)
response = client.get(reverse("accounts_api", kwargs={'username': "does_not_exist"}))
self.assertEqual(404, response.status_code)
@ddt.data(
("client", "user"),
("staff_client", "staff_user"),
)
@ddt.unpack
def test_get_account_by_email(self, api_client, user):
"""
Test that requesting a user email search works.
"""
client = self.login_client(api_client, user)
self.create_mock_profile(self.user)
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, PRIVATE_VISIBILITY)
response = self.send_get(client, query_parameters='email={}'.format(self.user.email))
self._verify_full_account_response(response)
# Note: using getattr so that the patching works even if there is no configuration.
# This is needed when testing CMS as the patching is still executed even though the
# suite is skipped.
@mock.patch.dict(getattr(settings, "ACCOUNT_VISIBILITY_CONFIGURATION", {}), {"default_visibility": "all_users"})
@unittest.skipIf(settings.TAHOE_ALWAYS_SKIP_TEST, 'skip query count test')
def test_get_account_different_user_visible(self):
"""
Test that a client (logged in) can only get the shareable fields for a different user.
This is the case when default_visibility is set to "all_users".
"""
self.different_client.login(username=self.different_user.username, password=TEST_PASSWORD)
self.create_mock_profile(self.user)
with self.assertNumQueries(23):
response = self.send_get(self.different_client)
self._verify_full_shareable_account_response(response, account_privacy=ALL_USERS_VISIBILITY)
# Note: using getattr so that the patching works even if there is no configuration.
# This is needed when testing CMS as the patching is still executed even though the
# suite is skipped.
@mock.patch.dict(getattr(settings, "ACCOUNT_VISIBILITY_CONFIGURATION", {}), {"default_visibility": "private"})
@unittest.skipIf(settings.TAHOE_ALWAYS_SKIP_TEST, 'skip query count test')
def test_get_account_different_user_private(self):
"""
Test that a client (logged in) can only get the shareable fields for a different user.
This is the case when default_visibility is set to "private".
"""
self.different_client.login(username=self.different_user.username, password=TEST_PASSWORD)
self.create_mock_profile(self.user)
with self.assertNumQueries(23):
response = self.send_get(self.different_client)
self._verify_private_account_response(response)
@mock.patch.dict(settings.FEATURES, {'ENABLE_OPENBADGES': True})
@ddt.data(
("client", "user", PRIVATE_VISIBILITY),
("different_client", "different_user", PRIVATE_VISIBILITY),
("staff_client", "staff_user", PRIVATE_VISIBILITY),
("client", "user", ALL_USERS_VISIBILITY),
("different_client", "different_user", ALL_USERS_VISIBILITY),
("staff_client", "staff_user", ALL_USERS_VISIBILITY),
)
@ddt.unpack
def test_get_account_private_visibility(self, api_client, requesting_username, preference_visibility):
"""
Test the return from GET based on user visibility setting.
"""
def verify_fields_visible_to_all_users(response):
"""
Confirms that private fields are private, and public/shareable fields are public/shareable
"""
if preference_visibility == PRIVATE_VISIBILITY:
self._verify_private_account_response(response)
else:
self._verify_full_shareable_account_response(response, ALL_USERS_VISIBILITY, badges_enabled=True)
client = self.login_client(api_client, requesting_username)
# Update user account visibility setting.
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, preference_visibility)
self.create_mock_profile(self.user)
response = self.send_get(client)
if requesting_username == "different_user":
verify_fields_visible_to_all_users(response)
else:
self._verify_full_account_response(response)
# Verify how the view parameter changes the fields that are returned.
response = self.send_get(client, query_parameters='view=shared')
verify_fields_visible_to_all_users(response)
response = self.send_get(client, query_parameters='view=shared&email={}'.format(self.user.email))
verify_fields_visible_to_all_users(response)
@ddt.data(
("client", "user"),
("staff_client", "staff_user"),
("different_client", "different_user"),
)
@ddt.unpack
def test_custom_visibility_over_age(self, api_client, requesting_username):
self.create_mock_profile(self.user)
# set user's custom visibility preferences
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, CUSTOM_VISIBILITY)
shared_fields = ("bio", "language_proficiencies", "name")
for field_name in shared_fields:
set_user_preference(self.user, "visibility.{}".format(field_name), ALL_USERS_VISIBILITY)
# make API request
client = self.login_client(api_client, requesting_username)
response = self.send_get(client)
# verify response
if requesting_username == "different_user":
data = response.data
self.assertEqual(6, len(data))
# public fields
self.assertEqual(self.user.username, data["username"])
self.assertEqual(UserPreference.get_value(self.user, 'account_privacy'), data["account_privacy"])
self._verify_profile_image_data(data, has_profile_image=True)
# custom shared fields
self.assertEqual(TEST_BIO_VALUE, data["bio"])
self.assertEqual([{"code": TEST_LANGUAGE_PROFICIENCY_CODE}], data["language_proficiencies"])
self.assertEqual(self.user.first_name + " " + self.user.last_name, data["name"])
else:
self._verify_full_account_response(response)
@ddt.data(
("client", "user"),
("staff_client", "staff_user"),
("different_client", "different_user"),
)
@ddt.unpack
def test_custom_visibility_under_age(self, api_client, requesting_username):
self.create_mock_profile(self.user)
year_of_birth = self._set_user_age_to_10_years(self.user)
# set user's custom visibility preferences
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, CUSTOM_VISIBILITY)
shared_fields = ("bio", "language_proficiencies")
for field_name in shared_fields:
set_user_preference(self.user, "visibility.{}".format(field_name), ALL_USERS_VISIBILITY)
# make API request
client = self.login_client(api_client, requesting_username)
response = self.send_get(client)
# verify response
if requesting_username == "different_user":
self._verify_private_account_response(response, requires_parental_consent=True)
else:
self._verify_full_account_response(
response,
requires_parental_consent=True,
year_of_birth=year_of_birth,
)
@unittest.skipIf(settings.TAHOE_ALWAYS_SKIP_TEST, 'query count fails for unknown reasons')
def test_get_account_default(self):
"""
Test that a client (logged in) can get her own account information (using default legacy profile information,
as created by the test UserFactory).
"""
def verify_get_own_information(queries):
"""
Internal helper to perform the actual assertions
"""
with self.assertNumQueries(queries):
response = self.send_get(self.client)
data = response.data
self.assertEqual(25, len(data))
self.assertEqual(self.user.username, data["username"])
self.assertEqual(self.user.first_name + " " + self.user.last_name, data["name"])
for empty_field in ("year_of_birth", "level_of_education", "mailing_address", "bio"):
self.assertIsNone(data[empty_field])
self.assertIsNone(data["country"])
self.assertIsNone(data["state"])
self.assertEqual("m", data["gender"])
self.assertEqual("Learn a lot", data["goals"])
self.assertEqual(self.user.email, data["email"])
self.assertIsNotNone(data["date_joined"])
self.assertEqual(self.user.is_active, data["is_active"])
self._verify_profile_image_data(data, False)
self.assertTrue(data["requires_parental_consent"])
self.assertEqual([], data["language_proficiencies"])
self.assertEqual(PRIVATE_VISIBILITY, data["account_privacy"])
self.assertIsNone(data["time_zone"])
# Badges aren't on by default, so should not be present.
self.assertEqual(False, data["accomplishments_shared"])
self.client.login(username=self.user.username, password=TEST_PASSWORD)
verify_get_own_information(21)
# Now make sure that the user can get the same information, even if not active
self.user.is_active = False
self.user.save()
verify_get_own_information(13)
@unittest.skipIf(settings.TAHOE_ALWAYS_SKIP_TEST, 'skip query count test')
def test_get_account_empty_string(self):
"""
Test the conversion of empty strings to None for certain fields.
"""
legacy_profile = UserProfile.objects.get(id=self.user.id)
legacy_profile.country = ""
legacy_profile.state = ""
legacy_profile.level_of_education = ""
legacy_profile.gender = ""
legacy_profile.bio = ""
legacy_profile.save()
self.client.login(username=self.user.username, password=TEST_PASSWORD)
with self.assertNumQueries(21):
response = self.send_get(self.client)
for empty_field in ("level_of_education", "gender", "country", "state", "bio",):
self.assertIsNone(response.data[empty_field])
@ddt.data(
("different_client", "different_user"),
("staff_client", "staff_user"),
)
@ddt.unpack
def test_patch_account_disallowed_user(self, api_client, user):
"""
Test that a client cannot call PATCH on a different client's user account (even with
is_staff access).
"""
client = self.login_client(api_client, user)
self.send_patch(client, {}, expected_status=403)
@ddt.data(
("client", "user"),
("staff_client", "staff_user"),
)
@ddt.unpack
def test_patch_account_unknown_user(self, api_client, user):
"""
Test that trying to update a user who does not exist returns a 403.
"""
client = self.login_client(api_client, user)
response = client.patch(
reverse("accounts_api", kwargs={'username': "does_not_exist"}),
data=json.dumps({}), content_type="application/merge-patch+json"
)
self.assertEqual(403, response.status_code)
@ddt.data(
("gender", "f", "not a gender", u'"not a gender" is not a valid choice.'),
("level_of_education", "none", u"ȻħȺɍłɇs", u'"ȻħȺɍłɇs" is not a valid choice.'),
("country", "GB", "XY", u'"XY" is not a valid choice.'),
("state", "MA", "PY", u'"PY" is not a valid choice.'),
("year_of_birth", 2009, "not_an_int", u"A valid integer is required."),
("name", "bob", "z" * 256, u"Ensure this field has no more than 255 characters."),
("name", u"ȻħȺɍłɇs", " ", u"The name field must be at least 1 character long."),
("goals", "Smell the roses"),
("mailing_address", "Sesame Street"),
# Note that we store the raw data, so it is up to client to escape the HTML.
(
"bio", u"<html>Lacrosse-playing superhero 壓是進界推日不復女</html>",
"z" * 301, u"The about me field must be at most 300 characters long."
),
("account_privacy", ALL_USERS_VISIBILITY),
("account_privacy", PRIVATE_VISIBILITY),
# Note that email is tested below, as it is not immediately updated.
# Note that language_proficiencies is tested below as there are multiple error and success conditions.
)
@ddt.unpack
def test_patch_account(self, field, value, fails_validation_value=None, developer_validation_message=None):
"""
Test the behavior of patch, when using the correct content_type.
"""
client = self.login_client("client", "user")
if field == 'account_privacy':
# Ensure the user has birth year set, and is over 13, so
# account_privacy behaves normally
legacy_profile = UserProfile.objects.get(id=self.user.id)
legacy_profile.year_of_birth = 2000
legacy_profile.save()
response = self.send_patch(client, {field: value})
self.assertEqual(value, response.data[field])
if fails_validation_value:
error_response = self.send_patch(client, {field: fails_validation_value}, expected_status=400)
self.assertEqual(
u'This value is invalid.',
error_response.data["field_errors"][field]["user_message"]
)
self.assertEqual(
u"Value '{value}' is not valid for field '{field}': {messages}".format(
value=fails_validation_value, field=field, messages=[developer_validation_message]
),
error_response.data["field_errors"][field]["developer_message"]
)
elif field != "account_privacy":
# If there are no values that would fail validation, then empty string should be supported;
# except for account_privacy, which cannot be an empty string.
response = self.send_patch(client, {field: ""})
self.assertEqual("", response.data[field])
def test_patch_inactive_user(self):
""" Verify that a user can patch her own account, even if inactive. """
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.user.is_active = False
self.user.save()
response = self.send_patch(self.client, {"goals": "to not activate account"})
self.assertEqual("to not activate account", response.data["goals"])
@ddt.unpack
def test_patch_account_noneditable(self):
"""
Tests the behavior of patch when a read-only field is attempted to be edited.
"""
client = self.login_client("client", "user")
def verify_error_response(field_name, data):
"""
Internal helper to check the error messages returned
"""
self.assertEqual(
"This field is not editable via this API", data["field_errors"][field_name]["developer_message"]
)
self.assertEqual(
u"The '{0}' field cannot be edited.".format(field_name),
data["field_errors"][field_name]["user_message"]
)
for field_name in ["username", "date_joined", "is_active", "profile_image", "requires_parental_consent"]:
response = self.send_patch(client, {field_name: "will_error", "gender": "o"}, expected_status=400)
verify_error_response(field_name, response.data)
# Make sure that gender did not change.
response = self.send_get(client)
self.assertEqual("m", response.data["gender"])
# Test error message with multiple read-only items
response = self.send_patch(client, {"username": "will_error", "date_joined": "xx"}, expected_status=400)
self.assertEqual(2, len(response.data["field_errors"]))
verify_error_response("username", response.data)
verify_error_response("date_joined", response.data)
def test_patch_bad_content_type(self):
"""
Test the behavior of patch when an incorrect content_type is specified.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.send_patch(self.client, {}, content_type="application/json", expected_status=415)
self.send_patch(self.client, {}, content_type="application/xml", expected_status=415)
def test_patch_account_empty_string(self):
"""
Tests the behavior of patch when attempting to set fields with a select list of options to the empty string.
Also verifies the behaviour when setting to None.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
for field_name in ["gender", "level_of_education", "country", "state"]:
response = self.send_patch(self.client, {field_name: ""})
# Although throwing a 400 might be reasonable, the default DRF behavior with ModelSerializer
# is to convert to None, which also seems acceptable (and is difficult to override).
self.assertIsNone(response.data[field_name])
# Verify that the behavior is the same for sending None.
response = self.send_patch(self.client, {field_name: ""})
self.assertIsNone(response.data[field_name])
def test_patch_name_metadata(self):
"""
Test the metadata stored when changing the name field.
"""
def get_name_change_info(expected_entries):
"""
Internal method to encapsulate the retrieval of old names used
"""
legacy_profile = UserProfile.objects.get(id=self.user.id)
name_change_info = legacy_profile.get_meta()["old_names"]
self.assertEqual(expected_entries, len(name_change_info))
return name_change_info
def verify_change_info(change_info, old_name, requester, new_name):
"""
Internal method to validate name changes
"""
self.assertEqual(3, len(change_info))
self.assertEqual(old_name, change_info[0])
self.assertEqual(u"Name change requested through account API by {}".format(requester), change_info[1])
self.assertIsNotNone(change_info[2])
# Verify the new name was also stored.
get_response = self.send_get(self.client)
self.assertEqual(new_name, get_response.data["name"])
self.client.login(username=self.user.username, password=TEST_PASSWORD)
legacy_profile = UserProfile.objects.get(id=self.user.id)
self.assertEqual({}, legacy_profile.get_meta())
old_name = legacy_profile.name
# First change the name as the user and verify meta information.
self.send_patch(self.client, {"name": "Mickey Mouse"})
name_change_info = get_name_change_info(1)
verify_change_info(name_change_info[0], old_name, self.user.username, "Mickey Mouse")
# Now change the name again and verify meta information.
self.send_patch(self.client, {"name": "Donald Duck"})
name_change_info = get_name_change_info(2)
verify_change_info(name_change_info[0], old_name, self.user.username, "Donald Duck",)
verify_change_info(name_change_info[1], "Mickey Mouse", self.user.username, "Donald Duck")
@mock.patch.dict(
'django.conf.settings.PROFILE_IMAGE_SIZES_MAP',
{'full': 50, 'medium': 30, 'small': 10},
clear=True
)
def test_patch_email(self):
"""
Test that the user can request an email change through the accounts API.
Full testing of the helper method used (do_email_change_request) exists in the package with the code.
Here just do minimal smoke testing.
"""
client = self.login_client("client", "user")
old_email = self.user.email
new_email = "newemail@example.com"
response = self.send_patch(client, {"email": new_email, "goals": "change my email"})
# Since request is multi-step, the email won't change on GET immediately (though goals will update).
self.assertEqual(old_email, response.data["email"])
self.assertEqual("change my email", response.data["goals"])
# Now call the method that will be invoked with the user clicks the activation key in the received email.
# First we must get the activation key that was sent.
pending_change = PendingEmailChange.objects.filter(user=self.user)
self.assertEqual(1, len(pending_change))
activation_key = pending_change[0].activation_key
confirm_change_url = reverse(
"confirm_email_change", kwargs={'key': activation_key}
)
response = self.client.post(confirm_change_url)
self.assertEqual(200, response.status_code)
get_response = self.send_get(client)
self.assertEqual(new_email, get_response.data["email"])
@ddt.data(
("not_an_email",),
("",),
(None,),
)
@ddt.unpack
def test_patch_invalid_email(self, bad_email):
"""
Test a few error cases for email validation (full test coverage lives with do_email_change_request).
"""
client = self.login_client("client", "user")
# Try changing to an invalid email to make sure error messages are appropriately returned.
error_response = self.send_patch(client, {"email": bad_email}, expected_status=400)
field_errors = error_response.data["field_errors"]
self.assertEqual(
"Error thrown from validate_new_email: 'Valid e-mail address required.'",
field_errors["email"]["developer_message"]
)
self.assertEqual("Valid e-mail address required.", field_errors["email"]["user_message"])
@mock.patch('student.views.management.do_email_change_request')
def test_patch_duplicate_email(self, do_email_change_request):
"""
Test that same success response will be sent to user even if the given email already used.
"""
existing_email = "same@example.com"
UserFactory.create(email=existing_email)
client = self.login_client("client", "user")
# Try changing to an existing email to make sure no error messages returned.
response = self.send_patch(client, {"email": existing_email})
self.assertEqual(200, response.status_code)
# Verify that no actual request made for email change
self.assertFalse(do_email_change_request.called)
def test_patch_language_proficiencies(self):
"""
Verify that patching the language_proficiencies field of the user
profile completely overwrites the previous value.
"""
client = self.login_client("client", "user")
# Patching language_proficiencies exercises the
# `LanguageProficiencySerializer.get_identity` method, which compares
# identifies language proficiencies based on their language code rather
# than django model id.
for proficiencies in ([{"code": "en"}, {"code": "fr"}, {"code": "es"}], [{"code": "fr"}], [{"code": "aa"}], []):
response = self.send_patch(client, {"language_proficiencies": proficiencies})
six.assertCountEqual(self, response.data["language_proficiencies"], proficiencies)
@ddt.data(
(
u"not_a_list",
{u'non_field_errors': [u'Expected a list of items but got type "unicode".']}
),
(
[u"not_a_JSON_object"],
[{u'non_field_errors': [u'Invalid data. Expected a dictionary, but got unicode.']}]
),
(
[{}],
[{'code': [u'This field is required.']}]
),
(
[{u"code": u"invalid_language_code"}],
[{'code': [u'"invalid_language_code" is not a valid choice.']}]
),
(
[{u"code": u"kw"}, {u"code": u"el"}, {u"code": u"kw"}],
[u'The language_proficiencies field must consist of unique languages.']
),
)
@ddt.unpack
def test_patch_invalid_language_proficiencies(self, patch_value, expected_error_message):
"""
Verify we handle error cases when patching the language_proficiencies
field.
"""
if six.PY3:
expected_error_message = six.text_type(expected_error_message).replace('unicode', 'str')
client = self.login_client("client", "user")
response = self.send_patch(client, {"language_proficiencies": patch_value}, expected_status=400)
self.assertEqual(
response.data["field_errors"]["language_proficiencies"]["developer_message"],
u"Value '{patch_value}' is not valid for field 'language_proficiencies': {error_message}".format(
patch_value=patch_value,
error_message=expected_error_message
)
)
@mock.patch('openedx.core.djangoapps.user_api.accounts.serializers.AccountUserSerializer.save')
def test_patch_serializer_save_fails(self, serializer_save):
"""
Test that AccountUpdateErrors are passed through to the response.
"""
serializer_save.side_effect = [Exception("bummer"), None]
self.client.login(username=self.user.username, password=TEST_PASSWORD)
error_response = self.send_patch(self.client, {"goals": "save an account field"}, expected_status=400)
self.assertEqual(
"Error thrown when saving account updates: 'bummer'",
error_response.data["developer_message"]
)
self.assertIsNone(error_response.data["user_message"])
@override_settings(PROFILE_IMAGE_BACKEND=TEST_PROFILE_IMAGE_BACKEND)
def test_convert_relative_profile_url(self):
"""
Test that when TEST_PROFILE_IMAGE_BACKEND['base_url'] begins
with a '/', the API generates the full URL to profile images based on
the URL of the request.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
response = self.send_get(self.client)
self.assertEqual(
response.data["profile_image"],
{
"has_image": False,
"image_url_full": "http://testserver/static/default_50.png",
"image_url_small": "http://testserver/static/default_10.png"
}
)
@ddt.data(
("client", "user", True),
("different_client", "different_user", False),
("staff_client", "staff_user", True),
)
@ddt.unpack
def test_parental_consent(self, api_client, requesting_username, has_full_access):
"""
Verifies that under thirteens never return a public profile.
"""
client = self.login_client(api_client, requesting_username)
year_of_birth = self._set_user_age_to_10_years(self.user)
set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, ALL_USERS_VISIBILITY)
# Verify that the default view is still private (except for clients with full access)
response = self.send_get(client)
if has_full_access:
data = response.data
self.assertEqual(25, len(data))
self.assertEqual(self.user.username, data["username"])
self.assertEqual(self.user.first_name + " " + self.user.last_name, data["name"])
self.assertEqual(self.user.email, data["email"])
self.assertEqual(year_of_birth, data["year_of_birth"])
for empty_field in ("country", "level_of_education", "mailing_address", "bio", "state",):
self.assertIsNone(data[empty_field])
self.assertEqual("m", data["gender"])
self.assertEqual("Learn a lot", data["goals"])
self.assertTrue(data["is_active"])
self.assertIsNotNone(data["date_joined"])
self._verify_profile_image_data(data, False)
self.assertTrue(data["requires_parental_consent"])
self.assertEqual(PRIVATE_VISIBILITY, data["account_privacy"])
else:
self._verify_private_account_response(response, requires_parental_consent=True)
# Verify that the shared view is still private
response = self.send_get(client, query_parameters='view=shared')
self._verify_private_account_response(response, requires_parental_consent=True)
@skip_unless_lms
class TestAccountAPITransactions(TransactionTestCase):
"""
Tests the transactional behavior of the account API
"""
def setUp(self):
super(TestAccountAPITransactions, self).setUp()
self.client = APIClient()
self.user = UserFactory.create(password=TEST_PASSWORD)
self.url = reverse("accounts_api", kwargs={'username': self.user.username})
@mock.patch('student.views.do_email_change_request')
def test_update_account_settings_rollback(self, mock_email_change):
"""
Verify that updating account settings is transactional when a failure happens.
"""
# Send a PATCH request with updates to both profile information and email.
# Throw an error from the method that is used to process the email change request
# (this is the last thing done in the api method). Verify that the profile did not change.
mock_email_change.side_effect = [ValueError, "mock value error thrown"]
self.client.login(username=self.user.username, password=TEST_PASSWORD)
old_email = self.user.email
json_data = {"email": "foo@bar.com", "gender": "o"}
response = self.client.patch(self.url, data=json.dumps(json_data), content_type="application/merge-patch+json")
self.assertEqual(400, response.status_code)
# Verify that GET returns the original preferences
response = self.client.get(self.url)
data = response.data
self.assertEqual(old_email, data["email"])
self.assertEqual(u"m", data["gender"])
@ddt.ddt
@mock.patch('django.conf.settings.USERNAME_REPLACEMENT_WORKER', 'test_replace_username_service_worker')
class UsernameReplacementViewTests(APITestCase):
""" Tests UsernameReplacementView """
SERVICE_USERNAME = 'test_replace_username_service_worker'
def setUp(self):
super(UsernameReplacementViewTests, self).setUp()
self.service_user = UserFactory(username=self.SERVICE_USERNAME)
self.url = reverse("username_replacement")
def build_jwt_headers(self, user):
"""
Helper function for creating headers for the JWT authentication.
"""
token = create_jwt_for_user(user)
headers = {'HTTP_AUTHORIZATION': u'JWT {}'.format(token)}
return headers
def call_api(self, user, data):
""" Helper function to call API with data """
data = json.dumps(data)
headers = self.build_jwt_headers(user)
return self.client.post(self.url, data, content_type='application/json', **headers)
def test_auth(self):
""" Verify the endpoint only works with the service worker """
data = {
"username_mappings": [
{"test_username_1": "test_new_username_1"},
{"test_username_2": "test_new_username_2"}
]
}
# Test unauthenticated
response = self.client.post(self.url)
self.assertEqual(response.status_code, 401)
# Test non-service worker
random_user = UserFactory()
response = self.call_api(random_user, data)
self.assertEqual(response.status_code, 403)
# Test service worker
response = self.call_api(self.service_user, data)
self.assertEqual(response.status_code, 200)
@ddt.data(
[{}, {}],
{},
[{"test_key": "test_value", "test_key_2": "test_value_2"}]
)
def test_bad_schema(self, mapping_data):
""" Verify the endpoint rejects bad data schema """
data = {
"username_mappings": mapping_data
}
response = self.call_api(self.service_user, data)
self.assertEqual(response.status_code, 400)
def test_existing_and_non_existing_users(self):
""" Tests a mix of existing and non existing users """
random_users = [UserFactory() for _ in range(5)]
fake_usernames = ["myname_" + str(x) for x in range(5)]
existing_users = [{user.username: user.username + '_new'} for user in random_users]
non_existing_users = [{username: username + '_new'} for username in fake_usernames]
data = {
"username_mappings": existing_users + non_existing_users
}
expected_response = {
'failed_replacements': [],
'successful_replacements': existing_users + non_existing_users
}
response = self.call_api(self.service_user, data)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data, expected_response)
| appsembler/edx-platform | openedx/core/djangoapps/user_api/accounts/tests/test_views.py | Python | agpl-3.0 | 46,567 | 0.002966 |
import distutils.dir_util
import os
from django.shortcuts import render
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
### Rest API setup
import rest_framework.routers
import rest_framework.viewsets
import rest_framework.filters
import voxel_globe.ingest.serializers
from .tools import METADATA_TYPES, PAYLOAD_TYPES
from voxel_globe.ingest import models
router = rest_framework.routers.DefaultRouter()
class IngestViewSet(rest_framework.viewsets.ModelViewSet):
filter_backends = (rest_framework.filters.DjangoFilterBackend,);
filter_fields = ['id', 'name']#, 'directory', 'file'];
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
super(IngestViewSet, self).perform_create(serializer);
def get_queryset(self):
return super(IngestViewSet, self).get_queryset().filter(owner=self.request.user);
def ViewSetFactory(model, serializer):
return type('ViewSet_%s' % model._meta.model_name, (IngestViewSet,), {'queryset':model.objects.all(), 'serializer_class':serializer})
router.register(models.File._meta.model_name, ViewSetFactory(models.File, voxel_globe.ingest.serializers.FileSerializer))
#router.register(models.Directory._meta.model_name, ViewSetFactory(models.Directory, voxel_globe.ingest.serializers.DirectorySerializer))
#router.register(models.Directory._meta.model_name+'_nest', ViewSetFactory(models.Directory, voxel_globe.ingest.serializers.NestFactory(voxel_globe.ingest.serializers.DirectorySerializer)))
router.register(models.UploadSession._meta.model_name, ViewSetFactory(models.UploadSession, voxel_globe.ingest.serializers.UploadSessionSerializer));
#router.register(models.UploadSession._meta.model_name+'_nest', ViewSetFactory(models.UploadSession, voxel_globe.ingest.serializers.NestFactory(voxel_globe.ingest.serializers.UploadSessionSerializer)));
#TODO: Pass upload types, then all the upload type types
#New a new "New session" panel to handle adding all sorts of upload types
def chooseSession(request):
return render_to_response('ingest/html/chooseSession.html',
{'payload_types': PAYLOAD_TYPES,
'metadata_types': METADATA_TYPES},
context_instance=RequestContext(request))
def addFiles(request):
upload_session_id = int(request.GET['upload'])
uploadSession = models.UploadSession.objects.get(id=upload_session_id)
testFile = models.File(name='Newfile', session=uploadSession, owner=request.user);
testFile.save();
return render_to_response('ingest/html/addFiles.html',
{'uploadSession':uploadSession,
'testFile':testFile},
context_instance=RequestContext(request))
def upload(request):
try:
uploadSession_id = request.POST['uploadSession']
except:
uploadSession = models.UploadSession(name='failesafe', owner=request.user)
uploadSession.save();
uploadSession.name = str(uploadSession.id); uploadSession.save();
uploadSession_id = uploadSession.id
try:
testFile_id = request.POST['testFile']
except:
testFile_id = 'failsafe'
s = 'ok<br>'
saveDir = os.path.join(os.environ['VIP_TEMP_DIR'], 'ingest', str(uploadSession_id))
distutils.dir_util.mkpath(saveDir)
for f in request.FILES:
s += request.FILES[f].name
with open(os.path.join(saveDir, request.FILES[f].name), 'wb') as fid:
for c in request.FILES[f].chunks():
fid.write(c)
return HttpResponse(s);
def ingestFolder(request):
from celery.canvas import chain
from vsi.tools.dir_util import mkdtemp
import voxel_globe.ingest.tasks
uploadSession_id = request.POST['uploadSession']
#directories = models.Directory.objects.filter(uploadSession_id = uploadSession_id)
#Code not quite done, using failsafe for now.
uploadSession = models.UploadSession.objects.get(id=uploadSession_id);
sessionDir = os.path.join(os.environ['VIP_TEMP_DIR'], 'ingest', str(uploadSession.id))
#imageDir = os.path.join(os.environ['VIP_IMAGE_SERVER_ROOT'], str(uploadSession.id))
#if os.path.exists(imageDir):
imageDir = mkdtemp(dir=os.environ['VIP_IMAGE_SERVER_ROOT'], prefix='img');
task0 = voxel_globe.ingest.tasks.move_data.si(sessionDir, imageDir)
task1 = PAYLOAD_TYPES[uploadSession.payload_type].ingest.si(uploadSession_id, imageDir)
task2 = METADATA_TYPES[uploadSession.metadata_type].ingest.s(uploadSession_id, imageDir)
task3 = voxel_globe.ingest.tasks.cleanup.si(uploadSession_id)
tasks = task0 | task1 | task2 | task3 #create chain
result = tasks.apply_async()
return render(request, 'ingest/html/ingest_started.html',
{'task_id':result.task_id})
| andyneff/voxel-globe | voxel_globe/ingest/views.py | Python | mit | 4,787 | 0.022143 |
import asyncio
import re
from operator import attrgetter
from stratus.loader import hook
plugin_info = {
"plugin_category": "core",
"command_category_name": "Informational"
}
@asyncio.coroutine
@hook.command("help", autohelp=False)
def help_command(text, conn, bot, notice, has_permission):
"""[command] - gives help for [command], or lists all available commands if no command is specified
:type text: str
:type conn: stratus.connection.Connection
:type bot: stratus.engine.Stratus
"""
if text:
searching_for = text.lower().strip()
if not re.match(r'^\w+$', searching_for):
notice("Invalid command name '{}'".format(text))
return
else:
searching_for = None
if searching_for:
if searching_for in bot.loader.commands:
doc = bot.loader.commands[searching_for].doc
if doc:
message = "{}{} {}".format(conn.config["command_prefix"], searching_for, doc)
notice(message)
else:
notice("Command {} has no additional documentation.".format(searching_for))
else:
notice("Unknown command '{}'".format(searching_for))
else:
# list of lines to send to the user
lines = []
# current line, containing words to join with " "
current_line = []
# current line length, to count how long the current line will be when joined with " "
current_line_length = 0
for plugin in sorted(set(bot.loader.commands.values()), key=attrgetter("name")):
# use set to remove duplicate commands (from multiple aliases), and sorted to sort by name
if plugin.permissions:
# check permissions
allowed = False
for perm in plugin.permissions:
if has_permission(perm, notice=False):
allowed = True
break
if not allowed:
# skip adding this command
continue
# add the command to lines sent
command = plugin.name
added_length = len(command) + 2 # + 2 to account for space and comma
if current_line_length + added_length > 450:
# if line limit is reached, add line to lines, and reset
lines.append(", ".join(current_line) + ",")
current_line = []
current_line_length = 0
current_line.append(command)
current_line_length += added_length
if current_line:
# make sure to include the last line
lines.append(", ".join(current_line))
notice("Available commands:")
for line in lines:
notice(line)
notice("For detailed help, use {}help <command>".format(conn.config["command_prefix"]))
| lukeroge/CloudbotX | plugins/help.py | Python | gpl-3.0 | 2,905 | 0.002754 |
import unittest
import os
import numpy as np
from pyresample import plot, geometry, utils, kd_tree
try:
import matplotlib
matplotlib.use('Agg')
except ImportError:
pass # Postpone fail to individual tests
def tmp(f):
f.tmp = True
return f
class Test(unittest.TestCase):
filename = os.path.abspath(os.path.join(os.path.dirname(__file__),
'test_files', 'ssmis_swath.npz'))
data = np.load(filename)['data']
lons = data[:, 0].astype(np.float64)
lats = data[:, 1].astype(np.float64)
tb37v = data[:, 2].astype(np.float64)
# screen out the fill values
fvalue = -10000000000.0
valid_fov = (lons != fvalue) * (lats != fvalue) * (tb37v != fvalue)
lons = lons[valid_fov]
lats = lats[valid_fov]
tb37v = tb37v[valid_fov]
def test_ellps2axis(self):
a, b = plot.ellps2axis('WGS84')
self.assertAlmostEqual(a, 6378137.0,
msg='Failed to get semi-major axis of ellipsis')
self.assertAlmostEqual(b, 6356752.3142451793,
msg='Failed to get semi-minor axis of ellipsis')
def test_area_def2basemap(self):
area_def = utils.parse_area_file(os.path.join(os.path.dirname(__file__),
'test_files', 'areas.cfg'), 'ease_sh')[0]
bmap = plot.area_def2basemap(area_def)
self.assertTrue(bmap.rmajor == bmap.rminor and
bmap.rmajor == 6371228.0,
'Failed to create Basemap object')
def test_plate_carreeplot(self):
area_def = utils.parse_area_file(os.path.join(os.path.dirname(__file__),
'test_files', 'areas.cfg'), 'pc_world')[0]
swath_def = geometry.SwathDefinition(self.lons, self.lats)
result = kd_tree.resample_nearest(swath_def, self.tb37v, area_def,
radius_of_influence=20000,
fill_value=None)
plt = plot._get_quicklook(area_def, result, num_meridians=0,
num_parallels=0)
def test_easeplot(self):
area_def = utils.parse_area_file(os.path.join(os.path.dirname(__file__),
'test_files', 'areas.cfg'), 'ease_sh')[0]
swath_def = geometry.SwathDefinition(self.lons, self.lats)
result = kd_tree.resample_nearest(swath_def, self.tb37v, area_def,
radius_of_influence=20000,
fill_value=None)
plt = plot._get_quicklook(area_def, result)
def test_orthoplot(self):
area_def = utils.parse_area_file(os.path.join(os.path.dirname(__file__),
'test_files', 'areas.cfg'), 'ortho')[0]
swath_def = geometry.SwathDefinition(self.lons, self.lats)
result = kd_tree.resample_nearest(swath_def, self.tb37v, area_def,
radius_of_influence=20000,
fill_value=None)
plt = plot._get_quicklook(area_def, result)
def suite():
"""The test suite.
"""
loader = unittest.TestLoader()
mysuite = unittest.TestSuite()
mysuite.addTest(loader.loadTestsFromTestCase(Test))
return mysuite
| jhamman/pyresample | pyresample/test/test_plot.py | Python | lgpl-3.0 | 3,440 | 0.002907 |
import sys
sys.path = ['.'] + sys.path
from test.test_support import verbose, run_unittest
import re
from re import Scanner
import sys, os, traceback
from weakref import proxy
# Misc tests from Tim Peters' re.doc
# WARNING: Don't change details in these tests if you don't know
# what you're doing. Some of these tests were carefuly modeled to
# cover most of the code.
import unittest
class ReTests(unittest.TestCase):
def test_weakref(self):
s = 'QabbbcR'
x = re.compile('ab+c')
y = proxy(x)
self.assertEqual(x.findall('QabbbcR'), y.findall('QabbbcR'))
def test_search_star_plus(self):
self.assertEqual(re.search('x*', 'axx').span(0), (0, 0))
self.assertEqual(re.search('x*', 'axx').span(), (0, 0))
self.assertEqual(re.search('x+', 'axx').span(0), (1, 3))
self.assertEqual(re.search('x+', 'axx').span(), (1, 3))
self.assertEqual(re.search('x', 'aaa'), None)
self.assertEqual(re.match('a*', 'xxx').span(0), (0, 0))
self.assertEqual(re.match('a*', 'xxx').span(), (0, 0))
self.assertEqual(re.match('x*', 'xxxa').span(0), (0, 3))
self.assertEqual(re.match('x*', 'xxxa').span(), (0, 3))
self.assertEqual(re.match('a+', 'xxx'), None)
def bump_num(self, matchobj):
int_value = int(matchobj.group(0))
return str(int_value + 1)
def test_basic_re_sub(self):
self.assertEqual(re.sub("(?i)b+", "x", "bbbb BBBB"), 'x x')
self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y'),
'9.3 -3 24x100y')
self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y', 3),
'9.3 -3 23x99y')
self.assertEqual(re.sub('.', lambda m: r"\n", 'x'), '\\n')
self.assertEqual(re.sub('.', r"\n", 'x'), '\n')
s = r"\1\1"
self.assertEqual(re.sub('(.)', s, 'x'), 'xx')
self.assertEqual(re.sub('(.)', re.escape(s), 'x'), s)
self.assertEqual(re.sub('(.)', lambda m: s, 'x'), s)
self.assertEqual(re.sub('(?P<a>x)', '\g<a>\g<a>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<a>x)', '\g<a>\g<1>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<unk>x)', '\g<unk>\g<unk>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<unk>x)', '\g<1>\g<1>', 'xx'), 'xxxx')
self.assertEqual(re.sub('a',r'\t\n\v\r\f\a\b\B\Z\a\A\w\W\s\S\d\D','a'),
'\t\n\v\r\f\a\b\\B\\Z\a\\A\\w\\W\\s\\S\\d\\D')
self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'), '\t\n\v\r\f\a')
self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'),
(chr(9)+chr(10)+chr(11)+chr(13)+chr(12)+chr(7)))
self.assertEqual(re.sub('^\s*', 'X', 'test'), 'Xtest')
def test_bug_449964(self):
# fails for group followed by other escape
self.assertEqual(re.sub(r'(?P<unk>x)', '\g<1>\g<1>\\b', 'xx'),
'xx\bxx\b')
def test_bug_449000(self):
# Test for sub() on escaped characters
self.assertEqual(re.sub(r'\r\n', r'\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub('\r\n', r'\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub(r'\r\n', '\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub('\r\n', '\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
def test_bug_1140(self):
# re.sub(x, y, u'') should return u'', not '', and
# re.sub(x, y, '') should return '', not u''.
# Also:
# re.sub(x, y, unicode(x)) should return unicode(y), and
# re.sub(x, y, str(x)) should return
# str(y) if isinstance(y, str) else unicode(y).
for x in 'x', u'x':
for y in 'y', u'y':
z = re.sub(x, y, u'')
self.assertEqual(z, u'')
self.assertEqual(type(z), unicode)
#
z = re.sub(x, y, '')
self.assertEqual(z, '')
self.assertEqual(type(z), str)
#
z = re.sub(x, y, unicode(x))
self.assertEqual(z, y)
self.assertEqual(type(z), unicode)
#
z = re.sub(x, y, str(x))
self.assertEqual(z, y)
self.assertEqual(type(z), type(y))
def test_bug_1661(self):
# Verify that flags do not get silently ignored with compiled patterns
pattern = re.compile('.')
self.assertRaises(ValueError, re.match, pattern, 'A', re.I)
self.assertRaises(ValueError, re.search, pattern, 'A', re.I)
self.assertRaises(ValueError, re.findall, pattern, 'A', re.I)
self.assertRaises(ValueError, re.compile, pattern, re.I)
def test_bug_3629(self):
# A regex that triggered a bug in the sre-code validator
re.compile("(?P<quote>)(?(quote))")
def test_sub_template_numeric_escape(self):
# bug 776311 and friends
self.assertEqual(re.sub('x', r'\0', 'x'), '\0')
self.assertEqual(re.sub('x', r'\000', 'x'), '\000')
self.assertEqual(re.sub('x', r'\001', 'x'), '\001')
self.assertEqual(re.sub('x', r'\008', 'x'), '\0' + '8')
self.assertEqual(re.sub('x', r'\009', 'x'), '\0' + '9')
self.assertEqual(re.sub('x', r'\111', 'x'), '\111')
self.assertEqual(re.sub('x', r'\117', 'x'), '\117')
self.assertEqual(re.sub('x', r'\1111', 'x'), '\1111')
self.assertEqual(re.sub('x', r'\1111', 'x'), '\111' + '1')
self.assertEqual(re.sub('x', r'\00', 'x'), '\x00')
self.assertEqual(re.sub('x', r'\07', 'x'), '\x07')
self.assertEqual(re.sub('x', r'\08', 'x'), '\0' + '8')
self.assertEqual(re.sub('x', r'\09', 'x'), '\0' + '9')
self.assertEqual(re.sub('x', r'\0a', 'x'), '\0' + 'a')
self.assertEqual(re.sub('x', r'\400', 'x'), '\0')
self.assertEqual(re.sub('x', r'\777', 'x'), '\377')
self.assertRaises(re.error, re.sub, 'x', r'\1', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\8', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\9', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\11', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\18', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\1a', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\90', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\99', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\118', 'x') # r'\11' + '8'
self.assertRaises(re.error, re.sub, 'x', r'\11a', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\181', 'x') # r'\18' + '1'
self.assertRaises(re.error, re.sub, 'x', r'\800', 'x') # r'\80' + '0'
# in python2.3 (etc), these loop endlessly in sre_parser.py
self.assertEqual(re.sub('(((((((((((x)))))))))))', r'\11', 'x'), 'x')
self.assertEqual(re.sub('((((((((((y))))))))))(.)', r'\118', 'xyz'),
'xz8')
self.assertEqual(re.sub('((((((((((y))))))))))(.)', r'\11a', 'xyz'),
'xza')
def test_qualified_re_sub(self):
self.assertEqual(re.sub('a', 'b', 'aaaaa'), 'bbbbb')
self.assertEqual(re.sub('a', 'b', 'aaaaa', 1), 'baaaa')
def test_bug_114660(self):
self.assertEqual(re.sub(r'(\S)\s+(\S)', r'\1 \2', 'hello there'),
'hello there')
def test_bug_462270(self):
# Test for empty sub() behaviour, see SF bug #462270
self.assertEqual(re.sub('x*', '-', 'abxd'), '-a-b-d-')
self.assertEqual(re.sub('x+', '-', 'abxd'), 'ab-d')
def test_symbolic_refs(self):
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<a', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<a a>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<1a1>', 'xx')
self.assertRaises(IndexError, re.sub, '(?P<a>x)', '\g<ab>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)|(?P<b>y)', '\g<b>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)|(?P<b>y)', '\\2', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<-1>', 'xx')
def test_re_subn(self):
self.assertEqual(re.subn("(?i)b+", "x", "bbbb BBBB"), ('x x', 2))
self.assertEqual(re.subn("b+", "x", "bbbb BBBB"), ('x BBBB', 1))
self.assertEqual(re.subn("b+", "x", "xyz"), ('xyz', 0))
self.assertEqual(re.subn("b*", "x", "xyz"), ('xxxyxzx', 4))
self.assertEqual(re.subn("b*", "x", "xyz", 2), ('xxxyz', 2))
def test_re_split(self):
self.assertEqual(re.split(":", ":a:b::c"), ['', 'a', 'b', '', 'c'])
self.assertEqual(re.split(":*", ":a:b::c"), ['', 'a', 'b', 'c'])
self.assertEqual(re.split("(:*)", ":a:b::c"),
['', ':', 'a', ':', 'b', '::', 'c'])
self.assertEqual(re.split("(?::*)", ":a:b::c"), ['', 'a', 'b', 'c'])
self.assertEqual(re.split("(:)*", ":a:b::c"),
['', ':', 'a', ':', 'b', ':', 'c'])
self.assertEqual(re.split("([b:]+)", ":a:b::c"),
['', ':', 'a', ':b::', 'c'])
self.assertEqual(re.split("(b)|(:+)", ":a:b::c"),
['', None, ':', 'a', None, ':', '', 'b', None, '',
None, '::', 'c'])
self.assertEqual(re.split("(?:b)|(?::+)", ":a:b::c"),
['', 'a', '', '', 'c'])
def test_qualified_re_split(self):
self.assertEqual(re.split(":", ":a:b::c", 2), ['', 'a', 'b::c'])
self.assertEqual(re.split(':', 'a:b:c:d', 2), ['a', 'b', 'c:d'])
self.assertEqual(re.split("(:)", ":a:b::c", 2),
['', ':', 'a', ':', 'b::c'])
self.assertEqual(re.split("(:*)", ":a:b::c", 2),
['', ':', 'a', ':', 'b::c'])
def test_re_findall(self):
self.assertEqual(re.findall(":+", "abc"), [])
self.assertEqual(re.findall(":+", "a:b::c:::d"), [":", "::", ":::"])
self.assertEqual(re.findall("(:+)", "a:b::c:::d"), [":", "::", ":::"])
self.assertEqual(re.findall("(:)(:*)", "a:b::c:::d"), [(":", ""),
(":", ":"),
(":", "::")])
def test_bug_117612(self):
self.assertEqual(re.findall(r"(a|(b))", "aba"),
[("a", ""),("b", "b"),("a", "")])
def test_re_match(self):
self.assertEqual(re.match('a', 'a').groups(), ())
self.assertEqual(re.match('(a)', 'a').groups(), ('a',))
self.assertEqual(re.match(r'(a)', 'a').group(0), 'a')
self.assertEqual(re.match(r'(a)', 'a').group(1), 'a')
self.assertEqual(re.match(r'(a)', 'a').group(1, 1), ('a', 'a'))
pat = re.compile('((a)|(b))(c)?')
self.assertEqual(pat.match('a').groups(), ('a', 'a', None, None))
self.assertEqual(pat.match('b').groups(), ('b', None, 'b', None))
self.assertEqual(pat.match('ac').groups(), ('a', 'a', None, 'c'))
self.assertEqual(pat.match('bc').groups(), ('b', None, 'b', 'c'))
self.assertEqual(pat.match('bc').groups(""), ('b', "", 'b', 'c'))
# A single group
m = re.match('(a)', 'a')
self.assertEqual(m.group(0), 'a')
self.assertEqual(m.group(0), 'a')
self.assertEqual(m.group(1), 'a')
self.assertEqual(m.group(1, 1), ('a', 'a'))
pat = re.compile('(?:(?P<a1>a)|(?P<b2>b))(?P<c3>c)?')
self.assertEqual(pat.match('a').group(1, 2, 3), ('a', None, None))
self.assertEqual(pat.match('b').group('a1', 'b2', 'c3'),
(None, 'b', None))
self.assertEqual(pat.match('ac').group(1, 'b2', 3), ('a', None, 'c'))
def test_re_groupref_exists(self):
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', '(a)').groups(),
('(', 'a'))
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', 'a').groups(),
(None, 'a'))
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', 'a)'), None)
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', '(a'), None)
self.assertEqual(re.match('^(?:(a)|c)((?(1)b|d))$', 'ab').groups(),
('a', 'b'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)b|d))$', 'cd').groups(),
(None, 'd'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)|d))$', 'cd').groups(),
(None, 'd'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)|d))$', 'a').groups(),
('a', ''))
# Tests for bug #1177831: exercise groups other than the first group
p = re.compile('(?P<g1>a)(?P<g2>b)?((?(g2)c|d))')
self.assertEqual(p.match('abc').groups(),
('a', 'b', 'c'))
self.assertEqual(p.match('ad').groups(),
('a', None, 'd'))
self.assertEqual(p.match('abd'), None)
self.assertEqual(p.match('ac'), None)
def test_re_groupref(self):
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', '|a|').groups(),
('|', 'a'))
self.assertEqual(re.match(r'^(\|)?([^()]+)\1?$', 'a').groups(),
(None, 'a'))
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', 'a|'), None)
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', '|a'), None)
self.assertEqual(re.match(r'^(?:(a)|c)(\1)$', 'aa').groups(),
('a', 'a'))
self.assertEqual(re.match(r'^(?:(a)|c)(\1)?$', 'c').groups(),
(None, None))
def test_groupdict(self):
self.assertEqual(re.match('(?P<first>first) (?P<second>second)',
'first second').groupdict(),
{'first':'first', 'second':'second'})
def test_expand(self):
self.assertEqual(re.match("(?P<first>first) (?P<second>second)",
"first second")
.expand(r"\2 \1 \g<second> \g<first>"),
"second first second first")
def test_repeat_minmax(self):
self.assertEqual(re.match("^(\w){1}$", "abc"), None)
self.assertEqual(re.match("^(\w){1}?$", "abc"), None)
self.assertEqual(re.match("^(\w){1,2}$", "abc"), None)
self.assertEqual(re.match("^(\w){1,2}?$", "abc"), None)
self.assertEqual(re.match("^(\w){3}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,3}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,4}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,3}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^x{1}$", "xxx"), None)
self.assertEqual(re.match("^x{1}?$", "xxx"), None)
self.assertEqual(re.match("^x{1,2}$", "xxx"), None)
self.assertEqual(re.match("^x{1,2}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3}$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,3}$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,4}$", "xxx"), None)
self.assertNotEqual(re.match("^x{3,4}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,3}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,4}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3,4}?$", "xxx"), None)
self.assertEqual(re.match("^x{}$", "xxx"), None)
self.assertNotEqual(re.match("^x{}$", "x{}"), None)
def test_getattr(self):
self.assertEqual(re.match("(a)", "a").pos, 0)
self.assertEqual(re.match("(a)", "a").endpos, 1)
self.assertEqual(re.match("(a)", "a").string, "a")
self.assertEqual(re.match("(a)", "a").regs, ((0, 1), (0, 1)))
self.assertNotEqual(re.match("(a)", "a").re, None)
def test_special_escapes(self):
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx").group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd").group(1), "bx")
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx", re.LOCALE).group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd", re.LOCALE).group(1), "bx")
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx", re.UNICODE).group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd", re.UNICODE).group(1), "bx")
self.assertEqual(re.search(r"^abc$", "\nabc\n", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "abc", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "\nabc\n", re.M), None)
self.assertEqual(re.search(r"\b(b.)\b",
u"abcd abc bcd bx").group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
u"abc bcd bc abxd").group(1), "bx")
self.assertEqual(re.search(r"^abc$", u"\nabc\n", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", u"abc", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", u"\nabc\n", re.M), None)
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a").group(0), "1aa! a")
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a", re.LOCALE).group(0), "1aa! a")
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a", re.UNICODE).group(0), "1aa! a")
def test_bigcharset(self):
self.assertEqual(re.match(u"([\u2222\u2223])",
u"\u2222").group(1), u"\u2222")
self.assertEqual(re.match(u"([\u2222\u2223])",
u"\u2222", re.UNICODE).group(1), u"\u2222")
def test_anyall(self):
self.assertEqual(re.match("a.b", "a\nb", re.DOTALL).group(0),
"a\nb")
self.assertEqual(re.match("a.*b", "a\n\nb", re.DOTALL).group(0),
"a\n\nb")
def test_non_consuming(self):
self.assertEqual(re.match("(a(?=\s[^a]))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[^a]*))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[abc]))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[abc]*))", "a bc").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s\1)", "a a").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s\1*)", "a aa").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s(abc|a))", "a a").group(1), "a")
self.assertEqual(re.match(r"(a(?!\s[^a]))", "a a").group(1), "a")
self.assertEqual(re.match(r"(a(?!\s[abc]))", "a d").group(1), "a")
self.assertEqual(re.match(r"(a)(?!\s\1)", "a b").group(1), "a")
self.assertEqual(re.match(r"(a)(?!\s(abc|a))", "a b").group(1), "a")
def test_ignore_case(self):
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match("abc", u"ABC", re.I).group(0), "ABC")
self.assertEqual(re.match(r"(a\s[^a])", "a b", re.I).group(1), "a b")
self.assertEqual(re.match(r"(a\s[^a]*)", "a bb", re.I).group(1), "a bb")
self.assertEqual(re.match(r"(a\s[abc])", "a b", re.I).group(1), "a b")
self.assertEqual(re.match(r"(a\s[abc]*)", "a bb", re.I).group(1), "a bb")
self.assertEqual(re.match(r"((a)\s\2)", "a a", re.I).group(1), "a a")
self.assertEqual(re.match(r"((a)\s\2*)", "a aa", re.I).group(1), "a aa")
self.assertEqual(re.match(r"((a)\s(abc|a))", "a a", re.I).group(1), "a a")
self.assertEqual(re.match(r"((a)\s(abc|a)*)", "a aa", re.I).group(1), "a aa")
def test_category(self):
self.assertEqual(re.match(r"(\s)", " ").group(1), " ")
def test_getlower(self):
import _sre
self.assertEqual(_sre.getlower(ord('A'), 0), ord('a'))
self.assertEqual(_sre.getlower(ord('A'), re.LOCALE), ord('a'))
self.assertEqual(_sre.getlower(ord('A'), re.UNICODE), ord('a'))
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match("abc", u"ABC", re.I).group(0), "ABC")
def test_not_literal(self):
self.assertEqual(re.search("\s([^a])", " b").group(1), "b")
self.assertEqual(re.search("\s([^a]*)", " bb").group(1), "bb")
def test_search_coverage(self):
self.assertEqual(re.search("\s(b)", " b").group(1), "b")
self.assertEqual(re.search("a\s", "a ").group(0), "a ")
def test_re_escape(self):
p=""
for i in range(0, 256):
p = p + chr(i)
self.assertEqual(re.match(re.escape(chr(i)), chr(i)) is not None,
True)
self.assertEqual(re.match(re.escape(chr(i)), chr(i)).span(), (0,1))
pat=re.compile(re.escape(p))
self.assertEqual(pat.match(p) is not None, True)
self.assertEqual(pat.match(p).span(), (0,256))
def test_pickling(self):
import pickle
self.pickle_test(pickle)
import cPickle
self.pickle_test(cPickle)
# old pickles expect the _compile() reconstructor in sre module
import warnings
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "The sre module is deprecated",
DeprecationWarning)
from sre import _compile
def pickle_test(self, pickle):
oldpat = re.compile('a(?:b|(c|e){1,2}?|d)+?(.)')
s = pickle.dumps(oldpat)
newpat = pickle.loads(s)
self.assertEqual(oldpat, newpat)
def test_constants(self):
self.assertEqual(re.I, re.IGNORECASE)
self.assertEqual(re.L, re.LOCALE)
self.assertEqual(re.M, re.MULTILINE)
self.assertEqual(re.S, re.DOTALL)
self.assertEqual(re.X, re.VERBOSE)
def test_flags(self):
for flag in [re.I, re.M, re.X, re.S, re.L]:
self.assertNotEqual(re.compile('^pattern$', flag), None)
def test_sre_character_literals(self):
for i in [0, 8, 16, 32, 64, 127, 128, 255]:
self.assertNotEqual(re.match(r"\%03o" % i, chr(i)), None)
self.assertNotEqual(re.match(r"\%03o0" % i, chr(i)+"0"), None)
self.assertNotEqual(re.match(r"\%03o8" % i, chr(i)+"8"), None)
self.assertNotEqual(re.match(r"\x%02x" % i, chr(i)), None)
self.assertNotEqual(re.match(r"\x%02x0" % i, chr(i)+"0"), None)
self.assertNotEqual(re.match(r"\x%02xz" % i, chr(i)+"z"), None)
self.assertRaises(re.error, re.match, "\911", "")
def test_sre_character_class_literals(self):
for i in [0, 8, 16, 32, 64, 127, 128, 255]:
self.assertNotEqual(re.match(r"[\%03o]" % i, chr(i)), None)
self.assertNotEqual(re.match(r"[\%03o0]" % i, chr(i)), None)
self.assertNotEqual(re.match(r"[\%03o8]" % i, chr(i)), None)
self.assertNotEqual(re.match(r"[\x%02x]" % i, chr(i)), None)
self.assertNotEqual(re.match(r"[\x%02x0]" % i, chr(i)), None)
self.assertNotEqual(re.match(r"[\x%02xz]" % i, chr(i)), None)
self.assertRaises(re.error, re.match, "[\911]", "")
def test_bug_113254(self):
self.assertEqual(re.match(r'(a)|(b)', 'b').start(1), -1)
self.assertEqual(re.match(r'(a)|(b)', 'b').end(1), -1)
self.assertEqual(re.match(r'(a)|(b)', 'b').span(1), (-1, -1))
def test_bug_527371(self):
# bug described in patches 527371/672491
self.assertEqual(re.match(r'(a)?a','a').lastindex, None)
self.assertEqual(re.match(r'(a)(b)?b','ab').lastindex, 1)
self.assertEqual(re.match(r'(?P<a>a)(?P<b>b)?b','ab').lastgroup, 'a')
self.assertEqual(re.match("(?P<a>a(b))", "ab").lastgroup, 'a')
self.assertEqual(re.match("((a))", "a").lastindex, 1)
def test_bug_545855(self):
# bug 545855 -- This pattern failed to cause a compile error as it
# should, instead provoking a TypeError.
self.assertRaises(re.error, re.compile, 'foo[a-')
def test_bug_418626(self):
# bugs 418626 at al. -- Testing Greg Chapman's addition of op code
# SRE_OP_MIN_REPEAT_ONE for eliminating recursion on simple uses of
# pattern '*?' on a long string.
self.assertEqual(re.match('.*?c', 10000*'ab'+'cd').end(0), 20001)
self.assertEqual(re.match('.*?cd', 5000*'ab'+'c'+5000*'ab'+'cde').end(0),
20003)
self.assertEqual(re.match('.*?cd', 20000*'abc'+'de').end(0), 60001)
# non-simple '*?' still used to hit the recursion limit, before the
# non-recursive scheme was implemented.
self.assertEqual(re.search('(a|b)*?c', 10000*'ab'+'cd').end(0), 20001)
def test_bug_612074(self):
pat=u"["+re.escape(u"\u2039")+u"]"
self.assertEqual(re.compile(pat) and 1, 1)
def test_stack_overflow(self):
# nasty cases that used to overflow the straightforward recursive
# implementation of repeated groups.
self.assertEqual(re.match('(x)*', 50000*'x').group(1), 'x')
self.assertEqual(re.match('(x)*y', 50000*'x'+'y').group(1), 'x')
self.assertEqual(re.match('(x)*?y', 50000*'x'+'y').group(1), 'x')
def test_scanner(self):
def s_ident(scanner, token): return token
def s_operator(scanner, token): return "op%s" % token
def s_float(scanner, token): return float(token)
def s_int(scanner, token): return int(token)
scanner = Scanner([
(r"[a-zA-Z_]\w*", s_ident),
(r"\d+\.\d*", s_float),
(r"\d+", s_int),
(r"=|\+|-|\*|/", s_operator),
(r"\s+", None),
])
self.assertNotEqual(scanner.scanner.scanner("").pattern, None)
self.assertEqual(scanner.scan("sum = 3*foo + 312.50 + bar"),
(['sum', 'op=', 3, 'op*', 'foo', 'op+', 312.5,
'op+', 'bar'], ''))
def test_bug_448951(self):
# bug 448951 (similar to 429357, but with single char match)
# (Also test greedy matches.)
for op in '','?','*':
self.assertEqual(re.match(r'((.%s):)?z'%op, 'z').groups(),
(None, None))
self.assertEqual(re.match(r'((.%s):)?z'%op, 'a:z').groups(),
('a:', 'a'))
def test_bug_725106(self):
# capturing groups in alternatives in repeats
self.assertEqual(re.match('^((a)|b)*', 'abc').groups(),
('b', 'a'))
self.assertEqual(re.match('^(([ab])|c)*', 'abc').groups(),
('c', 'b'))
self.assertEqual(re.match('^((d)|[ab])*', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)c|[ab])*', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)|b)*?c', 'abc').groups(),
('b', 'a'))
self.assertEqual(re.match('^(([ab])|c)*?d', 'abcd').groups(),
('c', 'b'))
self.assertEqual(re.match('^((d)|[ab])*?c', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)c|[ab])*?c', 'abc').groups(),
('b', None))
def test_bug_725149(self):
# mark_stack_base restoring before restoring marks
self.assertEqual(re.match('(a)(?:(?=(b)*)c)*', 'abb').groups(),
('a', None))
self.assertEqual(re.match('(a)((?!(b)*))*', 'abb').groups(),
('a', None, None))
def test_bug_764548(self):
# bug 764548, re.compile() barfs on str/unicode subclasses
try:
unicode
except NameError:
return # no problem if we have no unicode
class my_unicode(unicode): pass
pat = re.compile(my_unicode("abc"))
self.assertEqual(pat.match("xyz"), None)
def test_finditer(self):
iter = re.finditer(r":+", "a:b::c:::d")
self.assertEqual([item.group(0) for item in iter],
[":", "::", ":::"])
def test_bug_926075(self):
try:
unicode
except NameError:
return # no problem if we have no unicode
self.assert_(re.compile('bug_926075') is not
re.compile(eval("u'bug_926075'")))
def test_bug_931848(self):
try:
unicode
except NameError:
pass
pattern = eval('u"[\u002E\u3002\uFF0E\uFF61]"')
self.assertEqual(re.compile(pattern).split("a.b.c"),
['a','b','c'])
def test_bug_581080(self):
iter = re.finditer(r"\s", "a b")
self.assertEqual(iter.next().span(), (1,2))
self.assertRaises(StopIteration, iter.next)
scanner = re.compile(r"\s").scanner("a b")
self.assertEqual(scanner.search().span(), (1, 2))
self.assertEqual(scanner.search(), None)
def test_bug_817234(self):
iter = re.finditer(r".*", "asdf")
self.assertEqual(iter.next().span(), (0, 4))
self.assertEqual(iter.next().span(), (4, 4))
self.assertRaises(StopIteration, iter.next)
def test_empty_array(self):
# SF buf 1647541
import array
for typecode in 'cbBuhHiIlLfd':
a = array.array(typecode)
self.assertEqual(re.compile("bla").match(a), None)
self.assertEqual(re.compile("").match(a).groups(), ())
def test_inline_flags(self):
# Bug #1700
upper_char = unichr(0x1ea0) # Latin Capital Letter A with Dot Bellow
lower_char = unichr(0x1ea1) # Latin Small Letter A with Dot Bellow
p = re.compile(upper_char, re.I | re.U)
q = p.match(lower_char)
self.assertNotEqual(q, None)
p = re.compile(lower_char, re.I | re.U)
q = p.match(upper_char)
self.assertNotEqual(q, None)
p = re.compile('(?i)' + upper_char, re.U)
q = p.match(lower_char)
self.assertNotEqual(q, None)
p = re.compile('(?i)' + lower_char, re.U)
q = p.match(upper_char)
self.assertNotEqual(q, None)
p = re.compile('(?iu)' + upper_char)
q = p.match(lower_char)
self.assertNotEqual(q, None)
p = re.compile('(?iu)' + lower_char)
q = p.match(upper_char)
self.assertNotEqual(q, None)
def test_dollar_matches_twice(self):
"$ matches the end of string, and just before the terminating \n"
pattern = re.compile('$')
self.assertEqual(pattern.sub('#', 'a\nb\n'), 'a\nb#\n#')
self.assertEqual(pattern.sub('#', 'a\nb\nc'), 'a\nb\nc#')
self.assertEqual(pattern.sub('#', '\n'), '#\n#')
pattern = re.compile('$', re.MULTILINE)
self.assertEqual(pattern.sub('#', 'a\nb\n' ), 'a#\nb#\n#' )
self.assertEqual(pattern.sub('#', 'a\nb\nc'), 'a#\nb#\nc#')
self.assertEqual(pattern.sub('#', '\n'), '#\n#')
def test_dealloc(self):
# issue 3299: check for segfault in debug build
import _sre
# the overflow limit is different on wide and narrow builds and it
# depends on the definition of SRE_CODE (see sre.h).
# 2**128 should be big enough to overflow on both. For smaller values
# a RuntimeError is raised instead of OverflowError.
long_overflow = 2**128
self.assertRaises(TypeError, re.finditer, "a", {})
self.assertRaises(OverflowError, _sre.compile, "abc", 0, [long_overflow])
def run_re_tests():
from test.re_tests import benchmarks, tests, SUCCEED, FAIL, SYNTAX_ERROR
if verbose:
print 'Running re_tests test suite'
else:
# To save time, only run the first and last 10 tests
#tests = tests[:10] + tests[-10:]
pass
for t in tests:
sys.stdout.flush()
pattern = s = outcome = repl = expected = None
if len(t) == 5:
pattern, s, outcome, repl, expected = t
elif len(t) == 3:
pattern, s, outcome = t
else:
raise ValueError, ('Test tuples should have 3 or 5 fields', t)
try:
obj = re.compile(pattern)
except re.error:
if outcome == SYNTAX_ERROR: pass # Expected a syntax error
else:
print '=== Syntax error:', t
except KeyboardInterrupt: raise KeyboardInterrupt
except:
print '*** Unexpected error ***', t
if verbose:
traceback.print_exc(file=sys.stdout)
else:
try:
result = obj.search(s)
except re.error, msg:
print '=== Unexpected exception', t, repr(msg)
if outcome == SYNTAX_ERROR:
# This should have been a syntax error; forget it.
pass
elif outcome == FAIL:
if result is None: pass # No match, as expected
else: print '=== Succeeded incorrectly', t
elif outcome == SUCCEED:
if result is not None:
# Matched, as expected, so now we compute the
# result string and compare it to our expected result.
start, end = result.span(0)
vardict={'found': result.group(0),
'groups': result.group(),
'flags': result.re.flags}
for i in range(1, 100):
try:
gi = result.group(i)
# Special hack because else the string concat fails:
if gi is None:
gi = "None"
except IndexError:
gi = "Error"
vardict['g%d' % i] = gi
for i in result.re.groupindex.keys():
try:
gi = result.group(i)
if gi is None:
gi = "None"
except IndexError:
gi = "Error"
vardict[i] = gi
repl = eval(repl, vardict)
if repl != expected:
print '=== grouping error', t,
print repr(repl) + ' should be ' + repr(expected)
else:
print '=== Failed incorrectly', t
# Try the match on a unicode string, and check that it
# still succeeds.
try:
result = obj.search(unicode(s, "latin-1"))
if result is None:
print '=== Fails on unicode match', t
except NameError:
continue # 1.5.2
except TypeError:
continue # unicode test case
# Try the match on a unicode pattern, and check that it
# still succeeds.
obj=re.compile(unicode(pattern, "latin-1"))
result = obj.search(s)
if result is None:
print '=== Fails on unicode pattern match', t
# Try the match with the search area limited to the extent
# of the match and see if it still succeeds. \B will
# break (because it won't match at the end or start of a
# string), so we'll ignore patterns that feature it.
if pattern[:2] != '\\B' and pattern[-2:] != '\\B' \
and result is not None:
obj = re.compile(pattern)
result = obj.search(s, result.start(0), result.end(0) + 1)
if result is None:
print '=== Failed on range-limited match', t
# Try the match with IGNORECASE enabled, and check that it
# still succeeds.
obj = re.compile(pattern, re.IGNORECASE)
result = obj.search(s)
if result is None:
print '=== Fails on case-insensitive match', t
# Try the match with LOCALE enabled, and check that it
# still succeeds.
obj = re.compile(pattern, re.LOCALE)
result = obj.search(s)
if result is None:
print '=== Fails on locale-sensitive match', t
# Try the match with UNICODE locale enabled, and check
# that it still succeeds.
obj = re.compile(pattern, re.UNICODE)
result = obj.search(s)
if result is None:
print '=== Fails on unicode-sensitive match', t
def test_main():
run_unittest(ReTests)
run_re_tests()
if __name__ == "__main__":
test_main()
| mancoast/CPythonPyc_test | crash/265_test_re.py | Python | gpl-3.0 | 37,806 | 0.003095 |
# pylint: skip-file
from __future__ import print_function
import mxnet as mx
from mxnet import gluon
from mxnet.gluon import nn
import numpy as np
import logging
from common import get_data
from mxnet import autograd
logging.basicConfig(level=logging.DEBUG)
# define network
def get_net():
net = nn.Sequential()
net.add(nn.Dense(128, activation='relu', prefix='fc1_'))
net.add(nn.Dense(64, activation='relu', prefix='fc2_'))
net.add(nn.Dense(10, prefix='fc3_'))
return net
get_data.GetMNIST_ubyte()
batch_size = 100
train_data = mx.io.MNISTIter(
image="data/train-images-idx3-ubyte",
label="data/train-labels-idx1-ubyte",
data_shape=(784,),
label_name='sm_label',
batch_size=batch_size, shuffle=True, flat=True, silent=False, seed=10)
val_data = mx.io.MNISTIter(
image="data/t10k-images-idx3-ubyte",
label="data/t10k-labels-idx1-ubyte",
data_shape=(784,),
label_name='sm_label',
batch_size=batch_size, shuffle=True, flat=True, silent=False)
def score(net, ctx_list):
metric = mx.metric.Accuracy()
val_data.reset()
for batch in val_data:
datas = gluon.utils.split_and_load(batch.data[0], ctx_list, batch_axis=0)
labels = gluon.utils.split_and_load(batch.label[0], ctx_list, batch_axis=0)
outputs = []
for x in datas:
outputs.append(net(x))
metric.update(labels, outputs)
return metric.get()[1]
def train(net, epoch, ctx_list):
net.collect_params().initialize(mx.init.Xavier(magnitude=2.24), ctx=ctx_list)
trainer = gluon.Trainer(net.collect_params(), 'sgd', {'learning_rate': 0.5})
metric = mx.metric.Accuracy()
loss = gluon.loss.SoftmaxCrossEntropyLoss()
for i in range(epoch):
train_data.reset()
for batch in train_data:
datas = gluon.utils.split_and_load(batch.data[0], ctx_list, batch_axis=0)
labels = gluon.utils.split_and_load(batch.label[0], ctx_list, batch_axis=0)
outputs = []
with autograd.record():
for x, y in zip(datas, labels):
z = net(x)
L = loss(z, y)
L.backward()
outputs.append(z)
trainer.step(batch.data[0].shape[0])
metric.update(labels, outputs)
name, acc = metric.get()
metric.reset()
print('training acc at epoch %d: %s=%f'%(i, name, acc))
def test_autograd():
net1 = get_net()
train(net1, 5, [mx.cpu(0), mx.cpu(1)])
acc1 = score(net1, [mx.cpu(0)])
acc2 = score(net1, [mx.cpu(0), mx.cpu(1)])
assert acc1 > 0.95
assert abs(acc1 - acc2) < 0.01
net1.collect_params().save('mnist.params')
net2 = get_net()
net2.collect_params().load('mnist.params', ctx=[mx.cpu(0)])
acc3 = score(net2, [mx.cpu(0)])
assert abs(acc3 - acc1) < 0.0001
if __name__ == '__main__':
test_autograd()
| hotpxl/mxnet | tests/python/train/test_autograd.py | Python | apache-2.0 | 2,949 | 0.00373 |
# -*- coding: utf-8 -*-
# Copyright 2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from collections import namedtuple
import mock
import pytest
from kazoo.exceptions import NoNodeError
from kafka_utils.util.config import ClusterConfig
from kafka_utils.util.serialization import dump_json
from kafka_utils.util.zookeeper import ZK
MockGetTopics = namedtuple('MockGetTopics', ['ctime'])
@mock.patch(
'kafka_utils.util.zookeeper.KazooClient',
autospec=True
)
class TestZK(object):
cluster_config = ClusterConfig(
type='mytype',
name='some_cluster',
broker_list='some_list',
zookeeper='some_ip'
)
def test_create(self, mock_client):
with ZK(self.cluster_config) as zk:
zk.create(
'/kafka/consumers/some_group/offsets'
)
zk.create(
'/kafka/consumers/some_group/offsets',
value='some_val',
acl=None,
ephemeral=True,
sequence=True,
makepath=True
)
mock_obj = mock.Mock()
zk.create(
'/kafka/consumers/some_group/offsets',
value='some_val',
acl=mock_obj,
)
call_list = [
mock.call(
'/kafka/consumers/some_group/offsets',
'', None, False, False, False
),
mock.call(
'/kafka/consumers/some_group/offsets',
'some_val', None, True, True, True
),
mock.call(
'/kafka/consumers/some_group/offsets',
'some_val', mock_obj, False, False, False
),
]
assert mock_client.return_value.create.call_args_list == call_list
def test_set(self, mock_client):
with ZK(self.cluster_config) as zk:
zk.set(
'config/topics/some_topic',
'some_val'
)
zk.set(
'brokers/topics/some_topic',
'{"name": "some_topic", "more": "properties"}'
)
call_list = [
mock.call(
'config/topics/some_topic',
'some_val'
),
mock.call(
'brokers/topics/some_topic',
'{"name": "some_topic", "more": "properties"}'
)
]
assert mock_client.return_value.set.call_args_list == call_list
def test_delete(self, mock_client):
with ZK(self.cluster_config) as zk:
zk.delete(
'/kafka/consumers/some_group/offsets',
)
zk.delete(
'/kafka/consumers/some_group/offsets',
recursive=True
)
call_list = [
mock.call(
'/kafka/consumers/some_group/offsets',
recursive=False
),
mock.call(
'/kafka/consumers/some_group/offsets',
recursive=True
),
]
assert mock_client.return_value.delete.call_args_list == call_list
def test_delete_topic(self, _):
with mock.patch.object(
ZK,
'delete',
autospec=True
) as mock_delete:
with ZK(self.cluster_config) as zk:
zk.delete_topic(
'some_group',
'some_topic',
)
mock_delete.assert_called_once_with(
zk,
'/consumers/some_group/offsets/some_topic',
True,
)
def test_get_my_subscribed_partitions(self, _):
with mock.patch.object(
ZK,
'get_children',
autospec=True,
) as mock_children:
with ZK(self.cluster_config) as zk:
zk.get_my_subscribed_partitions(
'some_group',
'some_topic',
)
mock_children.assert_called_once_with(
zk,
'/consumers/some_group/offsets/some_topic',
)
def test_get_topic_config(self, mock_client):
with ZK(self.cluster_config) as zk:
zk.zk.get = mock.Mock(
return_value=(
b'{"version": 1, "config": {"cleanup.policy": "compact"}}',
"Random node info that doesn't matter"
)
)
actual = zk.get_topic_config("some_topic")
expected = {"version": 1, "config": {"cleanup.policy": "compact"}}
assert actual == expected
def test_get_topic_config_8(self, mock_client):
"""
Test getting configuration for topics created in Kafa prior to 0.9.0.
"""
with ZK(self.cluster_config) as zk:
zk.zk.get = mock.Mock(side_effect=NoNodeError())
zk.get_topics = mock.Mock(return_value={"some_topic": {}})
actual = zk.get_topic_config("some_topic")
expected = {"config": {}}
assert actual == expected
def test_get_nonexistent_topic_config(self, mock_client):
"""
Test getting configuration for topics that don't exist.
"""
with ZK(self.cluster_config) as zk:
zk.zk.get = mock.Mock(side_effect=NoNodeError())
zk.get_topics = mock.Mock(return_value={})
with pytest.raises(NoNodeError):
zk.get_topic_config("some_topic")
def test_set_topic_config_kafka_10(self, mock_client):
with mock.patch.object(
ZK,
'set',
autospec=True
) as mock_set:
with ZK(self.cluster_config) as zk:
config = {"version": 1, "config": {"cleanup.policy": "compact"}}
config_change = {"entity_path": "topics/some_topic", "version": 2}
zk.set_topic_config(
"some_topic",
config,
)
serialized_config = dump_json(config)
serialized_config_change = dump_json(config_change)
mock_set.assert_called_once_with(
zk,
'/config/topics/some_topic',
serialized_config,
)
expected_create_call = mock.call(
'/config/changes/config_change_',
serialized_config_change,
None,
False,
True,
False
)
assert mock_client.return_value.create.call_args_list == [expected_create_call]
def test_set_topic_config_kafka_9(self, mock_client):
with mock.patch.object(
ZK,
'set',
autospec=True
) as mock_set:
with ZK(self.cluster_config) as zk:
config = {"version": 1, "config": {"cleanup.policy": "compact"}}
config_change = {"version": 1, "entity_type": "topics", "entity_name": "some_topic"}
zk.set_topic_config(
"some_topic",
config,
(0, 9, 2)
)
serialized_config = dump_json(config)
serialized_config_change = dump_json(config_change)
mock_set.assert_called_once_with(
zk,
'/config/topics/some_topic',
serialized_config,
)
expected_create_call = mock.call(
'/config/changes/config_change_',
serialized_config_change,
None,
False,
True,
False
)
assert mock_client.return_value.create.call_args_list == [expected_create_call]
def test_get_broker_config(self, mock_client):
with ZK(self.cluster_config) as zk:
zk.zk.get = mock.Mock(
return_value=(
b'{"version": 1, "config": {"leader.replication.throttled.rate": "42"}}',
"Random node info that doesn't matter"
)
)
actual = zk.get_broker_config(0)
expected = {"version": 1, "config": {"leader.replication.throttled.rate": "42"}}
assert actual == expected
def test_set_broker_config_kafka_10(self, mock_client):
with mock.patch.object(
ZK,
'set',
autospec=True
) as mock_set:
with ZK(self.cluster_config) as zk:
config = {"version": 1, "config": {"leader.replication.throttled.rate": "42"}}
config_change = {"entity_path": "brokers/0", "version": 2}
zk.set_broker_config(0, config)
serialized_config = dump_json(config)
serialized_config_change = dump_json(config_change)
mock_set.assert_called_once_with(
zk,
'/config/brokers/0',
serialized_config,
)
expected_create_call = mock.call(
'/config/changes/config_change_',
serialized_config_change,
None,
False,
True,
False
)
assert mock_client.return_value.create.call_args_list == [expected_create_call]
def test_get_topics(self, mock_client):
with ZK(self.cluster_config) as zk:
zk.zk.get = mock.Mock(
return_value=(
(
b'{"version": "1", "partitions": {"0": [1, 0]}}',
MockGetTopics(31000),
)
)
)
zk._fetch_partition_state = mock.Mock(
return_value=(
(
b'{"version": "2"}',
MockGetTopics(32000),
)
)
)
actual_with_fetch_state = zk.get_topics("some_topic")
expected_with_fetch_state = {
'some_topic': {
'ctime': 31.0,
'partitions': {
'0': {
'replicas': [1, 0],
'ctime': 32.0,
'version': '2',
},
},
'version': '1',
},
}
assert actual_with_fetch_state == expected_with_fetch_state
zk._fetch_partition_info = mock.Mock(
return_value=MockGetTopics(33000)
)
actual_without_fetch_state = zk.get_topics("some_topic", fetch_partition_state=False)
expected_without_fetch_state = {
'some_topic': {
'ctime': 31.0,
'partitions': {
'0': {
'replicas': [1, 0],
'ctime': 33.0,
},
},
'version': '1',
},
}
assert actual_without_fetch_state == expected_without_fetch_state
def test_get_topics_empty_cluster(self, mock_client):
with ZK(self.cluster_config) as zk:
zk.get_children = mock.Mock(side_effect=NoNodeError())
actual_with_no_node_error = zk.get_topics()
expected_with_no_node_error = {}
zk.get_children.assert_called_with("/brokers/topics")
assert actual_with_no_node_error == expected_with_no_node_error
def test_get_brokers_names_only(self, mock_client):
with ZK(self.cluster_config) as zk:
zk.get_children = mock.Mock(
return_value=[1, 2, 3],
)
expected = {1: None, 2: None, 3: None}
actual = zk.get_brokers(names_only=True)
zk.get_children.assert_called_with("/brokers/ids")
assert actual == expected
def test_get_brokers_with_metadata(self, mock_client):
with ZK(self.cluster_config) as zk:
zk.get_children = mock.Mock(
return_value=[1, 2, 3],
)
zk.get_broker_metadata = mock.Mock(
return_value='broker',
)
expected = {1: 'broker', 2: 'broker', 3: 'broker'}
actual = zk.get_brokers()
zk.get_children.assert_called_with("/brokers/ids")
calls = zk.get_broker_metadata.mock_calls
zk.get_broker_metadata.assert_has_calls(calls)
assert actual == expected
def test_get_brokers_empty_cluster(self, mock_client):
with ZK(self.cluster_config) as zk:
zk.get_children = mock.Mock(side_effect=NoNodeError())
actual_with_no_node_error = zk.get_brokers()
expected_with_no_node_error = {}
zk.get_children.assert_called_with("/brokers/ids")
assert actual_with_no_node_error == expected_with_no_node_error
def test_get_brokers_with_metadata_for_ssl(self, mock_client):
with ZK(self.cluster_config) as zk:
zk.get_children = mock.Mock(
return_value=[1],
)
zk.get = mock.Mock(
return_value=(b'{"endpoints":["SSL://broker:9093"],"host":null}', None)
)
expected = {1: {'host': 'broker'}}
actual = zk.get_brokers()
assert actual[1]['host'] == expected[1]['host']
zk.get = mock.Mock(
return_value=(b'{"endpoints":["INTERNAL://broker:9093","EXTERNAL://broker:9093"],"host":null}', None)
)
expected = {1: {'host': 'broker'}}
actual = zk.get_brokers()
assert actual[1]['host'] == expected[1]['host']
def test_get_brokers_with_metadata_for_sasl(self, mock_client):
with ZK(self.cluster_config) as zk:
zk.get_children = mock.Mock(
return_value=[1],
)
zk.get = mock.Mock(
return_value=(b'{"endpoints":["PLAINTEXTSASL://broker:9093"],"host":null}', None)
)
expected = {1: {'host': 'broker'}}
actual = zk.get_brokers()
assert actual[1]['host'] == expected[1]['host']
def test_get_brokers_with_metadata_for_plaintext(self, mock_client):
with ZK(self.cluster_config) as zk:
zk.get_children = mock.Mock(
return_value=[1],
)
zk.get = mock.Mock(
return_value=(b'{"endpoints":[],"host":"broker"}', None)
)
expected = {1: {'host': 'broker'}}
actual = zk.get_brokers()
assert actual[1]['host'] == expected[1]['host']
| Yelp/kafka-utils | tests/util/zookeeper_test.py | Python | apache-2.0 | 15,746 | 0.000889 |
import copy
import unittest
import warnings
import mock
import numpy
import chainer
from chainer.backends import cuda
from chainer.backends import intel64
from chainer import initializers
from chainer import testing
from chainer.testing import attr
class TestLink(unittest.TestCase):
def setUp(self):
x_shape_0 = 2
x_shape_1 = numpy.int64(3)
with testing.assert_warns(DeprecationWarning):
self.link = chainer.Link(x=((x_shape_0, x_shape_1), 'd'),
u=(None, 'd'))
with self.link.init_scope():
self.link.y = chainer.Parameter(shape=(2,))
self.link.v = chainer.Parameter()
self.p = numpy.array([1, 2, 3], dtype='f')
self.link.add_persistent('p', self.p)
self.link.name = 'a'
self.link.x.update_rule = chainer.UpdateRule()
self.link.x.update_rule.enabled = False
self.link.u.update_rule = chainer.UpdateRule()
if cuda.available:
self.current_device_id = cuda.cupy.cuda.get_device_id()
def tearDown(self):
if cuda.available \
and cuda.cupy.cuda.get_device_id() != self.current_device_id:
cuda.Device(self.current_device_id).use()
def check_param_init(self, name, shape, dtype, data_value=numpy.nan):
self.assertTrue(hasattr(self.link, name))
var = getattr(self.link, name)
self.assertEqual(var.name, name)
self.assertIsInstance(var, chainer.Parameter)
self.assertEqual(var.data.shape, shape)
self.assertEqual(var.data.dtype, dtype)
numpy.testing.assert_array_equal(var.data, data_value)
self.assertEqual(var.grad.shape, shape)
self.assertEqual(var.grad.dtype, dtype)
numpy.testing.assert_array_equal(var.grad, numpy.nan)
def check_param_uninit(self, name, initializer=None):
self.assertTrue(hasattr(self.link, name))
var = getattr(self.link, name)
self.assertIsInstance(var, chainer.Parameter)
self.assertEqual(var.name, name)
self.assertIsNone(var.data)
if initializer is not None:
self.assertIs(var.initializer, initializer)
def test_init(self):
self.check_param_init('x', (2, 3), 'd')
self.check_param_init('y', (2,), 'f')
self.check_param_uninit('u')
self.link.u.initialize((2, 3))
self.check_param_init('u', (2, 3), 'd')
self.check_param_uninit('v')
self.link.v.initialize((2, 3))
self.check_param_init('v', (2, 3), 'f')
def test_assign_param_outside_of_init_scope(self):
p = chainer.Parameter()
self.link.p = p
self.assertTrue(all(p is not param for param in self.link.params()))
def test_assign_var_in_init_scope(self):
p = chainer.Variable()
with self.link.init_scope():
self.link.p = p
self.assertTrue(all(p is not param for param in self.link.params()))
def test_add_param(self):
with testing.assert_warns(DeprecationWarning):
self.link.add_param('z', (2, 3))
self.check_param_init('z', (2, 3), 'f')
with testing.assert_warns(DeprecationWarning):
self.link.add_param('w', (2, 3), dtype='d')
self.check_param_init('w', (2, 3), 'd')
with testing.assert_warns(DeprecationWarning):
self.link.add_param('r')
self.check_param_uninit('r')
self.link.r.initialize((2, 3))
self.check_param_init('r', (2, 3), 'f')
with testing.assert_warns(DeprecationWarning):
self.link.add_param('s', dtype='d')
self.check_param_uninit('s')
self.link.s.initialize((2, 3))
self.check_param_init('s', (2, 3), 'd')
initializer = initializers.Zero('d')
with testing.assert_warns(DeprecationWarning):
self.link.add_param('t', initializer=initializer)
self.check_param_uninit('t', initializer)
self.link.t.initialize((2, 3))
self.check_param_init('t', (2, 3), 'd', 0)
def test_add_param_direct_initialization(self):
z = numpy.random.rand(2, 3).astype('f')
with testing.assert_warns(DeprecationWarning):
self.link.add_param('z', initializer=z)
self.assertIsInstance(self.link.z.data, numpy.ndarray)
numpy.testing.assert_array_equal(self.link.z.data, z)
def test_add_param_duplicated_with_persistent(self):
self.link.add_persistent('z', 'abc')
with self.assertRaises(AttributeError):
with testing.assert_warns(DeprecationWarning):
self.link.add_param('z', (2, 3))
def test_add_persistent(self):
self.assertTrue(hasattr(self.link, 'p'))
self.assertIs(self.link.p, self.p)
self.link.add_persistent('q', 'abc')
self.assertTrue(hasattr(self.link, 'q'))
self.assertEqual(self.link.q, 'abc')
def test_delete(self):
del self.link.x
self.assertFalse(hasattr(self.link, 'x'))
self.assertNotIn('x', self.link._params)
self.assertNotIn('x', self.link._persistent)
del self.link.p
self.assertFalse(hasattr(self.link, 'p'))
self.assertNotIn('p', self.link._params)
self.assertNotIn('p', self.link._persistent)
def test_copy_with_share_mode(self):
link = self.link.copy(mode='share')
self.assertIsInstance(link._params, set)
self.assertIsInstance(link._persistent, set)
self.assertTrue(hasattr(link, 'x'))
self.assertTrue(hasattr(link, 'y'))
self.assertTrue(hasattr(link, 'u'))
self.assertTrue(hasattr(link, 'p'))
self.assertIsNot(link.x, self.link.x)
self.assertIs(link.x.array, self.link.x.array)
self.assertIsNot(link.y, self.link.y)
self.assertIs(link.y.array, self.link.y.array)
self.assertIsNone(link.u.array)
self.assertIs(link.p, self.link.p)
self.assertIs(link.name, None)
def test_copy_with_copy_mode(self):
link = self.link.copy(mode='copy')
self.assertIsInstance(link._params, set)
self.assertIsInstance(link._persistent, set)
self.assertTrue(hasattr(link, 'x'))
self.assertTrue(hasattr(link, 'y'))
self.assertTrue(hasattr(link, 'u'))
self.assertTrue(hasattr(link, 'p'))
self.assertIsNot(link.x, self.link.x)
self.assertIsNot(link.x.array, self.link.x.array)
self.assertIsNot(link.y, self.link.y)
self.assertIsNot(link.y.array, self.link.y.array)
self.assertIsNone(link.u.array)
self.assertIsNot(link.p, self.link.p)
self.assertIsNot(link.name, None)
def test_copy_with_init_mode(self):
self.link.u.initializer = initializers.Normal(
dtype=self.link.u.initializer.dtype)
self.link.u.initialize((2, 3))
link = self.link.copy(mode='init')
self.assertFalse(numpy.array_equal(self.link.u.array, link.u.array))
self.assertIsInstance(link._params, set)
self.assertIsInstance(link._persistent, set)
self.assertTrue(hasattr(link, 'x'))
self.assertTrue(hasattr(link, 'y'))
self.assertTrue(hasattr(link, 'u'))
self.assertTrue(hasattr(link, 'p'))
self.assertIsNot(link.x, self.link.x)
self.assertIsNot(link.x.array, self.link.x.array)
self.assertIsNot(link.y, self.link.y)
self.assertIsNot(link.y.array, self.link.y.array)
self.assertIsNot(link.p, self.link.p)
self.assertIsNot(link.name, None)
@attr.gpu
def test_copy_and_to_gpu_init(self):
cupy = cuda.cupy
l0 = self.link
l1 = l0.copy()
self.assertIs(l0.x.data, l1.x.data)
l1.to_gpu()
self.assertIsNot(l0.x.data, l1.x.data)
self.assertIsInstance(l0.x.data, numpy.ndarray)
self.assertIsInstance(l1.x.data, cupy.ndarray)
@attr.gpu
def test_copy_and_to_gpu_uninit(self):
cupy = cuda.cupy
l0 = self.link
l1 = l0.copy()
self.assertIsNone(l0.u.data)
self.assertIsNone(l1.u.data)
l1.to_gpu()
l1.u.initialize((2, 3))
self.assertIsNone(l0.u.data)
self.assertIsInstance(l1.u.data, cupy.ndarray)
@attr.multi_gpu(2)
def test_copy_and_to_gpu_uninit_multi_gpu(self):
cupy = cuda.cupy
l0 = self.link
l1 = l0.copy()
l2 = l0.copy()
self.assertIsNone(l0.u.data)
self.assertIsNone(l1.u.data)
self.assertIsNone(l2.u.data)
l1.to_gpu()
l1.u.initialize((2, 3))
l2.to_gpu()
l2.u.initialize((2, 3))
self.assertIsNone(l0.u.data)
self.assertIsInstance(l1.u.data, cupy.ndarray)
self.assertIsInstance(l2.u.data, cupy.ndarray)
self.assertNotEqual(l1.u.data.data, l2.u.data.data)
def _check_deepcopy(self, link):
self.assertIsInstance(link._params, set)
self.assertIsInstance(link._persistent, set)
self.assertTrue(hasattr(link, 'x'))
self.assertTrue(hasattr(link, 'y'))
self.assertTrue(hasattr(link, 'u'))
self.assertTrue(hasattr(link, 'p'))
self.assertIsNot(link.x, self.link.x)
self.assertIsNot(link.x.data, self.link.x.data)
numpy.testing.assert_array_equal(cuda.to_cpu(link.x.data),
cuda.to_cpu(self.link.x.data))
self.assertIsNot(link.y, self.link.y)
self.assertIsNot(link.y.data, self.link.y.data)
numpy.testing.assert_array_equal(cuda.to_cpu(link.y.data),
cuda.to_cpu(self.link.y.data))
self.assertIsNone(link.u.data)
self.assertIsNot(link.p, self.link.p)
self.assertEqual(link.name, self.link.name)
def test_deepcopy(self):
link = copy.deepcopy(self.link)
self._check_deepcopy(link)
self.assertIsNone(link._device_id)
@attr.multi_gpu(2)
def test_deepcopy_multi_device(self):
device_id = 1
self.link.to_gpu(device_id)
link = copy.deepcopy(self.link)
self._check_deepcopy(link)
self.assertEqual(link._device_id, device_id)
self.assertEqual(link.x.data.device.id, device_id)
self.assertEqual(link.y.data.device.id, device_id)
def test_to_cpu_on_cpu(self):
x = self.link.x.data
gx = self.link.x.grad
y = self.link.y.data
gy = self.link.y.grad
p = self.link.p
self.link.to_cpu()
self.assertIs(self.link.x.data, x)
self.assertIs(self.link.x.grad, gx)
self.assertIs(self.link.y.data, y)
self.assertIs(self.link.y.grad, gy)
self.assertIsNone(self.link.u.data)
self.assertIsNone(self.link.u.grad)
self.assertIs(self.link.p, p)
@attr.gpu
def test_to_cpu(self):
self.link.to_gpu()
self.link.to_cpu()
self.link.v.initialize((2, 3))
self.assertIs(self.link.xp, numpy)
self.assertIsInstance(self.link.x.data, numpy.ndarray)
self.assertIsInstance(self.link.x.grad, numpy.ndarray)
self.assertIsInstance(self.link.y.data, numpy.ndarray)
self.assertIsInstance(self.link.y.grad, numpy.ndarray)
self.assertIsNone(self.link.u.data)
self.assertIsNone(self.link.u.grad)
self.assertIsInstance(self.link.v.data, numpy.ndarray)
self.assertIsInstance(self.link.v.grad, numpy.ndarray)
self.assertIsInstance(self.link.p, numpy.ndarray)
@attr.gpu
def test_to_gpu(self):
cupy = cuda.cupy
self.link.to_gpu()
self.link.v.initialize((2, 3))
self.assertIs(self.link.xp, cupy)
self.assertIsInstance(self.link.x.data, cupy.ndarray)
self.assertIsInstance(self.link.x.grad, cupy.ndarray)
self.assertIsInstance(self.link.y.data, cupy.ndarray)
self.assertIsInstance(self.link.y.grad, cupy.ndarray)
self.assertIsNone(self.link.u.data)
self.assertIsNone(self.link.u.grad)
self.assertIsInstance(self.link.v.data, cupy.ndarray)
self.assertIsInstance(self.link.v.grad, cupy.ndarray)
self.assertIsInstance(self.link.p, cupy.ndarray)
@attr.multi_gpu(2)
def test_to_gpu_different_device(self):
cuda.Device(1).use()
self.link.to_gpu(0)
self.assertEqual(self.link._device_id, 0)
@attr.multi_gpu(2)
def test_to_gpu_current_device(self):
cuda.Device(1).use()
self.link.to_gpu()
self.assertEqual(self.link._device_id, 1)
def test_params(self):
params = list(self.link.params())
self.assertEqual({id(p) for p in params},
{id(self.link.x), id(self.link.y),
id(self.link.u), id(self.link.v)})
def test_params_skip_uninit(self):
params = list(self.link.params(include_uninit=False))
self.assertEqual({id(p) for p in params},
{id(self.link.x), id(self.link.y)})
def test_namedparams(self):
namedparams = list(self.link.namedparams())
self.assertEqual({(name, id(p)) for name, p in namedparams},
{('/x', id(self.link.x)), ('/y', id(self.link.y)),
('/u', id(self.link.u)), ('/v', id(self.link.v))})
def test_namedparams_skip_uninit(self):
namedparams = list(self.link.namedparams(include_uninit=False))
self.assertEqual({(name, id(p)) for name, p in namedparams},
{('/x', id(self.link.x)), ('/y', id(self.link.y))})
def test_links(self):
links = list(self.link.links())
self.assertIs(links[0], self.link)
def test_links_skipself(self):
links = list(self.link.links(skipself=True))
self.assertFalse(links) # empty
def test_namedlinks(self):
pl = list(self.link.namedlinks())
self.assertEqual(len(pl), 1)
self.assertEqual(pl[0][0], '/')
self.assertIs(pl[0][1], self.link)
def _setup_test_copyparams(self):
self.link.x.grad.fill(0)
self.link.y.grad.fill(1)
self.link.u.initialize((2, 3))
self.link.u.data.fill(0)
self.link.u.grad.fill(1)
self.link.v.cleargrad()
gx = self.link.x.grad.copy()
gy = self.link.y.grad.copy()
gu = self.link.u.grad.copy()
l = chainer.Link()
with l.init_scope():
l.x = chainer.Parameter(shape=(2, 3))
l.y = chainer.Parameter(shape=2)
l.u = chainer.Parameter(shape=(2, 3))
l.v = chainer.Parameter(shape=(3, 2))
l.x.data.fill(2)
l.x.grad.fill(3)
l.y.data.fill(4)
l.y.grad.fill(5)
l.u.data.fill(6)
l.u.grad.fill(7)
l.v.data.fill(8)
l.v.grad.fill(9)
l.add_persistent('p', numpy.full_like(self.link.p, 10))
return l, (gx, gy, gu)
def _check_copyparams(self, l, gs):
gx, gy, gu = gs
numpy.testing.assert_array_equal(self.link.x.data, l.x.data)
numpy.testing.assert_array_equal(self.link.x.grad, gx)
numpy.testing.assert_array_equal(self.link.y.data, l.y.data)
numpy.testing.assert_array_equal(self.link.y.grad, gy)
numpy.testing.assert_array_equal(self.link.u.data, l.u.data)
numpy.testing.assert_array_equal(self.link.u.grad, gu)
numpy.testing.assert_array_equal(self.link.v.data, l.v.data)
numpy.testing.assert_array_equal(self.link.v.grad, None)
def test_copyparams(self):
l, gs = self._setup_test_copyparams()
self.link.copyparams(l)
self._check_copyparams(l, gs)
numpy.testing.assert_array_equal(self.link.p, l.p)
def test_copyparams_no_copy_persistent(self):
orig_p = self.link.p.copy()
l, gs = self._setup_test_copyparams()
numpy.testing.assert_array_equal(False, orig_p == l.p)
self.link.copyparams(l, copy_persistent=False)
self._check_copyparams(l, gs)
numpy.testing.assert_array_equal(self.link.p, orig_p)
def test_cleargrads(self):
self.link.cleargrads()
self.assertIsNone(self.link.x.grad)
self.assertIsNone(self.link.y.grad)
self.link.u.initialize((2, 3))
self.link.v.initialize((2, 3))
self.assertIsNone(self.link.u.grad)
self.assertIsNone(self.link.v.grad)
def test_zerograds(self):
gx_expect = numpy.zeros_like(self.link.x.data)
gy_expect = numpy.zeros_like(self.link.y.data)
with testing.assert_warns(DeprecationWarning):
self.link.zerograds()
numpy.testing.assert_array_equal(self.link.x.grad, gx_expect)
numpy.testing.assert_array_equal(self.link.y.grad, gy_expect)
self.link.u.initialize((2, 3))
self.link.v.initialize((2, 3))
gu_expect = numpy.zeros_like(self.link.u.data)
gv_expect = numpy.zeros_like(self.link.v.data)
numpy.testing.assert_array_equal(self.link.u.grad, gu_expect)
numpy.testing.assert_array_equal(self.link.v.grad, gv_expect)
def test_addgrads(self):
l = chainer.Link()
with l.init_scope():
l.x = chainer.Parameter(shape=(2, 3),
initializer=initializers.NaN('d'))
l.y = chainer.Parameter(shape=2)
l.u = chainer.Parameter(shape=(2, 3))
l.v = chainer.Parameter()
l.x.grad.fill(1)
l.y.grad.fill(2)
l.u.grad.fill(3)
self.link.x.grad.fill(-1)
self.link.y.grad.fill(-2)
self.link.u.cleargrad()
self.link.addgrads(l)
gx_expect = numpy.zeros_like(l.x.grad)
gy_expect = numpy.zeros_like(l.y.grad)
gu_expect = l.u.grad
numpy.testing.assert_array_equal(self.link.x.grad, gx_expect)
numpy.testing.assert_array_equal(self.link.y.grad, gy_expect)
numpy.testing.assert_array_equal(self.link.u.grad, gu_expect)
self.assertIsNone(self.link.v.grad, None)
def test_serialize(self):
serializer = mock.MagicMock(return_value=3)
l = chainer.Link()
with l.init_scope():
l.x = chainer.Parameter(shape=(2, 3))
l.y = chainer.Parameter(shape=2)
l.add_persistent('z', 1)
l.serialize(serializer)
self.assertEqual(serializer.call_count, 3)
serializer.assert_any_call('x', l.x.data)
serializer.assert_any_call('y', l.y.data)
serializer.assert_any_call('z', 1)
self.assertEqual(l.z, 3)
def test_serialize_param_shape_placeholder(self):
serializer = mock.MagicMock(return_value=3)
l = chainer.Link()
with l.init_scope():
l.y = chainer.Parameter(shape=2)
l.x = chainer.Parameter()
l.x.initialize((2, 3))
l.add_persistent('z', 1)
l.serialize(serializer)
self.assertEqual(serializer.call_count, 3)
serializer.assert_any_call('x', l.x.data)
serializer.assert_any_call('y', l.y.data)
serializer.assert_any_call('z', 1)
self.assertEqual(l.z, 3)
def test_serialize_deserialize_to_uninitialized_param(self):
ret = numpy.random.rand(2, 3).astype('f')
serializer = mock.MagicMock(return_value=ret)
l = chainer.Link()
with l.init_scope():
l.x = chainer.Parameter()
l.serialize(serializer)
self.assertEqual(serializer.call_count, 1)
serializer.assert_any_call('x', None)
self.assertIsInstance(l.x.data, numpy.ndarray)
numpy.testing.assert_array_equal(l.x.data, ret)
def test_enable_update(self):
self.link.enable_update()
self.assertTrue(self.link.x.update_rule.enabled)
self.assertTrue(self.link.u.update_rule.enabled)
def test_disable_update(self):
self.link.disable_update()
self.assertFalse(self.link.x.update_rule.enabled)
self.assertFalse(self.link.u.update_rule.enabled)
def test_update_enabled(self):
self.assertTrue(self.link.update_enabled)
self.link.disable_update()
self.assertFalse(self.link.update_enabled)
self.link.enable_update()
self.assertTrue(self.link.update_enabled)
def test_count_params(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
assert self.link.count_params() == 8
assert len(w) == 2
assert w[0].category is UserWarning
self.link.u.initialize((2, 3))
self.link.v.initialize((2, 3))
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.link.count_params()
assert not w
class TestLinkRepeat(unittest.TestCase):
def setUp(self):
class Layer(chainer.Link):
def __init__(self):
super(Layer, self).__init__()
with self.init_scope():
self.x = chainer.Parameter(
chainer.initializers.Normal(), shape=(2, 3))
def forward(self):
pass
self.link = Layer()
def test_no_repeat(self):
ret = self.link.repeat(0)
self.assertEqual(len(ret), 0)
def test_repeat_with_init(self):
ret = self.link.repeat(2, mode='init')
self.assertEqual(len(ret), 2)
# Both should be different objects from the original link
self.assertIsNot(ret[0], self.link)
self.assertIsNot(ret[1], self.link)
# Object IDs of elements should be different
self.assertIsNot(ret[0], ret[1])
self.assertIsNot(ret[0].x, ret[1].x)
# But shape and type of paratmeres shuld be same
self.assertEqual(ret[0].x.shape, self.link.x.shape)
self.assertEqual(ret[0].x.dtype, self.link.x.dtype)
self.assertEqual(ret[0].x.shape, ret[1].x.shape)
self.assertEqual(ret[0].x.dtype, ret[1].x.dtype)
# Parameters are re-initialized, so the values should be different
self.assertFalse(numpy.all(ret[0].x.array == ret[1].x.array))
def test_repeat_with_copy(self):
ret = self.link.repeat(2, mode='copy')
self.assertEqual(len(ret), 2)
# Both should be different objects from the original link
self.assertIsNot(ret[0], self.link)
self.assertIsNot(ret[1], self.link)
# Object IDs of elements should be different
self.assertIsNot(ret[0], ret[1])
self.assertIsNot(ret[0].x, ret[1].x)
# But shape, type, and value of paratmeres shuld be same
self.assertEqual(ret[0].x.shape, self.link.x.shape)
self.assertEqual(ret[0].x.dtype, self.link.x.dtype)
self.assertEqual(ret[0].x.shape, ret[1].x.shape)
self.assertEqual(ret[0].x.dtype, ret[1].x.dtype)
numpy.testing.assert_array_equal(ret[0].x.array, ret[1].x.array)
def test_repeat_with_share(self):
ret = self.link.repeat(2, mode='share')
self.assertEqual(len(ret), 2)
# Both should be different objects from the original link
self.assertIsNot(ret[0], self.link)
self.assertIsNot(ret[1], self.link)
# Object IDs of elements should be different
self.assertIsNot(ret[0], ret[1])
self.assertIsNot(ret[0].x, ret[1].x)
# But the array objects should be the same
self.assertIs(ret[0].x.array, ret[1].x.array)
# But shape, type, and value of paratmeres shuld be same
self.assertEqual(ret[0].x.shape, self.link.x.shape)
self.assertEqual(ret[0].x.dtype, self.link.x.dtype)
self.assertEqual(ret[0].x.shape, ret[1].x.shape)
self.assertEqual(ret[0].x.dtype, ret[1].x.dtype)
numpy.testing.assert_array_equal(ret[0].x.array, ret[1].x.array)
class CountParameter(chainer.Parameter):
def __init__(self, v):
super(CountParameter, self).__init__(v.data, name=v.name)
self.data = v.data
self.grad = v.grad
self.count_to_cpu = 0
self.count_to_gpu = 0
self.count_zerograd = 0
def to_cpu(self):
self.count_to_cpu += 1
super(CountParameter, self).to_cpu()
def to_gpu(self, device=None):
self.count_to_gpu += 1
super(CountParameter, self).to_gpu(device)
def zerograd(self):
self.count_zerograd += 1
super(CountParameter, self).zerograd()
class TestChain(unittest.TestCase):
def setUp(self):
self.l1 = chainer.Link()
with self.l1.init_scope():
self.l1.x = chainer.Parameter(shape=(2, 3))
self.l2 = chainer.Link()
with self.l2.init_scope():
self.l2.x = chainer.Parameter(shape=2)
self.l3 = chainer.Link()
with self.l3.init_scope():
self.l3.x = chainer.Parameter()
self.c1 = chainer.Chain()
with self.c1.init_scope():
self.c1.l1 = self.l1
with testing.assert_warns(DeprecationWarning):
self.c1.add_link('l2', self.l2)
self.c2 = chainer.Chain()
with self.c2.init_scope():
self.c2.c1 = self.c1
self.c2.l3 = self.l3
def test_init(self):
self.assertIs(self.c1.l1, self.l1)
self.assertIs(self.c1['l1'], self.l1)
self.assertEqual(self.l1.name, 'l1')
self.assertIs(self.c2.c1, self.c1)
self.assertIs(self.c2['c1'], self.c1)
self.assertEqual(self.c1.name, 'c1')
self.assertIs(self.c2.l3, self.l3)
self.assertIs(self.c2['l3'], self.l3)
self.assertEqual(self.l3.name, 'l3')
def test_add_link(self):
self.assertIs(self.c1.l2, self.l2)
self.assertEqual(self.l2.name, 'l2')
def test_add_link_to_existing_attribute(self):
self.l1.z = 0
with self.assertRaises(AttributeError):
self.l1.add_link('z', chainer.Link())
def test_assign_link_outside_of_init_scope(self):
l = chainer.Link()
self.l1.l = l
self.assertTrue(all(l is not link for link in self.l1.links()))
def test_delete_link(self):
del self.c1.l1
self.assertFalse(hasattr(self.c1, 'l1'))
self.assertNotIn('l1', self.c1._children)
def test_copy_with_share_mode(self):
self.l1.x.initializer = initializers.Normal(
dtype=self.l1.x.initializer.dtype)
self.l1.x.initialize(self.l1.x.shape)
self.l2.x.initializer = initializers.Normal(
dtype=self.l2.x.initializer.dtype)
self.l2.x.initialize(self.l2.x.shape)
c2 = self.c2.copy(mode='share')
self.assertIs(c2.name, None)
self.assertIsInstance(c2._children, set)
self.assertTrue(hasattr(c2, 'c1'))
self.assertEqual(c2.c1.name, 'c1')
self.assertIsInstance(c2.c1._children, set)
self.assertIsNot(c2.c1, self.c1)
self.assertEqual(c2.c1.l1.name, 'l1')
self.assertIsNot(c2.c1.l1, self.l1)
self.assertIsNot(c2.c1.l1.x, self.l1.x)
self.assertIs(c2.c1.l1.x.data, self.l1.x.data)
self.assertIs(c2.c1.l1.x.grad, None)
self.assertTrue(hasattr(c2.c1, 'l2'))
self.assertEqual(c2.c1.l2.name, 'l2')
self.assertIsNot(c2.c1.l2, self.l2)
self.assertIsNot(c2.c1.l2.x, self.l2.x)
self.assertIs(c2.c1.l2.x.data, self.l2.x.data)
self.assertIs(c2.c1.l2.x.grad, None)
self.assertTrue(hasattr(c2, 'l3'))
self.assertEqual(c2.l3.name, 'l3')
self.assertIsNot(c2.l3, self.l3)
self.assertIsNot(c2.l3.x, self.l3.x)
self.assertIs(c2.l3.x.data, self.l3.x.data)
self.assertIs(c2.l3.x.grad, None)
def test_copy_with_copy_mode(self):
self.l1.x.initializer = initializers.Normal(
dtype=self.l1.x.initializer.dtype)
self.l1.x.initialize(self.l1.x.shape)
self.l2.x.initializer = initializers.Normal(
dtype=self.l2.x.initializer.dtype)
self.l2.x.initialize(self.l2.x.shape)
c2 = self.c2.copy(mode='copy')
self.assertIs(c2.name, None)
self.assertIsInstance(c2._children, set)
self.assertTrue(hasattr(c2, 'c1'))
self.assertEqual(c2.c1.name, 'c1')
self.assertIsInstance(c2.c1._children, set)
self.assertIsNot(c2.c1, self.c1)
self.assertEqual(c2.c1.l1.name, 'l1')
self.assertIsNot(c2.c1.l1, self.l1)
self.assertIsNot(c2.c1.l1.x, self.l1.x)
self.assertIsNot(c2.c1.l1.x.data, self.l1.x.data)
self.assertTrue(numpy.array_equal(c2.c1.l1.x.data, self.l1.x.data))
self.assertIs(c2.c1.l1.x.grad, None)
self.assertTrue(hasattr(c2.c1, 'l2'))
self.assertEqual(c2.c1.l2.name, 'l2')
self.assertIsNot(c2.c1.l2, self.l2)
self.assertIsNot(c2.c1.l2.x, self.l2.x)
self.assertIsNot(c2.c1.l2.x.data, self.l2.x.data)
self.assertTrue(numpy.array_equal(c2.c1.l2.x.data, self.l2.x.data))
self.assertIs(c2.c1.l2.x.grad, None)
self.assertTrue(hasattr(c2, 'l3'))
self.assertEqual(c2.l3.name, 'l3')
self.assertIsNot(c2.l3, self.l3)
self.assertIsNot(c2.l3.x, self.l3.x)
self.assertIs(c2.l3.x.data, self.l3.x.data)
self.assertIs(c2.l3.x.grad, None)
def test_copy_with_init_mode(self):
self.l1.x.initializer = initializers.Normal(
dtype=self.l1.x.initializer.dtype)
self.l1.x.initialize(self.l1.x.shape)
self.l2.x.initializer = initializers.Normal(
dtype=self.l2.x.initializer.dtype)
self.l2.x.initialize(self.l2.x.shape)
c2 = self.c2.copy(mode='init')
self.assertIs(c2.name, None)
self.assertIsInstance(c2._children, set)
self.assertTrue(hasattr(c2, 'c1'))
self.assertEqual(c2.c1.name, 'c1')
self.assertIsInstance(c2.c1._children, set)
self.assertIsNot(c2.c1, self.c1)
self.assertEqual(c2.c1.l1.name, 'l1')
self.assertIsNot(c2.c1.l1, self.l1)
self.assertIsNot(c2.c1.l1.x, self.l1.x)
self.assertIsNot(c2.c1.l1.x.data, self.l1.x.data)
self.assertFalse(numpy.array_equal(c2.c1.l1.x.data, self.l1.x.data))
# _grad_initializer attribute in a copied Parameter has constant.NaN
# after calling initilize() method
self.assertTrue(numpy.isnan(c2.c1.l1.x.grad).all())
self.assertTrue(hasattr(c2.c1, 'l2'))
self.assertEqual(c2.c1.l2.name, 'l2')
self.assertIsNot(c2.c1.l2, self.l2)
self.assertIsNot(c2.c1.l2.x, self.l2.x)
self.assertIsNot(c2.c1.l2.x.data, self.l2.x.data)
self.assertFalse(numpy.array_equal(c2.c1.l2.x.data, self.l2.x.data))
# _grad_initializer attribute in a copied Parameter has constant.NaN
# after calling initilize() method
self.assertTrue(numpy.isnan(c2.c1.l2.x.grad).all())
self.assertTrue(hasattr(c2, 'l3'))
self.assertEqual(c2.l3.name, 'l3')
self.assertIsNot(c2.l3, self.l3)
self.assertIsNot(c2.l3.x, self.l3.x)
self.assertIs(c2.l3.x.data, self.l3.x.data)
# A Parameter constructed with shape argument but not initialized
# has None in grad
self.assertIs(c2.l3.x.grad, None)
def test_to_cpu_on_cpu(self):
x1 = self.l1.x.data
gx1 = self.l1.x.grad
x2 = self.l2.x.data
gx2 = self.l2.x.grad
x3 = self.l3.x.data
gx3 = self.l3.x.grad
self.c2.to_cpu()
self.assertIs(self.l1.x.data, x1)
self.assertIs(self.l1.x.grad, gx1)
self.assertIs(self.l2.x.data, x2)
self.assertIs(self.l2.x.grad, gx2)
self.assertIs(self.l3.x.data, x3)
self.assertIs(self.l3.x.grad, gx3)
def set_count_parameters(self):
self.l1.x = CountParameter(self.l1.x)
self.l2.x = CountParameter(self.l2.x)
self.l3.x = CountParameter(self.l3.x)
@attr.gpu
def test_to_cpu(self):
self.set_count_parameters()
self.c2.to_gpu()
self.c2.to_cpu()
self.assertIs(self.c2.xp, numpy)
self.assertIs(self.c1.xp, numpy)
self.assertIs(self.l1.xp, numpy)
self.assertIs(self.l2.xp, numpy)
self.assertIs(self.l3.xp, numpy)
self.assertIsInstance(self.l1.x.data, numpy.ndarray)
self.assertIsInstance(self.l1.x.grad, numpy.ndarray)
self.assertIsInstance(self.l2.x.data, numpy.ndarray)
self.assertIsInstance(self.l2.x.grad, numpy.ndarray)
self.assertIsNone(self.l3.x.data)
self.assertIsNone(self.l3.x.grad)
self.assertEqual(self.l1.x.count_to_cpu, 1)
self.assertEqual(self.l1.x.count_to_gpu, 1)
self.assertEqual(self.l2.x.count_to_cpu, 1)
self.assertEqual(self.l2.x.count_to_gpu, 1)
self.assertEqual(self.l3.x.count_to_cpu, 1)
self.assertEqual(self.l3.x.count_to_gpu, 1)
self.l3.x.initialize(3)
self.assertIsInstance(self.l3.x.data, numpy.ndarray)
self.assertIsInstance(self.l3.x.grad, numpy.ndarray)
@attr.gpu
def test_to_gpu(self):
self.set_count_parameters()
cupy = cuda.cupy
self.c2.to_gpu()
self.assertIs(self.c2.xp, cupy)
self.assertIs(self.c1.xp, cupy)
self.assertIs(self.l1.xp, cupy)
self.assertIs(self.l2.xp, cupy)
self.assertIs(self.l3.xp, cupy)
self.assertIsInstance(self.l1.x.data, cupy.ndarray)
self.assertIsInstance(self.l1.x.grad, cupy.ndarray)
self.assertIsInstance(self.l2.x.data, cupy.ndarray)
self.assertIsInstance(self.l2.x.grad, cupy.ndarray)
self.assertIsNone(self.l3.x.data)
self.assertIsNone(self.l3.x.grad)
self.assertEqual(self.l1.x.count_to_gpu, 1)
self.assertEqual(self.l2.x.count_to_gpu, 1)
self.assertEqual(self.l3.x.count_to_gpu, 1)
self.l3.x.initialize(3)
self.assertIsInstance(self.l3.x.data, cupy.ndarray)
self.assertIsInstance(self.l3.x.grad, cupy.ndarray)
def test_params(self):
params = list(self.c2.params())
self.assertEqual({id(p) for p in params},
{id(self.l1.x), id(self.l2.x), id(self.l3.x)})
def test_params_skip_uninit(self):
params = list(self.c2.params(include_uninit=False))
self.assertEqual({id(p) for p in params},
{id(self.l1.x), id(self.l2.x)})
def test_namedparams(self):
namedparams = list(self.c2.namedparams())
self.assertEqual({(name, id(p)) for name, p in namedparams},
{('/c1/l1/x', id(self.l1.x)),
('/c1/l2/x', id(self.l2.x)),
('/l3/x', id(self.l3.x))})
def test_namedparams_skip_uninit(self):
namedparams = list(self.c2.namedparams(include_uninit=False))
self.assertEqual({(name, id(p)) for name, p in namedparams},
{('/c1/l1/x', id(self.l1.x)),
('/c1/l2/x', id(self.l2.x))})
def test_links(self):
links = list(self.c2.links())
self.assertEqual({id(l) for l in links},
{id(l) for l in [self.l1, self.l2, self.l3,
self.c1, self.c2]})
def test_links_skipself(self):
links = list(self.c2.links(skipself=True))
self.assertEqual({id(l) for l in links},
{id(l) for l in [self.l1, self.l2, self.l3, self.c1]})
def test_namedlinks(self):
namedlinks = list(self.c2.namedlinks())
self.assertEqual({(name, id(l)) for name, l in namedlinks},
{('/', id(self.c2)),
('/c1', id(self.c1)),
('/c1/l1', id(self.l1)),
('/c1/l2', id(self.l2)),
('/l3', id(self.l3))})
def test_namedlinks_skipself(self):
namedlinks = list(self.c2.namedlinks(skipself=True))
self.assertEqual({(name, id(l)) for name, l in namedlinks},
{('/c1', id(self.c1)),
('/c1/l1', id(self.l1)),
('/c1/l2', id(self.l2)),
('/l3', id(self.l3))})
def test_children(self):
children = list(self.c2.children())
self.assertEqual({id(c) for c in children}, {id(self.c1), id(self.l3)})
def test_copyparams(self):
l1 = chainer.Link()
with l1.init_scope():
l1.x = chainer.Parameter(shape=(2, 3))
l2 = chainer.Link()
with l2.init_scope():
l2.x = chainer.Parameter(shape=2)
l3 = chainer.Link()
with l3.init_scope():
l3.x = chainer.Parameter(shape=3)
c1 = chainer.Chain()
with c1.init_scope():
c1.l1 = l1
c1.l2 = l2
c2 = chainer.Chain()
with c2.init_scope():
c2.c1 = c1
c2.l3 = l3
l1.x.data.fill(0)
l2.x.data.fill(1)
l3.x.data.fill(2)
self.c2.copyparams(c2)
numpy.testing.assert_array_equal(self.l1.x.data, l1.x.data)
numpy.testing.assert_array_equal(self.l2.x.data, l2.x.data)
numpy.testing.assert_array_equal(self.l3.x.data, l3.x.data)
def test_zerograds(self):
self.set_count_parameters()
with testing.assert_warns(DeprecationWarning):
self.c2.zerograds()
numpy.testing.assert_array_equal(self.l1.x.grad, numpy.zeros((2, 3)))
numpy.testing.assert_array_equal(self.l2.x.grad, numpy.zeros(2))
self.assertEqual(self.l1.x.count_zerograd, 1)
self.assertEqual(self.l2.x.count_zerograd, 1)
self.assertEqual(self.l3.x.count_zerograd, 1)
self.l3.x.initialize(3)
numpy.testing.assert_array_equal(self.l3.x.grad, numpy.zeros(3))
def test_addgrads(self):
l1 = chainer.Link()
with l1.init_scope():
l1.x = chainer.Parameter(shape=(2, 3))
l2 = chainer.Link()
with l2.init_scope():
l2.x = chainer.Parameter(shape=2)
l3 = chainer.Link()
with l3.init_scope():
l3.x = chainer.Parameter(shape=3)
c1 = chainer.Chain()
with c1.init_scope():
c1.l1 = l1
c1.l2 = l2
c2 = chainer.Chain()
with c2.init_scope():
c2.c1 = c1
c2.l3 = l3
l1.x.grad.fill(1)
l2.x.grad.fill(2)
l3.x.grad.fill(3)
self.l1.x.grad.fill(-1)
self.l2.x.grad.fill(-2)
self.l3.cleargrads()
self.c2.addgrads(c2)
numpy.testing.assert_array_equal(self.l1.x.grad, numpy.zeros((2, 3)))
numpy.testing.assert_array_equal(self.l2.x.grad, numpy.zeros(2))
numpy.testing.assert_array_equal(self.l3.x.grad, numpy.full(3, 3.))
def test_serialize(self):
mocks = {'l1': mock.MagicMock(), 'l2': mock.MagicMock()}
serializer = mock.MagicMock()
serializer.__getitem__.side_effect = lambda k: mocks[k]
self.c1.serialize(serializer)
self.assertEqual(serializer.call_count, 0)
self.assertEqual(serializer.__getitem__.call_count, 2)
serializer.__getitem__.assert_any_call('l1')
serializer.__getitem__.assert_any_call('l2')
mocks['l1'].assert_called_with('x', self.l1.x.data)
mocks['l2'].assert_called_with('x', self.l2.x.data)
def test_count_params(self):
assert self.c1.count_params() == 8
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.c2.count_params()
assert len(w) == 1
assert w[0].category is UserWarning
self.c2.l3.x.initialize((3,))
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.c2.count_params()
assert not w
class TestChainRepeat(unittest.TestCase):
def setUp(self):
class ChainForTest(chainer.Chain):
def __init__(self):
super(ChainForTest, self).__init__()
with self.init_scope():
self.link = chainer.Link()
def forward(self):
pass
self.chain = ChainForTest()
self.link = self.chain.link
with self.link.init_scope():
self.link.x = chainer.Parameter(
chainer.initializers.Normal(), shape=(2, 3))
def test_no_repeat(self):
ret = self.chain.repeat(0)
self.assertEqual(len(ret), 0)
def test_repeat_with_share_mode(self):
ret = self.chain.repeat(2, mode='share')
self.assertEqual(len(ret), 2)
self.assertIsNot(ret[0], self.chain)
self.assertIsNot(ret[1], self.chain)
self.assertIsNot(ret[0], ret[1])
self.assertIsNot(ret[0].link, self.chain.link)
self.assertIsNot(ret[1].link, self.chain.link)
self.assertIsNot(ret[0].link, ret[1].link)
self.assertIsNot(ret[0].link.x, self.chain.link.x)
self.assertIsNot(ret[1].link.x, self.chain.link.x)
self.assertIsNot(ret[0].link.x, ret[1].link.x)
self.assertIs(ret[0].link.x.data, self.chain.link.x.data)
self.assertIs(ret[0].link.x.data, ret[1].link.x.data)
self.assertEqual(ret[0].link.x.shape, self.chain.link.x.shape)
self.assertEqual(ret[0].link.x.shape, ret[1].link.x.shape)
self.assertEqual(ret[0].link.x.dtype, self.chain.link.x.dtype)
self.assertEqual(ret[0].link.x.dtype, ret[1].link.x.dtype)
def test_repeat_with_copy_mode(self):
ret = self.chain.repeat(2, mode='copy')
self.assertEqual(len(ret), 2)
self.assertIsNot(ret[0], self.chain)
self.assertIsNot(ret[1], self.chain)
self.assertIsNot(ret[0], ret[1])
self.assertIsNot(ret[0].link, self.chain.link)
self.assertIsNot(ret[1].link, self.chain.link)
self.assertIsNot(ret[0].link, ret[1].link)
self.assertIsNot(ret[0].link.x, self.link.x)
self.assertIsNot(ret[1].link.x, self.link.x)
self.assertIsNot(ret[0].link.x, ret[1].link.x)
self.assertIsNot(ret[0].link.x.data, self.chain.link.x.data)
self.assertIsNot(ret[1].link.x.data, self.chain.link.x.data)
self.assertIsNot(ret[0].link.x.data, ret[1].link.x.data)
self.assertTrue(numpy.array_equal(
ret[0].link.x.data, self.chain.link.x.data))
self.assertTrue(numpy.array_equal(
ret[0].link.x.data, ret[1].link.x.data))
self.assertEqual(ret[0].link.x.shape, self.chain.link.x.shape)
self.assertEqual(ret[0].link.x.shape, ret[1].link.x.shape)
self.assertEqual(ret[0].link.x.dtype, self.chain.link.x.dtype)
self.assertEqual(ret[0].link.x.dtype, ret[1].link.x.dtype)
def test_repeat_with_init_mode(self):
ret = self.chain.repeat(2, mode='init')
self.assertEqual(len(ret), 2)
self.assertIsNot(ret[0], self.chain)
self.assertIsNot(ret[1], self.chain)
self.assertIsNot(ret[0], ret[1])
self.assertIsNot(ret[0].link, self.chain.link)
self.assertIsNot(ret[1].link, self.chain.link)
self.assertIsNot(ret[0].link.x, ret[1].link.x)
self.assertIsNot(ret[0].link.x.data, self.chain.link.x.data)
self.assertIsNot(ret[1].link.x.data, self.chain.link.x.data)
self.assertIsNot(ret[0].link.x.data, ret[1].link.x.data)
self.assertFalse(numpy.array_equal(
ret[0].link.x.data, self.chain.link.x.data))
self.assertFalse(numpy.array_equal(
ret[1].link.x.data, self.chain.link.x.data))
self.assertFalse(numpy.array_equal(
ret[0].link.x.data, ret[1].link.x.data))
self.assertEqual(ret[0].link.x.shape, self.chain.link.x.shape)
self.assertEqual(ret[0].link.x.shape, ret[1].link.x.shape)
self.assertEqual(ret[0].link.x.dtype, self.chain.link.x.dtype)
self.assertEqual(ret[0].link.x.dtype, ret[1].link.x.dtype)
class TestChainList(unittest.TestCase):
def setUp(self):
self.l1 = chainer.Link()
with self.l1.init_scope():
self.l1.x = chainer.Parameter(shape=(2, 3))
self.l1.y = chainer.Parameter()
self.l2 = chainer.Link()
with self.l2.init_scope():
self.l2.x = chainer.Parameter(shape=2)
self.l3 = chainer.Link()
with self.l3.init_scope():
self.l3.x = chainer.Parameter(shape=3)
self.l4 = chainer.Link()
self.l5 = chainer.Link()
self.l6 = chainer.Link()
self.c1 = chainer.ChainList(self.l1)
self.c1.add_link(self.l2)
self.c2 = chainer.ChainList(self.c1)
self.c2.append(self.l3)
self.c3 = chainer.ChainList(self.l4)
def test_init(self):
self.assertIs(self.c1[0], self.l1)
self.assertEqual(self.l1.name, '0')
self.assertIs(self.c2[0], self.c1)
self.assertEqual(self.c1.name, '0')
def test_add_link(self):
self.assertIs(self.c1[1], self.l2)
self.assertEqual(self.l2.name, '1')
def test_append(self):
self.assertIs(self.c2[1], self.l3)
self.assertEqual(self.l3.name, '1')
def test_setitem(self):
self.c1[1] = self.l3
self.assertEqual(self.l3.name, '1')
def test_setitem_slice(self):
self.c1.append(self.l3) # l1 l2 l3
self.c1[3:0:-1] = [self.l4, self.l5] # l1 l5 l4
self.assertEqual(len(self.c1), 3)
self.assertEqual(self.l1.name, '0')
self.assertEqual(self.l4.name, '2')
self.assertEqual(self.l5.name, '1')
def test_setitem_slice_short(self):
self.c1.append(self.l3) # l1 l2 l3
self.c1[1:3] = [self.l4] # l1 l4
self.assertEqual(len(self.c1), 2)
self.assertEqual(self.l1.name, '0')
self.assertEqual(self.l4.name, '1')
def test_setitem_slice_long(self):
self.c1.append(self.l3) # l1 l2 l3
self.c1[1:3] = [self.l4, self.l5, self.l6] # l1 l4 l5 l6
self.assertEqual(len(self.c1), 4)
self.assertEqual(self.l1.name, '0')
self.assertEqual(self.l4.name, '1')
self.assertEqual(self.l5.name, '2')
self.assertEqual(self.l6.name, '3')
def test_iadd(self):
self.c2 += self.c3
self.assertIs(len(self.c2), 3)
self.assertEqual(self.l4.name, '2')
def test_delete_item(self):
del self.c2[0]
self.assertEqual(len(self.c2), 1)
self.assertEqual(self.l3.name, '0')
def test_assign_param_in_init_scope(self):
p = chainer.Parameter()
with self.c1.init_scope():
self.c1.p = p
self.assertIn(p, self.c1.params())
def test_assign_link_in_init_scope(self):
l = chainer.Link()
with self.c1.init_scope():
with self.assertRaises(TypeError):
self.c1.l = l
def test_iter(self):
links = list(self.c2)
self.assertEqual(2, len(links))
self.assertIs(links[0], self.c1)
self.assertIs(links[1], self.l3)
def test_len(self):
self.assertEqual(len(self.c1), 2)
self.assertEqual(len(self.c2), 2)
def test_copy_with_share_mode(self):
c2 = self.c2.copy(mode='share')
self.l1.x.initializer = initializers.Normal(
dtype=self.l1.x.initializer.dtype)
self.l1.x.initialize(self.l1.x.shape)
self.l2.x.initializer = initializers.Normal(
dtype=self.l2.x.initializer.dtype)
self.l2.x.initialize(self.l2.x.shape)
self.assertIs(c2.name, None)
self.assertIsInstance(c2._children, list)
self.assertIsNot(c2[0], self.c1)
self.assertEqual(c2[0].name, '0')
self.assertIsInstance(c2[0]._children, list)
self.assertIsNot(c2[0][0], self.l1)
self.assertEqual(c2[0][0].name, '0')
self.assertIsNot(c2[0][0].x, self.l1.x)
self.assertIs(c2[0][0].x.data, self.l1.x.data)
self.assertIs(c2[0][0].x.grad, None)
self.assertIsNot(c2[0][1], self.l2)
self.assertEqual(c2[0][1].name, '1')
self.assertIsNot(c2[0][1].x, self.l2.x)
self.assertIs(c2[0][1].x.data, self.l2.x.data)
self.assertIs(c2[0][1].x.grad, None)
self.assertIsNot(c2[1], self.l3)
self.assertEqual(c2[1].name, '1')
self.assertIsNot(c2[1].x, self.l3.x)
self.assertIs(c2[1].x.data, self.l3.x.data)
self.assertIs(c2[1].x.grad, None)
def test_copy_with_copy_mode(self):
self.l1.x.initializer = initializers.Normal(
dtype=self.l1.x.initializer.dtype)
self.l1.x.initialize(self.l1.x.shape)
self.l2.x.initializer = initializers.Normal(
dtype=self.l2.x.initializer.dtype)
self.l2.x.initialize(self.l2.x.shape)
c2 = self.c2.copy(mode='copy')
self.assertIs(c2.name, None)
self.assertIsInstance(c2._children, list)
self.assertEqual(c2[0].name, '0')
self.assertIsInstance(c2[0]._children, list)
self.assertIsNot(c2[0][0], self.l1)
self.assertEqual(c2[0][0].name, '0')
self.assertIsNot(c2[0][0].x, self.l1.x)
self.assertIsNot(c2[0][0].x.data, self.l1.x.data)
self.assertTrue(numpy.array_equal(c2[0][0].x.data, self.l1.x.data))
self.assertIs(c2[0][0].x.grad, None)
self.assertIsNot(c2[0][1], self.l2)
self.assertEqual(c2[0][1].name, '1')
self.assertIsNot(c2[0][1].x, self.l2.x)
self.assertIsNot(c2[0][1].x.data, self.l2.x.data)
self.assertTrue(numpy.array_equal(c2[0][1].x.data, self.l2.x.data))
self.assertIs(c2[0][1].x.grad, None)
self.assertIsNot(c2[1], self.l3)
self.assertEqual(c2[1].name, '1')
self.assertIsNot(c2[1].x, self.l3.x)
self.assertIsNot(c2[1].x.data, self.l3.x.data)
# l3 is constructed with shape argument but not initialized
self.assertTrue(numpy.isnan(c2[1].x.grad).all())
def test_copy_with_init_mode(self):
self.l1.x.initializer = initializers.Normal(
dtype=self.l1.x.initializer.dtype)
self.l1.x.initialize(self.l1.x.shape)
self.l2.x.initializer = initializers.Normal(
dtype=self.l2.x.initializer.dtype)
self.l2.x.initialize(self.l2.x.shape)
c2 = self.c2.copy(mode='init')
self.assertIs(c2.name, None)
self.assertIsInstance(c2._children, list)
self.assertEqual(c2[0].name, '0')
self.assertIsInstance(c2[0]._children, list)
self.assertIsNot(c2[0][0], self.l1)
self.assertEqual(c2[0][0].name, '0')
self.assertIsNot(c2[0][0].x, self.l1.x)
self.assertIsNot(c2[0][0].x.data, self.l1.x.data)
self.assertFalse(numpy.array_equal(c2[0][0].x.data, self.l1.x.data))
# _grad_initializer attribute in a copied Parameter has constant.NaN
# after calling initilize() method
self.assertTrue(numpy.isnan(c2[0][0].x.grad).all())
self.assertIsNot(c2[0][1], self.l2)
self.assertEqual(c2[0][1].name, '1')
self.assertIsNot(c2[0][1].x, self.l2.x)
self.assertIsNot(c2[0][1].x.data, self.l2.x.data)
self.assertFalse(numpy.array_equal(c2[0][1].x.data, self.l2.x.data))
# _grad_initializer attribute in a copied Parameter has constant.NaN
# after calling initilize() method
self.assertTrue(numpy.isnan(c2[0][1].x.grad).all())
self.assertIsNot(c2[1], self.l3)
self.assertEqual(c2[1].name, '1')
self.assertIsNot(c2[1].x, self.l3.x)
self.assertTrue(numpy.isnan(c2[1].x.data).all())
self.assertTrue(numpy.isnan(c2[1].x.grad).all())
@attr.gpu
def test_copy_and_send_to_gpu(self):
c2 = self.c2.copy()
self.c2.to_gpu()
self.assertIsInstance(self.c2[0][0].x.data, cuda.cupy.ndarray)
self.assertIsInstance(self.c2[0][1].x.data, cuda.cupy.ndarray)
self.assertIsInstance(c2[0][0].x.data, numpy.ndarray)
self.assertIsInstance(c2[0][1].x.data, numpy.ndarray)
@attr.gpu
def test_copy_and_send_to_gpu_2(self):
c2 = self.c2.copy()
c2.to_gpu()
self.assertIsInstance(self.c2[0][0].x.data, numpy.ndarray)
self.assertIsInstance(self.c2[0][1].x.data, numpy.ndarray)
self.assertIsInstance(c2[0][0].x.data, cuda.cupy.ndarray)
self.assertIsInstance(c2[0][1].x.data, cuda.cupy.ndarray)
@attr.multi_gpu(2)
def test_copy_and_send_to_gpu_multi(self):
c2 = self.c2.copy()
self.c2.to_gpu(0)
c2.to_gpu(1)
self.assertEqual(self.c2[0][0].x.data.device.id, 0)
self.assertEqual(self.c2[0][1].x.data.device.id, 0)
self.assertEqual(c2[0][0].x.data.device.id, 1)
self.assertEqual(c2[0][1].x.data.device.id, 1)
def test_to_cpu_on_cpu(self):
x1 = self.l1.x.data
gx1 = self.l1.x.grad
x2 = self.l2.x.data
gx2 = self.l2.x.grad
x3 = self.l3.x.data
gx3 = self.l3.x.grad
self.c2.to_cpu()
self.assertIs(self.l1.x.data, x1)
self.assertIs(self.l1.x.grad, gx1)
self.assertIs(self.l2.x.data, x2)
self.assertIs(self.l2.x.grad, gx2)
self.assertIs(self.l3.x.data, x3)
self.assertIs(self.l3.x.grad, gx3)
@attr.gpu
def test_to_cpu(self):
self.c2.to_gpu()
self.c2.to_cpu()
self.assertIs(self.c2.xp, numpy)
self.assertIs(self.c1.xp, numpy)
self.assertIs(self.l1.xp, numpy)
self.assertIs(self.l2.xp, numpy)
self.assertIs(self.l3.xp, numpy)
self.assertIsInstance(self.l1.x.data, numpy.ndarray)
self.assertIsInstance(self.l1.x.grad, numpy.ndarray)
self.assertIsInstance(self.l2.x.data, numpy.ndarray)
self.assertIsInstance(self.l2.x.grad, numpy.ndarray)
self.assertIsInstance(self.l3.x.data, numpy.ndarray)
self.assertIsInstance(self.l3.x.grad, numpy.ndarray)
@attr.gpu
def test_to_gpu(self):
cupy = cuda.cupy
self.c2.to_gpu()
self.assertIs(self.c2.xp, cupy)
self.assertIs(self.c1.xp, cupy)
self.assertIs(self.l1.xp, cupy)
self.assertIs(self.l2.xp, cupy)
self.assertIs(self.l3.xp, cupy)
self.assertIsInstance(self.l1.x.data, cupy.ndarray)
self.assertIsInstance(self.l1.x.grad, cupy.ndarray)
self.assertIsInstance(self.l2.x.data, cupy.ndarray)
self.assertIsInstance(self.l2.x.grad, cupy.ndarray)
self.assertIsInstance(self.l3.x.data, cupy.ndarray)
self.assertIsInstance(self.l3.x.grad, cupy.ndarray)
def test_params(self):
params = list(self.c2.params())
self.assertEqual({id(p) for p in params},
{id(self.l1.x), id(self.l1.y),
id(self.l2.x), id(self.l3.x)})
def test_params_skip_uninit(self):
params = list(self.c2.params(include_uninit=False))
self.assertEqual({id(p) for p in params},
{id(self.l1.x), id(self.l2.x), id(self.l3.x)})
def test_namedparams(self):
namedparams = list(self.c2.namedparams())
self.assertEqual({(name, id(p)) for name, p in namedparams},
{('/0/0/x', id(self.l1.x)),
('/0/0/y', id(self.l1.y)),
('/0/1/x', id(self.l2.x)),
('/1/x', id(self.l3.x))})
def test_namedparams_skip_uninit(self):
namedparams = list(self.c2.namedparams(include_uninit=False))
self.assertEqual({(name, id(p)) for name, p in namedparams},
{('/0/0/x', id(self.l1.x)),
('/0/1/x', id(self.l2.x)),
('/1/x', id(self.l3.x))})
def test_links(self):
links = list(self.c2.links())
self.assertEqual({id(l) for l in links},
{id(l) for l in [self.l1, self.l2, self.l3,
self.c1, self.c2]})
def test_links_skipself(self):
links = list(self.c2.links(skipself=True))
self.assertEqual({id(l) for l in links},
{id(l) for l in [self.l1, self.l2, self.l3, self.c1]})
def test_namedlinks(self):
namedlinks = list(self.c2.namedlinks())
self.assertEqual({(name, id(l)) for name, l in namedlinks},
{('/', id(self.c2)),
('/0', id(self.c1)),
('/0/0', id(self.l1)),
('/0/1', id(self.l2)),
('/1', id(self.l3))})
def test_namedlinks_skipself(self):
namedlinks = list(self.c2.namedlinks(skipself=True))
self.assertEqual({(name, id(l)) for name, l in namedlinks},
{('/0', id(self.c1)),
('/0/0', id(self.l1)),
('/0/1', id(self.l2)),
('/1', id(self.l3))})
def test_children(self):
self.assertEqual(tuple(id(c) for c in self.c2.children()),
(id(self.c1), id(self.l3)))
self.assertEqual(tuple(id(c) for c in self.c1.children()),
(id(self.l1), id(self.l2)))
def test_copyparams(self):
l1 = chainer.Link()
with l1.init_scope():
l1.x = chainer.Parameter(shape=(2, 3))
l1.y = chainer.Parameter()
l2 = chainer.Link()
with l2.init_scope():
l2.x = chainer.Parameter(shape=2)
l3 = chainer.Link()
with l3.init_scope():
l3.x = chainer.Parameter(shape=3)
c1 = chainer.ChainList(l1, l2)
c2 = chainer.ChainList(c1, l3)
l1.x.data.fill(0)
l2.x.data.fill(1)
l3.x.data.fill(2)
self.c2.copyparams(c2)
numpy.testing.assert_array_equal(self.l1.x.data, l1.x.data)
numpy.testing.assert_array_equal(self.l2.x.data, l2.x.data)
numpy.testing.assert_array_equal(self.l3.x.data, l3.x.data)
def test_zerograds(self):
with testing.assert_warns(DeprecationWarning):
self.c2.zerograds()
numpy.testing.assert_array_equal(self.l1.x.grad, numpy.zeros((2, 3)))
numpy.testing.assert_array_equal(self.l2.x.grad, numpy.zeros(2))
numpy.testing.assert_array_equal(self.l3.x.grad, numpy.zeros(3))
self.l1.y.initialize((2, 3))
numpy.testing.assert_array_equal(self.l1.y.grad, numpy.zeros((2, 3)))
def test_cleargrads(self):
self.c2.cleargrads()
self.assertIsNone(self.l1.x.grad)
self.assertIsNone(self.l2.x.grad)
self.assertIsNone(self.l3.x.grad)
self.l1.y.initialize((2, 3))
self.assertIsNone(self.l1.y.grad)
def test_addgrads(self):
l1 = chainer.Link()
with l1.init_scope():
l1.x = chainer.Parameter(shape=(2, 3))
l1.y = chainer.Parameter(shape=(2, 3))
l2 = chainer.Link()
with l2.init_scope():
l2.x = chainer.Parameter(shape=2)
l3 = chainer.Link()
with l3.init_scope():
l3.x = chainer.Parameter(shape=3)
c1 = chainer.ChainList(l1, l2)
c2 = chainer.ChainList(c1, l3)
l1.x.grad.fill(1)
l2.x.grad.fill(2)
l3.x.grad.fill(3)
l1.y.grad.fill(4)
self.l1.x.grad.fill(-1)
self.l1.y.cleargrad()
self.l2.x.grad.fill(-2)
self.l3.x.grad.fill(-3)
self.c2.addgrads(c2)
numpy.testing.assert_array_equal(self.l1.x.grad, numpy.zeros((2, 3)))
numpy.testing.assert_array_equal(self.l1.y.grad, l1.y.grad)
numpy.testing.assert_array_equal(self.l2.x.grad, numpy.zeros(2))
numpy.testing.assert_array_equal(self.l3.x.grad, numpy.zeros(3))
def test_serialize(self):
l1 = chainer.Link()
with l1.init_scope():
l1.y = chainer.Parameter(shape=(1, 1))
l2 = chainer.Link()
with l2.init_scope():
l2.x = chainer.Parameter(0, 2)
c1 = chainer.ChainList(l1, l2)
mocks = {'0': mock.MagicMock(), '1': mock.MagicMock()}
serializer = mock.MagicMock()
serializer.__getitem__.side_effect = lambda k: mocks[k]
serializer.return_value = None
mocks['0'].return_value = None
mocks['1'].return_value = None
c1.serialize(serializer)
self.assertEqual(serializer.call_count, 0)
self.assertEqual(serializer.__getitem__.call_count, 2)
serializer.__getitem__.assert_any_call('0')
serializer.__getitem__.assert_any_call('1')
mocks['0'].assert_called_with('y', l1.y.data)
mocks['1'].assert_called_with('x', l2.x.data)
def test_count_params(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
assert self.c1.count_params() == 8
assert len(w) == 1
assert w[0].category is UserWarning
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.c2.count_params()
assert len(w) == 1
assert w[0].category is UserWarning
self.c2[0][0].y.initialize((2, 3))
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.c2.count_params()
assert not w
class TestChainListRepeat(unittest.TestCase):
def setUp(self):
class ChainListForTest(chainer.ChainList):
def __init__(self):
super(ChainListForTest, self).__init__(chainer.Link())
def forward(self):
pass
self.chainlist = ChainListForTest()
self.link = self.chainlist[0]
with self.link.init_scope():
self.link.x = chainer.Parameter(
chainer.initializers.Normal(), shape=(2, 3))
def test_no_repeat(self):
ret = self.chainlist.repeat(0)
self.assertEqual(len(ret), 0)
def test_repeat_with_share_mode(self):
ret = self.chainlist.repeat(2, mode='share')
self.assertEqual(len(ret), 2)
self.assertIsNot(ret[0], self.chainlist)
self.assertIsNot(ret[1], self.chainlist)
self.assertIsNot(ret[0], ret[1])
self.assertIsNot(ret[0][0], self.chainlist[0])
self.assertIsNot(ret[1][0], self.chainlist[0])
self.assertIsNot(ret[0][0], ret[1][0])
self.assertIsNot(ret[0][0].x, self.chainlist[0].x)
self.assertIsNot(ret[1][0].x, self.chainlist[0].x)
self.assertIsNot(ret[0][0].x, ret[1][0].x)
self.assertIs(ret[0][0].x.data, self.chainlist[0].x.data)
self.assertIs(ret[0][0].x.data, ret[1][0].x.data)
self.assertEqual(ret[0][0].x.shape, self.chainlist[0].x.shape)
self.assertEqual(ret[0][0].x.shape, ret[1][0].x.shape)
self.assertEqual(ret[0][0].x.dtype, self.chainlist[0].x.dtype)
self.assertEqual(ret[0][0].x.dtype, ret[1][0].x.dtype)
def test_repeat_with_copy_mode(self):
ret = self.chainlist.repeat(2, mode='copy')
self.assertEqual(len(ret), 2)
self.assertIsNot(ret[0], self.chainlist)
self.assertIsNot(ret[1], self.chainlist)
self.assertIsNot(ret[0], ret[1])
self.assertIsNot(ret[0][0], self.chainlist[0])
self.assertIsNot(ret[1][0], self.chainlist[0])
self.assertIsNot(ret[0][0], ret[1][0])
self.assertIsNot(ret[0][0].x, self.chainlist[0].x)
self.assertIsNot(ret[1][0].x, self.chainlist[0].x)
self.assertIsNot(ret[0][0].x, ret[1][0].x)
self.assertIsNot(ret[0][0].x.data, self.chainlist[0].x.data)
self.assertIsNot(ret[1][0].x.data, self.chainlist[0].x.data)
self.assertIsNot(ret[0][0].x.data, ret[1][0].x.data)
self.assertTrue(numpy.array_equal(
ret[0][0].x.data, self.chainlist[0].x.data))
self.assertTrue(numpy.array_equal(
ret[0][0].x.data, ret[1][0].x.data))
self.assertEqual(ret[0][0].x.shape, self.chainlist[0].x.shape)
self.assertEqual(ret[0][0].x.shape, ret[1][0].x.shape)
self.assertEqual(ret[0][0].x.dtype, self.chainlist[0].x.dtype)
self.assertEqual(ret[0][0].x.dtype, ret[1][0].x.dtype)
def test_repeat_with_init_mode(self):
ret = self.chainlist.repeat(2, mode='init')
self.assertEqual(len(ret), 2)
self.assertIsNot(ret[0], self.chainlist)
self.assertIsNot(ret[1], self.chainlist)
self.assertIsNot(ret[0], ret[1])
self.assertIsNot(ret[0][0], self.chainlist[0])
self.assertIsNot(ret[1][0], self.chainlist[0])
self.assertIsNot(ret[0][0], ret[1][0])
self.assertIsNot(ret[0][0].x, self.chainlist[0].x)
self.assertIsNot(ret[1][0].x, self.chainlist[0].x)
self.assertIsNot(ret[0][0].x, ret[1][0].x)
self.assertIsNot(ret[0][0].x.data, self.chainlist[0].x.data)
self.assertIsNot(ret[1][0].x.data, self.chainlist[0].x.data)
self.assertIsNot(ret[0][0].x.data, ret[1][0].x.data)
self.assertFalse(numpy.array_equal(
ret[0][0].x.data, self.chainlist[0].x.data))
self.assertFalse(numpy.array_equal(
ret[1][0].x.data, self.chainlist[0].x.data))
self.assertFalse(numpy.array_equal(
ret[0][0].x.data, ret[1][0].x.data))
self.assertEqual(ret[0][0].x.shape, self.chainlist[0].x.shape)
self.assertEqual(ret[0][0].x.shape, ret[1][0].x.shape)
self.assertEqual(ret[0][0].x.dtype, self.chainlist[0].x.dtype)
self.assertEqual(ret[0][0].x.dtype, ret[1][0].x.dtype)
@attr.ideep
class TestIntel64(unittest.TestCase):
def setUp(self):
self.link = chainer.Link()
shape = (2, 2)
dtype = numpy.float32
y_array = numpy.random.rand(*shape).astype(dtype)
pa_array = numpy.random.rand(*shape).astype(dtype)
ps_scalar = 2.4
with self.link.init_scope():
# Initialized parameter
self.link.y = chainer.Parameter(y_array)
# Uninitialized parameter
self.link.v = chainer.Parameter()
# Persistent ndarray
self.link.add_persistent('pa', pa_array)
# Persistent scalar
self.link.add_persistent('ps', ps_scalar)
self.y_array = y_array
self.pa_array = pa_array
self.ps_scalar = ps_scalar
def _assert_variable_array_equal(self, var, expected_array):
assert var.shape == expected_array.shape
assert var.dtype == expected_array.dtype
self._assert_arrays_equal(var.data, expected_array)
def _assert_arrays_equal(self, array, expected_array):
if isinstance(array, cuda.ndarray):
array = array.get()
assert array.shape == expected_array.shape
assert array.dtype == expected_array.dtype
assert (array == expected_array).all()
def test_cpu_to_intel64(self):
link = self.link
link.to_intel64()
assert link._device_id is None
# Arrays should be converted to ideep.mdarray
# Initialized parameter
assert isinstance(link.y.data, intel64.ideep.mdarray)
self._assert_variable_array_equal(link.y, self.y_array)
# Uninitialized parameter
assert link.v.data is None
# Persistent ndarray
assert isinstance(link.pa, intel64.ideep.mdarray)
self._assert_arrays_equal(link.pa, self.pa_array)
# Persistent scalar
assert link.ps == self.ps_scalar
def test_intel64_to_intel64(self):
link = self.link
link.to_intel64()
prev_y = link.y
prev_v = link.v
prev_pa = link.pa
prev_ps = link.ps
link.to_intel64()
assert link._device_id is None
# Everything should be left untouched
# Initialized parameter
assert link.y is prev_y
# Uninitialized parameter
assert link.v is prev_v
# Persistent ndarray
assert link.pa is prev_pa
# Persistent scalar
assert link.ps is prev_ps
@attr.gpu
def test_gpu_to_intel64(self):
link = self.link
link.to_gpu()
assert link._device_id == 0
link.to_intel64()
assert link._device_id is None
# Arrays should be converted to ideep.mdarray
# Initialized parameter
assert isinstance(link.y.data, intel64.ideep.mdarray)
self._assert_variable_array_equal(link.y, self.y_array)
# Uninitialized parameter
assert link.v.data is None
# Persistent ndarray
assert isinstance(link.pa, intel64.ideep.mdarray)
self._assert_arrays_equal(link.pa, self.pa_array)
# Persistent scalar
assert link.ps == self.ps_scalar
@attr.gpu
def test_intel64_to_gpu(self):
link = self.link
link.to_intel64()
assert link._device_id is None
link.to_gpu()
assert link._device_id == 0
# Arrays should be converted to cupy.ndarray
# Initialized parameter
assert isinstance(link.y.data, cuda.cupy.ndarray)
self._assert_variable_array_equal(link.y, self.y_array)
# Uninitialized parameter
assert link.v.data is None
# Persistent ndarray
assert isinstance(link.pa, cuda.ndarray)
self._assert_arrays_equal(link.pa, self.pa_array)
# Persistent scalar
assert link.ps == self.ps_scalar
def test_intel64_to_cpu(self):
link = self.link
link.to_intel64()
assert link._device_id is None
link.to_cpu()
assert link._device_id is None
# Arrays should be converted to numpy.ndarray
# Initialized parameter
assert isinstance(link.y.data, numpy.ndarray)
self._assert_variable_array_equal(link.y, self.y_array)
# Uninitialized parameter
assert link.v.data is None
# Persistent ndarray
assert isinstance(link.pa, numpy.ndarray)
self._assert_arrays_equal(link.pa, self.pa_array)
# Persistent scalar
assert link.ps == self.ps_scalar
def test_cpu_to_intel64_unsupported(self):
# Test for persistents that cannot be transferred to iDeep.
with self.link.init_scope():
self.link.no_ideep = numpy.ones((2, 2, 2), numpy.float32)
self.link.register_persistent('no_ideep')
self.link.to_intel64()
assert isinstance(self.link.no_ideep, numpy.ndarray)
@attr.gpu
def test_gpu_to_intel64_unsupported(self):
# Test for persistents that cannot be transferred to iDeep.
with self.link.init_scope():
self.link.no_ideep = cuda.cupy.ones((2, 2, 2), numpy.float32)
self.link.register_persistent('no_ideep')
self.link.to_intel64()
assert isinstance(self.link.no_ideep, numpy.ndarray)
class TestCallMethod(unittest.TestCase):
def setUp(self):
class Model(chainer.Chain):
def __init__(self):
super(Model, self).__init__()
self.model = Model()
def test_has_forward_no_call(self):
self.model.forward = mock.MagicMock()
self.model(0) # model.forward is called
self.model.forward.assert_called_once()
def test_has_call_and_forward(self):
self.model.__call__ = mock.MagicMock()
self.model.forward = mock.MagicMock()
self.model(0) # Link.__call__ is called
self.model.forward.assert_called_with(0)
self.model.__call__.assert_not_called()
def test_has_call_no_forward(self):
class Model(chainer.Chain):
def __init__(self):
super(Model, self).__init__()
self.mock = mock.MagicMock()
def __call__(self, x):
self.mock(x)
model = Model()
model(0) # model.__call__ is called
model.mock.assert_called_with(0)
def test_no_call_no_forward(self):
with self.assertRaises(AttributeError):
self.model(0)
testing.run_module(__name__, __file__)
| ronekko/chainer | tests/chainer_tests/test_link.py | Python | mit | 71,956 | 0.00025 |
from setuptools import setup, find_packages
setup(
name='hk_glazer',
version='0.0.8',
description='Convert compatible JSON configs to DeBAM/DeTIM config.dat files',
url='https://github.com/fmuzf/python_hk_glazer',
author='Lyman Gillispie',
author_email='lyman.gillispie@gmail.com',
packages=find_packages(),
scripts=['bin/hk_glazer'],
license='MIT',
long_description=open('README.md').read(),
install_requires = ['argparse'],
test_suite='nose.collector',
tests_require=['nose'],
include_package_data = True
)
| fmuzf/python_hk_glazer | setup.py | Python | mit | 566 | 0.008834 |
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PerlFontTtf(PerlPackage):
"""Perl module for TrueType Font hacking"""
homepage = "http://search.cpan.org/~bhallissy/Font-TTF-1.06/lib/Font/TTF.pm"
url = "http://search.cpan.org/CPAN/authors/id/B/BH/BHALLISSY/Font-TTF-1.06.tar.gz"
version('1.06', '241b59310ad4450e6e050d5e790f1b21')
| TheTimmy/spack | var/spack/repos/builtin/packages/perl-font-ttf/package.py | Python | lgpl-2.1 | 1,567 | 0.001914 |
# vim: set fileencoding=utf-8 :
#
# (C) 2014 Guido Günther <agx@sigxcpu.org>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, please see
# <http://www.gnu.org/licenses/>
"""Format a message"""
from gbp.errors import GbpError
def format_str(msg, args):
"""
Format a string with the given dict. Be a bit more verbose than
default python about the error cause.
>>> format_str("%(foo)", {})
Traceback (most recent call last):
...
gbp.errors.GbpError: Failed to format %(foo): Missing value 'foo' in {}
>>> format_str("%(foo)", {'foo': 'bar'})
Traceback (most recent call last):
...
gbp.errors.GbpError: Failed to format %(foo) with {'foo': 'bar'}: incomplete format
>>> format_str("A %(foo)s is a %(bar)s", {'foo': 'dog', 'bar': 'mamal'})
'A dog is a mamal'
"""
try:
return msg % args
except ValueError as e:
raise GbpError("Failed to format %s with %s: %s" % (msg, args, e))
except KeyError as e:
raise GbpError("Failed to format %s: Missing value %s in %s" %
(msg, e, args))
def format_b(fmtstr, *args):
"""String-like interpolation for bytes objects.
NOTE: This is a compatibility wrapper for older versions (<3.5) of Python 3
which do not support the percent operator ('%') for bytes objects. This
function should be removed (and replaced by simple '%') when Python 3.5
has gained wide enough adoption.
>>> format_b(b'%s %d', b'foo', 123)
b'foo 123'
>>> format_b(b'foo 123')
b'foo 123'
>>> format_b('%s %d', b'foo', 123)
Traceback (most recent call last):
...
AttributeError: 'str' object has no attribute 'decode'
"""
fmtstr = fmtstr.decode()
strargs = tuple([(a.decode() if isinstance(a, bytes) else a) for a in args])
return (fmtstr % strargs).encode()
| agx/git-buildpackage | gbp/format.py | Python | gpl-2.0 | 2,429 | 0.000824 |
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from clr import AddReference
AddReference("System")
AddReference("QuantConnect.Algorithm")
AddReference("QuantConnect.Common")
from System import *
from QuantConnect import *
from QuantConnect.Algorithm import *
from datetime import timedelta
### <summary>
### This algorithm is a regression test for issue #2018 and PR #2038.
### </summary>
class OptionDataNullReferenceRegressionAlgorithm(QCAlgorithm):
def Initialize(self):
self.SetStartDate(2016, 12, 1)
self.SetEndDate(2017, 1, 1)
self.SetCash(500000)
self.AddEquity("DUST")
option = self.AddOption("DUST")
option.SetFilter(self.UniverseFunc)
def UniverseFunc(self, universe):
return universe.IncludeWeeklys().Strikes(-1, +1).Expiration(timedelta(25), timedelta(100))
| AnshulYADAV007/Lean | Algorithm.Python/OptionDataNullReferenceRegressionAlgorithm.py | Python | apache-2.0 | 1,485 | 0.007417 |
#*-coding:Utf-8 -*
#!/usr/bin/python3.2
"""Fichier contenant la classe generale d'objet algébriques contenant + * et eventuellement /"""
class arith(object):
"""Classe generique contenant les methodes redondantes"""
def __ne__(self,autre):
"""Definition de !="""
return not(self == autre)
def __radd__(self,autre):
"""Addition dans l'autre sens"""
return self + autre
def __iadd__(self,autre):
"""Methode de +="""
return self + autre
def __rmul__(self,autre):
"""Multiplication dans l'autre sens"""
return self * autre
def __imul__(self,autre):
"""Methode de *="""
return self * autre
def __sub__(self,autre):
"""Methode de soustraction"""
return self + (-1 * autre)
def __rsub__(self,autre):
"""Methode de soustraction dans l'autre sens"""
return autre +(-1 * self)
def __neg__(self):
"""Methode de passage a l'opposé"""
return -1 * self | kalaspa/mc-eliece | src/arith.py | Python | gpl-3.0 | 891 | 0.042745 |
# coding: utf-8
from common import base
class Plugin(base.BASE):
__name__ = 'csdn'
__title__ = 'CSDN'
__url__ = 'http://www.csdn.net/'
def register(self, target):
self.information = {
'email': {
'url': 'http://passport.csdn.net/account/register',
'method': 'get',
'settings': {
'params': {
'action': 'validateEmail',
'email': target
}
},
'result': {
'type': 'str',
'value': 'false'
}
}
}
| tonybreak/Registered | plugins/csdn.py | Python | gpl-3.0 | 675 | 0.001481 |
from awards.forms import AwardForm
from awards.models import JudgeAllowance
from awards.models import Award
from challenges.decorators import judge_required
from challenges.models import Submission
from django.contrib import messages
from django.http import Http404, HttpResponseRedirect
from django.views.decorators.http import require_POST
from tower import ugettext as _
@judge_required
@require_POST
def award(request, submission_id, project=None, slug=None):
"""Awards an ammount to a gren-lit ``Submission`` by a Judge"""
try:
submission = (Submission.objects
.select_related('phase')
.get(id=submission_id, phase__challenge__slug=slug,
phase__challenge__project__slug=project,
is_winner=True, is_draft=False))
except Submission.DoesNotExist:
raise Http404
judge_data = {
'judge': request.user.get_profile(),
'award__phase': submission.phase,
'award__status': Award.RELEASED,
}
if submission.phase_round:
judge_data.update({'award__phase_round': submission.phase_round})
try:
judge_allowance = JudgeAllowance.objects.get(**judge_data)
except JudgeAllowance.DoesNotExist:
raise Http404
form = AwardForm(request.POST)
if form.is_valid():
is_allocated = judge_allowance.allocate(form.cleaned_data['amount'],
submission)
if form.cleaned_data['amount'] == 0:
submission_award = (judge_allowance.submissionaward_set
.filter(submission=submission))
if submission_award:
submission_award.delete()
message = _("You have successfuly removed the award from this"
" submission")
messages.success(request, message)
return HttpResponseRedirect(submission.get_absolute_url())
if is_allocated:
message = _("You have successfuly awarded this Entry")
messages.success(request, message)
return HttpResponseRedirect(submission.get_absolute_url())
if form.errors:
message = _("Please enter a valid amount for the award")
else:
message = _("You don't have enough funding for award this submission")
messages.error(request, message)
return HttpResponseRedirect(submission.get_absolute_url())
| mozilla/mozilla-ignite | apps/awards/views.py | Python | bsd-3-clause | 2,471 | 0 |
#!/usr/bin/python
# Copyright (c) 2016, Pierre Jodouin <pjodouin@virtualcomputing.solutions>
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: lambda_policy
short_description: Creates, updates or deletes AWS Lambda policy statements.
description:
- This module allows the management of AWS Lambda policy statements.
It is idempotent and supports "Check" mode. Use module M(lambda) to manage the lambda
function itself, M(lambda_alias) to manage function aliases, M(lambda_event) to manage event source mappings
such as Kinesis streams, M(lambda_invoke) to execute a lambda function and M(lambda_facts) to gather facts
relating to one or more lambda functions.
version_added: "2.4"
author:
- Pierre Jodouin (@pjodouin)
- Michael De La Rue (@mikedlr)
options:
function_name:
description:
- "Name of the Lambda function whose resource policy you are updating by adding a new permission."
- "You can specify a function name (for example, Thumbnail ) or you can specify Amazon Resource Name (ARN) of the"
- "function (for example, arn:aws:lambda:us-west-2:account-id:function:ThumbNail ). AWS Lambda also allows you to"
- "specify partial ARN (for example, account-id:Thumbnail ). Note that the length constraint applies only to the"
- "ARN. If you specify only the function name, it is limited to 64 character in length."
required: true
aliases: ['lambda_function_arn', 'function_arn']
state:
description:
- Describes the desired state.
required: true
default: "present"
choices: ["present", "absent"]
alias:
description:
- Name of the function alias. Mutually exclusive with C(version).
version:
description:
- Version of the Lambda function. Mutually exclusive with C(alias).
statement_id:
description:
- A unique statement identifier.
required: true
aliases: ['sid']
action:
description:
- "The AWS Lambda action you want to allow in this statement. Each Lambda action is a string starting with
lambda: followed by the API name (see Operations ). For example, lambda:CreateFunction . You can use wildcard
(lambda:* ) to grant permission for all AWS Lambda actions."
required: true
principal:
description:
- "The principal who is getting this permission. It can be Amazon S3 service Principal (s3.amazonaws.com ) if
you want Amazon S3 to invoke the function, an AWS account ID if you are granting cross-account permission, or
any valid AWS service principal such as sns.amazonaws.com . For example, you might want to allow a custom
application in another AWS account to push events to AWS Lambda by invoking your function."
required: true
source_arn:
description:
- This is optional; however, when granting Amazon S3 permission to invoke your function, you should specify this
field with the bucket Amazon Resource Name (ARN) as its value. This ensures that only events generated from
the specified bucket can invoke the function.
source_account:
description:
- The AWS account ID (without a hyphen) of the source owner. For example, if the SourceArn identifies a bucket,
then this is the bucket owner's account ID. You can use this additional condition to ensure the bucket you
specify is owned by a specific account (it is possible the bucket owner deleted the bucket and some other AWS
account created the bucket). You can also use this condition to specify all sources (that is, you don't
specify the SourceArn ) owned by a specific account.
event_source_token:
description:
- Token string representing source ARN or account. Mutually exclusive with C(source_arn) or C(source_account).
requirements:
- boto3
extends_documentation_fragment:
- aws
'''
EXAMPLES = '''
---
- hosts: localhost
gather_facts: no
vars:
state: present
tasks:
- name: Lambda S3 event notification
lambda_policy:
state: "{{ state | default('present') }}"
function_name: functionName
alias: Dev
statement_id: lambda-s3-myBucket-create-data-log
action: lambda:InvokeFunction
principal: s3.amazonaws.com
source_arn: arn:aws:s3:eu-central-1:123456789012:bucketName
source_account: 123456789012
- name: show results
debug: var=lambda_policy_action
'''
RETURN = '''
---
lambda_policy_action:
description: describes what action was taken
returned: success
type: string
'''
import json
import re
from ansible.module_utils._text import to_native
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.ec2 import get_aws_connection_info, boto3_conn
try:
from botocore.exceptions import ClientError
except:
pass # will be protected by AnsibleAWSModule
def pc(key):
"""
Changes python key into Pascal case equivalent. For example, 'this_function_name' becomes 'ThisFunctionName'.
:param key:
:return:
"""
return "".join([token.capitalize() for token in key.split('_')])
def policy_equal(module, current_statement):
for param in ('action', 'principal', 'source_arn', 'source_account', 'event_source_token'):
if module.params.get(param) != current_statement.get(param):
return False
return True
def set_api_params(module, module_params):
"""
Sets module parameters to those expected by the boto3 API.
:param module:
:param module_params:
:return:
"""
api_params = dict()
for param in module_params:
module_param = module.params.get(param)
if module_param is not None:
api_params[pc(param)] = module_param
return api_params
def validate_params(module):
"""
Performs parameter validation beyond the module framework's validation.
:param module:
:return:
"""
function_name = module.params['function_name']
# validate function name
if function_name.startswith('arn:'):
if not re.search('^[\w\-]+$', function_name):
module.fail_json(
msg='Function name {0} is invalid. Names must contain only alphanumeric characters and hyphens.'.format(
function_name)
)
if len(function_name) > 64:
module.fail_json(
msg='Function name "{0}" exceeds 64 character limit'.format(function_name))
else:
if not re.search('^[\w\-:]+$', function_name):
module.fail_json(
msg='ARN {0} is invalid. ARNs must contain only alphanumeric characters, hyphens and colons.'.format(function_name)
)
if len(function_name) > 140:
module.fail_json(msg='ARN name "{0}" exceeds 140 character limit'.format(function_name))
def get_qualifier(module):
"""
Returns the function qualifier as a version or alias or None.
:param module:
:return:
"""
if module.params.get('version') is not None:
return to_native(module.params['version'])
elif module.params['alias']:
return to_native(module.params['alias'])
return None
def extract_statement(policy, sid):
"""return flattened single policy statement from a policy
If a policy statement is present in the policy extract it and
return it in a flattened form. Otherwise return an empty
dictionary.
"""
if 'Statement' not in policy:
return {}
policy_statement = {}
# Now that we have the policy, check if required permission statement is present and flatten to
# simple dictionary if found.
for statement in policy['Statement']:
if statement['Sid'] == sid:
policy_statement['action'] = statement['Action']
policy_statement['principal'] = statement['Principal']['Service']
try:
policy_statement['source_arn'] = statement['Condition']['ArnLike']['AWS:SourceArn']
except KeyError:
pass
try:
policy_statement['source_account'] = statement['Condition']['StringEquals']['AWS:SourceAccount']
except KeyError:
pass
try:
policy_statement['event_source_token'] = statement['Condition']['StringEquals']['lambda:EventSourceToken']
except KeyError:
pass
break
return policy_statement
def get_policy_statement(module, client):
"""Checks that policy exists and if so, that statement ID is present or absent.
:param module:
:param client:
:return:
"""
policy = dict()
sid = module.params['statement_id']
# set API parameters
api_params = set_api_params(module, ('function_name', ))
qualifier = get_qualifier(module)
if qualifier:
api_params.update(Qualifier=qualifier)
policy_results = None
# check if function policy exists
try:
policy_results = client.get_policy(**api_params)
except ClientError as e:
try:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
return {}
except AttributeError: # catches ClientErrors without response, e.g. fail before connect
pass
module.fail_json_aws(e, msg="retrieving function policy")
except Exception as e:
module.fail_json_aws(e, msg="retrieving function policy")
# get_policy returns a JSON string so must convert to dict before reassigning to its key
policy = json.loads(policy_results.get('Policy', '{}'))
return extract_statement(policy, sid)
def add_policy_permission(module, client):
"""
Adds a permission statement to the policy.
:param module:
:param aws:
:return:
"""
changed = False
# set API parameters
params = (
'function_name',
'statement_id',
'action',
'principal',
'source_arn',
'source_account',
'event_source_token')
api_params = set_api_params(module, params)
qualifier = get_qualifier(module)
if qualifier:
api_params.update(Qualifier=qualifier)
if not module.check_mode:
try:
client.add_permission(**api_params)
except Exception as e:
module.fail_json_aws(e, msg="adding permission to policy")
changed = True
return changed
def remove_policy_permission(module, client):
"""
Removed a permission statement from the policy.
:param module:
:param aws:
:return:
"""
changed = False
# set API parameters
api_params = set_api_params(module, ('function_name', 'statement_id'))
qualifier = get_qualifier(module)
if qualifier:
api_params.update(Qualifier=qualifier)
try:
if not module.check_mode:
client.remove_permission(**api_params)
changed = True
except Exception as e:
module.fail_json_aws(e, msg="removing permission from policy")
return changed
def manage_state(module, lambda_client):
changed = False
current_state = 'absent'
state = module.params['state']
action_taken = 'none'
# check if the policy exists
current_policy_statement = get_policy_statement(module, lambda_client)
if current_policy_statement:
current_state = 'present'
if state == 'present':
if current_state == 'present' and not policy_equal(module, current_policy_statement):
remove_policy_permission(module, lambda_client)
changed = add_policy_permission(module, lambda_client)
action_taken = 'updated'
if not current_state == 'present':
changed = add_policy_permission(module, lambda_client)
action_taken = 'added'
elif current_state == 'present':
# remove the policy statement
changed = remove_policy_permission(module, lambda_client)
action_taken = 'deleted'
return dict(changed=changed, ansible_facts=dict(lambda_policy_action=action_taken))
def setup_client(module):
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
if region:
connection = boto3_conn(module, conn_type='client', resource='lambda', region=region, endpoint=ec2_url, **aws_connect_params)
else:
module.fail_json(msg="region must be specified")
return connection
def setup_module_object():
argument_spec = dict(
state=dict(default='present', choices=['present', 'absent']),
function_name=dict(required=True, aliases=['lambda_function_arn', 'function_arn']),
statement_id=dict(required=True, aliases=['sid']),
alias=dict(),
version=dict(type='int'),
action=dict(required=True, ),
principal=dict(required=True, ),
source_arn=dict(),
source_account=dict(),
event_source_token=dict(),
)
return AnsibleAWSModule(
argument_spec=argument_spec,
supports_check_mode=True,
mutually_exclusive=[['alias', 'version'],
['event_source_token', 'source_arn'],
['event_source_token', 'source_account']],
)
def main():
"""
Main entry point.
:return dict: ansible facts
"""
module = setup_module_object()
client = setup_client(module)
validate_params(module)
results = manage_state(module, client)
module.exit_json(**results)
if __name__ == '__main__':
main()
| e-gob/plataforma-kioscos-autoatencion | scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/modules/cloud/amazon/lambda_policy.py | Python | bsd-3-clause | 13,776 | 0.00363 |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
SpatialDecision
A QGIS plugin
This is a SDSS template for the GEO1005 course
-------------------
begin : 2015-11-02
git sha : $Format:%H$
copyright : (C) 2015 by Jorge Gil, TU Delft
email : j.a.lopesgil@tudelft.nl
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4 import QtGui, QtCore
from qgis.core import *
from qgis.networkanalysis import *
from pyspatialite import dbapi2 as sqlite
import psycopg2 as pgsql
import numpy as np
import math
import os.path
try:
import networkx as nx
has_networkx = True
except ImportError, e:
has_networkx = False
#
# Layer functions
#
def getLegendLayers(iface, geom='all', provider='all'):
"""
Return list of layer objects in the legend, with specific geometry type and/or data provider
:param iface: QgsInterface
:param geom: string ('point', 'linestring', 'polygon')
:param provider: string
:return: list QgsVectorLayer
"""
layers_list = []
for layer in iface.legendInterface().layers():
add_layer = False
if layer.isValid() and layer.type() == QgsMapLayer.VectorLayer:
if layer.hasGeometryType() and (geom is 'all' or layer.geometryType() in geom):
if provider is 'all' or layer.dataProvider().name() in provider:
add_layer = True
if add_layer:
layers_list.append(layer)
return layers_list
def getCanvasLayers(iface, geom='all', provider='all'):
"""Return list of valid QgsVectorLayer in QgsMapCanvas, with specific geometry type and/or data provider"""
layers_list = []
for layer in iface.mapCanvas().layers():
add_layer = False
if layer.isValid() and layer.type() == QgsMapLayer.VectorLayer:
if layer.hasGeometryType() and (geom is 'all' or layer.geometryType() in geom):
if provider is 'all' or layer.dataProvider().name() in provider:
add_layer = True
if add_layer:
layers_list.append(layer)
return layers_list
def getRegistryLayers(geom='all', provider='all'):
"""Return list of valid QgsVectorLayer in QgsMapLayerRegistry, with specific geometry type and/or data provider"""
layers_list = []
for layer in QgsMapLayerRegistry.instance().mapLayers().values():
add_layer = False
if layer.isValid() and layer.type() == QgsMapLayer.VectorLayer:
if layer.hasGeometryType() and (geom is 'all' or layer.geometryType() in geom):
if provider is 'all' or layer.dataProvider().name() in provider:
add_layer = True
if add_layer:
layers_list.append(layer)
return layers_list
def isLayerProjected(layer):
projected = False
if layer:
projected = not layer.crs().geographicFlag()
return projected
def getLegendLayerByName(iface, name):
layer = None
for i in iface.legendInterface().layers():
if i.name() == name:
layer = i
return layer
def getCanvasLayerByName(iface, name):
layer = None
for i in iface.mapCanvas().layers():
if i.name() == name:
layer = i
return layer
def getLayersListNames(layerslist):
layer_names = [layer.name() for layer in layerslist]
return layer_names
def getLayerPath(layer):
path = ''
provider = layer.dataProvider()
provider_type = provider.name()
if provider_type == 'spatialite':
uri = QgsDataSourceURI(provider.dataSourceUri())
path = uri.database()
elif provider_type == 'ogr':
uri = provider.dataSourceUri()
path = os.path.dirname(uri)
return path
def reloadLayer(layer):
layer_name = layer.name()
layer_provider = layer.dataProvider().name()
new_layer = None
if layer_provider in ('spatialite','postgres'):
uri = QgsDataSourceURI(layer.dataProvider().dataSourceUri())
new_layer = QgsVectorLayer(uri.uri(), layer_name, layer_provider)
elif layer_provider == 'ogr':
uri = layer.dataProvider().dataSourceUri()
new_layer = QgsVectorLayer(uri.split("|")[0], layer_name, layer_provider)
QgsMapLayerRegistry.instance().removeMapLayer(layer.id())
if new_layer:
QgsMapLayerRegistry.instance().addMapLayer(new_layer)
return new_layer
#
# Field functions
#
def fieldExists(layer, name):
fields = getFieldNames(layer)
if name in fields:
return True
else:
return False
def getFieldNames(layer):
field_names = []
if layer and layer.dataProvider():
field_names = [field.name() for field in layer.dataProvider().fields()]
return field_names
def getNumericFields(layer, type='all'):
fields = []
if type == 'all':
types = (QtCore.QVariant.Int, QtCore.QVariant.LongLong, QtCore.QVariant.Double,
QtCore.QVariant.UInt, QtCore.QVariant.ULongLong)
else:
types = (type)
if layer and layer.dataProvider():
for field in layer.dataProvider().fields():
if field.type() in types:
fields.append(field)
return fields
def getNumericFieldNames(layer, type='all'):
field_names = []
if type == 'all':
types = (QtCore.QVariant.Int, QtCore.QVariant.LongLong, QtCore.QVariant.Double,
QtCore.QVariant.UInt, QtCore.QVariant.ULongLong)
else:
types = (type)
if layer and layer.dataProvider():
for field in layer.dataProvider().fields():
if field.type() in types:
field_names.append(field.name())
return field_names
def getFieldIndex(layer, name):
idx = layer.dataProvider().fields().indexFromName(name)
return idx
def fieldHasValues(layer, name):
if layer and fieldExists(layer, name):
# find fields that only have NULL values
idx = getFieldIndex(layer, name)
maxval = layer.maximumValue(idx)
minval = layer.minimumValue(idx)
if maxval == NULL and minval == NULL:
return False
else:
return True
def fieldHasNullValues(layer, name):
if layer and fieldExists(layer, name):
idx = getFieldIndex(layer, name)
vals = layer.uniqueValues(idx,1)
# depending on the provider list is empty or has NULL value in first position
if not vals or (len(vals) == 1 and vals[0] == NULL):
return True
else:
return False
def getFieldValues(layer, fieldname, null=True, selection=False):
attributes = []
ids = []
if fieldExists(layer, fieldname):
if selection:
features = layer.selectedFeatures()
else:
request = QgsFeatureRequest().setSubsetOfAttributes([getFieldIndex(layer, fieldname)])
features = layer.getFeatures(request)
if null:
for feature in features:
attributes.append(feature.attribute(fieldname))
ids.append(feature.id())
else:
for feature in features:
val = feature.attribute(fieldname)
if val != NULL:
attributes.append(val)
ids.append(feature.id())
return attributes, ids
def addFields(layer, names, types):
# types can be QVariant.Int, QVariant.Double, QVariant.String
res = False
if layer:
provider = layer.dataProvider()
caps = provider.capabilities()
if caps & QgsVectorDataProvider.AddAttributes:
fields = provider.fields()
for i, name in enumerate(names):
#add new field if it doesn't exist
if fields.indexFromName(name) == -1:
res = provider.addAttributes([QgsField(name, types[i])])
#apply changes if any made
if res:
layer.updateFields()
return res
def updateField(layer, name, expression):
res = False
if layer:
provider = layer.dataProvider()
caps = provider.capabilities()
if caps & QgsVectorDataProvider.AddAttributes:
#field = layer.fieldNameIndex(name)
calc = QgsExpression(expression)
layer.startEditing()
for feature in layer.getFeatures():
value = calc.evaluate(feature)
feature[name] = value
layer.updateFeature(feature)
#layer.changeAttributeValue(feature.id(), field, value)
layer.commitChanges()
res = True
return res
#
# Feature functions
#
def getFeaturesByListValues(layer, name, values=list):
features = {}
if layer:
if fieldExists(layer, name):
request = QgsFeatureRequest().setSubsetOfAttributes([getFieldIndex(layer, name)])
iterator = layer.getFeatures(request)
for feature in iterator:
att = feature.attribute(name)
if att in values:
features[feature.id()] = att
return features
def selectFeaturesByListValues(layer, name, values=list):
features = []
if layer:
if fieldExists(layer, name):
request = QgsFeatureRequest().setSubsetOfAttributes([getFieldIndex(layer, name)])
iterator = layer.getFeatures(request)
for feature in iterator:
att = feature.attribute(name)
if att in values:
features.append(feature.id())
layer.select(features)
def getFeaturesByRangeValues(layer, name, min, max):
features = {}
if layer:
if fieldExists(layer, name):
request = QgsFeatureRequest().setSubsetOfAttributes([getFieldIndex(layer, name)])
iterator = layer.getFeatures(request)
for feature in iterator:
att = feature.attribute(name)
if min <= att <= max:
features[feature.id()] = att
return features
def selectFeaturesByRangeValues(layer, name, min, max):
features = []
if layer:
if fieldExists(layer, name):
request = QgsFeatureRequest().setSubsetOfAttributes([getFieldIndex(layer, name)])
iterator = layer.getFeatures(request)
for feature in iterator:
att = feature.attribute(name)
if min <= att <= max:
features.append(feature.id())
layer.select(features)
def getFeaturesByExpression(layer, expression):
features = {}
if layer:
request = QgsFeatureRequest().setFilterExpression(expression)
iterator = layer.getFeatures(request)
for feature in iterator:
features[feature.id()] = feature.attributes()
return features
def selectFeaturesByExpression(layer, expression):
features = []
if layer:
request = QgsFeatureRequest().setFilterExpression(expression)
iterator = layer.getFeatures(request)
for feature in iterator:
features.append(feature.id())
layer.select(features)
def filterFeaturesByExpression(layer, expression):
success = False
if layer:
try:
success = layer.setSubsetString(expression)
except:
success = False
return success
def getAllFeatures(layer):
allfeatures = {}
if layer:
features = layer.getFeatures()
allfeatures = {feature.id(): feature.attributes() for feature in features}
return allfeatures
def getAllFeatureIds(layer):
ids = []
if layer:
features = layer.getFeatures()
ids = [feature.id() for feature in features]
return ids
def getAllFeatureValues(layer, name):
values = []
if layer:
features = layer.getFeatures()
values = [feature.attribute(name) for feature in features]
return values
def getAllFeatureSymbols(layer):
symbols = {}
if layer:
renderer = layer.rendererV2()
features = layer.getFeatures()
for feature in features:
symb = renderer.symbolsForFeature(feature)
if len(symb) > 0:
symbols = {feature.id(): symb[0].color()}
else:
symbols = {feature.id(): QColor(200,200,200,255)}
return symbols
def getAllFeatureData(layer):
data = {}
symbols = {}
if layer:
renderer = layer.rendererV2()
features = layer.getFeatures()
for feature in features:
data = {feature.id(): feature.attributes()}
symb = renderer.symbolsForFeature(feature)
if len(symb) > 0:
symbols = {feature.id(): symb[0].color()}
else:
symbols = {feature.id(): QColor(200,200,200,255)}
return data, symbols
def getFeaturesByIntersection(base_layer, intersect_layer, crosses):
features = []
# retrieve objects to be intersected (list comprehension, more pythonic)
intersect_geom = [QgsGeometry(feat.geometry()) for feat in intersect_layer.getFeatures()]
# retrieve base layer objects
base = base_layer.getFeatures()
# should improve with spatial index for large data sets
#index = createIndex(base_layer)
# loop through base features and intersecting elements
# appends if intersecting, when crosses = True
# does the opposite if crosses = False
for feat in base:
append = not crosses
base_geom = feat.geometry()
for intersect in intersect_geom:
if base_geom.intersects(intersect):
append = crosses
break
if append:
features.append(feat)
return features
def selectFeaturesByIntersection(base_layer, intersect_layer, crosses):
features = []
# retrieve objects to be intersected (list comprehension, more pythonic)
obstacles_geom = [QgsGeometry(feat.geometry()) for feat in intersect_layer.getFeatures()]
# retrieve base layer objects
base = base_layer.getFeatures()
# loop through base features and intersecting elements
for feat in base:
append = not crosses
base_geom = QgsGeometry(feat.geometry())
for obst in obstacles_geom:
if base_geom.intersects(obst):
append = crosses
break
if append:
features.append(feat.id())
base_layer.select(features)
def getFeaturesIntersections(base_layer, intersect_layer):
intersections = []
# retrieve objects to be intersected (list comprehension, more pythonic)
obstacles_geom = [QgsGeometry(feat.geometry()) for feat in intersect_layer.getFeatures()]
# retrieve base layer objects
base = base_layer.getFeatures()
# loop through base features and intersecting elements
for feat in base:
base_geom = QgsGeometry(feat.geometry())
for obst in obstacles_geom:
if base_geom.intersects(obst):
intersections.append(base_geom.intersection(obst))
return intersections
#
# Canvas functions
#
# Display a message in the QGIS canvas
def showMessage(iface, msg, type='Info', lev=1, dur=2):
iface.messageBar().pushMessage(type,msg,level=lev,duration=dur)
def updateRenderer(layer, attribute, settings):
"""
Creates a renderer for the layer based on this, and applies it
The renderer uses GradientColourRamp to calculate the symbol colours
@param layer: the selected QgsVectorLayer object
"""
geometry = layer.geometryType()
# create a colour ramp based on colour range type, inverting symbols if required
ramp = settings['ramp']
line_width = float(settings['line_width'])
# calculate ranges: EqualInterval = 0; Quantile = 1; Jenks = 2; StdDev = 3; Pretty = 4; Custom = 5
intervals = int(settings['intervals'])
interval_type = int(settings['interval_type'])
renderer = None
# set symbol type and line width
symbol = QgsSymbolV2.defaultSymbol(geometry)
if symbol:
if symbol.type() == 1: # line
symbol.setWidth(line_width)
elif symbol.type() == 2: # line
symbol = QgsFillSymbolV2.createSimple({'style': 'solid', 'color': 'black', 'width_border': '%s' % line_width})
elif symbol.type() == 0: # point
symbol.setSize(line_width)
renderer = QgsGraduatedSymbolRendererV2.createRenderer(layer, attribute, intervals, interval_type, symbol, ramp)
renderer.setMode(interval_type)
renderer.setSourceColorRamp(ramp)
return renderer
#
# Network functions
#
def makeUndirectedGraph(network_layer, points=list):
graph = None
tied_points = []
if network_layer:
director = QgsLineVectorLayerDirector(network_layer, -1, '', '', '', 3)
properter = QgsDistanceArcProperter()
director.addProperter(properter)
builder = QgsGraphBuilder(network_layer.crs())
pstart=points[0]
pend=points[1]
tied_points = director.makeGraph(builder, [pstart,pend])
graph = builder.graph()
return graph, tied_points
def makeDirectedGraph(network_layer, points=list, direction_field=-1, one_way='', reverse_way='', two_way='', default_direction=3):
graph = None
tied_points = []
if network_layer:
director = QgsLineVectorLayerDirector(network_layer, direction_field, one_way, reverse_way, two_way, default_direction)
properter = QgsDistanceArcProperter()
director.addProperter(properter)
builder = QgsGraphBuilder(network_layer.crs())
tied_points = director.makeGraph(builder, points)
graph = builder.graph()
return graph, tied_points
def calculateRouteTree(graph, tied_points, origin, destination, impedance=0):
points = []
if tied_points:
try:
from_point = tied_points[origin]
to_point = tied_points[destination]
except:
return points
# analyse graph
if graph:
form_id = graph.findVertex(from_point)
tree = QgsGraphAnalyzer.shortestTree(graph, form_id, impedance)
form_id = tree.findVertex(from_point)
to_id = tree.findVertex(to_point)
# iterate to get all points in route
if to_id == -1:
pass
else:
while form_id != to_id:
l = tree.vertex(to_id).inArc()
if not l:
break
e = tree.arc(l[0])
points.insert(0, tree.vertex(e.inVertex()).point())
to_id = e.outVertex()
points.insert(0, from_point)
return points
def calculateRouteDijkstra(graph, tied_points, origin, destination, impedance=0):
points = []
if tied_points:
try:
from_point = tied_points[origin]
to_point = tied_points[destination]
except:
return points
# analyse graph
if graph:
from_id = graph.findVertex(from_point)
to_id = graph.findVertex(to_point)
(tree, cost) = QgsGraphAnalyzer.dijkstra(graph, from_id, impedance)
if tree[to_id] == -1:
pass
else:
curPos = to_id
while curPos != from_id:
points.append(graph.vertex(graph.arc(tree[curPos]).inVertex()).point())
curPos = graph.arc(tree[curPos]).outVertex()
points.append(from_point)
points.reverse()
rb = QgsRubberBand( qgis.utils.iface.mapCanvas() )
rb.setColor( Qt.red )
for pnt in points:
rb.addPoint(pnt)
return points
def calculateServiceArea(graph, tied_points, origin, cutoff, impedance=0):
points = {}
if tied_points:
try:
from_point = tied_points[origin]
except:
return points
# analyse graph
if graph:
from_id = graph.findVertex(from_point)
(tree, cost) = QgsGraphAnalyzer.dijkstra(graph, from_id, impedance)
i = 0
while i < len(cost):
if cost[i] <= cutoff and tree[i] != -1:
points[str(i)]=((graph.vertex(i).point()),cost)
i += 1
return points
#
# General functions
#
def getLastDir(tool_name=''):
path = ''
settings = QtCore.QSettings(tool_name,"")
settings.value("lastUsedDir",str(""))
return path
def setLastDir(filename, tool_name=''):
path = QtCore.QFileInfo(filename).absolutePath()
settings = QtCore.QSettings(tool_name,"")
settings.setValue("lastUsedDir", str(unicode(path)))
# check if a text string is of numeric type
def isNumeric(txt):
try:
int(txt)
return True
except ValueError:
try:
long(txt)
return True
except ValueError:
try:
float(txt)
return True
except ValueError:
return False
# convert a text string to a numeric value, if possible
def convertNumeric(txt):
try:
value = int(txt)
except ValueError:
try:
value = long(txt)
except ValueError:
try:
value = float(txt)
except ValueError:
value = ''
return value
def truncateNumber(num,digits=9):
if isNumeric(num):
truncated = str(num)
if '.' in truncated:
truncated = truncated[:digits]
truncated = truncated.rstrip('0').rstrip('.')
return convertNumeric(truncated)
# Function to create a spatial index for QgsVectorDataProvider
def createIndex(layer):
provider = layer.dataProvider()
caps = provider.capabilities()
if caps & QgsVectorDataProvider.CreateSpatialIndex:
feat = QgsFeature()
index = QgsSpatialIndex()
fit = provider.getFeatures()
while fit.nextFeature(feat):
index.insertFeature(feat)
return index
else:
return None
#------------------------------
# Layer creation functions
#------------------------------
def createTempLayer(name, geometry, srid, attributes, types):
#geometry can be 'POINT', 'LINESTRING' or 'POLYGON' or the 'MULTI' version of the previous
vlayer = QgsVectorLayer('%s?crs=EPSG:%s'% (geometry, srid), name, "memory")
provider = vlayer.dataProvider()
#create the required fields
if attributes:
vlayer.startEditing()
fields = []
for i, att in enumerate(attributes):
fields.append(QgsField(att, types[i]))
# add the fields to the layer
try:
provider.addAttributes(fields)
except:
return None
vlayer.commitChanges()
return vlayer
def loadTempLayer(layer):
QgsMapLayerRegistry.instance().addMapLayer(layer)
def insertTempFeatures(layer, geometry, attributes):
provider = layer.dataProvider()
geometry_type = provider.geometryType()
for i, geom in enumerate(geometry):
fet = QgsFeature()
if geometry_type in (1, 4):
fet.setGeometry(QgsGeometry.fromPoint(geom))
elif geometry_type in (2, 5):
fet.setGeometry(QgsGeometry.fromPolyline(geom))
elif geometry_type in (3, 6):
fet.setGeometry(QgsGeometry.fromPolygon(geom))
if attributes:
fet.setAttributes(attributes[i])
provider.addFeatures([fet])
provider.updateExtents()
def insertTempFeaturesGeom(layer, geometry, attributes):
provider = layer.dataProvider()
for i, geom in enumerate(geometry):
fet = QgsFeature()
fet.setGeometry(geom)
if attributes:
fet.setAttributes(attributes[i])
provider.addFeatures([fet])
provider.updateExtents()
def createTempLayerFull(name, srid, attributes, types, values, coords):
# create an instance of a memory vector layer
type = ''
if len(coords) == 2: type = 'Point'
elif len(coords) == 4: type = 'LineString'
vlayer = QgsVectorLayer('%s?crs=EPSG:%s'% (type, srid), name, "memory")
provider = vlayer.dataProvider()
#create the required fields
fields = []
for i, name in enumerate(attributes):
fields.append(QgsField(name, types[i]))
# add the fields to the layer
vlayer.startEditing()
try:
provider.addAttributes(fields)
except:
return None
# add features by iterating the values
features = []
for i, val in enumerate(values):
feat = QgsFeature()
# add geometry
try:
if type == 'Point':
feat.setGeometry(QgsGeometry.fromPoint([QgsPoint(float(val[coords[0]]),float(val[coords[1]]))]))
elif type == 'LineString':
feat.setGeometry(QgsGeometry.fromPolyline([QgsPoint(float(val[coords[0]]),float(val[coords[1]])),
QgsPoint(float(val[coords[2]]),float(val[coords[3]]))]))
except:
pass
# add attribute values
feat.setAttributes(list(val))
features.append(feat)
# add the features to the layer
try:
provider.addFeatures(features)
except:
return None
vlayer.commitChanges()
vlayer.updateExtents()
if not vlayer.isValid():
print "Layer failed to create!"
return None
return vlayer
#---------------------------------------------
# Shape file specific functions
#---------------------------------------------
def testShapeFileExists(path, name):
filename = path+"/"+name+".shp"
exists = os.path.isfile(filename)
return exists
def copyLayerToShapeFile(layer, path, name):
#Get layer provider
provider = layer.dataProvider()
filename = path+"/"+name+".shp"
fields = provider.fields()
if layer.hasGeometryType():
geometry = layer.wkbType()
else:
geometry = None
srid = layer.crs()
# create an instance of vector file writer, which will create the vector file.
writer = QgsVectorFileWriter(filename, "CP1250", fields, geometry, srid, "ESRI Shapefile")
if writer.hasError() != QgsVectorFileWriter.NoError:
print "Error when creating shapefile: ", writer.hasError()
return None
# add features by iterating the values
for feat in layer.getFeatures():
writer.addFeature(feat)
# delete the writer to flush features to disk
del writer
# open the newly created file
vlayer = QgsVectorLayer(filename, name, "ogr")
if not vlayer.isValid():
print "Layer failed to load!"
return None
return vlayer
def createShapeFileLayer(path, name, srid, attributes, types, geometrytype):
# create new empty layer with given attributes
# todo: created table has no attributes. not used
# use createShapeFileFullLayer instead
filename = path+"/"+name+".shp"
#create the required fields
fields = QgsFields()
for i, attr in enumerate(attributes):
fields.append(QgsField(attr, types[i]))
# create an instance of vector file writer, which will create the vector file.
writer = None
if 'point' in geometrytype.lower():
writer = QgsVectorFileWriter(filename, "CP1250", fields, QGis.WKBPoint, srid, "ESRI Shapefile")
elif 'line' in geometrytype.lower():
writer = QgsVectorFileWriter(filename, "CP1250", fields, QGis.WKBLineString, srid, "ESRI Shapefile")
elif 'polygon' in geometrytype.lower():
writer = QgsVectorFileWriter(filename, "CP1250", fields, QGis.WKBPolygon, srid, "ESRI Shapefile")
if writer.hasError() != QgsVectorFileWriter.NoError:
print "Error when creating shapefile: ", writer.hasError()
return None
# delete the writer to flush features to disk (optional)
del writer
# open the newly created file
vlayer = QgsVectorLayer(filename, name, "ogr")
if not vlayer.isValid():
print "Layer failed to open!"
return None
return vlayer
def createShapeFileFullLayer(path, name, srid, attributes, types, values, coords):
# create new layer with given attributes and data, including geometry (point and lines only)
filename = path+"/"+name+".shp"
#create the required fields
fields = QgsFields()
for i, attr in enumerate(attributes):
fields.append(QgsField(attr, types[i]))
# create an instance of vector file writer, which will create the vector file.
writer = None
if len(coords) == 2:
type = 'point'
writer = QgsVectorFileWriter(filename, "CP1250", fields, QGis.WKBPoint, srid, "ESRI Shapefile")
elif len(coords) == 4:
type = 'line'
writer = QgsVectorFileWriter(filename, "CP1250", fields, QGis.WKBLineString, srid, "ESRI Shapefile")
if writer.hasError() != QgsVectorFileWriter.NoError:
print "Error when creating shapefile: ", writer.hasError()
return None
# add features by iterating the values
feat = QgsFeature()
for i, val in enumerate(values):
# add geometry
try:
if type == 'point':
feat.setGeometry(QgsGeometry.fromPoint([QgsPoint(float(val[coords[0]]),float(val[coords[1]]))]))
elif type == 'line':
feat.setGeometry(QgsGeometry.fromPolyline([QgsPoint(float(val[coords[0]]),float(val[coords[1]])),
QgsPoint(float(val[coords[2]]),float(val[coords[3]]))]))
except: pass
# add attributes
attrs = []
for j, attr in enumerate(attributes):
attrs.append(val[j])
feat.setAttributes(attrs)
writer.addFeature(feat)
# delete the writer to flush features to disk (optional)
del writer
# open the newly created file
vlayer = QgsVectorLayer(filename, name, "ogr")
if not vlayer.isValid():
print "Layer failed to load!"
return None
return vlayer
def addShapeFileAttributes(layer, attributes, types, values):
# add attributes to an existing layer
attributes_pos = dict()
res = False
if layer:
provider = layer.dataProvider()
caps = provider.capabilities()
res = False
if caps & QgsVectorDataProvider.AddAttributes:
fields = provider.fields()
count = fields.count()
for i, name in enumerate(attributes):
#add new field if it doesn't exist
if fields.indexFromName(name) == -1:
res = provider.addAttributes([QgsField(name, types[i])])
# keep position of attributes that are added, since name can change
attributes_pos[i] = count
count += 1
#apply changes if any made
if res:
layer.updateFields()
# update attribute values by iterating the layer's features
res = False
if caps & QgsVectorDataProvider.ChangeAttributeValues:
#fields = provider.fields() #the fields must be retrieved again after the updateFields() method
iter = layer.getFeatures()
for i, feature in enumerate(iter):
fid = feature.id()
#to update the features the attribute/value pairs must be converted to a dictionary for each feature
attrs = {}
for j in attributes_pos.iterkeys():
field_id = attributes_pos[j]
val = values[i][j]
attrs.update({field_id: val})
#update the layer with the corresponding dictionary
res = provider.changeAttributeValues({fid: attrs})
#apply changes if any made
if res:
layer.updateFields()
return res
| VagosAplas/GEO1005-Fire | SpatialDecision/utility_functions.py | Python | gpl-2.0 | 32,661 | 0.003827 |
"""Demo platform that has a couple of fake sensors."""
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_TEMPERATURE,
PERCENTAGE,
TEMP_CELSIUS,
)
from homeassistant.helpers.entity import Entity
from . import DOMAIN
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Demo sensors."""
async_add_entities(
[
DemoSensor(
"sensor_1",
"Outside Temperature",
15.6,
DEVICE_CLASS_TEMPERATURE,
TEMP_CELSIUS,
12,
),
DemoSensor(
"sensor_2",
"Outside Humidity",
54,
DEVICE_CLASS_HUMIDITY,
PERCENTAGE,
None,
),
]
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Demo config entry."""
await async_setup_platform(hass, {}, async_add_entities)
class DemoSensor(Entity):
"""Representation of a Demo sensor."""
def __init__(
self, unique_id, name, state, device_class, unit_of_measurement, battery
):
"""Initialize the sensor."""
self._unique_id = unique_id
self._name = name
self._state = state
self._device_class = device_class
self._unit_of_measurement = unit_of_measurement
self._battery = battery
@property
def device_info(self):
"""Return device info."""
return {
"identifiers": {
# Serial numbers are unique identifiers within a specific domain
(DOMAIN, self.unique_id)
},
"name": self.name,
}
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def should_poll(self):
"""No polling needed for a demo sensor."""
return False
@property
def device_class(self):
"""Return the device class of the sensor."""
return self._device_class
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return self._unit_of_measurement
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self._battery:
return {ATTR_BATTERY_LEVEL: self._battery}
| tchellomello/home-assistant | homeassistant/components/demo/sensor.py | Python | apache-2.0 | 2,682 | 0.001119 |
#
# Copyright (c) 2021 Project CHIP Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Pretty print logging."""
import logging
import pprint
from typing import Any
def log(level: int, x: Any) -> None:
if logging.getLogger(None).isEnabledFor(level):
for line in pprint.pformat(x).split('\n'):
logging.log(level, line)
def info(x: Any) -> None:
log(logging.INFO, x)
def debug(x: Any) -> None:
log(logging.DEBUG, x)
| project-chip/connectedhomeip | scripts/tools/memory/memdf/util/pretty.py | Python | apache-2.0 | 957 | 0 |
from flask import render_template
from pyflipdot.plugins import get_pluginmanager
from pyflipdot.web.view import MenuFlaskView
__author__ = 'teddydestodes'
class AdminView(MenuFlaskView):
route_base = "admin"
menu_name = "Admin"
def index(self):
return render_template('base.html')
class PluginView(MenuFlaskView):
route_base = "plugins"
menu_name = "Plugins"
def index(self):
pm = get_pluginmanager()
return render_template('plugins.html', plugins=pm.get_plugin_index())
AdminView.plugins = PluginView
| TeddyDesTodes/pyflipdot | pyflipdot/web/admin/__init__.py | Python | bsd-3-clause | 559 | 0.003578 |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import re
import shutil
import sys
def main():
if len(sys.argv) < 3:
print("Syntax: PackETWSymbols ETWFilename.etl destdirname [-verbose]")
print("This script looks for symbols needed to decode the specified trace, and")
print("copies them to the specified directory. This allows moving traces to")
print("other machines for analysis and sharing.")
sys.exit(0)
ETLName = sys.argv[1]
DestDirName = sys.argv[2]
if not os.path.exists(DestDirName):
os.mkdir(DestDirName)
verbose = False
if len(sys.argv) > 3 and sys.argv[3].lower() == "-verbose":
verbose = True
print("Extracting symbols from ETL file '%s'." % ETLName)
# This command is slow but thorough -- it tries to build the symbol cache.
#command = "xperf.exe -i \"%s\" -tle -symbols -a symcache -quiet -build -imageid -dbgid" % ETLName
# This command is faster. It relies on symbols being loaded already for the modules of interest.
command = "xperf.exe -i \"%s\" -tle -a symcache -quiet -imageid -dbgid" % ETLName
print("Executing command '%s'" % command)
lines = os.popen(command).readlines()
if len(lines) < 30:
print("Error:")
for line in lines:
print(line, end='')
sys.exit(0)
# Typical output lines (including one heading) look like this:
#TimeDateStamp, ImageSize, OrigFileName, CodeView Record
# 0x4da89d03, 0x00bcb000, "client.dll", "[RSDS] PdbSig: {7b2a9028-87cd-448d-8500-1a18cdcf6166}; Age: 753; Pdb: u:\buildbot\dota_staging_win32\build\src\game\client\Release_dota\client.pdb"
scan = re.compile(r' 0x(.*), 0x(.*), "(.*)", "\[RSDS\].*; Pdb: (.*)"')
matchCount = 0
matchExists = 0
ourModuleCount = 0
# Get the users build directory
vgame = os.getenv("vgame")
if vgame == None:
print("Environment variable 'vgame' not found!")
sys.exit(-1)
vgame = vgame[:-5].lower()
prefixes = ["u:\\", "e:\\build_slave", vgame]
print("Looking for symbols built to:")
for prefix in prefixes:
print(" %s" % prefix)
# Default to looking for the SymCache on the C drive
prefix = "c"
# Look for a drive letter in the ETL Name and use that if present
if len(ETLName) > 1 and ETLName[1] == ':':
prefix = ETLName[0]
else:
# If there's no drive letter in the ETL name then look for one
# in the current working directory.
curwd = os.getcwd()
if len(curwd) > 1 and curwd[1] == ':':
prefix = curwd[0]
symCachePathBase = os.getenv("_NT_SYMCACHE_PATH");
if symCachePathBase == None or len(symCachePathBase) == 0:
symCachePathBase = "%s:\\symcache\\" % prefix
elif symCachePathBase[-1] != '\\':
symCachePathBase += '\\'
for line in lines:
result = scan.match(line)
if result is not None:
#print result.groups()
matchCount += 1
TimeDateStamp = result.groups()[0]
ImageSize = result.groups()[1]
OrigFileName = result.groups()[2]
PDBPath = result.groups()[3].lower()
# Find out which PDBs are 'interesting'. There is no obvious heuristic
# for this, but having a list of prefixes seems like a good start.
ours = False
for prefix in prefixes:
if PDBPath.startswith(prefix):
ours = True
if ours:
ourModuleCount += 1
ours = True
symFilePath = OrigFileName + "-" + TimeDateStamp + ImageSize + "v1.symcache"
symCachePath = symCachePathBase + symFilePath
if os.path.isfile(symCachePath):
matchExists += 1
print("Copying %s" % symCachePath)
shutil.copyfile(symCachePath, DestDirName + "\\" + symFilePath)
else:
print("Symbols for '%s' are not in %s" % (OrigFileName, symCachePathBase))
else:
#This is normally too verbose
if verbose:
print("Skipping %s" % PDBPath)
print("%d symbol files found in the trace, %d appear to be ours, and %d of those exist in symcache." % (matchCount, ourModuleCount, matchExists))
if __name__ == "__main__":
main()
| u-engine/UIforETW | bin/ETWPackSymbols.py | Python | apache-2.0 | 4,627 | 0.016425 |
import json
from datetime import datetime
import re
from .utils import date_to_str, str_to_date
def _is_datestring(s):
return isinstance(s, str) and \
re.match(r"(\d{4})-(\d{2})-(\d{2})T(\d{2})\:(\d{2})\:(\d{2})", s)
def _encode_datetime(obj):
if isinstance(obj, datetime):
return date_to_str(obj, obj.tzinfo is not None)
def _decode_datetime(d):
for k in d:
if _is_datestring(d[k]):
d[k] = str_to_date(d[k])
if isinstance(d[k], list):
for i, v in enumerate(d[k]):
if _is_datestring(v):
d[k][i] = str_to_date(v)
return d
def dumpd(obj):
"""
Convert object to dict
Parameters
----------
obj : object
Object to convert
Returns
-------
d : dict
Object as dict
"""
# return input directly if it is already dict
if isinstance(obj, dict):
return obj
# return list of dict
elif isinstance(obj, (list, tuple, set)):
return [dumpd(o) for o in obj]
# convert to dict
data = {}
for key in obj.__dict__.keys():
if not key.startswith("_"):
# convert each items in list-like object
if isinstance(getattr(obj, key, None), (list, tuple, set)):
data[key] = []
for v in getattr(obj, key, None):
if hasattr(v, "to_dict"):
data[key].append(v.to_dict())
elif hasattr(v, "__dict__"):
data[key].append(dumpd(v))
else:
data[key].append(v)
# convert each items in dict
elif isinstance(getattr(obj, key, None), dict):
data[key] = {}
for k, v in getattr(obj, key, None).items():
if hasattr(v, "to_dict"):
data[key][k] = v.to_dict()
elif hasattr(v, "__dict__"):
data[key][k] = dumpd(v)
else:
data[key][k] = v
# convert object with `to_dict`
elif hasattr(getattr(obj, key, None), "to_dict"):
data[key] = getattr(obj, key).to_dict()
# convert plain object
elif hasattr(getattr(obj, key, None), "__dict__"):
data[key] = dumpd(getattr(obj, key))
else:
data[key] = getattr(obj, key, None)
return data
def loadd(d, obj_cls):
"""
Convert dict to object
Parameters
----------
d : dict
Dictionary to convert
obj_cls : type
Class of object to convert
Returns
-------
obj : object
Instance of obj_cls
"""
# return None when input is None
if d is None:
return None
# return the list of objects when input is list
if isinstance(d, list):
return [loadd(di, obj_cls) for di in d]
# use `create_object` instead of its constructor
if hasattr(obj_cls, "create_object"):
obj = obj_cls.create_object(d)
else:
obj = obj_cls()
# get member's type info
types = obj_cls._types() if getattr(obj_cls, "_types", None) else {}
# set values to object
for k, v in d.items():
if k in types:
if hasattr(types[k], "from_dict"):
setattr(obj, k, types[k].from_dict(v))
else:
setattr(obj, k, loadd(v, types[k]))
else:
setattr(obj, k, v)
return obj
def dumps(obj, **kwargs):
"""
Encode object/dict to JSON
Parameters
----------
obj : object
Object to encode
Returns
-------
s : str
JSON string
"""
if obj is None:
return ""
d = dumpd(obj)
return json.dumps(d, default=_encode_datetime, **kwargs)
def loads(s, obj_cls=None, **kwargs):
"""
Decode JSON to dict/object
Parameters
----------
s : str
JSON string to decode
obj_cls : type, default None
Class of object to convert. If None, convert to dict
Returns
-------
obj : object
Instance of obj_cls
"""
if s is None or s == "":
return None
d = json.loads(s, object_hook=_decode_datetime, **kwargs)
if obj_cls is None:
return d
else:
return loadd(d, obj_cls)
class Serializable:
"""
Base class for serializable object
"""
@classmethod
def _types(cls):
"""
Override this method to create instance of specific class for members.
Configure like below then instance of `Foo` will be set to `self.foo`
and `Bar` to `self.bar`
```
return {
"foo": Foo,
"bar": Bar
}
```
"""
return {}
def __repr__(self):
return "<{} at {}>\n{}".format(
self.__class__.__name__,
hex(id(self)),
self.to_json(indent=2, ensure_ascii=False))
@classmethod
def create_object(obj_cls, d):
return obj_cls()
def to_dict(self):
"""
Convert this object to dict
Returns
-------
d : dict
Object as dict
"""
return dumpd(self)
def to_json(self, **kwargs):
"""
Convert this object to JSON
Returns
-------
s : str
Object as JSON string
"""
return dumps(self, **kwargs)
@classmethod
def from_dict(cls, d):
"""
Create object from dict
Parameters
----------
d : dict
Dictionary of this object
Returns
-------
obj : Serializable
Instance of this class
"""
return loadd(d, cls)
@classmethod
def from_dict_dict(cls, dict_dict):
"""
Create dictionary of this objects from dictionaries of dictionaries
Parameters
----------
dict_dict : dict
Dictionary of dictionaries
Returns
-------
dict_of_this_obj : dict
Dictionary of this objects
"""
return {k: cls.from_dict(v) for k, v in dict_dict.items()}
@classmethod
def from_json(cls, s, **kwargs):
"""
Create this object from JSON string
Parameters
----------
s : str
JSON string of this object
Returns
-------
obj : Serializable
Instance of this class
"""
return loads(s, cls, **kwargs)
| uezo/minette-python | minette/serializer.py | Python | apache-2.0 | 6,581 | 0 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from ..lib.decorators import json
from . import api
@api.route("/test", methods=["GET"])
@json
def test():
return {}
@api.route("/auth/register", methods=["GET"])
def auth_register():
return {}
@api.route("/auth/exist", methods=["get"])
| ee-book/api | api/v1/users.py | Python | apache-2.0 | 416 | 0.004808 |
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2015, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import shutil
import unittest
try:
import simplejson as json
except ImportError:
import json
from nupic.data.file_record_stream import FileRecordStream
from htmresearch.frameworks.classification.classification_network import (
configureNetwork,
runNetwork)
from htmresearch.frameworks.classification.utils.sensor_data import (
generateSensorData)
from htmresearch.frameworks.classification.utils.network_config import (
generateSampleNetworkConfig,
generateNetworkPartitions)
# Parameters to generate the artificial sensor data
OUTFILE_NAME = "white_noise"
SEQUENCE_LENGTH = 200
NUM_CATEGORIES = 3
NUM_RECORDS = 2400
WHITE_NOISE_AMPLITUDES = [0.0, 1.0]
SIGNAL_AMPLITUDES = [1.0]
SIGNAL_MEANS = [1.0]
SIGNAL_PERIODS = [20.0]
# Additional parameters to run the classification experiments
RESULTS_DIR = "results"
MODEL_PARAMS_DIR = 'model_params'
DATA_DIR = "data"
# Classifier types
CLA_CLASSIFIER_TYPE = "py.CLAClassifierRegion"
KNN_CLASSIFIER_TYPE = "py.KNNClassifierRegion"
class TestSensorDataClassification(unittest.TestCase):
"""Test classification results for sensor data."""
def setUp(self):
with open("sensor_data_network_config.json", "rb") as jsonFile:
self.templateNetworkConfig = json.load(jsonFile)
def testClassificationAccuracy(self):
"""Test classification accuracy for sensor data."""
networkConfigurations = generateSampleNetworkConfig(
self.templateNetworkConfig, NUM_CATEGORIES)
for networkConfig in networkConfigurations:
for noiseAmplitude in WHITE_NOISE_AMPLITUDES:
for signalMean in SIGNAL_MEANS:
for signalAmplitude in SIGNAL_AMPLITUDES:
for signalPeriod in SIGNAL_PERIODS:
sensorType = networkConfig["sensorRegionConfig"].get(
"regionType")
spEnabled = networkConfig["sensorRegionConfig"].get(
"regionEnabled")
tmEnabled = networkConfig["tmRegionConfig"].get(
"regionEnabled")
upEnabled = networkConfig["upRegionConfig"].get(
"regionEnabled")
classifierType = networkConfig["classifierRegionConfig"].get(
"regionType")
expParams = ("RUNNING EXPERIMENT WITH PARAMS:\n"
" * numRecords=%s\n"
" * signalAmplitude=%s\n"
" * signalMean=%s\n"
" * signalPeriod=%s\n"
" * noiseAmplitude=%s\n"
" * sensorType=%s\n"
" * spEnabled=%s\n"
" * tmEnabled=%s\n"
" * upEnabled=%s\n"
" * classifierType=%s\n"
) % (NUM_RECORDS,
signalAmplitude,
signalMean,
signalPeriod,
noiseAmplitude,
sensorType.split(".")[1],
spEnabled,
tmEnabled,
upEnabled,
classifierType.split(".")[1])
print expParams
inputFile = generateSensorData(DATA_DIR,
OUTFILE_NAME,
signalMean,
signalPeriod,
SEQUENCE_LENGTH,
NUM_RECORDS,
signalAmplitude,
NUM_CATEGORIES,
noiseAmplitude)
dataSource = FileRecordStream(streamID=inputFile)
network = configureNetwork(dataSource,
networkConfig)
partitions = generateNetworkPartitions(networkConfig,
NUM_RECORDS)
(numCorrect,
numTestRecords,
predictionAccuracy) = runNetwork(network, networkConfig,
partitions, NUM_RECORDS)
if (noiseAmplitude == 0
and signalMean == 1.0
and signalAmplitude == 1.0
and signalPeriod == 20.0
and classifierType == KNN_CLASSIFIER_TYPE
and spEnabled
and tmEnabled
and not upEnabled):
self.assertEqual(predictionAccuracy, 100.00)
elif (noiseAmplitude == 0
and signalMean == 1.0
and signalAmplitude == 1.0
and signalPeriod == 20.0
and classifierType == CLA_CLASSIFIER_TYPE
and spEnabled
and tmEnabled
and not upEnabled):
self.assertEqual(predictionAccuracy, 100.00)
elif (noiseAmplitude == 0
and signalMean == 1.0
and signalAmplitude == 1.0
and signalPeriod == 20.0
and classifierType == CLA_CLASSIFIER_TYPE
and spEnabled
and not tmEnabled
and not upEnabled):
self.assertEqual(predictionAccuracy, 100.00)
elif (noiseAmplitude == 1.0
and signalMean == 1.0
and signalAmplitude == 1.0
and signalPeriod == 20.0
and classifierType == CLA_CLASSIFIER_TYPE
and spEnabled
and tmEnabled
and not upEnabled):
# using AlmostEqual until the random bug issue is fixed
self.assertAlmostEqual(predictionAccuracy, 80, delta=1)
elif (noiseAmplitude == 1.0
and signalMean == 1.0
and signalAmplitude == 1.0
and signalPeriod == 20.0
and classifierType == CLA_CLASSIFIER_TYPE
and spEnabled
and not tmEnabled
and not upEnabled):
# using AlmostEqual until the random bug issue is fixed
self.assertAlmostEqual(predictionAccuracy, 81, delta=1)
def tearDown(self):
shutil.rmtree(DATA_DIR)
if __name__ == "__main__":
unittest.main()
| chanceraine/nupic.research | tests/classification/test_sensor_data_classification.py | Python | agpl-3.0 | 7,592 | 0.004478 |
import os
# We'll render HTML templates and access data sent by POST
# using the request object from flask. Redirect and url_for
# will be used to redirect the user once the upload is done
# and send_from_directory will help us to send/show on the
# browser the file that the user just uploaded
from flask import Flask, render_template, request, redirect, url_for, send_from_directory, jsonify
from werkzeug import secure_filename
import detect, face_recognizer
# Initialize the Flask application
app = Flask(__name__)
# This is the path to the upload directory
app.config['UPLOAD_FOLDER'] = 'uploads/'
# These are the extension that we are accepting to be uploaded
app.config['ALLOWED_EXTENSIONS'] = set(['png', 'jpg', 'jpeg'])
# For a given file, return whether it's an allowed type or not
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS']
# This route will show a form to perform an AJAX request
# jQuery is loaded to execute the request and update the
# value of the operation
@app.route('/')
def index():
return render_template('index.html')
file =None
# Route that will process the file upload
@app.route('/upload', methods=['GET','POST'])
def upload():
# Get the name of the uploaded file
file = request.files['file']
# Check if the file is one of the allowed types/extensions
if file and allowed_file(file.filename):
# Make the filename safe, remove unsupported chars
filename = secure_filename(file.filename)
# Move the file form the temporal folder to
# the upload folder we setup
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
detect.doDetect(file)
present_student_list = face_recognizer.doFaceRecognition()
return jsonify (
students_present=present_student_list)
return "something went wrong"
# Redirect the user to the uploaded_file route, which
# will basicaly show on the browser the uploaded file
# return redirect(url_for('uploaded_file',
# filename=filename))
@app.route('/uploads/<filename>')
def uploaded_file(filename):
#return send_from_directory(app.config['UPLOAD_FOLDER'], filename)
#detect.doDetect(file)
if file is None:
return "File is none"
# if file is not None:
# # present_student = face_recognizer.doFaceRecognition()
# return "file is not none"
return "file is not none"
if __name__ == '__main__':
app.run()
| duaraghav8/Corque | demo.py | Python | mit | 2,535 | 0.013807 |
from django.contrib.auth import get_user_model
from rest_framework.fields import CharField
from rest_framework.serializers import ModelSerializer
from grandchallenge.challenges.models import Challenge
from grandchallenge.components.serializers import (
ComponentInterfaceValueSerializer,
)
from grandchallenge.evaluation.models import (
Evaluation,
Phase,
Submission,
)
class UserSerializer(ModelSerializer):
class Meta:
model = get_user_model()
fields = ("username",)
class ChallengeSerializer(ModelSerializer):
class Meta:
model = Challenge
fields = (
"title",
"short_name",
)
class PhaseSerializer(ModelSerializer):
challenge = ChallengeSerializer()
class Meta:
model = Phase
fields = (
"challenge",
"title",
"slug",
)
class SubmissionSerializer(ModelSerializer):
phase = PhaseSerializer()
creator = UserSerializer()
class Meta:
model = Submission
fields = (
"pk",
"phase",
"created",
"creator",
"comment",
"predictions_file",
"supplementary_file",
"supplementary_url",
)
class EvaluationSerializer(ModelSerializer):
submission = SubmissionSerializer()
outputs = ComponentInterfaceValueSerializer(many=True)
status = CharField(source="get_status_display", read_only=True)
title = CharField(read_only=True)
class Meta:
model = Evaluation
fields = (
"pk",
"method",
"submission",
"created",
"published",
"outputs",
"rank",
"rank_score",
"rank_per_metric",
"status",
"title",
)
| comic/comic-django | app/grandchallenge/evaluation/serializers.py | Python | apache-2.0 | 1,856 | 0 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Delivery Costs',
'version': '1.0',
'category': 'Stock',
'description': """
Allows you to add delivery methods in sale orders and picking.
==============================================================
You can define your own carrier for prices. When creating
invoices from picking, the system is able to add and compute the shipping line.
""",
'depends': ['sale_stock'],
'data': [
'security/ir.model.access.csv',
'views/delivery_view.xml',
'views/partner_view.xml',
'data/delivery_data.xml',
'views/report_shipping.xml',
'views/report_deliveryslip.xml'
],
'demo': ['data/delivery_demo.xml'],
'test': [
'../account/test/account_minimal_test.xml',
],
'installable': True,
}
| vileopratama/vitech | src/addons/delivery/__openerp__.py | Python | mit | 886 | 0 |
import unittest
from PyFoam.Basics.TemplateFile import TemplateFile,TemplateFileOldFormat,PyratempPreprocessor
from PyFoam.Error import FatalErrorPyFoamException
from tempfile import mktemp
from PyFoam.ThirdParty.six import PY3
import sys
theSuite=unittest.TestSuite()
template1="""$$ y = 3+x
This should be $x+y$"""
template2="""
$$ xxx=13
$$ xx=34+xxx
$2*x+xx-xxx$
"""
templateFor="""$$ y = 2*x
<!--(for i in range(y))--> @!i!@ <!--(end)-->#!
"""
templateMath="sqrt(x) = $sqrt(x)$"
templateList="""<!--(for e in theList)-->#!
<!--(if e.lower()=="joe")-->#!
Big @!e!@
<!--(else)-->#!
Little @!e!@
<!--(end)-->#!
<!--(end)-->#!
"""
templateMacro="""<!--(macro tabsquare)-->
@!x!@ \t = @!x*x!@
<!--(end)-->
<!--(for i in vals)-->@!tabsquare(x=i)!@<!--(end)-->#!
"""
templateBuiltIn="""
<!--(if True)-->TRUE<!--(end)-->
<!--(if not False)-->FALSE<!--(end)-->
@!min(2,3)!@ @!max(2,3)!@
@!chr(42)!@ @!ord(' ')!@
"""
templateVariablesIn3="""
$$ duesenAus=[0,2,3]
$$ duesenNamen=["B30"]+["B%d_%d" % (29-i,j) for i in range(7) for j in [2,1]]
$$ removeDuesen=[duesenNamen[i] for i in duesenAus]
<!--(for d in removeDuesen)-->
|-d-|
<!--(end)-->
"""
class TemplateFileTest(unittest.TestCase):
def testTemplateFileString(self):
t=TemplateFile(content=template1,expressionDelimiter="$")
self.assertEqual(t.getString({"x":-1}),"This should be 1")
fName=mktemp()
t.writeToFile(fName,{"x":1+2.})
result=open(fName).read()
self.assertEqual(result,"This should be 9.0")
def testTemplateFileFile(self):
fName=mktemp()
open(fName,"w").write(template1)
t=TemplateFile(name=fName,expressionDelimiter="$")
self.assertEqual(t.getString({"x":-1}),"This should be 1")
def testTemplateFileLongVars(self):
t=TemplateFile(content=template2,expressionDelimiter="$")
self.assertEqual(int(t.getString({"x":1})),36)
def testTemplateFileForLoop(self):
t=TemplateFile(content=templateFor)
self.assertEqual(t.getString({"x":2})," 0 1 2 3 ")
def testTemplateFileMacro(self):
t=TemplateFile(content=templateMacro)
if PY3 and sys.version_info.minor>1:
self.assertEqual(t.getString({"vals":[2,3.3,-1]}),"2 \t = 4\n3.3 \t = 10.889999999999999\n-1 \t = 1\n")
else:
self.assertEqual(t.getString({"vals":[2,3.3,-1]}),"2 \t = 4\n3.3 \t = 10.89\n-1 \t = 1\n")
def testTemplateFileListLoop(self):
t=TemplateFile(content=templateList)
self.assertEqual(t.getString({"theList":["Henry","Joe","joe","Tom"]}),"Little Henry\nBig Joe\nBig joe\nLittle Tom\n")
def testTemplateFileLongMath(self):
t=TemplateFile(content=templateMath,expressionDelimiter="$")
self.assertEqual(t.getString({"x":4}),"sqrt(x) = 2.0")
def testTemplateFileMathRealDelim(self):
t=TemplateFile(content=templateMath.replace("$","|"))
self.assertEqual(t.getString({"x":4}),"sqrt(x) = 2.0")
def testTemplateFilePercentDelimiter(self):
t=TemplateFile(content="x=$!x!$")
self.assertEqual(t.getString({"x":4}),"x=4")
def testTemplateFileBuiltinStuff(self):
t=TemplateFile(content=templateBuiltIn)
self.assertEqual(t.getString({}),"\nTRUE\nFALSE\n2 3\n* 32\n")
theSuite.addTest(unittest.makeSuite(TemplateFileTest,"test"))
class TemplateFileAllowExecutionTest(unittest.TestCase):
def testAssignmentNotWorkingInPython3(self):
t=TemplateFile(content=templateVariablesIn3,
expressionDelimiter="|-",
allowExec=True)
self.assertEqual(t.getString({}),"\nB30\nB29_1\nB28_2\n")
class TemplateFileOldFormatTest(unittest.TestCase):
def testTemplateFileString(self):
t=TemplateFileOldFormat(content=template1)
self.assertEqual(t.getString({"x":-1}),"This should be 1\n")
fName=mktemp()
t.writeToFile(fName,{"x":"1+sqrt(4)"})
result=open(fName).read()
self.assertEqual(result,"This should be 9.0\n")
def testTemplateFileFile(self):
fName=mktemp()
open(fName,"w").write(template1)
t=TemplateFileOldFormat(name=fName)
self.assertEqual(t.getString({"x":-1}),"This should be 1\n")
def testTemplateFileLongVars(self):
t=TemplateFileOldFormat(content=template2)
self.assertEqual(int(t.getString({"x":1})),36)
def testTemplateFileLongMath(self):
t=TemplateFileOldFormat(content=templateMath)
self.assertEqual(t.getString({"x":4}),"sqrt(x) = 2.0\n")
theSuite.addTest(unittest.makeSuite(TemplateFileOldFormatTest,"test"))
class PyratempPreprocessorTest(unittest.TestCase):
def testFullPreprocessing(self):
p=PyratempPreprocessor()
self.assertEqual(p("nix\nda"),"nix\nda")
self.assertEqual(p("nix\nda\n"),"nix\nda\n")
self.assertEqual(p(""),"")
self.assertEqual(p("\n"),"\n")
self.assertEqual(p("$$ a=2 "),'$!setvar("a", "2")!$#!')
self.assertEqual(p(" $$ a=2 ")," $$ a=2 ")
self.assertRaises(FatalErrorPyFoamException,p,"$$ a ")
# Does not work with old nose
# with self.assertRaises(FatalErrorPyFoamException):
# p("$$ a ")
self.assertEqual(p("$$ a=2\n"),'$!setvar("a", "2")!$#!\n')
self.assertEqual(p("$$ a=2\n$$ b=3"),'$!setvar("a", "2")!$#!\n$!setvar("b", "3")!$#!')
self.assertEqual(p(" $foo$ $bar$ ")," $!foo!$ $!bar!$ ")
self.assertEqual(p("$foo$ $bar$"),"$!foo!$ $!bar!$")
self.assertEqual(p("$foo$ $bar$\n"),"$!foo!$ $!bar!$\n")
def testNoVarLinePreprocessing(self):
p=PyratempPreprocessor(dovarline=False)
self.assertEqual(p("nix\nda"),"nix\nda")
self.assertEqual(p("nix\nda\n"),"nix\nda\n")
self.assertEqual(p(""),"")
self.assertEqual(p("\n"),"\n")
self.assertEqual(p("$$ a=2 "),'$$ a=2 ')
self.assertEqual(p(" $$ a=2 ")," $$ a=2 ")
self.assertEqual(p("$$ a "),"$$ a ")
self.assertEqual(p("$$ a=2\n"),'$$ a=2\n')
self.assertEqual(p("$$ a=2\n$$ b=3"),'$$ a=2\n$$ b=3')
self.assertEqual(p(" $foo$ $bar$ ")," $!foo!$ $!bar!$ ")
self.assertEqual(p("$foo$ $bar$"),"$!foo!$ $!bar!$")
self.assertEqual(p("$foo$ $bar$\n"),"$!foo!$ $!bar!$\n")
def testNoExprPreprocessing(self):
p=PyratempPreprocessor(doexpr=False)
self.assertEqual(p("nix\nda"),"nix\nda")
self.assertEqual(p("nix\nda\n"),"nix\nda\n")
self.assertEqual(p(""),"")
self.assertEqual(p("\n"),"\n")
self.assertEqual(p("$$ a=2 "),'$!setvar("a", "2")!$#!')
self.assertEqual(p(" $$ a=2 ")," $$ a=2 ")
self.assertRaises(FatalErrorPyFoamException,p,"$$ a ")
# Does not work with old nose
# with self.assertRaises(FatalErrorPyFoamException):
# p("$$ a ")
self.assertEqual(p("$$ a=2\n"),'$!setvar("a", "2")!$#!\n')
self.assertEqual(p("$$ a=2\n$$ b=3"),'$!setvar("a", "2")!$#!\n$!setvar("b", "3")!$#!')
self.assertEqual(p(" $foo$ $bar$ ")," $foo$ $bar$ ")
self.assertEqual(p("$foo$ $bar$"),"$foo$ $bar$")
self.assertEqual(p("$foo$ $bar$\n"),"$foo$ $bar$\n")
theSuite.addTest(unittest.makeSuite(PyratempPreprocessorTest,"test"))
| Unofficial-Extend-Project-Mirror/openfoam-extend-Breeder-other-scripting-PyFoam | unittests/Basics/test_TemplateFile.py | Python | gpl-2.0 | 7,289 | 0.019207 |
"""JVPN netstats libraries
"""
__author__ = 'e@dscp.org (Ebben Aries)'
import socket
import struct
def GetNetstats(device):
device = device + ':'
for line in open('/proc/net/dev', 'r'):
data = filter(None, line.split(' '))
if data[0] == device:
return (data[1], data[2], data[9], data[10])
def GetRoutes(device):
routes = []
for line in open('/proc/net/route', 'r'):
if line.startswith(device):
prefix = socket.inet_ntoa(struct.pack('<L', int(line.split()[1], 16)))
metric = int(line.split()[6])
netmask = socket.inet_ntoa(struct.pack('<L', int(line.split()[7], 16)))
route_detail = '%s/%s:%d' % (prefix, netmask, metric)
routes.append(route_detail)
return routes
def GetIp(device):
ip = ''
for line in open('/proc/net/route', 'r'):
if line.startswith(device):
ip = socket.inet_ntoa(struct.pack('<L', int(line.split()[2], 16)))
break
return ip
def GetDefInterface(interface='eth0', gateway='0.0.0.0'):
for line in open('/proc/net/route', 'r'):
if line.split()[1] == '00000000' and line.split()[7] == '00000000':
interface = line.split()[0]
gateway = socket.inet_ntoa(struct.pack('<L', int(line.split()[2], 16)))
return gateway, interface
| earies/jvpn | jvpn/netstats.py | Python | apache-2.0 | 1,247 | 0.020048 |
# coding: utf8
{
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" is een SQL-expressie zoals "field1=\'newvalue\'". U kunt de resultaten van een JOIN niet updaten of wissen',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'%s rows deleted': '%s kaarten gewist',
'%s rows updated': '*** %s kaarten veranderd',
'About': 'Info',
'Act': 'Act',
'Add': 'Voegtoe',
'Admin Panel': 'Admin Paneel',
'Are you sure to delete this category?': 'Weet u zeker dat u deze categorie wilt wissen?',
'Are you sure you want to delete this category?': 'Weet u zeker dat u deze categorie wilt wissen?',
'Article added': 'Artikel toegevoegd',
'Articles in Archive ': 'Artikel in Archief ',
'Articles with ': 'Artikel met ',
'Articles with category': 'Artikel met categorie',
'Articles with tag': 'Artikelen with kenmerk',
'Available databases and tables': 'Beschikbare gegevensbestanden en tabellen',
'Avatar uploaded': 'Avatar opgestuurd',
'Avatars are disable.': 'Avatars zijn uitgeschakeld.',
'Avatars are disabled.': 'Avatars zijn uitgeschakeld.',
'Back to the index page': 'Terug naar de startpagina',
'Cannot be empty': 'Mag niet leeg zijn',
'Cats': 'Catn',
'Change Avatar': 'Verander Avatar',
'Change about': 'Verander info',
'Change author': 'Verander auteur',
'Change content': 'Verander inhoud',
'Change css': 'Verander css',
'Change description': 'Verander omschrijving',
'Change email': 'Verander e-mail',
'Change extract': 'Verander samenvatting',
'Change first name': 'Verander voornaam',
'Change footer': 'Verander voettekst',
'Change front page': 'Verander voorpagina',
'Change keywords (sep. by ,)': 'Verander trefwoorden (gesch. dr. ,)',
'Change last name': 'Verander achternaam',
'Change logo url': 'Verander logo url',
'Change name': 'Verander naam',
'Change password': 'Verander wachtwoord',
'Change site information': 'Verander site informatie',
'Change subtitle': 'Verander de subtitel',
'Change title': 'Verander de titel',
'Change url': 'Verander de url',
'Check to delete': 'Vink aan om te wissen',
'Check to delete:': 'Vink aan om te wissen:',
'Click if you want to make this article a link to a site, to list in panels need to be a page also': 'Klik als u van dit artikel een link naar een site wilt maken. (Om getoond te worden in de panelen moet het ook een Pagina zijn)',
'Click if you want to make this article a page': 'Klik als u van dit artikel een Pagina (met menuknop) wilt maken',
'Click to change about content': 'Klik om Info aan te passen',
'Click to change categories of this article': 'Klik om categorieën van artikel aan te passen',
'Click to change footer content': 'Klik om voettekst aan te passen',
'Click to change keywords of the site': 'Klik om trefwoorden aan te passen',
'Click to change keywords of this article': 'Klik om trefwoorden aan te passen',
'Click to change name of this article': 'Klik om aan te passen name of this article',
'Click to change the content of the article, content is all the body of the article': 'Klik om inhoud van artikel aan te passen, inhoud de z.g. body van het artikel',
'Click to change the description of the site': 'Klik om omschrijving van de site aan te passen',
'Click to change the extract of the article, extract is a slice of the content you want to show in search': 'Klik om aan te passen the extract of the article, extract is a slice of the content you want to show in search',
'Click to change the frontpage of the site': 'Klik om voorpagina aan te passen',
'Click to change the logo': 'Klik om het logo aan te passen',
'Click to change the subtitle of the site': 'Klik om subtitel van de site aan te passen',
'Click to change the title of the article': 'Klik om titel van artikel aan te passen',
'Click to change the title of the site': 'Klik om titel van de site aan te passen',
'Click to delete this article': 'Klik om dit artikel te wissen',
'Click to preview the article (publish or not)': 'Klik om dit artikel te bekijken (publiek of niet)',
'Click to publish this article': 'Klik omdit artikel te publiceren',
'Client IP': 'Client IP',
'Close this window': 'Sluit dit venster',
'Comment edit': 'Comment aanpassing',
'Content': 'Inhoud',
'Controller': 'Controller',
'Copyright': 'Copyright',
'Create new article': 'Maak nieuw artikel',
'Current request': 'Huidige request',
'Current response': 'Huidige response',
'Current session': 'Huidige session',
'DB Model': 'DB Model',
'Database': 'Database',
'Delete:': 'Wis:',
'Description': 'Omschrijving',
'E-mail': 'E-mail',
'Edit': 'Verander',
'Edit This App': 'Pas deze App aan',
'Edit current record': 'Pas huidige kaart aan',
'Error 400!': 'Fout 400!',
'Error 404!': 'Fout 404!',
'Extract': 'Extract',
'First name': 'Voornaam',
'Footer': 'Voettekst',
'Front Page': 'Voorpagina',
'Go back to main page': 'Ga terug naar start pagina',
'Group %(group_id)s created': 'Groep %(group_id)s aangemaakt',
'Group ID': 'Groep ID',
'Group uniquely assigned to user %(id)s': 'Groep exclusief toegekend aan gebruiker %(id)s',
'Hello World': 'Salve Mondo',
'Home': 'Start',
'Image': 'Image',
'Import/Export': 'Import/Export',
'Index': 'Index',
'Internal State': 'Internal State',
'Invalid Query': 'Query invalida',
'Invalid email': 'Invalid email',
'Invalid login': 'Invalid login',
'Invalid password': 'Invalid password',
'Keywords': 'Keywords',
'Language': 'Language',
'Last name': 'Last name',
'Layout': 'Layout',
'Leave a Reply': 'Leave a Reply',
'List articles': 'List articles',
'Logged in': 'Logged in',
'Logged out': 'Logged out',
'Login': 'Login',
'Logo': 'Logo',
'Logout': 'Logout',
'Lost password': 'Lost password',
'Main Menu': 'Main Menu',
'Make sure all words are spelled correctly': 'Make sure all words are spelled correctly',
'Menu Model': 'Menu Model',
'My Profile': 'Mijn profiel',
'NO': 'NO',
'Name': 'Naam',
'New Record': 'Nieuw Record',
'New password': 'Nieuw password',
'No Title': 'Geen Titel',
'No articles': 'Geen artikelen',
'No comments loaded yet!. If persist enable javascript or update your browser.': 'Commentaren nog niet geladen! Als dit zo blijft zet javascript-ondersteuning aan of ververs browser.',
'No databases in this application': 'Geen databases in deze applicatie',
'No description': 'Geen omschrijving',
'No message receive from server': 'Geen mededeling ontvangen van server',
'Old password': 'Oude wachtwoord',
'Origin': 'Afkomstig',
'Page': 'Pagina',
'PageUrl': 'PaginaUrl',
'Pages': 'Pagina\'s',
'Password': 'Wachtwoord',
'Password changed': 'Wachtwoord aangepast',
"Password fields don't match": "Wachtwoordvelden komen niet overeen",
'Powered by': 'Aangedreven door',
'Powered by Instant Press': 'Aangedreven door Instant Press',
'Powered by Web2py Enterprise Framework': 'Aangedreven door Web2py Enterprise Framework',
'Powered by python': 'Aangedreven door python',
'Problem with avatars': 'Probleem met avatars',
'Problem with categorie id value!': 'Probleem met categorie id waarde!',
'Problem with id value': 'Probleem met id waarde',
'Problem with some submitted values': 'Probleem met enkele opgestuurde waardes',
'Problem with the values submitted': 'Probleem met opgestuurde waardes',
'Profile': 'Profiel',
'Public': 'Publiek',
'Query:': 'Query:',
'Record %(id)s updated': 'Kaart %(id)s aangepast',
'Record ID': 'Kaart ID',
'Record Updated': 'Kaart Aangepast',
'Refresh': 'Ververs',
'Register': 'Registeer',
'Registration key': 'Registratie sleutel',
'Registration successful': 'Registratie successful',
'Reload the list': 'Ververs de lijst',
'Remember me (for 30 days)': 'Onthoud me (30 dagen)',
'Request reset password': 'Verzoek om wachtwoord terug te zetten',
'Reset Password key': 'Zet Wachtwoord terug',
'Role': 'Rol',
'Rows in table': 'Rijen in tabel',
'Rows selected': 'Rijen geselecteerd',
'Rss': 'Rss',
'Rss last comments': 'Rss laatste commentaren',
'Rss last posts': 'Rss laatste plaatsingen',
'Save the content': 'Sla de inhoud op',
'Search': 'Zoek',
'Search in title': 'Zoek in de titel',
'Show articles': 'Toon artikelen',
'Show categories': 'Toon categorieën',
'Show comments': 'Toon commentaren',
'Show images': 'Toon beeldmateriaal',
'Show links': 'Toon verwijzingen',
'Show or hide the admin panel': 'Toon of verberg beheerpaneel',
'Show styles': 'Toon sjablonen',
'Show the list of articles': 'Toon de artikellijst',
'Show the list of categories': 'Toon de categorielijst',
'Show the list of comments': 'Toon de commentaarijst',
'Show the list of images': 'Toon de lijst met beeldmateriaal',
'Show the list of links': 'Toon de verwijzingenlijst',
'Show the list of styles': 'Toon de sjablonenlijst',
'Show the list of users': 'Toon de gebruikerslijst',
'Show users': 'Toon gebruiker',
'Showing': 'Toont',
'Sign in': 'Log in',
'Sign in with your google account': 'Log in met uw google registratie',
"Sorry, but this article doesn't exist!": 'Sorry, dit artikel bestaat niet!',
'Stylesheet': 'Sjabloon',
'Submit': 'Stuurop',
'Subtitle': 'Subtitel',
'Sure you want to delete this article?': 'Weet u zeker dat u dit artikel wilt wissen?',
'Sure you want to delete this comment?': 'Weet u zeker dat u dit commentaaar wilt wissen?',
'Sure you want to delete this image?': 'Weet u zeker dat u dit plaatje wilt wissen?',
'Sure you want to delete this link?': 'Weet u zeker dat deze verwijzing wilt wissen?',
'Sure you want to delete this object?': 'Weet u zeker dat u dit object wilt wissen?',
'Sure you want to delete this style?': 'Weet u zeker dat dit sjabloon wilt wissen?',
'Sure you want to delete this user?': 'Weet u zeker dat u deze gebruiker wilt wissen?',
'Table name': 'Tabel naam',
'Tags': 'Labels',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'De "query" is a conditie zoals "db.table1.field1==\'value\'". Iets als "db.table1.field1==db.table2.field2" resulteert in een SQL JOIN.',
"The article id doesn't exist": 'Het artikel id bestaat niet',
"The article id or page number doesn't exist": 'Het artikel id or paginanummer bestaat niet',
"The article id, page number, or reply doesn't exist": 'Het artikel id, paginanummer, or commentaar bestaat niet',
"The comment id doesn't exist": 'Het comment id bestaat niet',
'The output of the file is a dictionary that was rendered by the view': 'De uitvoer van het bestand is een dictionary die omgezet is wordt door de view',
'The output of the file is a dictionary that was rendered by this view': 'De uitvoer van het bestand is een dictionary die omgezet is wordt door de view',
'The search for': 'Het zoeken naar',
'There was a problem with values of Year - Month': 'Er was een probleem met de waarden van Jaar - Maand',
'This article was updated on': 'Dit artikel is aangepast op',
"This function doesn't exist": 'Deze functie bestaat niet',
"This function doesn't exist!": 'Deze functie bestaat niet!',
'This is a copy of the scaffolding application': 'Dit is een kopie van de skelet applicatie',
'This is a copy of the scaffolding application.': 'Dit is een kopie van de skelet applicatie.',
'Timestamp': 'Tijdstempel',
'Title': 'Titel',
'Update:': 'Aangepast:',
'Updated': 'Aangepast',
'Upload from url': 'Haal gegeven van een url op',
'Upload your image': 'Haal uw plaatje op',
'Url to an image': 'Url naar een plaatje',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Gebruik (...)&(...) voor EN, (...)|(...) voor OF, en ~(...) voor NIET om complexere zoekopdrachten te maken.',
'Use more general keywords': 'Gebruik meer algemene trefwoorden',
'User %(id)s Logged-in': 'Gebruiker %(id)s Logde in',
'User %(id)s Logged-out': 'Gebruiker %(id)s Logde uit',
'User %(id)s Password changed': 'Gebruiker %(id)s Wachtwoord is veranderd',
'User %(id)s Password reset': 'Gebruiker %(id)s Wachtwoord teruggezet',
'User %(id)s Profile updated': 'Gebruiker %(id)s Profiel aangepast',
'User %(id)s Registered': 'Gebruiker %(id)s geregistreerd',
'User ID': 'Gebruiker ID',
'Verify Password': 'Controle wachtwoord',
'View': 'View',
'Warning: This will replace your css with the default value from current style. Are you sure you want to continue?': 'Waarschuwing dit zal de css code door de standaard waarde van het gekozen sjabloon. Wilt u echt doorgaan?',
'Welcome %s': 'Welkom %s',
'Welcome to web2py': 'Welkom bij web2py',
'Which called the function': 'Die de volgende functie aanriep:',
'You are not logged in': 'U bent niet ingelogd',
'You are successfully running web2py': 'U heeft web2py succesvol aan de praat',
'You are successfully running web2py.': 'U heeft web2py succesvol aan de praat.',
'You can modify this application and adapt it to your needs': 'U kunt deze applicatie aanpassen',
'You can modify this application and adapt it to your needs.': 'U kunt deze applicatie aanpassen.',
'You have to sign in to your account before comment': 'Als u een commenteer wilt geven, dient u eerst in te loggen',
'You need to sign in as an admin': 'U dient in te loggen als beheerder',
'You need to submit your search text.': 'U dient een zoektekst op te geven.',
'You visited the url': 'U bezocht de url',
'appadmin is disabled because insecure channel': 'appadmin is uitgeschakeld, onveilig kanaal',
'back': 'terug',
'cache': 'cache',
'change password': 'verander wachtwoord',
'click here for online examples': 'klik hier om voorbeelden te zien',
'click here for the administrative interface': 'klik hier voor beheer',
'customize me!': 'pas me aan!',
'data uploaded': 'gegevens opgestuurd',
'database': 'database',
'database %s select': 'database %s geselecteerd',
'db': 'db',
'design': 'ontwerp',
'documentation': 'documentatie',
'done!': 'gedaan!',
'edit profile': 'verander profiel',
'export as csv file': 'export als csv bestand',
'in categories': 'in categorieën',
'insert new': 'voeg nieuw(e)',
'insert new %s': 'voeg nieuw(e) %s toe',
'invalid request': 'ongeldig verzoek!',
'located in the file': 'aanwezig in bestand',
'login': 'log in',
'logout': 'log uit',
'lost password': 'vergeten wachtwoord',
'lost password?': 'vergeten wachtwoord?',
'new record inserted': 'nieuwe kaart toegevoegd',
'next 100 rows': 'volgende 100 rijen',
'not yield any results': 'gaf geen resultaat',
'or import from csv file': 'of importeer uit csv bestand',
'previous 100 rows': 'vorige 100 rijen',
'record': 'kaart',
'record does not exist': 'kaart bestaat niet',
'record id': 'kaart id',
'register': 'registreer',
'results': 'resultaten',
'selected': 'geselecteerde',
'state': 'status',
'table': 'tabel',
'unable to parse csv file': 'kon csv bestand niet verwerken',
}
| danisuke0781/instant-press | languages/nl.py | Python | gpl-2.0 | 14,840 | 0.021972 |
# Aspect ratio
# create mesh
from SMESH_mechanic import *
# get faces with aspect ratio > 1.5
filter = smesh.GetFilter(SMESH.FACE, SMESH.FT_AspectRatio, SMESH.FT_MoreThan, 1.5)
ids = mesh.GetIdsFromFilter(filter)
print "Number of faces with aspect ratio > 1.5:", len(ids)
# copy the faces with aspect ratio > 1.5 to another mesh;
# this demostrates that a filter can be used where usually a group or submesh is acceptable
filter.SetMesh( mesh.GetMesh() )
mesh2 = smesh.CopyMesh( filter, "AR > 1.5" )
print "Number of copied faces with aspect ratio > 1.5:", mesh2.NbFaces()
# create a Group of faces with Aspect Ratio < 1.5
group = mesh.MakeGroup("AR < 1.5", SMESH.FACE, SMESH.FT_AspectRatio, '<', 1.5)
print "Number of faces with aspect ratio < 1.5:", group.Size()
# combine several criteria to Create a Group of only Triangular faces with Aspect Ratio < 1.5;
# note that contents of a GroupOnFilter is dynamically updated as the mesh changes
crit = [ smesh.GetCriterion( SMESH.FACE, SMESH.FT_AspectRatio, '<', 1.5, BinaryOp=SMESH.FT_LogicalAND ),
smesh.GetCriterion( SMESH.FACE, SMESH.FT_ElemGeomType,'=', SMESH.Geom_TRIANGLE ) ]
filter = smesh.GetFilterFromCriteria( crit )
triaGroup = mesh.GroupOnFilter( SMESH.FACE, "Tria AR < 1.5", filter )
print "Number of triangles with aspect ratio < 1.5:", triaGroup.Size()
| FedoraScientific/salome-smesh | doc/salome/examples/filters_ex01.py | Python | lgpl-2.1 | 1,332 | 0.016517 |
# -*- coding: utf-8 -*-
'''
Created on Oct 2, 2010
@author: dimitry (zavlab1)
'''
from gi.repository import Gtk
from gi.repository import Gdk
from foobnix.gui.service.path_service import get_foobnix_resourse_path_by_name
from foobnix.util.const import ICON_FOOBNIX
from foobnix.version import FOOBNIX_VERSION
class AboutWindow(Gtk.AboutDialog):
def __init__(self):
Gtk.AboutDialog.__init__(self)
self.set_program_name("Foobnix")
self.set_version(FOOBNIX_VERSION)
self.set_copyright("(c) Ivan Ivanenko <ivan.ivanenko@gmail.com>")
self.set_comments(_("Simple and Powerful player"))
self.set_website("http://www.foobnix.com")
self.set_authors(["Dmitry Kozhura (zavlab1) <zavlab1@gmail.com>", "Pietro Campagnano <fain182@gmailcom>", "Viktor Suprun <popsul1993@gmail.com>"])
self.set_translator_credits("""Bernardo Miguel Savone
Sérgio Marques
XsLiDian
KamilSPL
north
Alex Serada
Ivan Ivanenko
Dmitry-Kogura
Fitoschido
zeugma
Schaffino
Oleg «Eleidan» Kulik
Sergey Zigachev
Martino Barbon
Florian Heissenberger
Aldo Mann""")
self.set_logo(Gdk.pixbuf_new_from_file(get_foobnix_resourse_path_by_name(ICON_FOOBNIX))) #@UndefinedVariable
def show(self):
self.run()
self.destroy()
| kagel/foobnix | foobnix/gui/about/about.py | Python | gpl-3.0 | 1,313 | 0.008397 |
#! /usr/bin/env python
###############################################################################
#
# simulavr - A simulator for the Atmel AVR family of microcontrollers.
# Copyright (C) 2001, 2002 Theodore A. Roth
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
###############################################################################
#
# $Id: test_SUB.py,v 1.1 2004/07/31 00:59:11 rivetwa Exp $
#
"""Test the SUB opcode.
"""
import base_test
from registers import Reg, SREG
class SUB_TestFail(base_test.TestFail): pass
class base_SUB(base_test.opcode_test):
"""Generic test case for testing SUB opcode.
SUB - Subtract without Carry. [Rd <- Rd - Rr]
opcode is '0001 10rd dddd rrrr' where r and d are registers (d is destination).
Only registers PC, Rd and SREG should be changed.
"""
def setup(self):
# Set SREG to zero
self.setup_regs[Reg.SREG] = 0
# Set the register values
self.setup_regs[self.Rd] = self.Vd
self.setup_regs[self.Rr] = self.Vr
# Return the raw opcode
return 0x1800 | (self.Rd << 4) | ((self.Rr & 0x10) << 5) | (self.Rr & 0xf)
def analyze_results(self):
self.reg_changed.extend( [self.Rd, Reg.SREG] )
# check that result is correct
res = (self.Vd - self.Vr)
expect = res & 0xff
got = self.anal_regs[self.Rd]
if expect != got:
self.fail('SUB r%02d, r%02d: 0x%02x - 0x%02x = (expect=%02x, got=%02x)' % (
self.Rd, self.Rr, self.Vd, self.Vr, expect, got))
expect_sreg = 0
# calculate what we expect sreg to be (I and T should be zero)
carry = ((~self.Vd & self.Vr) | (self.Vr & res) | (res & ~self.Vd))
H = (carry >> 3) & 1
C = (carry >> 7) & 1
V = (((self.Vd & ~self.Vr & ~res) | (~self.Vd & self.Vr & res)) >> 7) & 1
N = ((expect & 0x80) != 0)
expect_sreg += H << SREG.H
expect_sreg += V << SREG.V
expect_sreg += N << SREG.N
expect_sreg += (N ^ V) << SREG.S
expect_sreg += C << SREG.C
expect_sreg += (expect == 0) << SREG.Z
got_sreg = self.anal_regs[Reg.SREG]
if expect_sreg != got_sreg:
self.fail('SUB r%02d, r%02d: 0x%02x - 0x%02x -> SREG (expect=%02x, got=%02x)' % (
self.Rd, self.Rr, self.Vd, self.Vr, expect_sreg, got_sreg))
#
# Template code for test case.
# The fail method will raise a test specific exception.
#
template = """
class SUB_rd%02d_vd%02x_rr%02d_vr%02x_TestFail(SUB_TestFail): pass
class test_SUB_rd%02d_vd%02x_rr%02d_vr%02x(base_SUB):
Rd = %d
Vd = 0x%x
Rr = %d
Vr = 0x%x
def fail(self,s):
raise SUB_rd%02d_vd%02x_rr%02d_vr%02x_TestFail, s
"""
#
# Define a list of test values such that we all the cases of SREG bits being set.
#
vals = (
( 0x00, 0x00 ),
( 0xff, 0x00 ),
( 0xfe, 0x01 ),
( 0x0f, 0x00 ),
( 0x0f, 0xf0 ),
( 0x01, 0x02 ),
( 0x80, 0x01 )
)
#
# automagically generate the test_SUB_rdNN_vdXX_rrNN_vrXX class definitions.
# For these, we don't want Rd=Rr as that is a special case handled below.
#
code = ''
for d in range(0,32,4):
for r in range(1,32,4):
for vd,vr in vals:
args = (d,vd,r,vr)*4
code += template % args
# make sure things work if Rd == Rr
for d in range(2,32,4):
for vd,vr in vals:
args = (d,vd,d,vd)*4
code += template % args
exec code
| simark/simulavr | regress/test_opcodes/test_SUB.py | Python | gpl-2.0 | 3,851 | 0.027266 |
# The Craftr build system
# Copyright (C) 2016 Niklas Rosenstein
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
:mod:`craftr.defaults`
======================
This module provides the default global namespace for Craftr modules. Names
starting with an underscore will be ignored.
"""
from craftr.core import build as _build
from craftr.core.logging import logger
from craftr.core.manifest import Namespace
from craftr.core.session import session, ModuleNotFound
from craftr.utils import path, shell
from craftr.targetbuilder import gtn, TargetBuilder, Framework
from craftr import platform
import builtins as _builtins
import itertools as _itertools
import os as _os
import require
import sys as _sys
require = require.Require(write_bytecode=False)
class ToolDetectionError(Exception):
pass
class ModuleError(Exception):
pass
def include_defs(filename, globals=None):
"""
Uses :mod:`require` to load a Python file and then copies all symbols
that do not start with an underscore into the *globals* dictionary. If
*globals* is not specified, it will fall back to the globals of the frame
that calls the function.
"""
module = require(filename, _stackdepth=1)
if globals is None:
globals = _sys._getframe(1).f_globals
for key, value in vars(module).items():
if not key.startswith('_'):
globals[key] = value
def glob(patterns, parent=None, exclude=(), include_dotfiles=False):
"""
Wrapper for :func:`path.glob` that automatically uses the current modules
project directory for the *parent* argument if it has not been specifically
set.
"""
if parent is None and session and session.module:
parent = session.module.project_dir
return path.glob(patterns, parent, exclude, include_dotfiles)
def local(rel_path):
"""
Given a relative path, returns the absolute path relative to the current
module's project directory.
"""
parent = session.module.project_dir
return path.norm(rel_path, parent)
def buildlocal(rel_path):
"""
Given a relative path, returns the path (still relative) to the build
directory for the current module. This is basically a shorthand for
prepending the module name and version to *path*.
"""
if path.isabs(rel_path):
return rel_path
return path.canonical(path.join(session.module.ident, rel_path))
def relocate_files(files, outdir, suffix, replace_suffix=True, parent=None):
"""
Converts a list of filenames, relocating them to *outdir* and replacing
their existing suffix. If *suffix* is a callable, it will be passed the
new filename and expected to return the same filename, eventually with
a different suffix.
"""
if parent is None:
parent = session.module.project_dir
result = []
for filename in files:
filename = path.join(outdir, path.rel(filename, parent))
filename = path.addsuffix(filename, suffix, replace=replace_suffix)
result.append(filename)
return result
def filter(predicate, iterable):
"""
Alternative for the built-in ``filter()`` function that returns a list
instead of an iterable (which is the behaviour since Python 3).
"""
result = []
for item in iterable:
if predicate(item):
result.append(item)
return result
def map(procedure, iterable):
"""
Alternative for the built-in ``map()`` function that returns a list instead
of an iterable (which is the behaviour since Python 3).
"""
result = []
for item in iterable:
result.append(procedure(item))
return result
def zip(*iterables, fill=NotImplemented):
"""
Alternative to the Python built-in ``zip()`` function. This function returns
a list rather than an iterable and also supports swapping to the
:func:`itertools.izip_longest` version if the *fill* parameter is specified.
"""
if fill is NotImplemented:
return list(_builtins.zip(*iterables))
else:
return list(_itertools.zip_longest(*iterables, fillvalue=fill))
def load_module(name, into=None, get_namespace=True, _stackframe=1):
"""
Load a Craftr module by name and return it. If *into* is specified, it must
be a dictionary that will be filled with all the members of the module. Note
that this function returns the namespace object of the module rather than
the actual :class:`craftr.core.session.Module` object that wraps the module
information unless *get_namespace* is False.
The version criteria is read from the current module's manifest.
:param name: The name of the module to load. If this name is suffixed
with the two characters ``.*`` and the *into* parameter is :const:`None`,
the contents of the module will be exported into the globals of the
calling frame.
:param into: If specified, must be a dictionary.
:param get_namespace:
:return: The module namespace object (of type :class:`types.ModuleType`)
or the actual :class:`craftr.core.session.Module` if *get_namespace*
is False.
:raise ModuleNotFound: If the module could not be found.
:raise RuntimeError: If the module that is attempted to be loaded is not
declared in the current module's manifest.
Examples:
.. code:: python
cxx = load_module('lang.cxx')
load_module('lang.cxx.*')
assert cxx.c_compile is c_compile
"""
if name.endswith('.*') and into is None:
name = name[:-2]
into = _sys._getframe(_stackframe).f_globals
if not session:
raise RuntimeError('no session context')
module = session.module
if not module:
raise RuntimeError('no current module')
if name not in module.manifest.dependencies:
raise RuntimeError('"{}" can not load "{}", make sure that it is listed '
'in the dependencies'.format(module.ident, name))
loaded_module = session.find_module(name, module.manifest.dependencies[name])
if not loaded_module.executed:
loaded_module.run()
if into is not None:
module_builtins = frozenset('loader project_dir options'.split())
all_vars = getattr(loaded_module.namespace, '__all__', None)
for key, value in vars(loaded_module.namespace).items():
if all_vars is not None:
if key in all_vars:
into[key] = value
else:
if not key.startswith('_') and key not in module_builtins and key not in globals():
into[key] = value
if get_namespace:
return loaded_module.namespace
return loaded_module
def load_file(filename):
"""
Loads a Python file into a new module-like object and returns it. The
*filename* is assumed relative to the currently executed module's
directory (NOT the project directory which can be different).
"""
if not path.isabs(filename):
filename = path.join(session.module.directory, filename)
with open(filename, 'r') as fp:
code = compile(fp.read(), filename, 'exec')
scope = Namespace()
vars(scope).update(globals())
exec(code, vars(scope))
return scope
def gentool(commands, preamble=None, environ=None, name=None):
"""
Create a :class:`~_build.Tool` object. The name of the tool will be derived
from the variable name it is assigned to unless *name* is specified.
"""
tool = _build.Tool(gtn(name), commands, preamble, environ)
session.graph.add_tool(tool)
return tool
def gentarget(commands, inputs=(), outputs=(), *args, **kwargs):
"""
Create a :class:`~_build.Target` object. The name of the target will be
derived from the variable name it is assigned to unless *name* is specified.
"""
target = _build.Target(gtn(kwargs.pop('name', None)), commands, inputs,
outputs, *args, **kwargs)
session.graph.add_target(target)
return target
def runtarget(target, *args, inputs=(), outputs=(), **kwargs):
"""
Simplification of :func:`gentarget` to make it more obvious that a
generate target is actually executed.
"""
name = gtn(kwargs.pop('name', None))
kwargs.setdefault('explicit', True)
return gentarget([[target] + list(args)], inputs, outputs, name=name, **kwargs)
def write_response_file(arguments, builder=None, name=None, force_file=False):
"""
Creates a response-file with the specified *name* in the in the
``buildfiles/`` directory and writes the *arguments* list quoted into
the file. If *builder* is specified, it must be a :class:`TargetBuilder`
and the response file will be added to the implicit dependencies.
If *force_file* is set to True, a file will always be written. Otherwise,
the function will into possible limitations of the platform and decide
whether to write a response file or to return the *arguments* as is.
Returns a tuple of ``(filename, arguments)``. If a response file is written,
the returned *arguments* will be a list with a single string that is the
filename prepended with ``@``. The *filename* part can be None if no
response file needed to be exported.
"""
if not name:
if not builder:
raise ValueError('builder must be specified if name is bot')
name = builder.name + '.response.txt'
if platform.name != 'win':
return None, arguments
# We'll just assume that there won't be more than 2048 characters for
# other flags. The windows max buffer size is 8192.
content = shell.join(arguments)
if len(content) < 6144:
return None, arguments
filename = buildlocal(path.join('buildfiles', name))
if builder:
builder.implicit_deps.append(filename)
if session.builddir:
path.makedirs(path.dirname(filename))
with open(filename, 'w') as fp:
fp.write(content)
return filename, ['@' + filename]
def error(*message):
"""
Raises a :class:`ModuleError`.
"""
raise ModuleError(' '.join(map(str, message)))
def append_PATH(*paths):
"""
This is a helper function that is used to generate a ``PATH`` environment
variable from the value that already exists and add the specified *paths*
to it. It is typically used for example like this:
.. code:: python
run = gentarget(
commands = [[main, local('example.ini')]],
explicit=True,
environ = {'PATH': append_PATH(qt5.bin_dir if qt5 else None)}
)
"""
result = _os.getenv('PATH')
paths = _os.path.pathsep.join(filter(bool, paths))
if paths:
result += _os.path.pathsep + paths
return result
| winksaville/craftr | craftr/defaults.py | Python | gpl-3.0 | 10,759 | 0.008458 |
# -*- coding: UTF-8 -*-
#
"""css3make tester"""
class GnrCustomWebPage(object):
py_requires = "gnrcomponents/testhandler:TestHandlerBase"
dojo_theme = 'tundra'
def test_1_rounded(self, pane):
sl = pane.slotBar('k,*,test,*')
sl.k.verticalSlider(value='^.k',minimum=0,maximum='30',intermediateChanges=True,height='100px')
test = sl.test.div(width='400px')
test.div(margin='5px', display='inline-block', border='1px solid gray', width='100px', height='80px',
rounded='15')
test.div(margin='5px', display='inline-block', border='1px solid gray', width='100px', height='80px',
rounded='12',rounded_left_top=0,rounded_bottom_right=0)
test.div(margin='5px', display='inline-block', border='1px solid gray', width='100px', height='80px',
rounded_left_top='12',rounded_bottom_right='^.k')
def test_2_shadow(self, pane):
sl = pane.slotBar('x,y,blur,inset,*,test1,test2,*',lbl_font_size='8px',
lbl_position='L',lbl_transform_rotate='-90',cell_border='1px dotted gray',
lbl_width='10px'
)
sl.x.verticalSlider(value='^.x',minimum=-30,maximum=30,intermediateChanges=True,height='100px',lbl='X')
sl.y.verticalSlider(value='^.y',minimum=-30,maximum=30,intermediateChanges=True,height='100px',lbl='Y')
sl.blur.verticalSlider(value='^.blur',minimum=-30,maximum=30,intermediateChanges=True,height='100px',lbl='Blurrone')
sl.inset.checkbox(value='^.inset',label='Inset')
sl.test1.div(margin='5px', display='inline-block', border='1px solid gray', width='100px', height='80px',
shadow='3px 3px 5px gray inset')
sl.test2.div(margin='5px', display='inline-block', border='1px solid gray', width='100px', height='80px',
shadow='3px 3px 5px gray inset',
shadow_x='^.x',shadow_y='^.y',
shadow_blur='^.blur',shadow_inset='^.inset')
def test_3_gradient_fixed(self, pane):
sl = pane.slotBar('deg,fld,tan,*,test,*,test1,*',lbl_position='B',lbl_font_size='8px')
sl.deg.verticalSlider(value='^.deg',minimum=0,maximum=360,intermediateChanges=True,height='100px',lbl='Deg')
fb = sl.fld.formbuilder(cols=6, border_spacing='2px')
fb.numbertextbox(value='^.deg',lbl='deg')
sl.test.div(margin='5px', display='inline-block',
border='1px solid gray', width='100px', height='80px',
gradient_from='white',gradient_to='navy',gradient_deg='^.deg')
sl.test1.div(margin='5px', display='inline-block',
border='1px solid gray', width='100px', height='80px',
gradient_color_0='pink,15',gradient_color_1='yellow,50' ,gradient_color_2='red,100',gradient_deg='^.deg')
def test_4_transform(self, pane):
sl = pane.slotBar('rotate,translatex,translatey,scalex,scaley,skewx,skewy,*,test')
sl.rotate.verticalSlider(value='^.rotate',minimum=0,maximum=360,intermediateChanges=True,height='100px',default_value=0)
sl.translatex.verticalSlider(value='^.translate_x',minimum=-100,maximum=100,intermediateChanges=True,height='100px',default_value=0)
sl.translatey.verticalSlider(value='^.translate_y',minimum=-100,maximum=100,intermediateChanges=True,height='100px',default_value=0)
sl.scalex.verticalSlider(value='^.scale_x',minimum=0,maximum=1,intermediateChanges=True,height='100px',default_value=1)
sl.scaley.verticalSlider(value='^.scale_y',minimum=0,maximum=1,intermediateChanges=True,height='100px',default_value=1)
sl.skewx.verticalSlider(value='^.skew_x',minimum=0,maximum=360,intermediateChanges=True,height='100px',default_value=0)
sl.skewy.verticalSlider(value='^.skew_y',minimum=0,maximum=360,intermediateChanges=True,height='100px',default_value=0)
sl.test.div(margin='5px', display='inline-block', border='1px solid gray', width='50px', height='70px'
,transform_rotate='^.rotate'
,transform_translate_x='^.translate_x',transform_translate_y='^.translate_y'
,transform_scale_x='^.scale_x',transform_scale_y='^.scale_y'
,transform_skew_x='^.skew_x',transform_skew_y='^.skew_y'
)
def test_5_transition(self, pane):
sl = pane.slotBar('w,color,mode,duration,*,test',lbl_position='T')
sl.w.textbox(value='^.w',lbl='width',default_value='3px')
sl.color.textbox(value='^.color',lbl='color',default_value='red')
sl.mode.comboBox(value='^.function',default_value='linear',values='linear,ease,ease-in,ease-out,ease-in-out')
sl.duration.verticalSlider(value='^.duration',minimum=0,maximum=10,intermediateChanges=True,height='100px',default_value=1)
sl.test.div(width='^.w',background='^.color',height='50px',transition='all 3s',border='1px solid gray',transition_function='.^function',
transition_duration='.^duration')
| poppogbr/genropy | packages/test15/webpages/tools/css3make.py | Python | lgpl-2.1 | 5,203 | 0.032866 |
# -*- coding: utf-8 -*-
# TEST_UNICODE_LITERALS
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import, division, print_function
import pytest
import numpy as np
from ...extern import six
from ..data_info import dtype_info_name
STRING_TYPE_NAMES = {(False, 'S'): 'str', # PY2
(False, 'U'): 'unicode',
(True, 'S'): 'bytes', # not PY2
(True, 'U'): 'str'}
DTYPE_TESTS = ((np.array(b'abcd').dtype, STRING_TYPE_NAMES[(not six.PY2, 'S')] + '4'),
(np.array(u'abcd').dtype, STRING_TYPE_NAMES[(not six.PY2, 'U')] + '4'),
('S4', STRING_TYPE_NAMES[(not six.PY2, 'S')] + '4'),
('U4', STRING_TYPE_NAMES[(not six.PY2, 'U')] + '4'),
(np.void, 'void'),
(np.int32, 'int32'),
(np.bool, 'bool'),
(bool, 'bool'),
(float, 'float64'),
('<f4', 'float32'),
('u8', 'uint64'),
('c16', 'complex128'),
('object', 'object'))
@pytest.mark.parametrize('input,output', DTYPE_TESTS)
def test_dtype_info_name(input, output):
"""
Test that dtype_info_name is giving the expected output
Here the available types::
'b' boolean
'i' (signed) integer
'u' unsigned integer
'f' floating-point
'c' complex-floating point
'O' (Python) objects
'S', 'a' (byte-)string
'U' Unicode
'V' raw data (void)
"""
assert dtype_info_name(input) == output
| AustereCuriosity/astropy | astropy/utils/tests/test_data_info.py | Python | bsd-3-clause | 1,575 | 0.00127 |
print("Greetings Earth! We come in peace.")
| morepj/numerical-mooc | working/HelloWorld.py | Python | mit | 44 | 0 |
from __future__ import print_function
from __future__ import absolute_import
import re
from pyingest.config.utils import u2asc
from .default import DefaultParser
from .author_names import AuthorNames
from .entity_convert import EntityConverter
head_dict = {'TITLE:': 'journal', 'NUMBER:': 'volume', 'SUBJECT:': 'title',
'DATE:': 'pubdate', 'FROM:': 'email'
}
class GCNCParser(DefaultParser):
def __init__(self, data):
# econv = EntityConverter()
# econv.input_text = data
# econv.convert()
# self.raw = econv.output_text
self.raw = data
self.data_dict = dict()
def make_pubdate(self):
input_date = self.data_dict['pubdate']
yymmdd = input_date.split('/')
if int(yymmdd[0]) > 50:
year = '19' + yymmdd[0]
else:
year = '20' + yymmdd[0]
pubdate = year + '/' + yymmdd[1]
self.data_dict['pubdate'] = pubdate
def make_bibcode(self):
year = self.data_dict['pubdate'][0:4]
bibcode = 'GCN.'
self.data_dict['volume'] = self.data_dict['volume'].ljust(5, '.')
volume = self.data_dict['volume'].ljust(9, '.') + '1'
try:
init = u2asc(self.data_dict['authors'][0][0])
except Exception as err:
print ("Problem generating author initial")
init = '.'
self.data_dict['bibcode'] = year + bibcode + volume + init
def make_publication(self):
base_string = 'GRB Coordinates Network, Circular Service, No. '
self.data_dict['publication'] = base_string + self.data_dict['volume']
self.data_dict['page'] = '1'
def split_authors_abstract(self):
# This could be used to extract affils and apply them to authors,
# but the process of doing so is unwieldy. I'm leaving code that
# was my initial try but commented out.
body = self.data_dict['abstract']
while body[0] == '':
body.pop(0)
auths = []
affils = []
while body[0] != '' and ':' not in body[0]:
auths.append(body.pop(0).strip())
auths.append(body.pop(0).strip())
auth_delimiter = u'| '
auth_string = ' '.join(auths)
auth_string = re.sub(r'\s+\((.*?)\)', ',', auth_string)
auth_string = re.sub(r'[ ,]and\s', ',', auth_string)
auth_string = re.sub(r'on behalf of', ',', auth_string)
auth_string = re.sub(r'reports?', ',', auth_string)
auth_string = re.sub(r'\s?:', '', auth_string)
auth_string = re.sub(r',?\s+,', ',', auth_string)
auth_array = [s.strip() for s in auth_string.split(',')]
auth_array = list([a for a in auth_array if len(a) > 3])
# auth_string = u'; '.join(auth_array)
auth_string = auth_delimiter.join(auth_array)
auth_mod = AuthorNames()
# self.data_dict['authors'] = auth_mod.parse(auth_string)
self.data_dict['authors'] = auth_mod.parse(auth_string, delimiter=auth_delimiter)
self.data_dict['authors'] = re.sub(r'\| ', u';', self.data_dict['authors'])
def parse(self):
self.data_dict = {}
# Start by looking at the Circular line by line...
try:
gdata = self.raw.split('\n')
# Header is fixed format and five lines long...
head = gdata[0:5]
for l in head:
lfix = l.replace(' ', '\t', 1)
lparts = lfix.split('\t')
self.data_dict[head_dict[lparts[0]]] = lparts[1].strip()
# Now you need to split the authors from the abstract.
# This should work in *most* cases, maybe not all,
# especially from older (pre-2016) Circulars
self.data_dict['abstract'] = gdata[5:]
self.split_authors_abstract()
# Authors and abstract content should now be defined
# If you want to try and keep fixed formatting
# (e.g. for tables), use '\n' for the join character
abstract_new = ' '.join(self.data_dict['abstract'])
self.data_dict['abstract'] = abstract_new.strip()
# Extract pubdate from the header date
self.make_pubdate()
# Create the bibcode from circular info
self.make_bibcode()
# Make the publication string
self.make_publication()
# Pass the necessary fields through EntityConverter
ec_fields = ['authors', 'abstract', 'title']
econv = EntityConverter()
for ecf in ec_fields:
econv.input_text = self.data_dict[ecf]
econv.convert()
self.data_dict[ecf] = econv.output_text
except Exception as err:
self.data_dict['raw'] = self.raw
self.data_dict['error'] = err
return self.data_dict
| adsabs/adsabs-pyingest | pyingest/parsers/gcncirc.py | Python | mit | 4,888 | 0.000818 |
#!/usr/bin/env python
# Author: Richard Bradshaw, R.T.Bradshaw@soton.ac.uk
# Module to fit various curves to provided x/y data
# Current available curves: Linear, Gaussian, Lorentzian, Voigt
# Requirements: lmfit, numpy, matplotlib (as dependencies of lmfit)
from lmfit.models import LinearModel,GaussianModel,LorentzianModel,VoigtModel
class FitError(Exception):
"""Exception in lmfit wrapper"""
class Fit():
"""Class to contain methods for curve fitting from
lmfit package."""
def __init__(self, data):
"""Usage: Fit(data). Initialises data as xs and ys attributes.
Data should be a 2d numpy array of x and y values."""
if len(data) != 2:
raise FitError("""Your data is formatted incorrectly -
it should be a 2D array of all x-,
then all y-values""")
self.xs = data[0]
self.ys = data[1]
def __str__(self):
"""Prints lmfit fit report for the current object"""
try:
return self.fit.fit_report()
except AttributeError:
return "No fit yet performed for this object."
def linear(self, **kwargs):
"""Linear fit of data. Usage: self.linear([**kwargs])
kwargs are passed to a lmfit LinearModel."""
self._mod = LinearModel(**kwargs)
pars = self._mod.guess(self.ys, self.xs)
self.fit = self._mod.fit(self.ys, pars, x=self.xs)
def gaussian(self, **kwargs):
"""Gaussian fit of data. Usage: self.gaussian([**kwargs])
kwargs are passed to a lmfit GaussianModel."""
self._mod = GaussianModel(**kwargs)
pars = self._mod.guess(self.ys, self.xs)
self.fit = self._mod.fit(self.ys, pars, x=self.xs)
def lorentzian(self, **kwargs):
"""Lorentzian fit of datia. Usage: self.lorentzian([**kwargs])
kwargs are passed to a lmfit LorentzianModel."""
self._mod = LorentzianModel(**kwargs)
pars = self._mod.guess(self.ys, self.xs)
self.fit = self._mod.fit(self.ys, pars, x=self.xs)
def voigt(self, **kwargs):
"""Voigt fit of data. Usage: self.voigt([**kwargs])
kwargs are passed to a lmfit VoigtModel."""
self._mod = VoigtModel(**kwargs)
pars = self._mod.guess(self.ys, self.xs)
self.fit = self._mod.fit(self.ys, pars, x=self.xs)
def plots(self, **kwargs):
"""Returns matplotlib axes with original data, fitted
function & initial model.
Usage: self.plots([**kwargs])
kwargs are passed to lmfit.ModelResult.plot_fit"""
try:
return self.fit.plot_fit(**kwargs)
except AttributeError:
raise FitError("No fit yet performed to plot!")
class Multifit():
"""Composite model from two or more underlying models,
passed as Fit objects defined in lmcurvefit. Models in
Fit objects should have been defined with unique prefixes
otherwise output in the composite model may be confusing/incorrect."""
def __init__(self, *args):
"""Usage: Multifit(model1, [model2, model3, ...])
Reads in individual models to perform a composite fit.
Models should be read in as Fit objects with their
own defined individual models already assigned"""
self._mods = args
try:
self._pars = self._mods[0]._mod.make_params()
except AttributeError:
raise FitError("""Your starting models should be read in as Fit objects
each with a single model already defined.""")
for fit in self._mods[1:]:
self._pars.update(fit._mod.make_params())
def __str__(self):
"""Prints lmfit fit report for the current object"""
try:
return self.total_fit.fit_report()
except AttributeError:
return "No composite fit yet performed for this object."
def init_params(self, prefix='', center=0, sigma=10, amplitude=10):
"""Usage: self.init_params([prefix='', center=0, sigma=10, amplitude=10])
Sets initial guess parameters for the model defined with 'prefix'."""
self._pars[prefix+'center'].set(center)
self._pars[prefix+'sigma'].set(sigma)
self._pars[prefix+'amplitude'].set(amplitude)
def make_mod(self):
"""Usage: self.make_mod()
Makes composite model from all models read in."""
self._compmod = self._mods[0]._mod
for fit in self._mods[1:]:
self._compmod += fit._mod
def do_multifit(self,data):
"""Usage: self.do_multifit(data)
Performs fitting of data to composite model.
Data should be a 2D numpy array of x and y values"""
if len(data) != 2:
raise FitError("""Your data is formatted incorrectly -
it should be a 2D array of all x-,
then all y-values""")
self.xs = data[0]
self.ys = data[1]
try:
self.total_fit = self._compmod.fit(self.ys, self._pars, x=self.xs)
self.composite_fits = self.total_fit.eval_components(x=self.xs)
except AttributeError:
raise FitError("""You don't seem to have a composite model - run
make_mod() first!""")
def plots(self, **kwargs):
"""Returns matplotlib axes with original data, fitted
function & initial model.
Usage: self.plots([**kwargs])
kwargs are passed to lmfit.ModelResult.plot_fit"""
try:
return self.total_fit.plot_fit(**kwargs)
except AttributeError:
raise FitError("No fit yet performed to plot!")
| rtb1c13/scripts | IR_lineshapes/lmcurvefit.py | Python | gpl-2.0 | 5,749 | 0.003131 |
# Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
This script will migrate the database of an openvswitch or linuxbridge
plugin so that it can be used with the ml2 plugin.
Known Limitations:
- THIS SCRIPT IS DESTRUCTIVE! Make sure to backup your
Neutron database before running this script, in case anything goes
wrong.
- It will be necessary to upgrade the database to the target release
via neutron-db-manage before attempting to migrate to ml2.
Initially, only the icehouse release is supported.
- This script does not automate configuration migration.
Example usage:
python -m neutron.db.migration.migrate_to_ml2 openvswitch \
mysql://login:pass@127.0.0.1/neutron
Note that migration of tunneling state will only be attempted if the
--tunnel-type parameter is provided.
To manually test migration from ovs to ml2 with devstack:
- stack with Q_PLUGIN=openvswitch
- boot an instance and validate connectivity
- stop the neutron service and all agents
- run the neutron-migrate-to-ml2 script
- update /etc/neutron/neutron.conf as follows:
core_plugin = neutron.plugins.ml2.plugin.Ml2Plugin
- Create /etc/neutron/plugins/ml2/ml2_conf.ini and ensure that:
- ml2.mechanism_drivers includes 'openvswitch'
- ovs.local_ip is set correctly
- database.connection is set correctly
- Start the neutron service with the ml2 config file created in
the previous step in place of the openvswitch config file
- Start all the agents
- verify that the booted instance still has connectivity
- boot a second instance and validate connectivity
"""
import argparse
from oslo_db.sqlalchemy import session
import sqlalchemy as sa
from neutron.extensions import portbindings
from neutron.openstack.common import uuidutils
from neutron.plugins.common import constants as p_const
from neutron.plugins.ml2.drivers import type_vxlan
# Migration targets
LINUXBRIDGE = 'linuxbridge'
OPENVSWITCH = 'openvswitch'
# Releases
ICEHOUSE = 'icehouse'
JUNO = 'juno'
SUPPORTED_SCHEMA_VERSIONS = [ICEHOUSE, JUNO]
def check_db_schema_version(engine, metadata):
"""Check that current version of the db schema is supported."""
version_table = sa.Table(
'alembic_version', metadata, autoload=True, autoload_with=engine)
versions = [v[0] for v in engine.execute(version_table.select())]
if not versions:
raise ValueError(_("Missing version in alembic_versions table"))
elif len(versions) > 1:
raise ValueError(_("Multiple versions in alembic_versions table: %s")
% versions)
current_version = versions[0]
if current_version not in SUPPORTED_SCHEMA_VERSIONS:
raise SystemError(_("Unsupported database schema %(current)s. "
"Please migrate your database to one of following "
"versions: %(supported)s")
% {'current': current_version,
'supported': ', '.join(SUPPORTED_SCHEMA_VERSIONS)}
)
# Duplicated from neutron.plugins.linuxbridge.common.constants to
# avoid having any dependency on the linuxbridge plugin being
# installed.
def interpret_vlan_id(vlan_id):
"""Return (network_type, segmentation_id) tuple for encoded vlan_id."""
FLAT_VLAN_ID = -1
LOCAL_VLAN_ID = -2
if vlan_id == LOCAL_VLAN_ID:
return (p_const.TYPE_LOCAL, None)
elif vlan_id == FLAT_VLAN_ID:
return (p_const.TYPE_FLAT, None)
else:
return (p_const.TYPE_VLAN, vlan_id)
class BaseMigrateToMl2(object):
def __init__(self, vif_type, driver_type, segment_table_name,
vlan_allocation_table_name, old_tables):
self.vif_type = vif_type
self.driver_type = driver_type
self.segment_table_name = segment_table_name
self.vlan_allocation_table_name = vlan_allocation_table_name
self.old_tables = old_tables
def __call__(self, connection_url, save_tables=False, tunnel_type=None,
vxlan_udp_port=None):
engine = session.create_engine(connection_url)
metadata = sa.MetaData()
check_db_schema_version(engine, metadata)
if hasattr(self, 'define_ml2_tables'):
self.define_ml2_tables(metadata)
# Autoload the ports table to ensure that foreign keys to it and
# the network table can be created for the new tables.
sa.Table('ports', metadata, autoload=True, autoload_with=engine)
metadata.create_all(engine)
self.migrate_network_segments(engine, metadata)
if tunnel_type:
self.migrate_tunnels(engine, tunnel_type, vxlan_udp_port)
self.migrate_vlan_allocations(engine)
self.migrate_port_bindings(engine, metadata)
if hasattr(self, 'drop_old_tables'):
self.drop_old_tables(engine, save_tables)
def migrate_segment_dict(self, binding):
binding['id'] = uuidutils.generate_uuid()
def migrate_network_segments(self, engine, metadata):
# Migrating network segments requires loading the data to python
# so that a uuid can be generated for each segment.
source_table = sa.Table(self.segment_table_name, metadata,
autoload=True, autoload_with=engine)
source_segments = engine.execute(source_table.select())
ml2_segments = [dict(x) for x in source_segments]
for segment in ml2_segments:
self.migrate_segment_dict(segment)
if ml2_segments:
ml2_network_segments = metadata.tables['ml2_network_segments']
engine.execute(ml2_network_segments.insert(), ml2_segments)
def migrate_tunnels(self, engine, tunnel_type, vxlan_udp_port=None):
"""Override this method to perform plugin-specific tunnel migration."""
pass
def migrate_vlan_allocations(self, engine):
engine.execute(("""
INSERT INTO ml2_vlan_allocations
SELECT physical_network, vlan_id, allocated
FROM %(source_table)s
WHERE allocated = TRUE
""") % {'source_table': self.vlan_allocation_table_name})
def get_port_segment_map(self, engine):
"""Retrieve a mapping of port id to segment id.
The monolithic plugins only support a single segment per
network, so the segment id can be uniquely identified by
the network associated with a given port.
"""
port_segments = engine.execute("""
SELECT ports_network.port_id, ml2_network_segments.id AS segment_id
FROM ml2_network_segments, (
SELECT portbindingports.port_id, ports.network_id
FROM portbindingports, ports
WHERE portbindingports.port_id = ports.id
) AS ports_network
WHERE ml2_network_segments.network_id = ports_network.network_id
""")
return dict(x for x in port_segments)
def migrate_port_bindings(self, engine, metadata):
port_segment_map = self.get_port_segment_map(engine)
port_binding_ports = sa.Table('portbindingports', metadata,
autoload=True, autoload_with=engine)
source_bindings = engine.execute(port_binding_ports.select())
ml2_bindings = [dict(x) for x in source_bindings]
for binding in ml2_bindings:
binding['vif_type'] = self.vif_type
binding['driver'] = self.driver_type
segment = port_segment_map.get(binding['port_id'])
if segment:
binding['segment'] = segment
if ml2_bindings:
ml2_port_bindings = metadata.tables['ml2_port_bindings']
engine.execute(ml2_port_bindings.insert(), ml2_bindings)
class BaseMigrateToMl2_IcehouseMixin(object):
"""A mixin to ensure ml2 database schema state for Icehouse.
This classes the missing tables for Icehouse schema revisions. In Juno,
the schema state has been healed, so we do not need to run these.
"""
def drop_old_tables(self, engine, save_tables=False):
if save_tables:
return
old_tables = self.old_tables + [self.vlan_allocation_table_name,
self.segment_table_name]
for table_name in old_tables:
engine.execute('DROP TABLE %s' % table_name)
def define_ml2_tables(self, metadata):
sa.Table(
'arista_provisioned_nets', metadata,
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('network_id', sa.String(length=36), nullable=True),
sa.Column('segmentation_id', sa.Integer(),
autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id'),
)
sa.Table(
'arista_provisioned_vms', metadata,
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('vm_id', sa.String(length=255), nullable=True),
sa.Column('host_id', sa.String(length=255), nullable=True),
sa.Column('port_id', sa.String(length=36), nullable=True),
sa.Column('network_id', sa.String(length=36), nullable=True),
sa.PrimaryKeyConstraint('id'),
)
sa.Table(
'arista_provisioned_tenants', metadata,
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('id', sa.String(length=36), nullable=False),
sa.PrimaryKeyConstraint('id'),
)
sa.Table(
'cisco_ml2_nexusport_bindings', metadata,
sa.Column('binding_id', sa.Integer(), nullable=False),
sa.Column('port_id', sa.String(length=255), nullable=True),
sa.Column('vlan_id', sa.Integer(), autoincrement=False,
nullable=False),
sa.Column('switch_ip', sa.String(length=255), nullable=True),
sa.Column('instance_id', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('binding_id'),
)
sa.Table(
'cisco_ml2_credentials', metadata,
sa.Column('credential_id', sa.String(length=255), nullable=True),
sa.Column('tenant_id', sa.String(length=255), nullable=False),
sa.Column('credential_name', sa.String(length=255),
nullable=False),
sa.Column('user_name', sa.String(length=255), nullable=True),
sa.Column('password', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('tenant_id', 'credential_name'),
)
sa.Table(
'ml2_flat_allocations', metadata,
sa.Column('physical_network', sa.String(length=64),
nullable=False),
sa.PrimaryKeyConstraint('physical_network'),
)
sa.Table(
'ml2_gre_allocations', metadata,
sa.Column('gre_id', sa.Integer, nullable=False,
autoincrement=False),
sa.Column('allocated', sa.Boolean, nullable=False),
sa.PrimaryKeyConstraint('gre_id'),
)
sa.Table(
'ml2_gre_endpoints', metadata,
sa.Column('ip_address', sa.String(length=64)),
sa.PrimaryKeyConstraint('ip_address'),
)
sa.Table(
'ml2_network_segments', metadata,
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('network_id', sa.String(length=36), nullable=False),
sa.Column('network_type', sa.String(length=32), nullable=False),
sa.Column('physical_network', sa.String(length=64), nullable=True),
sa.Column('segmentation_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['network_id'], ['networks.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
)
sa.Table(
'ml2_port_bindings', metadata,
sa.Column('port_id', sa.String(length=36), nullable=False),
sa.Column('host', sa.String(length=255), nullable=False),
sa.Column('vif_type', sa.String(length=64), nullable=False),
sa.Column('driver', sa.String(length=64), nullable=True),
sa.Column('segment', sa.String(length=36), nullable=True),
sa.Column('vnic_type', sa.String(length=64), nullable=False,
server_default='normal'),
sa.Column('vif_details', sa.String(4095), nullable=False,
server_default=''),
sa.Column('profile', sa.String(4095), nullable=False,
server_default=''),
sa.ForeignKeyConstraint(['port_id'], ['ports.id'],
ondelete='CASCADE'),
sa.ForeignKeyConstraint(['segment'], ['ml2_network_segments.id'],
ondelete='SET NULL'),
sa.PrimaryKeyConstraint('port_id'),
)
sa.Table(
'ml2_vlan_allocations', metadata,
sa.Column('physical_network', sa.String(length=64),
nullable=False),
sa.Column('vlan_id', sa.Integer(), autoincrement=False,
nullable=False),
sa.Column('allocated', sa.Boolean(), autoincrement=False,
nullable=False),
sa.PrimaryKeyConstraint('physical_network', 'vlan_id'),
)
sa.Table(
'ml2_vxlan_allocations', metadata,
sa.Column('vxlan_vni', sa.Integer, nullable=False,
autoincrement=False),
sa.Column('allocated', sa.Boolean, nullable=False),
sa.PrimaryKeyConstraint('vxlan_vni'),
)
sa.Table(
'ml2_vxlan_endpoints', metadata,
sa.Column('ip_address', sa.String(length=64)),
sa.Column('udp_port', sa.Integer(), nullable=False,
autoincrement=False),
sa.PrimaryKeyConstraint('ip_address', 'udp_port'),
)
class MigrateLinuxBridgeToMl2_Juno(BaseMigrateToMl2):
def __init__(self):
super(MigrateLinuxBridgeToMl2_Juno, self).__init__(
vif_type=portbindings.VIF_TYPE_BRIDGE,
driver_type=LINUXBRIDGE,
segment_table_name='network_bindings',
vlan_allocation_table_name='network_states',
old_tables=['portbindingports'])
def migrate_segment_dict(self, binding):
super(MigrateLinuxBridgeToMl2_Juno, self).migrate_segment_dict(
binding)
vlan_id = binding.pop('vlan_id')
network_type, segmentation_id = interpret_vlan_id(vlan_id)
binding['network_type'] = network_type
binding['segmentation_id'] = segmentation_id
class MigrateOpenvswitchToMl2_Juno(BaseMigrateToMl2):
def __init__(self):
super(MigrateOpenvswitchToMl2_Juno, self).__init__(
vif_type=portbindings.VIF_TYPE_OVS,
driver_type=OPENVSWITCH,
segment_table_name='ovs_network_bindings',
vlan_allocation_table_name='ovs_vlan_allocations',
old_tables=[
'ovs_tunnel_allocations',
'ovs_tunnel_endpoints',
'portbindingports',
])
def migrate_tunnels(self, engine, tunnel_type, vxlan_udp_port=None):
if tunnel_type == p_const.TYPE_GRE:
engine.execute("""
INSERT INTO ml2_gre_allocations
SELECT tunnel_id as gre_id, allocated
FROM ovs_tunnel_allocations
WHERE allocated = TRUE
""")
engine.execute("""
INSERT INTO ml2_gre_endpoints
SELECT ip_address
FROM ovs_tunnel_endpoints
""")
elif tunnel_type == p_const.TYPE_VXLAN:
if not vxlan_udp_port:
vxlan_udp_port = type_vxlan.VXLAN_UDP_PORT
engine.execute("""
INSERT INTO ml2_vxlan_allocations
SELECT tunnel_id as vxlan_vni, allocated
FROM ovs_tunnel_allocations
WHERE allocated = TRUE
""")
engine.execute(sa.text("""
INSERT INTO ml2_vxlan_endpoints
SELECT ip_address, :udp_port as udp_port
FROM ovs_tunnel_endpoints
"""), udp_port=vxlan_udp_port)
else:
raise ValueError(_('Unknown tunnel type: %s') % tunnel_type)
class MigrateLinuxBridgeToMl2_Icehouse(MigrateLinuxBridgeToMl2_Juno,
BaseMigrateToMl2_IcehouseMixin):
pass
class MigrateOpenvswitchToMl2_Icehouse(MigrateOpenvswitchToMl2_Juno,
BaseMigrateToMl2_IcehouseMixin):
pass
migrate_map = {
ICEHOUSE: {
OPENVSWITCH: MigrateOpenvswitchToMl2_Icehouse,
LINUXBRIDGE: MigrateLinuxBridgeToMl2_Icehouse,
},
JUNO: {
OPENVSWITCH: MigrateOpenvswitchToMl2_Juno,
LINUXBRIDGE: MigrateLinuxBridgeToMl2_Juno,
},
}
def main():
parser = argparse.ArgumentParser()
parser.add_argument('plugin', choices=[OPENVSWITCH, LINUXBRIDGE],
help=_('The plugin type whose database will be '
'migrated'))
parser.add_argument('connection',
help=_('The connection url for the target db'))
parser.add_argument('--tunnel-type', choices=[p_const.TYPE_GRE,
p_const.TYPE_VXLAN],
help=_('The %s tunnel type to migrate from') %
OPENVSWITCH)
parser.add_argument('--vxlan-udp-port', default=None, type=int,
help=_('The UDP port to use for VXLAN tunnels.'))
parser.add_argument('--release', default=JUNO, choices=[ICEHOUSE, JUNO])
parser.add_argument('--save-tables', default=False, action='store_true',
help=_("Retain the old plugin's tables"))
#TODO(marun) Provide a verbose option
args = parser.parse_args()
if args.plugin == LINUXBRIDGE and (args.tunnel_type or
args.vxlan_udp_port):
msg = _('Tunnel args (tunnel-type and vxlan-udp-port) are not valid '
'for the %s plugin')
parser.error(msg % LINUXBRIDGE)
try:
migrate_func = migrate_map[args.release][args.plugin]()
except KeyError:
msg = _('Support for migrating %(plugin)s for release '
'%(release)s is not yet implemented')
parser.error(msg % {'plugin': args.plugin, 'release': args.release})
else:
migrate_func(args.connection, args.save_tables, args.tunnel_type,
args.vxlan_udp_port)
if __name__ == '__main__':
main()
| cloudbase/neutron-virtualbox | neutron/db/migration/migrate_to_ml2.py | Python | apache-2.0 | 19,607 | 0.000102 |
import nltk.data
from nltk.tokenize import word_tokenize, sent_tokenize
from util import errors, cleaning
def tokenize(**kwargs):
"""Tokenize text using nltk's tokenizer."""
if 'text' in kwargs.keys():
return word_tokenize(kwargs['text'])
raise errors.CustomAPIError('No text argument found.', status_code=400, payload={'arguments':kwargs.keys()})
def sentence_split(**kwargs):
"""Split sentences using nltk."""
tokenizer = nltk.data.load('tokenizers/punkt/dutch.pickle')
if 'text' in kwargs.keys():
cleaner = cleaning.Clean()
cleaner.feed(kwargs['text'])
cleanedText = cleaner.get_data()
return tokenizer.tokenize(cleanedText)
raise errors.CustomAPIError('No text argument found.', status_code=400, payload={'arguments':kwargs.keys()}) | 904labs/ctTrakr | nlp/simple.py | Python | mit | 764 | 0.024869 |
# Copyright 2009-2011 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Tests of the branch interface."""
__metaclass__ = type
from bzrlib.branch import format_registry as branch_format_registry
from bzrlib.bzrdir import BzrProber
from bzrlib.repository import format_registry as repo_format_registry
from lp.code.bzr import (
BranchFormat,
ControlFormat,
RepositoryFormat,
)
import lp.codehosting # For plugins.
from lp.testing import TestCase
class TestFormatSupport(TestCase):
"""Ensure the launchpad format list is up-to-date.
While ideally we would ensure that the lists of markers were the same,
early branch and repo formats did not use markers. (The branch/repo
was implied by the control dir format.)
"""
def test_control_format_complement(self):
self.bzrlib_is_subset(BzrProber.formats.keys(), ControlFormat)
def test_branch_format_complement(self):
self.bzrlib_is_subset(branch_format_registry.keys(), BranchFormat)
def test_repository_format_complement(self):
self.bzrlib_is_subset(repo_format_registry.keys(), RepositoryFormat)
def bzrlib_is_subset(self, bzrlib_formats, launchpad_enum):
"""Ensure the bzr format marker list is a subset of launchpad."""
bzrlib_format_strings = set(bzrlib_formats)
launchpad_format_strings = set(format.title for format
in launchpad_enum.items)
self.assertEqual(
set(), bzrlib_format_strings.difference(launchpad_format_strings))
def test_repositoryDescriptions(self):
self.checkDescriptions(RepositoryFormat)
def test_branchDescriptions(self):
self.checkDescriptions(BranchFormat)
def test_controlDescriptions(self):
self.checkDescriptions(ControlFormat)
def checkDescriptions(self, format_enums):
for item in format_enums.items:
description = item.description
if description.endswith('\n'):
description = description[:-1]
self.assertTrue(len(description.split('\n')) == 1,
item.description)
| abramhindle/UnnaturalCodeFork | python/testdata/launchpad/lib/lp/code/interfaces/tests/test_branch.py | Python | agpl-3.0 | 2,218 | 0 |
# This script gives an example of how to scrape a webpage
import requests
from BeautifulSoup import BeautifulSoup
url = "http://chicagofoodtruckfinder.com/weekly-schedule"
truck_data_list = []
soup = BeautifulSoup(requests.post(url).text)
table = soup.find("table").findAll("tr")
days = [d.text for d in table[0].findAll("th")[1:]]
print days
# skip the first row because it is a header
for location in table[1:]:
cols = location.findAll("td")
loc_name = cols[0].text
for i, c in enumerate(cols[1:]):
print i
print c
trucks = c.findAll("img")
if trucks is not None:
for t in trucks:
time_name = t["title"]
am_spt = time_name.split("AM")
pm_spt = time_name.split("PM")
if len(pm_spt) > 1 and len(am_spt) > 1:
name = pm_spt[1]
if len(pm_spt) > 1 and len(am_spt) == 1:
name = pm_spt[2]
if len(pm_spt) == 1 and len(am_spt) > 1:
name = am_spt[2]
time = time_name.replace(name, "")
truck_data_list.append({"name": name,
"time": time,
"week_day": days[i],
"date": date,
"location": loc_name})
| lbybee/Python-for-Econ | Chap_4/scraper_example.py | Python | gpl-2.0 | 1,393 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-06-20 18:43
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('posgradmin', '0034_auto_20190620_1333'),
]
operations = [
migrations.RemoveField(
model_name='asignatura',
name='clave',
),
migrations.AddField(
model_name='curso',
name='clave',
field=models.CharField(blank=True, max_length=20, null=True),
),
migrations.AddField(
model_name='curso',
name='entidad',
field=models.CharField(blank=True, choices=[(3, 3), (700, 700), (800, 800)], max_length=20, null=True),
),
migrations.AlterField(
model_name='curso',
name='sede',
field=models.CharField(blank=True, choices=[(b'CDMX', b'CDMX'), (b'Morelia', b'Morelia'), ('Le\xf3n', 'Le\xf3n')], max_length=80, null=True),
),
]
| sostenibilidad-unam/posgrado | posgradmin/posgradmin/migrations/0035_auto_20190620_1343.py | Python | gpl-3.0 | 1,049 | 0.001907 |
__author__ = 'Vitalii K'
from itertools import groupby
SEQ_LENGTH = 4
def is_in_matrix(m):
len_list = [[len(list(group)) for key, group in groupby(j)] for j in m]
if any(map(lambda x: [i for i in x if i >= SEQ_LENGTH], len_list)):
return True
return False
def get_diagonals(m):
d = []
for o in range(-len(m) + SEQ_LENGTH, len(m) - SEQ_LENGTH + 1):
d.append([r[i + o] for i, r in enumerate(m) if 0 <= i + o < len(r)])
return d
def has_sequence(matrix):
if is_in_matrix(matrix):
return True
if is_in_matrix(map(lambda *row: list(row), *matrix)):
return True
if is_in_matrix(get_diagonals(matrix)):
return True
if is_in_matrix(get_diagonals(list(reversed(matrix)))):
return True
return False
if __name__ == '__main__':
# These "asserts" using only for self-checking and not necessary for auto-testing
assert has_sequence([
[1, 2, 1, 1],
[1, 1, 4, 1],
[1, 3, 1, 6],
[1, 7, 2, 5]
]), "Vertical"
assert not has_sequence([
[7, 1, 4, 1],
[1, 2, 5, 2],
[3, 4, 1, 3],
[1, 1, 8, 1]
]), "Nothing here"
assert has_sequence([
[2, 1, 1, 6, 1],
[1, 3, 2, 1, 1],
[4, 1, 1, 3, 1],
[5, 5, 5, 5, 5],
[1, 1, 3, 1, 1]
]), "Long Horizontal"
assert has_sequence([
[7, 1, 1, 8, 1, 1],
[1, 1, 7, 3, 1, 5],
[2, 3, 1, 2, 5, 1],
[1, 1, 1, 5, 1, 4],
[4, 6, 5, 1, 3, 1],
[1, 1, 9, 1, 2, 1]
]), "Diagonal"
print("All set? Click 'Check' to review your code and earn rewards!")
| tivaliy/empire-of-code | find_sequence.py | Python | gpl-2.0 | 1,551 | 0.000645 |
from django.contrib import admin
from models import *
admin.site.register(Section) | igudym/twango | twango/template/default/src/apps/twango_dashboard/admin.py | Python | bsd-3-clause | 83 | 0.012048 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for parser and parser plugin presets."""
from __future__ import unicode_literals
import unittest
from plaso.containers import artifacts
from plaso.parsers import presets
from tests import test_lib as shared_test_lib
class ParserPresetTest(shared_test_lib.BaseTestCase):
"""Tests for the parser and parser plugin preset."""
def testInitialize(self):
"""Tests the __init__ function."""
test_definition = presets.ParserPreset('test', ['parser1', 'parser2'])
self.assertIsNotNone(test_definition)
class ParserPresetsManagerTest(shared_test_lib.BaseTestCase):
"""Tests for the parser and parser plugin presets manager."""
_LINUX_PARSERS = [
'bash_history',
'bencode',
'czip/oxml',
'dockerjson',
'dpkg',
'filestat',
'gdrive_synclog',
'olecf',
'pls_recall',
'popularity_contest',
'selinux',
'sqlite/google_drive',
'sqlite/skype',
'sqlite/zeitgeist',
'syslog',
'systemd_journal',
'utmp',
'vsftpd',
'webhist',
'xchatlog',
'xchatscrollback',
'zsh_extended_history']
_MACOS_PARSERS = [
'asl_log',
'bash_history',
'bencode',
'bsm_log',
'cups_ipp',
'czip/oxml',
'filestat',
'fseventsd',
'gdrive_synclog',
'mac_appfirewall_log',
'mac_keychain',
'mac_securityd',
'macwifi',
'olecf',
'plist',
'sqlite/appusage',
'sqlite/google_drive',
'sqlite/imessage',
'sqlite/ls_quarantine',
'sqlite/mac_document_versions',
'sqlite/mackeeper_cache',
'sqlite/skype',
'syslog',
'utmpx',
'webhist',
'zsh_extended_history']
# TODO add tests for _ReadPresetDefinitionValues
# TODO add tests for _ReadPresetsFromFileObject
def testGetNames(self):
"""Tests the GetNames function."""
test_file_path = self._GetTestFilePath(['presets.yaml'])
self._SkipIfPathNotExists(test_file_path)
test_manager = presets.ParserPresetsManager()
test_manager.ReadFromFile(test_file_path)
test_names = list(test_manager.GetNames())
self.assertEqual(len(test_names), 7)
expected_names = sorted([
'android', 'linux', 'macos', 'webhist', 'win7', 'win_gen', 'winxp'])
self.assertEqual(test_names, expected_names)
def testGetParsersByPreset(self):
"""Tests the GetParsersByPreset function."""
test_file_path = self._GetTestFilePath(['presets.yaml'])
self._SkipIfPathNotExists(test_file_path)
test_manager = presets.ParserPresetsManager()
test_manager.ReadFromFile(test_file_path)
parser_names = test_manager.GetParsersByPreset('linux')
self.assertEqual(parser_names, self._LINUX_PARSERS)
with self.assertRaises(KeyError):
test_manager.GetParsersByPreset('bogus')
def testGetPresetByName(self):
"""Tests the GetPresetByName function."""
test_file_path = self._GetTestFilePath(['presets.yaml'])
self._SkipIfPathNotExists(test_file_path)
test_manager = presets.ParserPresetsManager()
test_manager.ReadFromFile(test_file_path)
test_preset = test_manager.GetPresetByName('linux')
self.assertIsNotNone(test_preset)
self.assertEqual(test_preset.name, 'linux')
self.assertEqual(test_preset.parsers, self._LINUX_PARSERS)
test_preset = test_manager.GetPresetByName('bogus')
self.assertIsNone(test_preset)
def testGetPresetsByOperatingSystem(self):
"""Tests the GetPresetsByOperatingSystem function."""
test_file_path = self._GetTestFilePath(['presets.yaml'])
self._SkipIfPathNotExists(test_file_path)
test_manager = presets.ParserPresetsManager()
test_manager.ReadFromFile(test_file_path)
operating_system = artifacts.OperatingSystemArtifact(family='MacOS')
test_presets = test_manager.GetPresetsByOperatingSystem(operating_system)
self.assertEqual(len(test_presets), 1)
self.assertEqual(test_presets[0].name, 'macos')
self.assertEqual(test_presets[0].parsers, self._MACOS_PARSERS)
operating_system = artifacts.OperatingSystemArtifact(family='bogus')
test_presets = test_manager.GetPresetsByOperatingSystem(operating_system)
self.assertEqual(len(test_presets), 0)
def testGetPresetsInformation(self):
"""Tests the GetPresetsInformation function."""
test_file_path = self._GetTestFilePath(['presets.yaml'])
self._SkipIfPathNotExists(test_file_path)
test_manager = presets.ParserPresetsManager()
test_manager.ReadFromFile(test_file_path)
parser_presets_information = test_manager.GetPresetsInformation()
self.assertGreaterEqual(len(parser_presets_information), 1)
available_parser_names = [name for name, _ in parser_presets_information]
self.assertIn('linux', available_parser_names)
# TODO add tests for ReadFromFile
if __name__ == '__main__':
unittest.main()
| rgayon/plaso | tests/parsers/presets.py | Python | apache-2.0 | 4,926 | 0.003045 |
# -*- coding: utf-8 -*- pylint: disable-msg=R0801
#
# Copyright (c) 2013 Rodolphe Quiédeville <rodolphe@quiedeville.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
API definition
"""
from tastypie import fields
from tastypie.resources import ModelResource
from tastypie.throttle import BaseThrottle
from cotetra.survey.models import Journey, Connection
from cotetra.network.api import StationResource
class JourneyResource(ModelResource):
"""
The journeys
"""
station_from = fields.ForeignKey(StationResource, 'station_from')
station_to = fields.ForeignKey(StationResource, 'station_to')
class Meta:
queryset = Journey.objects.all()
resource_name = 'journey'
throttle = BaseThrottle(throttle_at=100, timeframe=60)
class ConnectionResource(ModelResource):
"""
The connections
"""
station_from = fields.ForeignKey(StationResource, 'station_from')
station_to = fields.ForeignKey(StationResource, 'station_to')
class Meta:
queryset = Connection.objects.all()
resource_name = 'connection'
throttle = BaseThrottle(throttle_at=100, timeframe=60)
| rodo/cotetra | cotetra/survey/api.py | Python | agpl-3.0 | 1,782 | 0 |
lines = int(input("How many lines of text? "))
lineText = input("What is the line of text? ")
for i in range(lines):
print(lineText) | ZachOhara/OCSTA-Programming-Contest-2015 | python/TestPrintLines.py | Python | gpl-3.0 | 134 | 0.014925 |
"""
Copyright (c) 2012 Philip Schliehauf (uniphil@gmail.com) and the
Queen's University Applied Sustainability Centre
This project is hosted on github; for up-to-date code and contacts:
https://github.com/Queens-Applied-Sustainability/PyRTM
This file is part of PyRTM.
PyRTM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PyRTM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PyRTM. If not, see <http://www.gnu.org/licenses/>.
"""
# import unittest
# import shutil
# import time
# from datetime import datetime
# from .. import cache
# class TestVarsToFile(unittest.TestCase):
# def assertClean(self, inp, res):
# clean = cache.vars_to_file(inp)
# self.assertEqual(clean, res)
# def testOneChar(self):
# self.assertClean(['a'], 'a')
# def testOneString(self):
# self.assertClean(['hello'], 'hello')
# def testOtherType(self):
# self.assertClean([1], '1')
# def testStringJoin(self):
# self.assertClean(['a', 'b'], 'a-b')
# def testCharReplace(self):
# some_illegals = ' !@#$%^&*()+=<>?;"\'[]{}~`'
# for illegal in some_illegals:
# dirty = illegal.join(['a', 'b'])
# self.assertClean([dirty], 'a.b')
# def testGeneratorIn(self):
# self.assertClean((str(i) for i in xrange(2)), '0-1')
# class TestGet(unittest.TestCase):
# def setUp(self):
# self.expensive_fn = lambda c: 1
# self.config = {
# 'description': 'test',
# 'longitude': -75.3,
# 'latitude': 44.22,
# 'time': datetime(2012, 1, 1, 0, 0, 0)
# }
# self.cachedconfig = {
# 'description': 'cachedtest',
# 'longitude': -75.3,
# 'latitude': 44.22,
# 'time': datetime(2012, 1, 1, 0, 0, 0)
# }
# cache.get(self.expensive_fn, self.cachedconfig)
# def testFunc(self):
# result = cache.get(self.expensive_fn, self.config)
# self.assertEqual(result, (1, False))
# def testCached(self):
# result = cache.get(self.expensive_fn, self.cachedconfig)
# self.assertEqual(result, (1, True))
# def tearDown(self):
# shutil.rmtree(cache.CACHE_DIR)
# if __name__ == '__main__':
# unittest.main()
| Queens-Applied-Sustainability/PyRTM | rtm/test/test_cache.py | Python | gpl-3.0 | 2,536 | 0.001577 |
# http://stackoverflow.com/questions/1477294/generate-random-utf-8-string-in-python
import random
def get_random_unicode(length):
try:
get_char = unichr
except NameError:
get_char = chr
# Update this to include code point ranges to be sampled
include_ranges = [
(0x0021, 0x0021),
(0x0023, 0x0026),
(0x0028, 0x007E),
(0x00A1, 0x00AC),
(0x00AE, 0x00FF),
(0x0100, 0x017F),
(0x0180, 0x024F),
(0x2C60, 0x2C7F),
(0x16A0, 0x16F0),
(0x0370, 0x0377),
(0x037A, 0x037E),
(0x0384, 0x038A),
(0x038C, 0x038C),
]
alphabet = [
get_char(code_point) for current_range in include_ranges
for code_point in range(current_range[0], current_range[1] + 1)
]
return ''.join(random.choice(alphabet) for i in range(length))
| gouthambs/Flask-Blogging | test/utils.py | Python | mit | 948 | 0 |
# -*- coding: utf-8 -*-
# Minio Python Library for Amazon S3 Compatible Cloud Storage,
# (C) 2015, 2016, 2017 Minio, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import TestCase
from nose.tools import raises, eq_
from minio import Minio
from minio.api import _DEFAULT_USER_AGENT
from minio.error import InvalidEndpointError, InvalidBucketError
from minio.helpers import get_target_url, is_valid_bucket_name
class ValidBucketName(TestCase):
@raises(InvalidBucketError)
def test_bucket_name(self):
is_valid_bucket_name('bucketName')
@raises(InvalidBucketError)
def test_bucket_name_invalid_characters(self):
is_valid_bucket_name('$$$bcuket')
@raises(InvalidBucketError)
def test_bucket_name_length(self):
is_valid_bucket_name('dd')
@raises(InvalidBucketError)
def test_bucket_name_periods(self):
is_valid_bucket_name('dd..mybucket')
@raises(InvalidBucketError)
def test_bucket_name_begins_period(self):
is_valid_bucket_name('.ddmybucket')
class GetURLTests(TestCase):
def test_get_target_url_works(self):
url = 'http://localhost:9000'
eq_(get_target_url(url, 'bucket-name'),
'http://localhost:9000/bucket-name/')
eq_(get_target_url(url, 'bucket-name', 'objectName'),
'http://localhost:9000/bucket-name/objectName')
eq_(get_target_url(url, 'bucket-name', 'objectName', None),
'http://localhost:9000/bucket-name/objectName')
eq_(get_target_url(url, 'bucket-name', 'objectName', 'us-east-1',
{'foo': 'bar'}),
'http://localhost:9000/bucket-name/objectName?foo=bar')
eq_(get_target_url(url, 'bucket-name', 'objectName', 'us-east-1',
{'foo': 'bar',
'b': 'c',
'a': 'b'}),
'http://localhost:9000/bucket-name/objectName?a=b&b=c&foo=bar')
# S3 urls.
s3_url = 'https://s3.amazonaws.com'
eq_(get_target_url(s3_url), 'https://s3.amazonaws.com/')
eq_(get_target_url(s3_url, 'my.bucket.name'),
'https://s3.amazonaws.com/my.bucket.name/')
eq_(get_target_url(s3_url,
'bucket-name',
'objectName',
'us-west-2', None),
'https://bucket-name.s3-us-west-2.amazonaws.com/objectName')
@raises(TypeError)
def test_minio_requires_string(self):
Minio(10)
@raises(InvalidEndpointError)
def test_minio_requires_hostname(self):
Minio('http://')
class UserAgentTests(TestCase):
def test_default_user_agent(self):
client = Minio('localhost')
eq_(client._user_agent, _DEFAULT_USER_AGENT)
def test_set_app_info(self):
client = Minio('localhost')
expected_user_agent = _DEFAULT_USER_AGENT + ' hello/2.0.6'
client.set_app_info('hello', '2.0.6')
eq_(client._user_agent, expected_user_agent)
@raises(ValueError)
def test_set_app_info_requires_non_empty_name(self):
client = Minio('localhost:9000')
client.set_app_info('', '2.0.6')
@raises(ValueError)
def test_set_app_info_requires_non_empty_version(self):
client = Minio('localhost:9000')
client.set_app_info('hello', '')
| donatello/minio-py | tests/unit/minio_test.py | Python | apache-2.0 | 3,851 | 0.000519 |
from .StateBase import StateBase
from neo.Core.Fixed8 import Fixed8
from neo.Core.IO.BinaryReader import BinaryReader
from neo.IO.MemoryStream import StreamManager
from neo.Core.AssetType import AssetType
from neo.Core.UInt160 import UInt160
from neo.Core.Cryptography.Crypto import Crypto
from neo.Core.Cryptography.ECCurve import EllipticCurve, ECDSA
from neo.Core.Size import Size as s
from neo.Core.Size import GetVarSize
class AssetState(StateBase):
def Size(self):
return super(AssetState, self).Size() + s.uint256 + s.uint8 + GetVarSize(
self.Name) + self.Amount.Size() + self.Available.Size() + s.uint8 + s.uint8 + self.Fee.Size() + s.uint160 + self.Owner.Size() + s.uint160 + s.uint160 + s.uint32 + s.uint8
def __init__(self, asset_id=None, asset_type=None, name=None, amount=None, available=None,
precision=0, fee_mode=0, fee=None, fee_addr=None, owner=None,
admin=None, issuer=None, expiration=None, is_frozen=False):
"""
Create an instance.
Args:
asset_id (UInt256):
asset_type (neo.Core.AssetType):
name (str): the asset name.
amount (Fixed8):
available (Fixed8):
precision (int): number of decimals the asset has.
fee_mode (int):
fee (Fixed8):
fee_addr (UInt160): where the fee will be send to.
owner (EllipticCurve.ECPoint):
admin (UInt160): the administrator of the asset.
issuer (UInt160): the issuer of the asset.
expiration (UInt32): the block number on which the asset expires.
is_frozen (bool):
"""
self.AssetId = asset_id
self.AssetType = asset_type
self.Name = name
self.Amount = Fixed8(0) if amount is None else amount
self.Available = Fixed8(0) if available is None else available
self.Precision = precision
self.FeeMode = fee_mode
self.Fee = Fixed8(0) if fee is None else fee
self.FeeAddress = UInt160(data=bytearray(20)) if fee_addr is None else fee_addr
if owner is not None and type(owner) is not EllipticCurve.ECPoint:
raise Exception("Owner must be ECPoint Instance")
self.Owner = owner
self.Admin = admin
self.Issuer = issuer
self.Expiration = expiration
self.IsFrozen = is_frozen
# def Size(self):
# return super(AssetState, self).Size()
@staticmethod
def DeserializeFromDB(buffer):
"""
Deserialize full object.
Args:
buffer (bytes, bytearray, BytesIO): (Optional) data to create the stream from.
Returns:
AssetState:
"""
m = StreamManager.GetStream(buffer)
reader = BinaryReader(m)
account = AssetState()
account.Deserialize(reader)
StreamManager.ReleaseStream(m)
return account
def Deserialize(self, reader):
"""
Deserialize full object.
Args:
reader (neo.Core.IO.BinaryReader):
"""
super(AssetState, self).Deserialize(reader)
self.AssetId = reader.ReadUInt256()
self.AssetType = ord(reader.ReadByte())
self.Name = reader.ReadVarString()
position = reader.stream.tell()
try:
self.Amount = reader.ReadFixed8()
except Exception:
reader.stream.seek(position)
self.Amount = reader.ReadFixed8()
self.Available = reader.ReadFixed8()
self.Precision = ord(reader.ReadByte())
# fee mode
reader.ReadByte()
self.Fee = reader.ReadFixed8()
self.FeeAddress = reader.ReadUInt160()
self.Owner = ECDSA.Deserialize_Secp256r1(reader)
self.Admin = reader.ReadUInt160()
self.Issuer = reader.ReadUInt160()
self.Expiration = reader.ReadUInt32()
self.IsFrozen = reader.ReadBool()
def Serialize(self, writer):
"""
Serialize full object.
Args:
writer (neo.IO.BinaryWriter):
"""
super(AssetState, self).Serialize(writer)
writer.WriteUInt256(self.AssetId)
writer.WriteByte(self.AssetType)
writer.WriteVarString(self.Name)
if self.Amount.value > -1:
writer.WriteFixed8(self.Amount, unsigned=True)
else:
writer.WriteFixed8(self.Amount)
if type(self.Available) is not Fixed8:
raise Exception("AVAILABLE IS NOT FIXED 8!")
writer.WriteFixed8(self.Available, unsigned=True)
writer.WriteByte(self.Precision)
writer.WriteByte(b'\x00')
writer.WriteFixed8(self.Fee)
writer.WriteUInt160(self.FeeAddress)
self.Owner.Serialize(writer)
writer.WriteUInt160(self.Admin)
writer.WriteUInt160(self.Issuer)
writer.WriteUInt32(self.Expiration)
writer.WriteBool(self.IsFrozen)
def GetName(self):
"""
Get the asset name based on its type.
Returns:
str: 'NEO' or 'NEOGas'
"""
if self.AssetType == AssetType.GoverningToken:
return "NEO"
elif self.AssetType == AssetType.UtilityToken:
return "NEOGas"
if type(self.Name) is bytes:
return self.Name.decode('utf-8')
return self.Name
def ToJson(self):
"""
Convert object members to a dictionary that can be parsed as JSON.
Returns:
dict:
"""
return {
'assetId': self.AssetId.To0xString(),
'assetType': self.AssetType,
'name': self.GetName(),
'amount': self.Amount.value,
'available': self.Available.value,
'precision': self.Precision,
'fee': self.Fee.value,
'address': self.FeeAddress.ToString(),
'owner': self.Owner.ToString(),
'admin': Crypto.ToAddress(self.Admin),
'issuer': Crypto.ToAddress(self.Issuer),
'expiration': self.Expiration,
'is_frozen': self.IsFrozen
}
def Clone(self):
return AssetState(asset_id=self.AssetId, asset_type=self.AssetType, name=self.Name, amount=self.Amount, available=self.Available, precision=self.Precision, fee=self.Fee, fee_addr=self.FeeAddress, owner=self.Owner, admin=self.Admin, issuer=self.Issuer, expiration=self.Expiration, is_frozen=self.IsFrozen)
| hal0x2328/neo-python | neo/Core/State/AssetState.py | Python | mit | 6,477 | 0.000926 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Code to generate Pauli strings for measurement of local operators"""
from itertools import chain, zip_longest
import numpy
from openfermion.ops.operators import QubitOperator
def binary_partition_iterator(qubit_list, num_iterations=None):
"""Generator for a list of 2-partitions of N qubits
such that all pairs of qubits are split in at least one partition,
This follows a variation on ArXiv:1908.0562 - instead of
explicitly partitioning the list based on the binary indices of
the qubits, we repeatedly divide the list in two and then
zip it back together.
Args:
qubit_list(list): list of qubits to be partitioned
num_iterations(int or None): number of iterations to perform.
If None, will be set to ceil(log2(len(qubit_list)))
Returns:
partition(iterator of tuples of lists): the required partitioning
"""
# Some edge cases
if num_iterations is not None and num_iterations == 0:
return
num_qubits = len(qubit_list)
if num_qubits < 2:
raise ValueError('Need at least 2 qubits to partition')
if num_qubits == 2:
yield ([qubit_list[0]], [qubit_list[1]])
return
if num_iterations is None:
num_iterations = int(numpy.ceil(numpy.log2(num_qubits)))
# Calculate the point where we need to split the list each time.
half_point = int(numpy.ceil(num_qubits / 2))
# Repeat the division and zip steps as many times
# as required.
for _ in range(num_iterations):
# Divide the qubit list in two and return it
partition = (qubit_list[:half_point], qubit_list[half_point:])
yield partition
# Zip the partition together to remake the qubit list.
qubit_list = list(chain(*zip_longest(partition[0], partition[1])))
# If len(qubit_list) is odd, the end of the list will be 'None'
# which we delete.
if qubit_list[-1] is None:
del qubit_list[-1]
def partition_iterator(qubit_list, partition_size, num_iterations=None):
"""Generator for a list of k-partitions of N qubits such that
all sets of k qubits are perfectly split in at least one
partition, following ArXiv:1908.05628
Args:
qubit_list(list): list of qubits to be partitioned
partition_size(int): the number of sets in the partition.
num_iterations(int or None): the number of iterations in the
outer iterator. If None, set to ceil(log2(len(qubit_list)))
Returns:
partition(iterator of tuples of lists): the required partitioning
"""
# Some edge cases
if num_iterations == 0:
return
if partition_size == 1:
yield (qubit_list,)
return
elif partition_size == 2:
for p in binary_partition_iterator(qubit_list, num_iterations):
yield p
return
num_qubits = len(qubit_list)
if partition_size == num_qubits:
yield tuple([q] for q in qubit_list)
return
elif partition_size > num_qubits:
raise ValueError('I cant k-partition less than k qubits')
if num_iterations is None:
num_iterations = int(numpy.ceil(numpy.log2(num_qubits)))
# First iterate over the outer binary partition
outer_iterator = binary_partition_iterator(qubit_list,
num_iterations=num_iterations)
for set1, set2 in outer_iterator:
# Each new partition needs to be subdivided fewer times
# to prevent an additional k! factor in the scaling.
num_iterations -= 1
# Iterate over all possibilities of partitioning the first
# set into l parts and the second set into k - l parts.
for inner_partition_size in range(1, partition_size):
if inner_partition_size > len(set1) or\
partition_size - inner_partition_size > len(set2):
continue
# subdivide the first partition
inner_iterator1 = partition_iterator(set1, inner_partition_size,
num_iterations)
for inner_partition1 in inner_iterator1:
# subdivide the second partition
inner_iterator2 = partition_iterator(
set2, partition_size - inner_partition_size, num_iterations)
for inner_partition2 in inner_iterator2:
yield inner_partition1 + inner_partition2
def pauli_string_iterator(num_qubits, max_word_size=2):
"""Generates a set of Pauli strings such that each word
of k Pauli operators lies in at least one string.
Args:
num_qubits(int): number of qubits in string
max_word_size(int): maximum required word
Returns:
pauli_string(iterator of strings): iterator
over Pauli strings
"""
if max_word_size > num_qubits:
raise ValueError('Number of qubits is too few')
if max_word_size <= 0:
raise ValueError('Word size too small')
qubit_list = list(range(num_qubits))
partitions = partition_iterator(qubit_list, max_word_size)
pauli_string = ['I' for temp in range(num_qubits)]
pauli_letters = ['X', 'Y', 'Z']
for partition in partitions:
for lettering in range(3**max_word_size):
for p in partition:
letter = pauli_letters[lettering % 3]
for qubit in p:
pauli_string[qubit] = letter
lettering = lettering // 3
yield tuple(pauli_string)
def _find_compatible_basis(term, bases):
for basis in bases:
basis_qubits = {op[0] for op in basis}
conflicts = ((i, P)
for (i, P) in term
if i in basis_qubits and (i, P) not in basis)
if any(conflicts):
continue
return basis
return None
def group_into_tensor_product_basis_sets(operator, seed=None):
"""
Split an operator (instance of QubitOperator) into `sub-operator`
QubitOperators, where each sub-operator has terms that are diagonal
in the same tensor product basis.
Each `sub-operator` can be measured using the same qubit post-rotations
in expectation estimation. Grouping into these tensor product basis
sets has been found to improve the efficiency of expectation estimation
significantly for some Hamiltonians in the context of
VQE (see section V(A) in the supplementary material of
https://arxiv.org/pdf/1704.05018v2.pdf). The more general problem
of grouping operators into commutitative groups is discussed in
section IV (B2) of https://arxiv.org/pdf/1509.04279v1.pdf. The
original input operator is the union of all output sub-operators,
and all sub-operators are disjoint (do not share any terms).
Args:
operator (QubitOperator): the operator that will be split into
sub-operators (tensor product basis sets).
seed (int): default None. Random seed used to initialize the
numpy.RandomState pseudo-random number generator.
Returns:
sub_operators (dict): a dictionary where each key defines a
tensor product basis, and each corresponding value is a
QubitOperator with terms that are all diagonal in
that basis.
**key** (tuple of tuples): Each key is a term, which defines
a tensor product basis. A term is a product of individual
factors; each factor is represented by a tuple of the form
(`index`, `action`), and these tuples are collected into a
larger tuple which represents the term as the product of
its factors. `action` is from the set {'X', 'Y', 'Z'} and
`index` is a non-negative integer corresponding to the
index of a qubit.
**value** (QubitOperator): A QubitOperator with terms that are
diagonal in the basis defined by the key it is stored in.
Raises:
TypeError: Operator of invalid type.
"""
if not isinstance(operator, QubitOperator):
raise TypeError('Can only split QubitOperator into tensor product'
' basis sets. {} is not supported.'.format(
type(operator).__name__))
sub_operators = {}
r = numpy.random.RandomState(seed)
for term, coefficient in operator.terms.items():
bases = list(sub_operators.keys())
r.shuffle(bases)
basis = _find_compatible_basis(term, bases)
if basis is None:
sub_operators[term] = QubitOperator(term, coefficient)
else:
sub_operator = sub_operators.pop(basis)
sub_operator += QubitOperator(term, coefficient)
additions = tuple(op for op in term if op not in basis)
basis = tuple(
sorted(basis + additions, key=lambda factor: factor[0]))
sub_operators[basis] = sub_operator
return sub_operators
| quantumlib/OpenFermion | src/openfermion/measurements/qubit_partitioning.py | Python | apache-2.0 | 9,586 | 0.000104 |
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Osmo Salomaa
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Listing nearby places using Foursquare.
http://developer.foursquare.com/docs/venues/explore
"""
import copy
import html
import itertools
import poor
import urllib.parse
CONF_DEFAULTS = {"sort_by_distance": False}
CLIENT_ID = "BP3KCWJXGQDXWVMYSVLWWRITMVZTG5XANJ43D2ZD0D5JMKCX"
URL = ("https://api.foursquare.com/v2/venues/explore"
"?client_id={CLIENT_ID}"
"&client_secret=JTINTTCK4S5V4RTZ40IJB0GIKDX1XT0LJVNRH2EZXNVLNZ2T"
"&v=20140912"
"&m=foursquare"
"&query={query}"
"&ll={y:.5f},{x:.5f}"
"&limit=50"
"&radius={radius:.0f}"
"&sortByDistance={sort_by_distance}")
cache = {}
def nearby(query, near, radius, params):
"""Return X, Y and a list of dictionaries of places matching `query`."""
query = urllib.parse.quote_plus(query)
sort_by_distance = str(int(poor.conf.guides.foursquare.sort_by_distance))
x, y = prepare_point(near)
url = URL.format(CLIENT_ID=CLIENT_ID, **locals())
with poor.util.silent(KeyError):
return copy.deepcopy(cache[url])
results = poor.http.get_json(url)
results = poor.AttrDict(results)
results = [dict(
title=item.venue.name,
description=parse_description(item),
text=parse_text(item),
link=parse_link(item),
x=float(item.venue.location.lng),
y=float(item.venue.location.lat),
) for item in itertools.chain.from_iterable(
group["items"] for group in
results.response.get("groups", [])
)]
if results and results[0]:
cache[url] = copy.deepcopy((x, y, results))
return x, y, results
def parse_description(item):
"""Parse description from search result `item`."""
description = []
with poor.util.silent(Exception):
rating = float(item.venue.rating)
description.append("{:.1f}/10".format(rating))
with poor.util.silent(Exception):
description.append(item.venue.categories[0].name)
with poor.util.silent(Exception):
description.append(item.venue.location.address)
description = ", ".join(description)
with poor.util.silent(Exception):
description += "\n“{}”".format(item.tips[0].text)
return description
def parse_link(item):
"""Parse hyperlink from search result `item`."""
return ("http://foursquare.com/v/{}?ref={}"
.format(item.venue.id, CLIENT_ID))
def parse_text(item):
"""Parse blurb text from search result `item`."""
lines = []
with poor.util.silent(Exception):
lines.append('<font color="Theme.highlightColor">'
'<big>{}</big>'
'</font>'
.format(html.escape(item.venue.name)))
subtitle = []
with poor.util.silent(Exception):
subtitle.append('<font color="Theme.highlightColor">'
'<big>{:.1f}</big>'
'</font>'
'<small> / 10</small>'
.format(float(item.venue.rating)))
with poor.util.silent(Exception):
category = html.escape(item.venue.categories[0].name)
subtitle.append("<small>{}</small>".format(category))
lines.append(" ".join(subtitle))
with poor.util.silent(Exception):
quote = html.escape(item.tips[0].text)
lines.append("<small>“{}”</small>".format(quote))
return "<br>".join(lines)
def prepare_point(point):
"""Return geocoded coordinates for `point`."""
# Foursquare does geocoding too, but not that well.
if isinstance(point, (list, tuple)):
return point[0], point[1]
geocoder = poor.Geocoder("default")
results = geocoder.geocode(point, dict(limit=1))
return results[0]["x"], results[0]["y"]
| otsaloma/poor-maps | guides/foursquare.py | Python | gpl-3.0 | 4,437 | 0.001129 |
#!/usr/bin/env python
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See LICENSE for more details.
#
# Copyright: 2016 IBM
# Author: Prudhvi Miryala<mprudhvi@linux.vnet.ibm.com>
#
# test multicasting
# to test we need to enable multicast option on host
# then ping from peer to multicast group
import netifaces
from avocado import Test
from avocado.utils.software_manager import SoftwareManager
from avocado.utils.ssh import Session
from avocado.utils import process
from avocado.utils import distro
from avocado.utils.network.interfaces import NetworkInterface
from avocado.utils.network.hosts import LocalHost
class ReceiveMulticastTest(Test):
'''
check multicast receive
using ping tool
'''
def setUp(self):
'''
To check and install dependencies for the test
'''
self.peer = self.params.get("peer_ip", default="")
self.user = self.params.get("user_name", default="root")
self.peer_password = self.params.get("peer_password",
'*', default="None")
interfaces = netifaces.interfaces()
self.iface = self.params.get("interface", default="")
if self.iface not in interfaces:
self.cancel("%s interface is not available" % self.iface)
self.ipaddr = self.params.get("host_ip", default="")
self.netmask = self.params.get("netmask", default="")
local = LocalHost()
self.networkinterface = NetworkInterface(self.iface, local)
try:
self.networkinterface.add_ipaddr(self.ipaddr, self.netmask)
self.networkinterface.save(self.ipaddr, self.netmask)
except Exception:
self.networkinterface.save(self.ipaddr, self.netmask)
self.networkinterface.bring_up()
self.session = Session(self.peer, user=self.user,
password=self.peer_password)
if not self.session.connect():
self.cancel("failed connecting to peer")
self.count = self.params.get("count", default="500000")
smm = SoftwareManager()
pkgs = ["net-tools"]
detected_distro = distro.detect()
if detected_distro.name == "Ubuntu":
pkgs.extend(["openssh-client", "iputils-ping"])
elif detected_distro.name == "SuSE":
pkgs.extend(["openssh", "iputils"])
else:
pkgs.extend(["openssh-clients", "iputils"])
for pkg in pkgs:
if not smm.check_installed(pkg) and not smm.install(pkg):
self.cancel("%s package is need to test" % pkg)
if self.peer == "":
self.cancel("peer ip should specify in input")
cmd = "ip addr show | grep %s" % self.peer
output = self.session.cmd(cmd)
result = ""
result = result.join(output.stdout.decode("utf-8"))
self.peerif = result.split()[-1]
if self.peerif == "":
self.cancel("unable to get peer interface")
cmd = "ip -f inet -o addr show %s | awk '{print $4}' | cut -d / -f1"\
% self.iface
self.local_ip = process.system_output(cmd, shell=True).strip()
if self.local_ip == "":
self.cancel("unable to get local ip")
def test_multicast(self):
'''
ping to peer machine
'''
cmd = "echo 0 > /proc/sys/net/ipv4/icmp_echo_ignore_broadcasts"
if process.system(cmd, shell=True, verbose=True,
ignore_status=True) != 0:
self.fail("unable to set value to icmp_echo_ignore_broadcasts")
cmd = "ip link set %s allmulticast on" % self.iface
if process.system(cmd, shell=True, verbose=True,
ignore_status=True) != 0:
self.fail("unable to set all mulicast option to test interface")
cmd = "ip route add 224.0.0.0/4 dev %s" % self.peerif
output = self.session.cmd(cmd)
if not output.exit_status == 0:
self.fail("Unable to add route for Peer interafce")
cmd = "timeout 600 ping -I %s 224.0.0.1 -c %s -f" % (self.peerif,
self.count)
output = self.session.cmd(cmd)
if not output.exit_status == 0:
self.fail("multicast test failed")
def tearDown(self):
'''
delete multicast route and turn off multicast option
'''
cmd = "ip route del 224.0.0.0/4"
output = self.session.cmd(cmd)
if not output.exit_status == 0:
self.log.info("Unable to delete multicast route added for peer")
cmd = "echo 1 > /proc/sys/net/ipv4/icmp_echo_ignore_broadcasts"
if process.system(cmd, shell=True, verbose=True,
ignore_status=True) != 0:
self.log.info("unable to unset all mulicast option")
cmd = "ip link set %s allmulticast off" % self.iface
if process.system(cmd, shell=True, verbose=True,
ignore_status=True) != 0:
self.log.info("unable to unset all mulicast option")
self.networkinterface.remove_ipaddr(self.ipaddr, self.netmask)
try:
self.networkinterface.restore_from_backup()
except Exception:
self.log.info("backup file not availbale, could not restore file.")
self.session.quit()
| narasimhan-v/avocado-misc-tests-1 | io/net/multicast.py | Python | gpl-2.0 | 5,757 | 0 |
from django.conf.urls import url
from . import views
urlpatterns = [
# ex: /polls/
url(r'^$', views.index, name='index'),
# ex: /polls/5/
url(r'^(?P<question_id>[0-9]+)/$', views.detail, name='detail'),
# ex: /polls/5/results/
url(r'^(?P<question_id>[0-9]+)/results/$', views.results, name='results'),
# ex: /polls/5/vote/
url(r'^(?P<question_id>[0-9]+)/vote/$', views.vote, name='vote'),
]
| druss16/danslist | polls/urls.py | Python | mit | 400 | 0.02 |
"""useful context managers"""
from contextlib import suppress
with suppress(ModuleNotFoundError):
from lag import *
import os
import contextlib
def clog(*args, condition=True, log_func=print, **kwargs):
if condition:
return log_func(*args, **kwargs)
@contextlib.contextmanager
def cd(newdir, verbose=True):
"""Change your working directory, do stuff, and change back to the original"""
_clog = partial(clog, condition=verbose, log_func=print)
prevdir = os.getcwd()
os.chdir(os.path.expanduser(newdir))
try:
_clog(f'cd {newdir}')
yield
finally:
_clog(f'cd {prevdir}')
os.chdir(prevdir)
# from pathlib import Path
# _clog("Called before cd", Path().absolute())
# with cd(Path.home()):
# if verbose: print("Called under cd", Path().absolute())
# _clog("Called after cd and same as before", Path().absolute())
| thorwhalen/ut | util/context_managers.py | Python | mit | 908 | 0.001101 |
"""
Script to evaluate intermodal forward Brillouin scattering in a cylindrical SiO2 waveguide
"""
# Import the necessary packages
import time
import datetime
import numpy as np
import sys
import copy
from matplotlib.ticker import AutoMinorLocator
import math
sys.path.append("../backend/")
import materials
import objects
import mode_calcs
import integration
import plotting
from plotting import FieldDecorator
from fortran import NumBAT
# Naming conventions
# AC: acoustic
# EM: electromagnetic
# k_AC: acoustic wavenumber
start = time.time()
# Specify Geometric Parameters - all in [nm].
wl_nm = 1550 # Wavelength of EM wave in vacuum.
# Unit cell dimensions must be sufficiently large to ensure fields are zero at outermost boundary.
unitcell_x = 4.01*wl_nm #be careful to ensure not whole integer multiples
unitcell_y = unitcell_x
inc_a_x = 1000 # Waveguide width.
inc_a_y = inc_a_x
inc_shape = 'circular' # Shape of the waveguide.
# Specify number of electromagnetic modes, acoustic modes, and which EM indices
# are involved in the calculation for intermodal FSBS
num_modes_EM_pump = 20
num_modes_EM_Stokes = num_modes_EM_pump
num_modes_AC = 100 # Number of acoustic modes to solve for.
# The EM pump mode(s) for which to calculate interaction with AC modes.
# Can specify a mode number (zero has lowest propagation constant) or 'All'.
EM_ival_pump = 1
# The EM Stokes mode(s) for which to calculate interaction with AC modes.
EM_ival_Stokes = 0
# The AC mode(s) for which to calculate interaction with EM modes.
AC_ival = 'All'
# Output files are generated in a folder with the following prefix
prefix_str = 'ifsbs-josab-1umSiO2'
# Use all specified parameters to create a waveguide object
wguide = objects.Struct(unitcell_x,inc_a_x,unitcell_y,inc_a_y,inc_shape,
material_bkg=materials.materials_dict["Vacuum"],
material_a=materials.materials_dict["SiO2_2021_Poulton"],
lc_bkg=0.05, # mesh coarseness in background, larger lc_bkg = coarser along horizontal outer edge
lc_refine_1=20.0, # mesh refinement factor near the interface of waveguide, larger lc2 = finer along horizontal interface
lc_refine_2=30.0, # mesh refinement factor near the origin/centre of waveguide
plt_mesh=False, # creates png file of geometry and mesh in backend/fortran/msh/
check_mesh=False) # note requires x-windows configuration to work
# Initial guess for the EM effective index of the waveguide
n_eff = wguide.material_a.n-0.1
# Calculate Electromagnetic Modes
print("Starting EM pump modes")
sim_EM_pump = wguide.calc_EM_modes(num_modes_EM_pump, wl_nm, n_eff=n_eff, debug=False)
print("Starting EM Stokes modes")
sim_EM_Stokes = mode_calcs.fwd_Stokes_modes(sim_EM_pump)
# Generate images for the EM modes involved in the calculation
print("Starting EM field plotting ")
plotting.plt_mode_fields(sim_EM_pump,
ivals=[EM_ival_pump,EM_ival_Stokes],
EM_AC='EM_E', num_ticks=3,xlim_min=0.2, xlim_max=0.2, ylim_min=0.2, ylim_max=0.2,
prefix_str=prefix_str, pdf_png='png', ticks=True, quiver_steps=10,
comps=['Et','Eabs'], n_points=1000, colorbar=True)
# A computation interruption if needed
# sys.exit("We interrupt your regularly scheduled computation to bring you something completely different... for now")
# Print the wavevectors of EM modes.
print('k_z of EM modes \n', np.round(np.real(sim_EM_pump.Eig_values), 4))
# Calculate the EM effective index of the waveguide.
n_eff_sim = np.real(sim_EM_pump.Eig_values*((wl_nm*1e-9)/(2.*np.pi)))
print("n_eff = ", np.round(n_eff_sim, 4))
# Calculate and print the acoustic wave vector
k_AC = np.real(sim_EM_pump.Eig_values[EM_ival_pump] - sim_EM_Stokes.Eig_values[EM_ival_Stokes])
print('Intermode q_AC (Hz) \n', k_AC)
# Calculate Acoustic Modes
print("Starting acoustic modes")
sim_AC = wguide.calc_AC_modes(num_modes_AC, k_AC, EM_sim=sim_EM_pump, debug=False)
# Print the frequencies of AC modes.
AC_freqs_GHz = np.round(np.real(sim_AC.Eig_values)*1e-9, 4)
print('\n Freq of AC modes (GHz) \n', AC_freqs_GHz)
# Calculate total SBS gain, photoelastic and moving boundary contributions, as
# well as other important quantities
SBS_gain, SBS_gain_PE, SBS_gain_MB, linewidth_Hz, Q_factors, alpha = integration.gain_and_qs(
sim_EM_pump, sim_EM_Stokes, sim_AC, k_AC,
EM_ival_pump=EM_ival_pump, EM_ival_Stokes=EM_ival_Stokes, AC_ival=AC_ival)
# Mask negligible gain values to improve clarity of print out.
threshold = 1e-3
masked_PE = np.ma.masked_inside(SBS_gain_PE[EM_ival_pump,EM_ival_Stokes,:], 0, threshold)
masked_MB = np.ma.masked_inside(SBS_gain_MB[EM_ival_pump,EM_ival_Stokes,:], 0, threshold)
masked = np.ma.masked_inside(SBS_gain[EM_ival_pump,EM_ival_Stokes,:], 0, threshold)
# Display these in terminal
print("\n Displaying results with negligible components masked out")
print("SBS_gain [1/(Wm)] PE contribution \n", masked_PE)
print("SBS_gain [1/(Wm)] MB contribution \n", masked_MB)
print("SBS_gain [1/(Wm)] total \n", masked)
# determining the location of the maximum gain
maxGainloc=6; #note sometimes its necessary to manually specify as certain values are NOT possible by symmetry arguments
print("Plotting acoustic mode corresponding to maximum")
plotting.plt_mode_fields(sim_AC, EM_AC='AC', prefix_str=prefix_str, ivals=[maxGainloc],
num_ticks=3, quiver_steps=40, pdf_png='png',ticks=True, comps=['ut','uabs'], colorbar=True)
# Displaying results for the maximum found in the selection
print("-----------------")
print("Displaying results for maximum gain value found:")
print("Greatest SBS_gain [1/(Wm)] total \n", masked.data[maxGainloc])
print("displaying corresponding acoustic mode number (i.e., AC_field_#) for reference \n",maxGainloc )
print("EM Pump Power [Watts] \n", sim_EM_pump.EM_mode_power[EM_ival_pump] )
print("EM Stokes Power [Watts] \n", sim_EM_Stokes.EM_mode_power[EM_ival_Stokes] )
print("EM angular frequency [THz] \n", sim_EM_pump.omega_EM/1e12 )
print("AC Energy Density [J*m^{-1}] \n", sim_AC.AC_mode_energy_elastic[maxGainloc] )
print("AC loss alpha [1/s] \n", alpha[maxGainloc] )
print("AC frequency [GHz] \n", sim_AC.Omega_AC[maxGainloc]/(1e9*2*math.pi) )
print("AC linewidth [MHz] \n", linewidth_Hz[maxGainloc]/1e6)
#since the overlap is not returned directly we'll have to deduce it
absQtot2 = (alpha[maxGainloc]*sim_EM_pump.EM_mode_power[EM_ival_pump]*sim_EM_Stokes.EM_mode_power[EM_ival_Stokes]*sim_AC.AC_mode_energy_elastic[maxGainloc]*masked.data[maxGainloc])/(2*sim_EM_pump.omega_EM*sim_AC.Omega_AC[maxGainloc]);
absQtot = pow(absQtot2,1/2)
print("Total coupling |Qtot| [W*m^{-1}*s] \n", absQtot )
end = time.time()
print("\n Simulation time (sec.)", (end - start))
| bjornsturmberg/NumBAT | JOSAB_tutorial/simo-josab-IFSBS-1umcylwg-SiO2.py | Python | gpl-3.0 | 7,020 | 0.010684 |
from copy import copy
import silk.utils.six as six
from silk.singleton import Singleton
def default_permissions(user):
if user:
return user.is_staff
return False
class SilkyConfig(six.with_metaclass(Singleton, object)):
defaults = {
'SILKY_DYNAMIC_PROFILING': [],
'SILKY_IGNORE_PATHS': [],
'SILKY_HIDE_COOKIES': True,
'SILKY_IGNORE_QUERIES': [],
'SILKY_META': False,
'SILKY_AUTHENTICATION': False,
'SILKY_AUTHORISATION': False,
'SILKY_PERMISSIONS': default_permissions,
'SILKY_MAX_REQUEST_BODY_SIZE': -1,
'SILKY_MAX_RESPONSE_BODY_SIZE': -1,
'SILKY_INTERCEPT_PERCENT': 100,
'SILKY_INTERCEPT_FUNC': None,
'SILKY_PYTHON_PROFILER': False,
}
def _setup(self):
from django.conf import settings
options = {option: getattr(settings, option) for option in dir(settings) if option.startswith('SILKY')}
self.attrs = copy(self.defaults)
self.attrs.update(options)
def __init__(self):
super(SilkyConfig, self).__init__()
self._setup()
def __getattr__(self, item):
return self.attrs.get(item, None)
def __setattribute__(self, key, value):
self.attrs[key] = value
| Alkalit/silk | silk/config.py | Python | mit | 1,268 | 0.000789 |
class DataTypeError(RuntimeError):
"""
Generic exception class for SDoc1 language errors with data types and expressions.
"""
pass
| SDoc/py-sdoc | sdoc/sdoc1/error.py | Python | mit | 147 | 0.006803 |
""" Here, we need some documentation...
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import os
import types
import threading
import time
import six
from DIRAC import gLogger
from DIRAC.Core.Utilities.DIRACSingleton import DIRACSingleton
@six.add_metaclass(DIRACSingleton)
class Devloader(object):
def __init__(self):
self.__log = gLogger.getSubLogger("Devloader")
self.__reloaded = False
self.__enabled = True
self.__reloadTask = False
self.__stuffToClose = []
self.__watchedFiles = []
self.__modifyTimes = {}
def addStuffToClose(self, stuff):
self.__stuffToClose.append(stuff)
@property
def enabled(self):
return self.__enabled
def watchFile(self, fp):
if os.path.isfile(fp):
self.__watchedFiles.append(fp)
return True
return False
def __restart(self):
self.__reloaded = True
for stuff in self.__stuffToClose:
try:
self.__log.always("Closing %s" % stuff)
sys.stdout.flush()
stuff.close()
except Exception:
gLogger.exception("Could not close %s" % stuff)
python = sys.executable
os.execl(python, python, * sys.argv)
def bootstrap(self):
if not self.__enabled:
return False
if self.__reloadTask:
return True
self.__reloadTask = threading.Thread(target=self.__reloadOnUpdate)
self.__reloadTask.setDaemon(1)
self.__reloadTask.start()
def __reloadOnUpdate(self):
while True:
time.sleep(1)
if self.__reloaded:
return
for modName in sys.modules:
modObj = sys.modules[modName]
if not isinstance(modObj, types.ModuleType):
continue
path = getattr(modObj, "__file__", None)
if not path:
continue
if path.endswith(".pyc") or path.endswith(".pyo"):
path = path[:-1]
self.__checkFile(path)
for path in self.__watchedFiles:
self.__checkFile(path)
def __checkFile(self, path):
try:
modified = os.stat(path).st_mtime
except Exception:
return
if path not in self.__modifyTimes:
self.__modifyTimes[path] = modified
return
if self.__modifyTimes[path] != modified:
self.__log.always("File system changed (%s). Restarting..." % (path))
self.__restart()
| yujikato/DIRAC | src/DIRAC/Core/Utilities/Devloader.py | Python | gpl-3.0 | 2,365 | 0.011839 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('obligarcy', '0006_auto_20151009_1947'),
]
operations = [
migrations.AlterField(
model_name='contract',
name='users',
field=models.ManyToManyField(to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='submission',
name='user',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
),
migrations.DeleteModel(
name='User',
),
]
| polypmer/obligarcy | obligarcy/migrations/0007_auto_20151010_2304.py | Python | gpl-3.0 | 696 | 0 |
from collections import defaultdict
class ObservableMemory:
def __init__(self, subject=None, addrWidth=16):
self.physMask = 0xffff
if addrWidth > 16:
# even with 32-bit address space, model only 256k memory
self.physMask = 0x3ffff
if subject is None:
subject = (self.physMask + 1) * [0x00]
self._subject = subject
self._read_subscribers = defaultdict(list)
self._write_subscribers = defaultdict(list)
def __setitem__(self, address, value):
if isinstance(address, slice):
r = range(*address.indices(self.physMask + 1))
for n, v in zip(r, value):
self[n] = v
return
address &= self.physMask
callbacks = self._write_subscribers[address]
for callback in callbacks:
result = callback(address, value)
if result is not None:
value = result
self._subject[address] = value
def __getitem__(self, address):
if isinstance(address, slice):
r = range(*address.indices(self.physMask + 1))
return [ self[n] for n in r ]
address &= self.physMask
callbacks = self._read_subscribers[address]
final_result = None
for callback in callbacks:
result = callback(address)
if result is not None:
final_result = result
if final_result is None:
return self._subject[address]
else:
return final_result
def __getattr__(self, attribute):
return getattr(self._subject, attribute)
def subscribe_to_write(self, address_range, callback):
for address in address_range:
address &= self.physMask
callbacks = self._write_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def subscribe_to_read(self, address_range, callback):
for address in address_range:
address &= self.physMask
callbacks = self._read_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def write(self, start_address, bytes):
start_address &= self.physMask
self._subject[start_address:start_address + len(bytes)] = bytes
| mkeller0815/py65 | py65/memory.py | Python | bsd-3-clause | 2,382 | 0.00084 |
# Generated by Django 2.2.3 on 2019-07-26 13:03
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("payments", "0013_auto_20190724_1628")]
operations = [
migrations.AlterModelOptions(
name="payment",
options={"get_latest_by": "created", "ordering": ("-created",)},
)
]
| lafranceinsoumise/api-django | agir/payments/migrations/0014_auto_20190726_1503.py | Python | agpl-3.0 | 364 | 0 |
from django import template
from django.conf import settings
from django.template.loader import render_to_string
try:
from urlparse import urljoin
except ImportError:
from urllib.parse import urljoin
register = template.Library()
def get_settings():
"""Utility function to retrieve settings.py values with defaults"""
flavor = getattr(settings, "DJANGO_WYSIWYG_FLAVOR", "yui")
return {
"DJANGO_WYSIWYG_MEDIA_URL": getattr(settings, "DJANGO_WYSIWYG_MEDIA_URL", urljoin(settings.STATIC_URL, flavor) + '/'),
"DJANGO_WYSIWYG_FLAVOR": flavor,
}
@register.simple_tag
def wysiwyg_setup(protocol="http"):
"""
Create the <style> and <script> tags needed to initialize the rich text editor.
Create a local django_wysiwyg/includes.html template if you don't want to use Yahoo's CDN
"""
ctx = {
"protocol": protocol,
}
ctx.update(get_settings())
return render_to_string(
"django_wysiwyg/%s/includes.html" % ctx['DJANGO_WYSIWYG_FLAVOR'],
ctx
)
@register.simple_tag
def wysiwyg_editor(field_id, editor_name=None, config=None):
"""
Turn the textarea #field_id into a rich editor. If you do not specify the
JavaScript name of the editor, it will be derived from the field_id.
If you don't specify the editor_name then you'll have a JavaScript object
named "<field_id>_editor" in the global namespace. We give you control of
this in case you have a complex JS ctxironment.
"""
if not editor_name:
editor_name = "%s_editor" % field_id
ctx = {
'field_id': field_id,
'editor_name': editor_name,
'config': config
}
ctx.update(get_settings())
return render_to_string(
"django_wysiwyg/%s/editor_instance.html" % ctx['DJANGO_WYSIWYG_FLAVOR'],
ctx
)
@register.simple_tag
def wysiwyg_static_url(appname, prefix, default_path):
"""
Automatically use an prefix if a given application is installed.
For example, if django-ckeditor is installed, use it's STATIC_URL/ckeditor folder to find the CKEditor distribution.
When the application does not available, fallback to the default path.
This is a function for the internal templates of *django-wysiwyg*.
"""
if appname in settings.INSTALLED_APPS:
return urljoin(settings.STATIC_URL, prefix)
else:
return default_path
| saydulk/django-wysiwyg | django_wysiwyg/templatetags/wysiwyg.py | Python | mit | 2,413 | 0.002072 |
import fsui
from fsgamesys.context import fsgs
from launcher.i18n import gettext
from launcher.launcher_config import LauncherConfig
class CustomOptionsPage(fsui.Panel):
def __init__(self, parent):
fsui.Panel.__init__(self, parent)
self.layout = fsui.VerticalLayout()
label = fsui.MultiLineLabel(
self,
gettext(
"You can write key = value pairs here to set FS-UAE options "
"not currently supported by the user interface. This is only a "
"temporary feature until the GUI supports all options "
"directly."
),
760,
)
self.layout.add(label, fill=True, margin_bottom=10)
label = fsui.MultiLineLabel(
self,
gettext(
"The options specified here will apply to this configuration "
"only."
),
760,
)
self.layout.add(label, fill=True, margin_bottom=10)
hor_layout = fsui.HorizontalLayout()
self.layout.add(hor_layout, fill=True, expand=True)
# hor_layout.add_spacer(20)
self.text_area = fsui.TextArea(self, font_family="monospace")
self.text_area.set_min_width(760)
self.text_area.set_min_height(400)
self.text_area.set_text(initial_text())
hor_layout.add(self.text_area, fill=True, expand=True)
# hor_layout.add_spacer(20)
# self.layout.add_spacer(20)
self.get_window().add_close_listener(self.on_close_window)
def on_close_window(self):
self.update_config()
def on_close_button(self):
self.end_modal(0)
def update_config(self):
text = self.text_area.get_text()
update_config = {}
# First mark all unknown config values as cleared
for key in list(fsgs.config.values.keys()):
if key not in LauncherConfig.default_config:
update_config[key] = ""
# Then we overwrite with specific values
for line in text.split("\n"):
line = line.strip()
parts = line.split("=", 1)
if len(parts) == 2:
key = parts[0].strip()
# if key in Config.no_custom_config:
# continue
value = parts[1].strip()
update_config[key] = value
# Finally, set everything at once
LauncherConfig.set_multiple(update_config.items())
def initial_text():
text = []
keys = fsgs.config.values.keys()
for key in sorted(keys):
# FIXME: Move to LauncherConfig as a method, maybe use
# is_custom_option.
if key in LauncherConfig.no_custom_config:
continue
if key.startswith("__implicit_"):
continue
value = fsgs.config.values[key]
if not value:
continue
text.append("{0} = {1}\n".format(key, value))
return "".join(text)
| FrodeSolheim/fs-uae-launcher | launcher/ui/config/CustomOptionsPage.py | Python | gpl-2.0 | 2,973 | 0.000336 |
import flask
import keras
import numpy as np
import os
from keras.models import load_model
from PIL import Image
from flask import Flask, request
from jinja2 import Template
app = Flask(__name__)
model = load_model('smile.h5')
model._make_predict_function()
def predict_image(image):
image = image.convert(mode="L")
image = image.resize((32, 32))
im = np.asarray(image)
im = im.reshape(1, 32, 32, 1)
pred = model.predict(im)
return pred
@app.route("/predict", methods=["POST"])
def predict():
f = request.files['file']
image = Image.open(f.stream)
pred = predict_image(image)
template = Template("""
<html>
<body>
<p>Probability of Smiling: {{smile_prob}}</p>
<p>Probability of Not Smiling: {{no_smile_prob}}</p>
</body>
</html>
""")
return template.render(smile_prob=pred[0], no_smile_prob=pred[1])
@app.route("/")
def index():
html = """
<html>
<body>
<form action="predict" method="POST" enctype="multipart/form-data">
<input type="file" name="file" accept="image/*;capture=camera">
<input type="submit"/>
</form>
</body>
</html>
"""
return(html)
if __name__ == '__main__' and not os.getenv("FLASK_DEBUG"):
app.run(port=8080)
| lukas/ml-class | examples/keras-smile/smile-server-1.py | Python | gpl-2.0 | 1,356 | 0 |
import re
import sqlite3
import csv
import ast
import os
import sys
import fnmatch
import datetime
import xlrd
import win32com.client
def question_marks(st):
question_marks = '?'
for i in range(0, len(st.split(','))-1):
question_marks = question_marks + ",?"
return question_marks
def xlsx_to_arr(xlsx_file, worksheet=0, row_start=0, col_start=0, row_end=-1, col_end=-1):
arr = []
wb = xlrd.open_workbook(xlsx_file)
ws = wb.sheet_by_index(worksheet)
row_end = ws.nrows if row_end == -1 else row_end
col_end = ws.ncols if col_end == -1 else col_end
arr = [ws.row_values(row, start_colx=col_start, end_colx=col_end-1) for row in range(row_start, row_end)]
header = ','.join(arr[0])
return re.sub(r"[\*\.#/\$%\"\(\)&\- ]", "", header), arr[1:]
def csv_to_arr(csv_file, start=0, end=0, has_header=True, delim=',', ignore_col=""):
arr = []
with open(csv_file, 'rU') as f:
reader = csv.reader(f, delimiter=delim)
arr = list(reader)
header = ""
if has_header:
header = ','.join(arr[start])
if end == 0:
arr = arr[start+1:]
else:
arr = arr[start+1:end]
return re.sub(r"[\*\.#/\$%\d\" ]", "", header), arr
else:
return arr[start:]
return
def arr_to_csv(file_name, header, data_arr):
csv_file = open(file_name, 'wb')
wr = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
wr.writerow(header.split(','))
for data_row in data_arr:
line = []
for ele in data_row:
line.append(str(ele))
wr.writerow(line)
csv_file.close()
return
def arrs_to_xlsx(filename, header=[], arr=[]):
i = 1
xl = win32com.client.Dispatch('Excel.Application')
wb = xl.Workbooks.Add()
for x in range(0, len(header)):
ws = wb.Worksheets(x+1)
for i, cell in enumerate(header[x].split(',')):
ws.Cells(1,i+1).Value = cell
for i, row in enumerate(arr[x]):
for j, cell in enumerate(row):
ws.Cells(i+2,j+1).Value = str(cell)
wb.Worksheets(1).Columns.AutoFit()
wb.Worksheets(1).UsedRange.FormatConditions.Add(win32com.client.constants.xlExpression, "", '=OR(AND(ISNUMBER($C1),$C1<>$D1),AND(ISNUMBER($E1),$E1<>$F1))')
wb.Worksheets(1).UsedRange.FormatConditions(1).Interior.ColorIndex = 6
wb.Worksheets(1).UsedRange.FormatConditions(1).StopIfTrue = False
wb.Worksheets(1).Columns("C:F").NumberFormat = "#,##0_);[Red](#,##0);0;@"
xl.DisplayAlerts = False
wb.SaveAs(filename)
xl.DisplayAlerts = True
wb.Close(True)
return
def db_cur(source = ":memory:"):
conn = sqlite3.connect(source, detect_types=sqlite3.PARSE_DECLTYPES)
# conn.row_factory = sqlite3.Row
cur = conn.cursor()
return conn, cur
def create_tbl(cur, tbl_name, header, arr = [], index_arr = []):
cur.execute("""select count(*) FROM sqlite_master WHERE type='table' AND name = '%s' """ % (tbl_name))
tbl_exists = cur.fetchone()
if tbl_exists[0] == 0:
cur.execute("CREATE TABLE " + tbl_name + " (" + header.replace("id,", "id PRIMARY KEY,") + " );")
for index in index_arr:
cur.execute("CREATE INDEX " + tbl_name + "_" + index + " ON " + tbl_name + " (" + index + ");")
if arr != []:
cur.executemany("INSERT INTO " + tbl_name + " VALUES ("+question_marks(header)+")", arr)
return
def files_lookup(tgt_dir, pattern, recur_list=False, sub_folder=False, most_recent=True):
filepath_arr = []
for fi in os.listdir(tgt_dir):
full_path = os.path.join(tgt_dir, fi)
if sub_folder and os.path.isdir(full_path):
filepath_arr += files_lookup(full_path, pattern, recur_list, sub_folder, most_recent)
if fnmatch.fnmatch(fi, pattern):
filepath_arr.append(full_path)
filepath_arr.sort(reverse=most_recent)
if recur_list:
return filepath_arr
else:
return filepath_arr[0]
def recon_sbl(cur):
sbl_header = "Contract,CPCode,Client,StockCode,G1 O/S,FA O/S,G1 Pending,FA Pending"
create_tbl(cur, "g1_inv", "SBLCODE,CPTY,STOCK,OS,PD")
create_tbl(cur, "fa_inv", "EXT,DESK,STOCK,OS,PD")
cur.execute("""
insert into fa_inv
select ExternalReference, ClientCode, StockCode, sum(case when date('now') > ValueDate then Qty else 0 end), sum(case when date('now') <= ValueDate then Qty else 0 end)
from fasbl
group by ExternalReference, ClientCode, StockCode
""")
cur.execute("""
insert into g1_inv
select SBLCODE, business, STOCK||' HK Equity', sum(case when source = 'os' then QTY else 0 end), sum(case when source = 'pd' then QTY else 0 end)
from (
select sblmap.SBLCode as SBLCODE, sblmap.Name as business, cast(STOCK as int) as STOCK, case when PTYPE = 'B' then -QTY else QTY end as QTY, 'os' as source
from os join sblmap on os.CPTY = sblmap.SBLCode
where cast(STOCK as int) <> 0
UNION ALL
select sblmap.SBLCode as SBLCODE, sblmap.Name as business, cast(STOCK as int) as STOCK, case when (BL = 'L' and STATUS = 'R') or (BL = 'B' and STATUS = 'L') then -QTY else QTY end as QTY, 'pd' as source
from pd join sblmap on pd.CPTY = sblmap.SBLCode
where cast(STOCK as int) <> 0
) aggrg
where STOCK <> ''
group by business, STOCK
""")
cur.execute("""
select EXT, SBLCode, CPTY, STOCK, sbl_os, fa_os, sbl_pd, fa_pd
from (
select EXT, SBLCODE, g1_inv.CPTY as CPTY, g1_inv.STOCK as STOCK, g1_inv.OS as sbl_os, ifnull(fa_inv.OS, 0) as fa_os, g1_inv.PD as sbl_pd, ifnull(fa_inv.PD, 0) as fa_pd
from g1_inv left join fa_inv
on g1_inv.CPTY = fa_inv.DESK
and g1_inv.STOCK = fa_inv.STOCK
union
select EXT, SBLCODE, fa_inv.DESK as CPTY, fa_inv.STOCK as STOCK, ifnull(g1_inv.OS, 0) as sbl_os, fa_inv.OS as fa_os, ifnull(g1_inv.PD, 0) as sbl_pd, fa_inv.PD as fa_pd
from fa_inv left join g1_inv
on g1_inv.CPTY = fa_inv.DESK
and g1_inv.STOCK = fa_inv.STOCK
) consol
where sbl_os <> 0 or fa_os <> 0 or sbl_pd <> 0 or fa_pd <> 0
""")
sbl_arr = cur.fetchall()
# for row in sbl_arr:
# print row
return sbl_header, sbl_arr
def conv_xl_dt(xl_dt):
dt = datetime.datetime.fromordinal(datetime.datetime(1900, 1, 1).toordinal() + int(xl_dt) - 2).date().strftime("%Y-%m-%d")
# tt = dt.timetuple()
return dt
def conv_xl_dt_arr(arr, cols):
return [ [ conv_xl_dt(ele) if idx in cols else ele for idx, ele in enumerate(row) ] for row in arr ]
def main():
conn, cur = db_cur()
pb_dir = os.path.dirname(os.path.abspath(__file__))
# pb_dir = "\\\\p7fs0003\\nd\\3033-Horizon-FA-Share\\PB_DeltaOne\\Daily_Data"
sbl_dir = os.path.dirname(os.path.abspath(__file__))
# sbl_dir = "\\\\P7FS0001\\ED\\SBL\\Reports\\Daily SBL Report\\ReportData"
output_dir = "\\\\p7fs0003\\nd\\3033-Horizon-FA-Share\\PB_DeltaOne\\SBL FA Deltaone Recon"
sblmap_file = files_lookup(pb_dir, "ClientDetails_????????.xlsx")
fasbl_file = files_lookup(pb_dir, "RepoSBLTrade_????????.xlsx")
os_file = files_lookup(sbl_dir, "OS_Trades_Extract_*.CSV")
pd_file = files_lookup(sbl_dir, "Pending_Trades_Extract_*.CSV")
print (sblmap_file)
print (fasbl_file)
print (os_file)
print (pd_file)
trd_date = sblmap_file[-13:-5]
inv_file = os.path.join(output_dir, "FA_G1_SBL_recon_"+trd_date+".xlsx")
sblmap_header, sblmap_arr = xlsx_to_arr(sblmap_file, row_start=1)
sblmap_header = sblmap_header.replace("ClientId", "ClientId1", 1)
fasbl_header, fasbl_arr = xlsx_to_arr(fasbl_file, row_start=1)
fasbl_arr = conv_xl_dt_arr(fasbl_arr, [3, 4])
os_header, os_arr = csv_to_arr(os_file, 1, -1, True, '\t')
pd_header, pd_arr = csv_to_arr(pd_file, 1, -1, True, '\t')
pd_header = pd_header.replace("BL","B_L",1)
create_tbl(cur, "sblmap", sblmap_header, sblmap_arr)
create_tbl(cur, "os", os_header, os_arr)
create_tbl(cur, "pd", pd_header, pd_arr)
create_tbl(cur, "fasbl", fasbl_header, fasbl_arr)
sbl_header, sbl_arr = recon_sbl(cur)
arrs_to_xlsx(inv_file, [sbl_header], [sbl_arr])
return
if __name__ == "__main__":
print ("D1 G1 SBL Recon")
try:
main()
except KeyboardInterrupt:
print ("Ctrl+C pressed. Stopping...") | frederick623/pb | deltaone/d1_sbl_recon.py | Python | apache-2.0 | 7,739 | 0.030236 |
import os
import unittest
from vsg.rules import ieee
from vsg import vhdlFile
from vsg.tests import utils
sTestDir = os.path.dirname(__file__)
lFile, eError =vhdlFile.utils.read_vhdlfile(os.path.join(sTestDir,'rule_500_test_input.vhd'))
lExpected_lower = []
lExpected_lower.append('')
utils.read_file(os.path.join(sTestDir, 'rule_500_test_input.fixed_lower.vhd'), lExpected_lower)
lExpected_upper = []
lExpected_upper.append('')
utils.read_file(os.path.join(sTestDir, 'rule_500_test_input.fixed_upper.vhd'), lExpected_upper)
class test_port_rule(unittest.TestCase):
def setUp(self):
self.oFile = vhdlFile.vhdlFile(lFile)
self.assertIsNone(eError)
def test_rule_500_lower(self):
oRule = ieee.rule_500()
self.assertTrue(oRule)
self.assertEqual(oRule.name, 'ieee')
self.assertEqual(oRule.identifier, '500')
self.assertEqual(oRule.groups, ['case', 'case::keyword'])
lExpected = [66, 67, 68, 69, 70]
lExpected.extend([73, 74, 76, 77, 78,79])
lExpected.extend(range(87, 89))
lExpected.extend([91])
lExpected.extend(range(93, 95))
lExpected.extend(range(100, 105))
lExpected.extend([107, 108, 110, 111, 112, 113])
oRule.analyze(self.oFile)
self.assertEqual(utils.extract_violation_lines_from_violation_object(oRule.violations), lExpected)
def test_rule_500_upper(self):
oRule = ieee.rule_500()
oRule.case = 'upper'
self.assertTrue(oRule)
self.assertEqual(oRule.name, 'ieee')
self.assertEqual(oRule.identifier, '500')
lExpected = []
lExpected.extend(range(5, 10))
lExpected.extend([12, 13, 15, 16, 17, 18])
lExpected.extend(range(26, 28))
lExpected.extend([30])
lExpected.extend(range(32, 34))
lExpected.extend(range(39, 44))
lExpected.extend([46, 47, 49, 50, 51, 52])
oRule.analyze(self.oFile)
self.assertEqual(utils.extract_violation_lines_from_violation_object(oRule.violations), lExpected)
def test_fix_rule_500_lower(self):
oRule = ieee.rule_500()
oRule.fix(self.oFile)
lActual = self.oFile.get_lines()
self.assertEqual(lExpected_lower, lActual)
oRule.analyze(self.oFile)
self.assertEqual(oRule.violations, [])
def test_fix_rule_500_upper(self):
oRule = ieee.rule_500()
oRule.case = 'upper'
oRule.fix(self.oFile)
lActual = self.oFile.get_lines()
self.assertEqual(lExpected_upper, lActual)
oRule.analyze(self.oFile)
self.assertEqual(oRule.violations, [])
| jeremiah-c-leary/vhdl-style-guide | vsg/tests/ieee/test_rule_500.py | Python | gpl-3.0 | 2,648 | 0.003776 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from Tkinter import *
from ttk import *
class URLDialogView(object):
_LABEL_CONF = {'column': 0, 'padx': 5, 'pady': 5, 'sticky': W}
_ENTRY_CONF = {'padx': 5, 'pady': 5, 'sticky': E+W}
def __init__(self, parent, controller, data=None, edit=False):
self.parent = parent
self.edit = edit
# define callbacks for actions
self.submit_callback = controller.submit
self.cancel_callback = controller.cancel
# create elements
self._create_variables(data)
self._createWindow()
self._createWidgets()
def focus_set(self):
self.window.focus_set()
self.parent.wait_window(self.window)
def destroy(self):
"""Destroys the current window and returns the focus back to the
Treeview widget of the parent."""
self.parent.monitor_widget.focus_set()
self.window.destroy()
def _create_variables(self, data=None):
"""Create the variables and optionally set the data. Data retrieval is
only possible through these variables."""
self.url_var = StringVar()
self.label_var = StringVar()
self.user_var = StringVar()
self.password_var = StringVar()
if data is None:
return
keys = {'url': self.url_var, 'label': self.label_var}
for key, var in keys.iteritems():
if key in data and data[key] is not None:
var.set(data[key])
if 'auth' in data and data['auth'] is not None:
self.user_var.set(data['auth'][0])
self.password_var.set(data['auth'][1])
def _createWindow(self):
"""Create the main window for this dialog and set the instance variable
self.window."""
window = Toplevel(self.parent)
window.title('URL to watch')
window.transient(self.parent)
window.grab_set()
window.geometry("+%d+%d" % (self.parent.winfo_rootx()+50, self.parent.winfo_rooty()+50))
window.protocol("WM_DELETE_WINDOW", self.cancel_callback)
window.bind("<Return>", self.submit_callback)
window.bind("<Escape>", self.cancel_callback)
self.window = window
def _createWidgets(self):
"""Create all the widgets."""
# frame to pack everything
frame = Frame(self.window, padding=10)
frame.pack()
# define labels
Label(frame, text='URL', anchor=W).grid(self._LABEL_CONF)
Label(frame, text='Label').grid(self._LABEL_CONF)
Label(frame, text='User').grid(self._LABEL_CONF)
Label(frame, text='Password').grid(self._LABEL_CONF)
# entries
url = Entry(frame, width=75, textvariable=self.url_var)
url.grid(column=1, row=0, padx=5, pady=5)
if self.edit is True:
url['state'] = DISABLED
else:
url.focus_set()
url_label = Entry(frame, textvariable=self.label_var)
url_label.grid(column=1, row=1, **self._ENTRY_CONF)
if self.edit is True:
url_label.focus_set()
user = Entry(frame, textvariable=self.user_var)
user.grid(column=1, row=2, **self._ENTRY_CONF)
password = Entry(frame, show="*", textvariable=self.password_var)
password.grid(column=1, row=3, **self._ENTRY_CONF)
# define buttons
ok = Button(frame, text="OK", command=self.submit_callback)
ok.grid(column=0, sticky=W, padx=5, pady=5)
cancel = Button(frame, text="Cancel", command=self.cancel_callback)
cancel.grid(column=1, row=4, sticky=W, padx=5, pady=5)
import widget
class ApplicationView(Frame):
"""Class to handle the application window logic."""
def __init__(self, parent, controller):
Frame.__init__(self, parent)
self.pack()
self.monitor_widget = widget.URLMonitorWidget(self, controller)
self.monitor_widget.grid(column=0, padx=5, pady=5, sticky=E+W)
self.statusbar = widget.StatusBar(self)
self.statusbar.grid(column=0, sticky=E+W)
# callback and shortcuts
self.add_url_callback = controller.add_url
self.quit_app_callback = controller.quit_app
self._register_keyboard_shortcuts()
Style().theme_use('xpnative')
def _register_keyboard_shortcuts(self):
self.bind_all('<Control-KeyPress-n>', self.add_url_callback)
self.bind_all('<Control-Shift-KeyPress-C>', self.quit_app_callback) | fzuellich/urlmonitor | view.py | Python | gpl-3.0 | 4,690 | 0.002559 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
here = lambda *a: os.path.join(os.path.dirname(__file__), *a)
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
readme = open(here('README.md')).read()
requirements = [x.strip() for x in open(here('requirements.txt')).readlines()]
setup(name='python-tado',
version='0.2.9',
description='PyTado from chrism0dwk, modfied by w.malgadey',
long_description=readme,
keywords='tado',
author='chrism0dwk, w.malgadey',
author_email='chrism0dwk@gmail.com, w.malgadey@gmail.com',
url='https://github.com/wmalgadey/PyTado',
install_requires=requirements,
license="GPL3",
zip_safe=False,
platforms=["any"],
packages=find_packages(),
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Home Automation',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.5'
],
entry_points={
'console_scripts': [
'pytado = pytado.__main__:main'
]
},
)
| chrism0dwk/PyTado | setup.py | Python | gpl-3.0 | 1,442 | 0.001387 |
#!/usr/bin/env python
"""
This script is mainly for running autotests on the build server, however, it
can also be used by engineers to run the tests locally on their machines.
It takes as optional parameters the path to the folder containing the test
executables (which must have names ending in _tests), and a list of tests that
need to be skipped, this list must be comma separated and contain no spaces. E.g.:
./run_desktop_tests.py -f ./omim-build-release -e drape_tests,some_other_tests
The script outputs the console output of the tests. It also checks the error
code of each test suite, and after all the tests are executed, it prints the
list of the failed tests, passed tests, skipped tests and tests that could not
be found, i.e. the tests that were specified in the skip list, but do not exist.
"""
from __future__ import print_function
from optparse import OptionParser
from os import listdir, remove
from random import shuffle
import random
import socket
import subprocess
import testserver
import time
import urllib2
import logging
TO_RUN = "to_run"
SKIP = "skip"
NOT_FOUND = "not_found"
FAILED = "failed"
PASSED = "passed"
WITH_SERVER = "with_server"
PORT = 34568
TESTS_REQUIRING_SERVER = ["downloader_tests", "storage_tests"]
class TestRunner:
def print_pretty(self, result, tests):
if not tests:
return
logging.info("\n{result}".format(result=result.upper()))
for test in tests:
logging.info("- {test}".format(test=test))
def set_global_vars(self):
parser = OptionParser()
parser.add_option("-o", "--output", dest="output", default="testlog.log", help="resulting log file. Default testlog.log")
parser.add_option("-f", "--folder", dest="folder", default="omim-build-release/out/release", help="specify the folder where the tests reside (absolute path or relative to the location of this script)")
parser.add_option("-d", "--data_path", dest="data_path", help="Path to data files (passed to the test executables as --data_path=<value>)")
parser.add_option("-u", "--user_resource_path", dest="resource_path", help="Path to resources, styles and classificators (passed to the test executables as --user_resource_path=<value>)")
parser.add_option("-i", "--include", dest="runlist", action="append", default=[], help="Include test into execution, comma separated list with no spaces or individual tests, or both. E.g.: -i one -i two -i three,four,five")
parser.add_option("-e", "--exclude", dest="skiplist", action="append", default=[], help="Exclude test from execution, comma separated list with no spaces or individual tests, or both. E.g.: -i one -i two -i three,four,five")
parser.add_option("-b", "--boost_tests", dest="boost_tests", action="store_true", default=False, help="Treat all the tests as boost tests (their output is different and it must be processed differently).")
parser.add_option("-k", "--keep_alive", dest="keep_alive", action="store_true", default=False, help="Keep the server alive after the end of the test. Because the server sometimes fails to start, this reduces the probability of false test failures on CI servers.")
(options, args) = parser.parse_args()
self.skiplist = set()
self.runlist = list()
for tests in options.skiplist:
for test in tests.split(","):
self.skiplist.add(test)
for tests in options.runlist:
self.runlist.extend(tests.split(","))
self.boost_tests = options.boost_tests
if self.runlist:
logging.warn("-i or -b option found, the -e option will be ignored")
self.workspace_path = options.folder
self.logfile = options.output
self.data_path = (" --data_path={0}".format(options.data_path) if options.data_path else "")
self.user_resource_path = (" --user_resource_path={0}".format(options.resource_path) if options.resource_path else "")
self.keep_alive = options.keep_alive
def start_server(self):
server = testserver.TestServer()
server.start_serving()
time.sleep(3)
def stop_server(self):
if self.keep_alive:
return
try:
urllib2.urlopen('http://localhost:{port}/kill'.format(port=PORT), timeout=5)
except (urllib2.URLError, socket.timeout):
logging.info("Failed to stop the server...")
def categorize_tests(self):
tests_to_run = list()
local_skiplist = list()
not_found = list()
test_files_in_dir = filter(lambda x: x.endswith("_tests"), listdir(self.workspace_path))
on_disk = lambda x: x in test_files_in_dir
not_on_disk = lambda x : not on_disk(x)
if not self.runlist:
local_skiplist = filter(on_disk, self.skiplist)
not_found = filter(not_on_disk, self.skiplist)
tests_to_run = filter(lambda x: x not in local_skiplist, test_files_in_dir)
else:
tests_to_run = filter(on_disk, self.runlist)
shuffle(tests_to_run)
not_found = filter(not_on_disk, self.runlist)
# now let's move the tests that need a server either to the beginning or the end of the tests_to_run list
tests_with_server = list(TESTS_REQUIRING_SERVER)
for test in TESTS_REQUIRING_SERVER:
if test in tests_to_run:
tests_to_run.remove(test)
else:
tests_with_server.remove(test)
return {TO_RUN:tests_to_run, SKIP:local_skiplist, NOT_FOUND:not_found, WITH_SERVER:tests_with_server}
def test_file_with_keys(self, test_file):
boost_keys = " --report_format=xml --report_level=detailed --log_level=test_suite --log_format=xml " if self.boost_tests else ""
return "{test_file}{boost_keys}{data}{resources}".format(test_file=test_file, boost_keys=boost_keys, data=self.data_path, resources=self.user_resource_path)
def run_tests(self, tests_to_run):
failed = list()
passed = list()
for test_file in tests_to_run:
self.log_exec_file(test_file)
test_file_with_keys = self.test_file_with_keys(test_file)
logging.info(test_file_with_keys)
process = subprocess.Popen("{tests_path}/{test_file} 2>> {logfile}".
format(tests_path=self.workspace_path, test_file=test_file_with_keys, logfile=self.logfile),
shell=True,
stdout=subprocess.PIPE)
logging.info("Pid: {0}".format(process.pid))
process.wait()
if process.returncode > 0:
failed.append(test_file)
else:
passed.append(test_file)
self.log_exec_file(test_file, result=process.returncode)
return {FAILED: failed, PASSED: passed}
def log_exec_file(self, filename, result=None):
if self.boost_tests:
return
logstring = "BEGIN" if result is None else "END" #can be 0 or None. If we omit the explicit check for None, we get wrong result
resstring = (" | result: {returncode}".format(returncode=result) if result is not None else "")
with open(self.logfile, "a") as logf:
logf.write("\n{logstring}: {filename}{resstring}\n".format(logstring=logstring, filename=filename, resstring=resstring))
def rm_log_file(self):
try:
remove(self.logfile)
except OSError:
pass
def __init__(self):
self.set_global_vars()
self.rm_log_file()
def merge_dicts_of_lists(self, one, two):
if not one:
return two
if not two:
return one
ret = one.copy()
for key, value in two.iteritems():
if key in one:
ret[key] = ret[key].append(two[key])
else:
ret[key] = two[key]
return ret
def execute(self):
categorized_tests = self.categorize_tests()
to_run_and_with_server_keys = [TO_RUN, WITH_SERVER]
random.shuffle(to_run_and_with_server_keys)
results = dict()
for key in to_run_and_with_server_keys:
if key == WITH_SERVER and categorized_tests[WITH_SERVER]:
self.start_server()
results = self.merge_dicts_of_lists(results, self.run_tests(categorized_tests[key]))
if key == WITH_SERVER and categorized_tests[WITH_SERVER]:
self.stop_server()
self.print_pretty("failed", results[FAILED])
self.print_pretty("skipped", categorized_tests[SKIP])
self.print_pretty("passed", results[PASSED])
self.print_pretty("not found", categorized_tests[NOT_FOUND])
def tests_on_disk(path):
return filter(lambda x: x.endswith("_tests"), listdir(path))
if __name__ == "__main__":
runner = TestRunner()
runner.execute()
| stangls/omim | tools/run_desktop_tests.py | Python | apache-2.0 | 9,109 | 0.007355 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name,unused-argument
"""Tensor Expression for identity"""
from tvm import te
from .dma import read_compute, write_compute
def identity_compute(
ifm: te.Tensor,
lut: te.Tensor,
ifm_scale: float,
ifm_zero_point: int,
ofm_scale: float,
ofm_zero_point: int,
activation: str,
) -> te.Tensor:
"""A compute operator for the NPU identity operator.
Parameters
----------
ifm : te.Tensor
The Input Feature Map tensor (IFM).
lut : te.Tensor
The look-up table values to use if activation is "LUT", "TANH" or "SIGMOID".
ifm_scale : float
The quantization scale for the Input Feature Map tensor.
ifm_zero_point : int
The quantization zero point for the Input Feature Map tensor.
ofm_scale : float
The quantization scale for the Output Feature Map tensor.
ofm_zero_point : int
The quantization zero point for the Output Feature Map tensor.
activation : str
The activation function to use.
"NONE" - no activation function.
"TANH" - tanh activation function.
"SIGMOID" - sigmoid activation function.
"LUT" - use a look-up table to perform the activation function.
Returns
-------
te.Tensor
The Output Feature Map tensor.
"""
dmaed_ifm = read_compute(ifm, ifm_zero_point, ifm_scale)
id_attrs = {"op": "ethosu_identity", "activation": activation}
has_lut = activation in ("TANH", "LUT", "SIGMOID")
# This is a trick to insert the LUT tensor into the TE graph if LUT is present
lut_expr = (lut[0] + lut[255]).astype(ifm.dtype) if has_lut else 0
# Add the LUT tensor to the attributes to be able to later tell which tensor is the LUT
if has_lut:
id_attrs["lut"] = lut
identity = te.compute(
ifm.shape,
lambda *i: (dmaed_ifm(*i) + lut_expr).astype(ifm.dtype),
name="ethosu_identity",
attrs=id_attrs,
)
dmaed_ofm = write_compute(identity, ofm_zero_point, ofm_scale)
return dmaed_ofm
| Laurawly/tvm-1 | python/tvm/relay/backend/contrib/ethosu/te/identity.py | Python | apache-2.0 | 2,862 | 0.001048 |
import time
import datetime
from MyPy.core.exceptions import (
Warning, Error, InterfaceError, DatabaseError, DataError, OperationalError,
IntegrityError, InternalError, ProgrammingError, NotSupportedError
)
from MyPy.constants import fieldtypes
apilevel = '2.0'
threadsafety = 1
paramstyle = 'format'
def Connect(*args, **kwargs):
from MyPy.core.connection import Connection
return Connection(*args, **kwargs)
connect = Connection = Connect
Date = datetime.date
Time = datetime.time
Timestamp = datetime.datetime
def DateFromTicks(ticks):
return Date(*time.localtime(ticks)[:3])
def TimeFromTicks(ticks):
return Time(*time.localtime(ticks)[3:6])
def TimestampFromTicks(ticks):
return Timestamp(*time.localtime(ticks)[:6])
Binary = str
class DBAPITypeObject:
def __init__(self, *values):
self.values = values
def __cmp__(self, other):
if other in self.values:
return 0
if other < self.values:
return 1
else:
return -1
STRING = DBAPITypeObject(fieldtypes.FIELD_TYPE_ENUM, fieldtypes.FIELD_TYPE_STRING,
fieldtypes.FIELD_TYPE_VAR_STRING)
BINARY = DBAPITypeObject(fieldtypes.FIELD_TYPE_BLOB, fieldtypes.FIELD_TYPE_LONG_BLOB,
fieldtypes.FIELD_TYPE_MEDIUM_BLOB, fieldtypes.FIELD_TYPE_TINY_BLOB)
NUMBER = DBAPITypeObject(fieldtypes.FIELD_TYPE_DECIMAL, fieldtypes.FIELD_TYPE_DOUBLE,
fieldtypes.FIELD_TYPE_FLOAT, fieldtypes.FIELD_TYPE_INT24,
fieldtypes.FIELD_TYPE_LONG, fieldtypes.FIELD_TYPE_LONGLONG,
fieldtypes.FIELD_TYPE_TINY, fieldtypes.FIELD_TYPE_YEAR)
DATETIME = DBAPITypeObject(fieldtypes.FIELD_TYPE_DATETIME, fieldtypes.FIELD_TYPE_TIMESTAMP)
ROWID = DBAPITypeObject()
__all__ = [
'Connect', 'Connection', 'connect', 'apilevel', 'threadsafety', 'paramstyle',
'Error', 'Warning', 'InterfaceError', 'DatabaseError', 'DataError',
'OperationalError', 'IntegrityError', 'InternalError', 'ProgrammingError',
'NotSupportedError', 'Date', 'Time', 'Timestamp', 'Binary', 'DateFromTicks',
'DateFromTicks', 'TimestampFromTicks', 'STRING', 'BINARY', 'NUMBER',
'DATETIME', 'ROWID',
]
| nasi/MyPy | MyPy/__init__.py | Python | bsd-3-clause | 2,276 | 0.010984 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class Marketplace(Resource):
"""An marketplace resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar billing_period_id: The id of the billing period resource that the
usage belongs to.
:vartype billing_period_id: str
:ivar usage_start: The start of the date time range covered by the usage
detail.
:vartype usage_start: datetime
:ivar usage_end: The end of the date time range covered by the usage
detail.
:vartype usage_end: datetime
:ivar resource_rate: The marketplace resource rate.
:vartype resource_rate: decimal.Decimal
:ivar offer_name: The type of offer.
:vartype offer_name: str
:ivar resource_group: The name of resource group.
:vartype resource_group: str
:ivar order_number: The order number.
:vartype order_number: str
:ivar instance_name: The name of the resource instance that the usage is
about.
:vartype instance_name: str
:ivar instance_id: The uri of the resource instance that the usage is
about.
:vartype instance_id: str
:ivar currency: The ISO currency in which the meter is charged, for
example, USD.
:vartype currency: str
:ivar consumed_quantity: The quantity of usage.
:vartype consumed_quantity: decimal.Decimal
:ivar unit_of_measure: The unit of measure.
:vartype unit_of_measure: str
:ivar pretax_cost: The amount of cost before tax.
:vartype pretax_cost: decimal.Decimal
:ivar is_estimated: The estimated usage is subject to change.
:vartype is_estimated: bool
:ivar meter_id: The meter id.
:vartype meter_id: str
:ivar subscription_guid: Subscription guid.
:vartype subscription_guid: str
:ivar subscription_name: Subscription name.
:vartype subscription_name: str
:ivar account_name: Account name.
:vartype account_name: str
:ivar department_name: Department name.
:vartype department_name: str
:ivar consumed_service: Consumed service name.
:vartype consumed_service: str
:ivar cost_center: The cost center of this department if it is a
department and a costcenter exists
:vartype cost_center: str
:ivar additional_properties: Additional details of this usage item. By
default this is not populated, unless it's specified in $expand.
:vartype additional_properties: str
:ivar publisher_name: The name of publisher.
:vartype publisher_name: str
:ivar plan_name: The name of plan.
:vartype plan_name: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'tags': {'readonly': True},
'billing_period_id': {'readonly': True},
'usage_start': {'readonly': True},
'usage_end': {'readonly': True},
'resource_rate': {'readonly': True},
'offer_name': {'readonly': True},
'resource_group': {'readonly': True},
'order_number': {'readonly': True},
'instance_name': {'readonly': True},
'instance_id': {'readonly': True},
'currency': {'readonly': True},
'consumed_quantity': {'readonly': True},
'unit_of_measure': {'readonly': True},
'pretax_cost': {'readonly': True},
'is_estimated': {'readonly': True},
'meter_id': {'readonly': True},
'subscription_guid': {'readonly': True},
'subscription_name': {'readonly': True},
'account_name': {'readonly': True},
'department_name': {'readonly': True},
'consumed_service': {'readonly': True},
'cost_center': {'readonly': True},
'additional_properties': {'readonly': True},
'publisher_name': {'readonly': True},
'plan_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'billing_period_id': {'key': 'properties.billingPeriodId', 'type': 'str'},
'usage_start': {'key': 'properties.usageStart', 'type': 'iso-8601'},
'usage_end': {'key': 'properties.usageEnd', 'type': 'iso-8601'},
'resource_rate': {'key': 'properties.resourceRate', 'type': 'decimal'},
'offer_name': {'key': 'properties.offerName', 'type': 'str'},
'resource_group': {'key': 'properties.resourceGroup', 'type': 'str'},
'order_number': {'key': 'properties.orderNumber', 'type': 'str'},
'instance_name': {'key': 'properties.instanceName', 'type': 'str'},
'instance_id': {'key': 'properties.instanceId', 'type': 'str'},
'currency': {'key': 'properties.currency', 'type': 'str'},
'consumed_quantity': {'key': 'properties.consumedQuantity', 'type': 'decimal'},
'unit_of_measure': {'key': 'properties.unitOfMeasure', 'type': 'str'},
'pretax_cost': {'key': 'properties.pretaxCost', 'type': 'decimal'},
'is_estimated': {'key': 'properties.isEstimated', 'type': 'bool'},
'meter_id': {'key': 'properties.meterId', 'type': 'str'},
'subscription_guid': {'key': 'properties.subscriptionGuid', 'type': 'str'},
'subscription_name': {'key': 'properties.subscriptionName', 'type': 'str'},
'account_name': {'key': 'properties.accountName', 'type': 'str'},
'department_name': {'key': 'properties.departmentName', 'type': 'str'},
'consumed_service': {'key': 'properties.consumedService', 'type': 'str'},
'cost_center': {'key': 'properties.costCenter', 'type': 'str'},
'additional_properties': {'key': 'properties.additionalProperties', 'type': 'str'},
'publisher_name': {'key': 'properties.publisherName', 'type': 'str'},
'plan_name': {'key': 'properties.planName', 'type': 'str'},
}
def __init__(self):
super(Marketplace, self).__init__()
self.billing_period_id = None
self.usage_start = None
self.usage_end = None
self.resource_rate = None
self.offer_name = None
self.resource_group = None
self.order_number = None
self.instance_name = None
self.instance_id = None
self.currency = None
self.consumed_quantity = None
self.unit_of_measure = None
self.pretax_cost = None
self.is_estimated = None
self.meter_id = None
self.subscription_guid = None
self.subscription_name = None
self.account_name = None
self.department_name = None
self.consumed_service = None
self.cost_center = None
self.additional_properties = None
self.publisher_name = None
self.plan_name = None
| lmazuel/azure-sdk-for-python | azure-mgmt-consumption/azure/mgmt/consumption/models/marketplace.py | Python | mit | 7,474 | 0.000803 |
from django.contrib import admin
# Register your models here.
from statistic import models | shownotes/snotes20-restapi | statistic/admin.py | Python | agpl-3.0 | 91 | 0.010989 |
import os
import unittest
def load_tests(loader, standard_tests, pattern):
# top level directory cached on loader instance
this_dir = os.path.dirname(__file__)
pattern = pattern or "test*.py"
# We are inside unittest.test.testmock, so the top-level is three notches up
top_level_dir = os.path.dirname(os.path.dirname(os.path.dirname(this_dir)))
package_tests = loader.discover(start_dir=this_dir, pattern=pattern,
top_level_dir=top_level_dir)
standard_tests.addTests(package_tests)
return standard_tests
if __name__ == '__main__':
unittest.main()
| Orav/kbengine | kbe/src/lib/python/Lib/unittest/test/testmock/__main__.py | Python | lgpl-3.0 | 641 | 0.00156 |
from CommonServerPython import *
''' STANDALONE FUNCTION '''
def get_threat_indicator_list(args: Dict[str, Any]) -> list:
"""
Executes cofense-threat-indicator-list command for given arguments.
:type args: ``Dict[str, Any]``
:param args: The script arguments provided by the user.
:return: List of responses.
:rtype: ``list``
"""
# Fetch threat indicators based on threat value provided in the argument.
# cofense-threat-indicator-list command will enrich the information based on value.
threat_indicator = execute_command('cofense-threat-indicator-list',
{'threat_value': f"{args.get('threat_value')}"},
extract_contents=False)
# Populate response
return threat_indicator
''' MAIN FUNCTION '''
def main():
try:
return_results(get_threat_indicator_list(demisto.args()))
except Exception as e:
demisto.error(traceback.format_exc()) # print the traceback
return_error(f'Failed to execute CofenseTriageThreatEnrichment. Error: {str(e)}')
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| VirusTotal/content | Packs/CofenseTriage/Scripts/CofenseTriageThreatEnrichment/CofenseTriageThreatEnrichment.py | Python | mit | 1,197 | 0.002506 |
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.7
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_libfann', [dirname(__file__)])
except ImportError:
import _libfann
return _libfann
if fp is not None:
try:
_mod = imp.load_module('_libfann', fp, pathname, description)
finally:
fp.close()
return _mod
_libfann = swig_import_helper()
del swig_import_helper
else:
import _libfann
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr_nondynamic(self, class_type, name, static=1):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
if (not static):
return object.__getattr__(self, name)
else:
raise AttributeError(name)
def _swig_getattr(self, class_type, name):
return _swig_getattr_nondynamic(self, class_type, name, 0)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
class SwigPyIterator(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SwigPyIterator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _libfann.delete_SwigPyIterator
__del__ = lambda self: None
def value(self):
return _libfann.SwigPyIterator_value(self)
def incr(self, n=1):
return _libfann.SwigPyIterator_incr(self, n)
def decr(self, n=1):
return _libfann.SwigPyIterator_decr(self, n)
def distance(self, x):
return _libfann.SwigPyIterator_distance(self, x)
def equal(self, x):
return _libfann.SwigPyIterator_equal(self, x)
def copy(self):
return _libfann.SwigPyIterator_copy(self)
def next(self):
return _libfann.SwigPyIterator_next(self)
def __next__(self):
return _libfann.SwigPyIterator___next__(self)
def previous(self):
return _libfann.SwigPyIterator_previous(self)
def advance(self, n):
return _libfann.SwigPyIterator_advance(self, n)
def __eq__(self, x):
return _libfann.SwigPyIterator___eq__(self, x)
def __ne__(self, x):
return _libfann.SwigPyIterator___ne__(self, x)
def __iadd__(self, n):
return _libfann.SwigPyIterator___iadd__(self, n)
def __isub__(self, n):
return _libfann.SwigPyIterator___isub__(self, n)
def __add__(self, n):
return _libfann.SwigPyIterator___add__(self, n)
def __sub__(self, *args):
return _libfann.SwigPyIterator___sub__(self, *args)
def __iter__(self):
return self
SwigPyIterator_swigregister = _libfann.SwigPyIterator_swigregister
SwigPyIterator_swigregister(SwigPyIterator)
_libfann.ERRORFUNC_LINEAR_swigconstant(_libfann)
ERRORFUNC_LINEAR = _libfann.ERRORFUNC_LINEAR
_libfann.ERRORFUNC_TANH_swigconstant(_libfann)
ERRORFUNC_TANH = _libfann.ERRORFUNC_TANH
_libfann.STOPFUNC_MSE_swigconstant(_libfann)
STOPFUNC_MSE = _libfann.STOPFUNC_MSE
_libfann.STOPFUNC_BIT_swigconstant(_libfann)
STOPFUNC_BIT = _libfann.STOPFUNC_BIT
_libfann.TRAIN_INCREMENTAL_swigconstant(_libfann)
TRAIN_INCREMENTAL = _libfann.TRAIN_INCREMENTAL
_libfann.TRAIN_BATCH_swigconstant(_libfann)
TRAIN_BATCH = _libfann.TRAIN_BATCH
_libfann.TRAIN_RPROP_swigconstant(_libfann)
TRAIN_RPROP = _libfann.TRAIN_RPROP
_libfann.TRAIN_QUICKPROP_swigconstant(_libfann)
TRAIN_QUICKPROP = _libfann.TRAIN_QUICKPROP
_libfann.LINEAR_swigconstant(_libfann)
LINEAR = _libfann.LINEAR
_libfann.THRESHOLD_swigconstant(_libfann)
THRESHOLD = _libfann.THRESHOLD
_libfann.THRESHOLD_SYMMETRIC_swigconstant(_libfann)
THRESHOLD_SYMMETRIC = _libfann.THRESHOLD_SYMMETRIC
_libfann.SIGMOID_swigconstant(_libfann)
SIGMOID = _libfann.SIGMOID
_libfann.SIGMOID_STEPWISE_swigconstant(_libfann)
SIGMOID_STEPWISE = _libfann.SIGMOID_STEPWISE
_libfann.SIGMOID_SYMMETRIC_swigconstant(_libfann)
SIGMOID_SYMMETRIC = _libfann.SIGMOID_SYMMETRIC
_libfann.SIGMOID_SYMMETRIC_STEPWISE_swigconstant(_libfann)
SIGMOID_SYMMETRIC_STEPWISE = _libfann.SIGMOID_SYMMETRIC_STEPWISE
_libfann.GAUSSIAN_swigconstant(_libfann)
GAUSSIAN = _libfann.GAUSSIAN
_libfann.GAUSSIAN_SYMMETRIC_swigconstant(_libfann)
GAUSSIAN_SYMMETRIC = _libfann.GAUSSIAN_SYMMETRIC
_libfann.GAUSSIAN_STEPWISE_swigconstant(_libfann)
GAUSSIAN_STEPWISE = _libfann.GAUSSIAN_STEPWISE
_libfann.ELLIOT_swigconstant(_libfann)
ELLIOT = _libfann.ELLIOT
_libfann.ELLIOT_SYMMETRIC_swigconstant(_libfann)
ELLIOT_SYMMETRIC = _libfann.ELLIOT_SYMMETRIC
_libfann.LINEAR_PIECE_swigconstant(_libfann)
LINEAR_PIECE = _libfann.LINEAR_PIECE
_libfann.LINEAR_PIECE_SYMMETRIC_swigconstant(_libfann)
LINEAR_PIECE_SYMMETRIC = _libfann.LINEAR_PIECE_SYMMETRIC
_libfann.SIN_SYMMETRIC_swigconstant(_libfann)
SIN_SYMMETRIC = _libfann.SIN_SYMMETRIC
_libfann.COS_SYMMETRIC_swigconstant(_libfann)
COS_SYMMETRIC = _libfann.COS_SYMMETRIC
_libfann.LAYER_swigconstant(_libfann)
LAYER = _libfann.LAYER
_libfann.SHORTCUT_swigconstant(_libfann)
SHORTCUT = _libfann.SHORTCUT
class training_data_parent(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, training_data_parent, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, training_data_parent, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _libfann.new_training_data_parent(*args)
try:
self.this.append(this)
except:
self.this = this
__swig_destroy__ = _libfann.delete_training_data_parent
__del__ = lambda self: None
def destroy_train(self):
return _libfann.training_data_parent_destroy_train(self)
def read_train_from_file(self, filename):
return _libfann.training_data_parent_read_train_from_file(self, filename)
def save_train(self, filename):
return _libfann.training_data_parent_save_train(self, filename)
def save_train_to_fixed(self, filename, decimal_point):
return _libfann.training_data_parent_save_train_to_fixed(self, filename, decimal_point)
def shuffle_train_data(self):
return _libfann.training_data_parent_shuffle_train_data(self)
def merge_train_data(self, data):
return _libfann.training_data_parent_merge_train_data(self, data)
def length_train_data(self):
return _libfann.training_data_parent_length_train_data(self)
def num_input_train_data(self):
return _libfann.training_data_parent_num_input_train_data(self)
def num_output_train_data(self):
return _libfann.training_data_parent_num_output_train_data(self)
def get_input(self):
return _libfann.training_data_parent_get_input(self)
def get_output(self):
return _libfann.training_data_parent_get_output(self)
def set_train_data(self, num_data, num_input, input, num_output, output):
return _libfann.training_data_parent_set_train_data(self, num_data, num_input, input, num_output, output)
def create_train_from_callback(self, num_data, num_input, num_output, user_function):
return _libfann.training_data_parent_create_train_from_callback(self, num_data, num_input, num_output, user_function)
def scale_input_train_data(self, new_min, new_max):
return _libfann.training_data_parent_scale_input_train_data(self, new_min, new_max)
def scale_output_train_data(self, new_min, new_max):
return _libfann.training_data_parent_scale_output_train_data(self, new_min, new_max)
def scale_train_data(self, new_min, new_max):
return _libfann.training_data_parent_scale_train_data(self, new_min, new_max)
def subset_train_data(self, pos, length):
return _libfann.training_data_parent_subset_train_data(self, pos, length)
training_data_parent_swigregister = _libfann.training_data_parent_swigregister
training_data_parent_swigregister(training_data_parent)
class neural_net_parent(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, neural_net_parent, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, neural_net_parent, name)
__repr__ = _swig_repr
def __init__(self):
this = _libfann.new_neural_net_parent()
try:
self.this.append(this)
except:
self.this = this
__swig_destroy__ = _libfann.delete_neural_net_parent
__del__ = lambda self: None
def destroy(self):
return _libfann.neural_net_parent_destroy(self)
def create_standard(self, num_layers):
return _libfann.neural_net_parent_create_standard(self, num_layers)
def create_standard_array(self, num_layers, layers):
return _libfann.neural_net_parent_create_standard_array(self, num_layers, layers)
def create_sparse(self, connection_rate, num_layers):
return _libfann.neural_net_parent_create_sparse(self, connection_rate, num_layers)
def create_sparse_array(self, connection_rate, num_layers, layers):
return _libfann.neural_net_parent_create_sparse_array(self, connection_rate, num_layers, layers)
def create_shortcut(self, num_layers):
return _libfann.neural_net_parent_create_shortcut(self, num_layers)
def create_shortcut_array(self, num_layers, layers):
return _libfann.neural_net_parent_create_shortcut_array(self, num_layers, layers)
def run(self, input):
return _libfann.neural_net_parent_run(self, input)
def randomize_weights(self, min_weight, max_weight):
return _libfann.neural_net_parent_randomize_weights(self, min_weight, max_weight)
def init_weights(self, data):
return _libfann.neural_net_parent_init_weights(self, data)
def print_connections(self):
return _libfann.neural_net_parent_print_connections(self)
def create_from_file(self, configuration_file):
return _libfann.neural_net_parent_create_from_file(self, configuration_file)
def save(self, configuration_file):
return _libfann.neural_net_parent_save(self, configuration_file)
def save_to_fixed(self, configuration_file):
return _libfann.neural_net_parent_save_to_fixed(self, configuration_file)
def train(self, input, desired_output):
return _libfann.neural_net_parent_train(self, input, desired_output)
def train_epoch(self, data):
return _libfann.neural_net_parent_train_epoch(self, data)
def train_on_data(self, data, max_epochs, epochs_between_reports, desired_error):
return _libfann.neural_net_parent_train_on_data(self, data, max_epochs, epochs_between_reports, desired_error)
def train_on_file(self, filename, max_epochs, epochs_between_reports, desired_error):
return _libfann.neural_net_parent_train_on_file(self, filename, max_epochs, epochs_between_reports, desired_error)
def test(self, input, desired_output):
return _libfann.neural_net_parent_test(self, input, desired_output)
def test_data(self, data):
return _libfann.neural_net_parent_test_data(self, data)
def get_MSE(self):
return _libfann.neural_net_parent_get_MSE(self)
def reset_MSE(self):
return _libfann.neural_net_parent_reset_MSE(self)
def set_callback(self, callback, user_data):
return _libfann.neural_net_parent_set_callback(self, callback, user_data)
def print_parameters(self):
return _libfann.neural_net_parent_print_parameters(self)
def get_training_algorithm(self):
return _libfann.neural_net_parent_get_training_algorithm(self)
def set_training_algorithm(self, training_algorithm):
return _libfann.neural_net_parent_set_training_algorithm(self, training_algorithm)
def get_learning_rate(self):
return _libfann.neural_net_parent_get_learning_rate(self)
def set_learning_rate(self, learning_rate):
return _libfann.neural_net_parent_set_learning_rate(self, learning_rate)
def get_activation_function(self, layer, neuron):
return _libfann.neural_net_parent_get_activation_function(self, layer, neuron)
def set_activation_function(self, activation_function, layer, neuron):
return _libfann.neural_net_parent_set_activation_function(self, activation_function, layer, neuron)
def set_activation_function_layer(self, activation_function, layer):
return _libfann.neural_net_parent_set_activation_function_layer(self, activation_function, layer)
def set_activation_function_hidden(self, activation_function):
return _libfann.neural_net_parent_set_activation_function_hidden(self, activation_function)
def set_activation_function_output(self, activation_function):
return _libfann.neural_net_parent_set_activation_function_output(self, activation_function)
def get_activation_steepness(self, layer, neuron):
return _libfann.neural_net_parent_get_activation_steepness(self, layer, neuron)
def set_activation_steepness(self, steepness, layer, neuron):
return _libfann.neural_net_parent_set_activation_steepness(self, steepness, layer, neuron)
def set_activation_steepness_layer(self, steepness, layer):
return _libfann.neural_net_parent_set_activation_steepness_layer(self, steepness, layer)
def set_activation_steepness_hidden(self, steepness):
return _libfann.neural_net_parent_set_activation_steepness_hidden(self, steepness)
def set_activation_steepness_output(self, steepness):
return _libfann.neural_net_parent_set_activation_steepness_output(self, steepness)
def get_train_error_function(self):
return _libfann.neural_net_parent_get_train_error_function(self)
def set_train_error_function(self, train_error_function):
return _libfann.neural_net_parent_set_train_error_function(self, train_error_function)
def get_quickprop_decay(self):
return _libfann.neural_net_parent_get_quickprop_decay(self)
def set_quickprop_decay(self, quickprop_decay):
return _libfann.neural_net_parent_set_quickprop_decay(self, quickprop_decay)
def get_quickprop_mu(self):
return _libfann.neural_net_parent_get_quickprop_mu(self)
def set_quickprop_mu(self, quickprop_mu):
return _libfann.neural_net_parent_set_quickprop_mu(self, quickprop_mu)
def get_rprop_increase_factor(self):
return _libfann.neural_net_parent_get_rprop_increase_factor(self)
def set_rprop_increase_factor(self, rprop_increase_factor):
return _libfann.neural_net_parent_set_rprop_increase_factor(self, rprop_increase_factor)
def get_rprop_decrease_factor(self):
return _libfann.neural_net_parent_get_rprop_decrease_factor(self)
def set_rprop_decrease_factor(self, rprop_decrease_factor):
return _libfann.neural_net_parent_set_rprop_decrease_factor(self, rprop_decrease_factor)
def get_rprop_delta_min(self):
return _libfann.neural_net_parent_get_rprop_delta_min(self)
def set_rprop_delta_min(self, rprop_delta_min):
return _libfann.neural_net_parent_set_rprop_delta_min(self, rprop_delta_min)
def get_rprop_delta_max(self):
return _libfann.neural_net_parent_get_rprop_delta_max(self)
def set_rprop_delta_max(self, rprop_delta_max):
return _libfann.neural_net_parent_set_rprop_delta_max(self, rprop_delta_max)
def get_num_input(self):
return _libfann.neural_net_parent_get_num_input(self)
def get_num_output(self):
return _libfann.neural_net_parent_get_num_output(self)
def get_total_neurons(self):
return _libfann.neural_net_parent_get_total_neurons(self)
def get_total_connections(self):
return _libfann.neural_net_parent_get_total_connections(self)
def get_network_type(self):
return _libfann.neural_net_parent_get_network_type(self)
def get_connection_rate(self):
return _libfann.neural_net_parent_get_connection_rate(self)
def get_num_layers(self):
return _libfann.neural_net_parent_get_num_layers(self)
def get_layer_array(self, layers):
return _libfann.neural_net_parent_get_layer_array(self, layers)
def get_bias_array(self, bias):
return _libfann.neural_net_parent_get_bias_array(self, bias)
def get_connection_array(self, connections):
return _libfann.neural_net_parent_get_connection_array(self, connections)
def set_weight_array(self, connections, num_connections):
return _libfann.neural_net_parent_set_weight_array(self, connections, num_connections)
def set_weight(self, from_neuron, to_neuron, weight):
return _libfann.neural_net_parent_set_weight(self, from_neuron, to_neuron, weight)
def get_learning_momentum(self):
return _libfann.neural_net_parent_get_learning_momentum(self)
def set_learning_momentum(self, learning_momentum):
return _libfann.neural_net_parent_set_learning_momentum(self, learning_momentum)
def get_train_stop_function(self):
return _libfann.neural_net_parent_get_train_stop_function(self)
def set_train_stop_function(self, train_stop_function):
return _libfann.neural_net_parent_set_train_stop_function(self, train_stop_function)
def get_bit_fail_limit(self):
return _libfann.neural_net_parent_get_bit_fail_limit(self)
def set_bit_fail_limit(self, bit_fail_limit):
return _libfann.neural_net_parent_set_bit_fail_limit(self, bit_fail_limit)
def get_bit_fail(self):
return _libfann.neural_net_parent_get_bit_fail(self)
def cascadetrain_on_data(self, data, max_neurons, neurons_between_reports, desired_error):
return _libfann.neural_net_parent_cascadetrain_on_data(self, data, max_neurons, neurons_between_reports, desired_error)
def cascadetrain_on_file(self, filename, max_neurons, neurons_between_reports, desired_error):
return _libfann.neural_net_parent_cascadetrain_on_file(self, filename, max_neurons, neurons_between_reports, desired_error)
def get_cascade_output_change_fraction(self):
return _libfann.neural_net_parent_get_cascade_output_change_fraction(self)
def set_cascade_output_change_fraction(self, cascade_output_change_fraction):
return _libfann.neural_net_parent_set_cascade_output_change_fraction(self, cascade_output_change_fraction)
def get_cascade_output_stagnation_epochs(self):
return _libfann.neural_net_parent_get_cascade_output_stagnation_epochs(self)
def set_cascade_output_stagnation_epochs(self, cascade_output_stagnation_epochs):
return _libfann.neural_net_parent_set_cascade_output_stagnation_epochs(self, cascade_output_stagnation_epochs)
def get_cascade_candidate_change_fraction(self):
return _libfann.neural_net_parent_get_cascade_candidate_change_fraction(self)
def set_cascade_candidate_change_fraction(self, cascade_candidate_change_fraction):
return _libfann.neural_net_parent_set_cascade_candidate_change_fraction(self, cascade_candidate_change_fraction)
def get_cascade_candidate_stagnation_epochs(self):
return _libfann.neural_net_parent_get_cascade_candidate_stagnation_epochs(self)
def set_cascade_candidate_stagnation_epochs(self, cascade_candidate_stagnation_epochs):
return _libfann.neural_net_parent_set_cascade_candidate_stagnation_epochs(self, cascade_candidate_stagnation_epochs)
def get_cascade_weight_multiplier(self):
return _libfann.neural_net_parent_get_cascade_weight_multiplier(self)
def set_cascade_weight_multiplier(self, cascade_weight_multiplier):
return _libfann.neural_net_parent_set_cascade_weight_multiplier(self, cascade_weight_multiplier)
def get_cascade_candidate_limit(self):
return _libfann.neural_net_parent_get_cascade_candidate_limit(self)
def set_cascade_candidate_limit(self, cascade_candidate_limit):
return _libfann.neural_net_parent_set_cascade_candidate_limit(self, cascade_candidate_limit)
def get_cascade_max_out_epochs(self):
return _libfann.neural_net_parent_get_cascade_max_out_epochs(self)
def set_cascade_max_out_epochs(self, cascade_max_out_epochs):
return _libfann.neural_net_parent_set_cascade_max_out_epochs(self, cascade_max_out_epochs)
def get_cascade_max_cand_epochs(self):
return _libfann.neural_net_parent_get_cascade_max_cand_epochs(self)
def set_cascade_max_cand_epochs(self, cascade_max_cand_epochs):
return _libfann.neural_net_parent_set_cascade_max_cand_epochs(self, cascade_max_cand_epochs)
def get_cascade_num_candidates(self):
return _libfann.neural_net_parent_get_cascade_num_candidates(self)
def get_cascade_activation_functions_count(self):
return _libfann.neural_net_parent_get_cascade_activation_functions_count(self)
def get_cascade_activation_functions(self):
return _libfann.neural_net_parent_get_cascade_activation_functions(self)
def set_cascade_activation_functions(self, cascade_activation_functions, cascade_activation_functions_count):
return _libfann.neural_net_parent_set_cascade_activation_functions(self, cascade_activation_functions, cascade_activation_functions_count)
def get_cascade_activation_steepnesses_count(self):
return _libfann.neural_net_parent_get_cascade_activation_steepnesses_count(self)
def get_cascade_activation_steepnesses(self):
return _libfann.neural_net_parent_get_cascade_activation_steepnesses(self)
def set_cascade_activation_steepnesses(self, cascade_activation_steepnesses, cascade_activation_steepnesses_count):
return _libfann.neural_net_parent_set_cascade_activation_steepnesses(self, cascade_activation_steepnesses, cascade_activation_steepnesses_count)
def get_cascade_num_candidate_groups(self):
return _libfann.neural_net_parent_get_cascade_num_candidate_groups(self)
def set_cascade_num_candidate_groups(self, cascade_num_candidate_groups):
return _libfann.neural_net_parent_set_cascade_num_candidate_groups(self, cascade_num_candidate_groups)
def scale_train(self, data):
return _libfann.neural_net_parent_scale_train(self, data)
def descale_train(self, data):
return _libfann.neural_net_parent_descale_train(self, data)
def set_input_scaling_params(self, data, new_input_min, new_input_max):
return _libfann.neural_net_parent_set_input_scaling_params(self, data, new_input_min, new_input_max)
def set_output_scaling_params(self, data, new_output_min, new_output_max):
return _libfann.neural_net_parent_set_output_scaling_params(self, data, new_output_min, new_output_max)
def set_scaling_params(self, data, new_input_min, new_input_max, new_output_min, new_output_max):
return _libfann.neural_net_parent_set_scaling_params(self, data, new_input_min, new_input_max, new_output_min, new_output_max)
def clear_scaling_params(self):
return _libfann.neural_net_parent_clear_scaling_params(self)
def scale_input(self, input_vector):
return _libfann.neural_net_parent_scale_input(self, input_vector)
def scale_output(self, output_vector):
return _libfann.neural_net_parent_scale_output(self, output_vector)
def descale_input(self, input_vector):
return _libfann.neural_net_parent_descale_input(self, input_vector)
def descale_output(self, output_vector):
return _libfann.neural_net_parent_descale_output(self, output_vector)
def set_error_log(self, log_file):
return _libfann.neural_net_parent_set_error_log(self, log_file)
def get_errno(self):
return _libfann.neural_net_parent_get_errno(self)
def reset_errno(self):
return _libfann.neural_net_parent_reset_errno(self)
def reset_errstr(self):
return _libfann.neural_net_parent_reset_errstr(self)
def get_errstr(self):
return _libfann.neural_net_parent_get_errstr(self)
def print_error(self):
return _libfann.neural_net_parent_print_error(self)
neural_net_parent_swigregister = _libfann.neural_net_parent_swigregister
neural_net_parent_swigregister(neural_net_parent)
class training_data(training_data_parent):
__swig_setmethods__ = {}
for _s in [training_data_parent]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, training_data, name, value)
__swig_getmethods__ = {}
for _s in [training_data_parent]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, training_data, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _libfann.new_training_data(*args)
try:
self.this.append(this)
except:
self.this = this
__swig_destroy__ = _libfann.delete_training_data
__del__ = lambda self: None
def get_input(self):
return _libfann.training_data_get_input(self)
def get_output(self):
return _libfann.training_data_get_output(self)
def set_train_data(self, input, output):
return _libfann.training_data_set_train_data(self, input, output)
training_data_swigregister = _libfann.training_data_swigregister
training_data_swigregister(training_data)
class neural_net(neural_net_parent):
__swig_setmethods__ = {}
for _s in [neural_net_parent]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, neural_net, name, value)
__swig_getmethods__ = {}
for _s in [neural_net_parent]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, neural_net, name)
__repr__ = _swig_repr
def __init__(self):
this = _libfann.new_neural_net()
try:
self.this.append(this)
except:
self.this = this
__swig_destroy__ = _libfann.delete_neural_net
__del__ = lambda self: None
def create_standard_array(self, layers):
return _libfann.neural_net_create_standard_array(self, layers)
def create_sparse_array(self, connection_rate, layers):
return _libfann.neural_net_create_sparse_array(self, connection_rate, layers)
def create_shortcut_array(self, layers):
return _libfann.neural_net_create_shortcut_array(self, layers)
def run(self, input):
return _libfann.neural_net_run(self, input)
def train(self, input, desired_output):
return _libfann.neural_net_train(self, input, desired_output)
def test(self, input, desired_output):
return _libfann.neural_net_test(self, input, desired_output)
def get_layer_array(self, ARGOUT):
return _libfann.neural_net_get_layer_array(self, ARGOUT)
def get_bias_array(self, ARGOUT):
return _libfann.neural_net_get_bias_array(self, ARGOUT)
def get_connection_array(self, ARGOUT):
return _libfann.neural_net_get_connection_array(self, ARGOUT)
def set_weight_array(self, connections):
return _libfann.neural_net_set_weight_array(self, connections)
def get_cascade_activation_steepnesses(self):
return _libfann.neural_net_get_cascade_activation_steepnesses(self)
def set_cascade_activation_steepnesses(self, cascade_activation_steepnesses):
return _libfann.neural_net_set_cascade_activation_steepnesses(self, cascade_activation_steepnesses)
neural_net_swigregister = _libfann.neural_net_swigregister
neural_net_swigregister(neural_net)
# This file is compatible with both classic and new-style classes.
| jeffames-cs/nnot | pyfann/libfann.py | Python | mit | 29,340 | 0.00426 |
"""Expand target names in an aggregated coverage file."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from .... import types as t
from ....io import (
SortedSetEncoder,
write_json_file,
)
from . import (
CoverageAnalyzeTargetsConfig,
expand_indexes,
format_arc,
read_report,
)
class CoverageAnalyzeTargetsExpandConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets expand` command."""
def __init__(self, args): # type: (t.Any) -> None
super(CoverageAnalyzeTargetsExpandConfig, self).__init__(args)
self.input_file = args.input_file # type: str
self.output_file = args.output_file # type: str
def command_coverage_analyze_targets_expand(args): # type: (CoverageAnalyzeTargetsExpandConfig) -> None
"""Expand target names in an aggregated coverage file."""
covered_targets, covered_path_arcs, covered_path_lines = read_report(args.input_file)
report = dict(
arcs=expand_indexes(covered_path_arcs, covered_targets, format_arc),
lines=expand_indexes(covered_path_lines, covered_targets, str),
)
if not args.explain:
write_json_file(args.output_file, report, encoder=SortedSetEncoder)
| azaghal/ansible | test/lib/ansible_test/_internal/coverage/analyze/targets/expand.py | Python | gpl-3.0 | 1,272 | 0.001572 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import os, locale, sys
from core import controller
locale.setlocale(locale.LC_ALL, 'fr_FR.UTF-8')
debug = False
if len(sys.argv) > 1: debug = sys.argv[1] == '-d'
# Initialisation du controller principale
ctrl = controller.Controller(debug)
# démarrage du serveur
ctrl.run() | btouchard/piserver | src/piserver.py | Python | gpl-3.0 | 320 | 0.009404 |
#!/usr/bin/python
# build_native.py
# Build native codes
import sys
import os, os.path
import shutil
from optparse import OptionParser
import cocos
from MultiLanguage import MultiLanguage
import cocos_project
import json
import re
from xml.dom import minidom
import project_compile
BUILD_CFIG_FILE="build-cfg.json"
class AndroidBuilder(object):
CFG_KEY_COPY_TO_ASSETS = "copy_to_assets"
CFG_KEY_MUST_COPY_TO_ASSERTS = "must_copy_to_assets"
CFG_KEY_STORE = "key_store"
CFG_KEY_STORE_PASS = "key_store_pass"
CFG_KEY_ALIAS = "alias"
CFG_KEY_ALIAS_PASS = "alias_pass"
ANT_KEY_STORE = "key.store"
ANT_KEY_ALIAS = "key.alias"
ANT_KEY_STORE_PASS = "key.store.password"
ANT_KEY_ALIAS_PASS = "key.alias.password"
GRADLE_KEY_STORE = "RELEASE_STORE_FILE"
GRADLE_KEY_ALIAS = "RELEASE_KEY_ALIAS"
GRADLE_KEY_STORE_PASS = "RELEASE_STORE_PASSWORD"
GRADLE_KEY_ALIAS_PASS = "RELEASE_KEY_PASSWORD"
def __init__(self, verbose, app_android_root, no_res, proj_obj, use_studio=False):
self._verbose = verbose
self.app_android_root = app_android_root
self._no_res = no_res
self._project = proj_obj
self.use_studio = use_studio
# check environment variable
if self.use_studio:
self.ant_root = None
self.sign_prop_file = os.path.join(self.app_android_root, 'app', "gradle.properties")
else:
self.ant_root = cocos.check_environment_variable('ANT_ROOT')
self.sign_prop_file = os.path.join(self.app_android_root, "ant.properties")
self.sdk_root = cocos.check_environment_variable('ANDROID_SDK_ROOT')
self._parse_cfg()
def _run_cmd(self, command, cwd=None):
cocos.CMDRunner.run_cmd(command, self._verbose, cwd=cwd)
def _parse_cfg(self):
self.cfg_path = os.path.join(self.app_android_root, BUILD_CFIG_FILE)
try:
f = open(self.cfg_path)
cfg = json.load(f, encoding='utf8')
f.close()
except Exception:
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_PARSE_CFG_FAILED_FMT', self.cfg_path),
cocos.CCPluginError.ERROR_PARSE_FILE)
if cfg.has_key(project_compile.CCPluginCompile.CFG_KEY_MUST_COPY_RESOURCES):
if self._no_res:
self.res_files = cfg[project_compile.CCPluginCompile.CFG_KEY_MUST_COPY_RESOURCES]
else:
self.res_files = cfg[project_compile.CCPluginCompile.CFG_KEY_MUST_COPY_RESOURCES] + cfg[project_compile.CCPluginCompile.CFG_KEY_COPY_RESOURCES]
else:
self.res_files = cfg[project_compile.CCPluginCompile.CFG_KEY_COPY_RESOURCES]
self.ndk_module_paths = cfg['ndk_module_path']
# get the properties for sign release apk
if self.use_studio:
self.key_store_str = AndroidBuilder.GRADLE_KEY_STORE
self.key_alias_str = AndroidBuilder.GRADLE_KEY_ALIAS
self.key_store_pass_str = AndroidBuilder.GRADLE_KEY_STORE_PASS
self.key_alias_pass_str = AndroidBuilder.GRADLE_KEY_ALIAS_PASS
else:
self.key_store_str = AndroidBuilder.ANT_KEY_STORE
self.key_alias_str = AndroidBuilder.ANT_KEY_ALIAS
self.key_store_pass_str = AndroidBuilder.ANT_KEY_STORE_PASS
self.key_alias_pass_str = AndroidBuilder.ANT_KEY_ALIAS_PASS
move_cfg = {}
self.key_store = None
if cfg.has_key(AndroidBuilder.CFG_KEY_STORE):
self.key_store = cfg[AndroidBuilder.CFG_KEY_STORE]
move_cfg[self.key_store_str] = self.key_store
del cfg[AndroidBuilder.CFG_KEY_STORE]
self.key_store_pass = None
if cfg.has_key(AndroidBuilder.CFG_KEY_STORE_PASS):
self.key_store_pass = cfg[AndroidBuilder.CFG_KEY_STORE_PASS]
move_cfg[self.key_store_pass_str] = self.key_store_pass
del cfg[AndroidBuilder.CFG_KEY_STORE_PASS]
self.alias = None
if cfg.has_key(AndroidBuilder.CFG_KEY_ALIAS):
self.alias = cfg[AndroidBuilder.CFG_KEY_ALIAS]
move_cfg[self.key_alias_str] = self.alias
del cfg[AndroidBuilder.CFG_KEY_ALIAS]
self.alias_pass = None
if cfg.has_key(AndroidBuilder.CFG_KEY_ALIAS_PASS):
self.alias_pass = cfg[AndroidBuilder.CFG_KEY_ALIAS_PASS]
move_cfg[self.key_alias_pass_str] = self.alias_pass
del cfg[AndroidBuilder.CFG_KEY_ALIAS_PASS]
if len(move_cfg) > 0:
# move the config into ant.properties
self._move_cfg(move_cfg)
with open(self.cfg_path, 'w') as outfile:
json.dump(cfg, outfile, sort_keys = True, indent = 4)
outfile.close()
def has_keystore_in_signprops(self):
keystore = None
if self.use_studio:
pattern = re.compile(r"^RELEASE_STORE_FILE=(.+)")
else:
pattern = re.compile(r"^key\.store=(.+)")
try:
file_obj = open(self.sign_prop_file)
for line in file_obj:
str1 = line.replace(' ', '')
str2 = str1.replace('\t', '')
match = pattern.match(str2)
if match is not None:
keystore = match.group(1)
break
file_obj.close()
except:
pass
if keystore is None:
return False
else:
return True
def _write_sign_properties(self, cfg):
file_obj = open(self.sign_prop_file, "a+")
for key in cfg.keys():
str_cfg = "%s=%s\n" % (key, cfg[key])
file_obj.write(str_cfg)
file_obj.close()
def _move_cfg(self, cfg):
if not self.has_keystore_in_signprops():
self._write_sign_properties(cfg)
def remove_c_libs(self, libs_dir):
for file_name in os.listdir(libs_dir):
lib_file = os.path.join(libs_dir, file_name)
if os.path.isfile(lib_file):
ext = os.path.splitext(lib_file)[1]
if ext == ".a" or ext == ".so":
os.remove(lib_file)
def update_project(self, android_platform):
if self.use_studio:
manifest_path = os.path.join(self.app_android_root, 'app')
else:
manifest_path = self.app_android_root
sdk_tool_path = os.path.join(self.sdk_root, "tools", "android")
# check the android platform
target_str = self.check_android_platform(self.sdk_root, android_platform, manifest_path, False)
# update project
command = "%s update project -t %s -p %s" % (cocos.CMDRunner.convert_path_to_cmd(sdk_tool_path), target_str, manifest_path)
self._run_cmd(command)
# update lib-projects
property_path = manifest_path
self.update_lib_projects(self.sdk_root, sdk_tool_path, android_platform, property_path)
if self.use_studio:
# copy the local.properties to the app_android_root
file_name = 'local.properties'
src_path = os.path.normpath(os.path.join(manifest_path, file_name))
dst_path = os.path.normpath(os.path.join(self.app_android_root, file_name))
if src_path != dst_path:
if os.path.isfile(dst_path):
os.remove(dst_path)
shutil.copy(src_path, dst_path)
def get_toolchain_version(self, ndk_root, compile_obj):
ret_version = "4.8"
version_file_path = os.path.join(ndk_root, "RELEASE.TXT")
try:
versionFile = open(version_file_path)
lines = versionFile.readlines()
versionFile.close()
version_num = None
version_char = None
pattern = r'^[a-zA-Z]+(\d+)(\w)'
for line in lines:
str_line = line.lstrip()
match = re.match(pattern, str_line)
if match:
version_num = int(match.group(1))
version_char = match.group(2)
break
if version_num is None:
cocos.Logging.warning(MultiLanguage.get_string('COMPILE_WARNING_GET_NDK_VER_FAILED_FMT',
version_file_path))
else:
version_char = version_char.lower()
if version_num > 10 or (version_num == 10 and cmp(version_char, 'c') >= 0):
ret_version = "4.9"
else:
compile_obj.add_warning_at_end(MultiLanguage.get_string('COMPILE_WARNING_NDK_VERSION'))
except:
cocos.Logging.warning(MultiLanguage.get_string('COMPILE_WARNING_GET_NDK_VER_FAILED_FMT', version_file_path))
cocos.Logging.info(MultiLanguage.get_string('COMPILE_INFO_NDK_TOOLCHAIN_VER_FMT', ret_version))
if ret_version == "4.8":
compile_obj.add_warning_at_end(MultiLanguage.get_string('COMPILE_WARNING_TOOLCHAIN_FMT', ret_version))
return ret_version
def do_ndk_build(self, ndk_build_param, build_mode, compile_obj):
cocos.Logging.info(MultiLanguage.get_string('COMPILE_INFO_NDK_MODE', build_mode))
ndk_root = cocos.check_environment_variable('NDK_ROOT')
toolchain_version = self.get_toolchain_version(ndk_root, compile_obj)
if self.use_studio:
ndk_work_dir = os.path.join(self.app_android_root, 'app')
else:
ndk_work_dir = self.app_android_root
reload(sys)
sys.setdefaultencoding('utf8')
ndk_path = cocos.CMDRunner.convert_path_to_cmd(os.path.join(ndk_root, "ndk-build"))
module_paths = []
for cfg_path in self.ndk_module_paths:
if cfg_path.find("${ENGINE_ROOT}") >= 0:
cocos_root = cocos.check_environment_variable("COCOS_X_ROOT")
module_paths.append(cfg_path.replace("${ENGINE_ROOT}", cocos_root))
elif cfg_path.find("${COCOS_FRAMEWORKS}") >= 0:
cocos_frameworks = cocos.check_environment_variable("COCOS_FRAMEWORKS")
module_paths.append(cfg_path.replace("${COCOS_FRAMEWORKS}", cocos_frameworks))
else:
module_paths.append(os.path.join(self.app_android_root, cfg_path))
# delete template static and dynamic files
obj_local_dir = os.path.join(ndk_work_dir, "obj", "local")
if os.path.isdir(obj_local_dir):
for abi_dir in os.listdir(obj_local_dir):
static_file_path = os.path.join(ndk_work_dir, "obj", "local", abi_dir)
if os.path.isdir(static_file_path):
self.remove_c_libs(static_file_path)
# windows should use ";" to seperate module paths
if cocos.os_is_win32():
ndk_module_path = ';'.join(module_paths)
else:
ndk_module_path = ':'.join(module_paths)
ndk_module_path= 'NDK_MODULE_PATH=' + ndk_module_path
if ndk_build_param is None:
ndk_build_cmd = '%s -C %s %s' % (ndk_path, ndk_work_dir, ndk_module_path)
else:
ndk_build_cmd = '%s -C %s %s %s' % (ndk_path, ndk_work_dir, ' '.join(ndk_build_param), ndk_module_path)
ndk_build_cmd = '%s NDK_TOOLCHAIN_VERSION=%s' % (ndk_build_cmd, toolchain_version)
if build_mode == 'debug':
ndk_build_cmd = '%s NDK_DEBUG=1' % ndk_build_cmd
self._run_cmd(ndk_build_cmd)
def _xml_attr(self, dir, file_name, node_name, attr):
doc = minidom.parse(os.path.join(dir, file_name))
return doc.getElementsByTagName(node_name)[0].getAttribute(attr)
def update_lib_projects(self, sdk_root, sdk_tool_path, android_platform, property_path):
property_file = os.path.join(property_path, "project.properties")
if not os.path.isfile(property_file):
return
patten = re.compile(r'^android\.library\.reference\.[\d]+=(.+)')
for line in open(property_file):
str1 = line.replace(' ', '')
str2 = str1.replace('\t', '')
match = patten.match(str2)
if match is not None:
# a lib project is found
lib_path = match.group(1)
abs_lib_path = os.path.join(property_path, lib_path)
abs_lib_path = os.path.normpath(abs_lib_path)
if os.path.isdir(abs_lib_path):
target_str = self.check_android_platform(sdk_root, android_platform, abs_lib_path, True)
command = "%s update lib-project -p %s -t %s" % (cocos.CMDRunner.convert_path_to_cmd(sdk_tool_path), abs_lib_path, target_str)
self._run_cmd(command)
self.update_lib_projects(sdk_root, sdk_tool_path, android_platform, abs_lib_path)
def select_default_android_platform(self, min_api_level):
''' select a default android platform in SDK_ROOT
'''
sdk_root = cocos.check_environment_variable('ANDROID_SDK_ROOT')
platforms_dir = os.path.join(sdk_root, "platforms")
ret_num = -1
ret_platform = ""
if os.path.isdir(platforms_dir):
for dir_name in os.listdir(platforms_dir):
if not os.path.isdir(os.path.join(platforms_dir, dir_name)):
continue
num = self.get_api_level(dir_name, raise_error=False)
if num >= min_api_level:
if ret_num == -1 or ret_num > num:
ret_num = num
ret_platform = dir_name
if ret_num != -1:
return ret_platform
else:
return None
def get_api_level(self, target_str, raise_error=True):
special_targats_info = {
"android-4.2" : 17,
"android-L" : 20
}
if special_targats_info.has_key(target_str):
ret = special_targats_info[target_str]
else:
match = re.match(r'android-(\d+)', target_str)
if match is not None:
ret = int(match.group(1))
else:
if raise_error:
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_NOT_VALID_AP_FMT', target_str),
cocos.CCPluginError.ERROR_PARSE_FILE)
else:
ret = -1
return ret
def get_target_config(self, proj_path):
property_file = os.path.join(proj_path, "project.properties")
if not os.path.isfile(property_file):
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_FILE_NOT_FOUND_FMT', property_file),
cocos.CCPluginError.ERROR_PATH_NOT_FOUND)
patten = re.compile(r'^target=(.+)')
for line in open(property_file):
str1 = line.replace(' ', '')
str2 = str1.replace('\t', '')
match = patten.match(str2)
if match is not None:
target = match.group(1)
target_num = self.get_api_level(target)
if target_num > 0:
return target_num
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_TARGET_NOT_FOUND_FMT', property_file),
cocos.CCPluginError.ERROR_PARSE_FILE)
# check the selected android platform
def check_android_platform(self, sdk_root, android_platform, proj_path, auto_select):
ret = android_platform
min_platform = self.get_target_config(proj_path)
if android_platform is None:
# not specified platform, found one
cocos.Logging.info(MultiLanguage.get_string('COMPILE_INFO_AUTO_SELECT_AP'))
ret = self.select_default_android_platform(min_platform)
else:
# check whether it's larger than min_platform
select_api_level = self.get_api_level(android_platform)
if select_api_level < min_platform:
if auto_select:
# select one for project
ret = self.select_default_android_platform(min_platform)
else:
# raise error
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_AP_TOO_LOW_FMT',
(proj_path, min_platform, select_api_level)),
cocos.CCPluginError.ERROR_WRONG_ARGS)
if ret is None:
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_AP_NOT_FOUND_FMT',
(proj_path, min_platform)),
cocos.CCPluginError.ERROR_PARSE_FILE)
ret_path = os.path.join(cocos.CMDRunner.convert_path_to_python(sdk_root), "platforms", ret)
if not os.path.isdir(ret_path):
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_NO_AP_IN_SDK_FMT', ret),
cocos.CCPluginError.ERROR_PATH_NOT_FOUND)
special_platforms_info = {
"android-4.2" : "android-17"
}
if special_platforms_info.has_key(ret):
ret = special_platforms_info[ret]
return ret
def ant_build_apk(self, build_mode, custom_step_args):
app_android_root = self.app_android_root
# run ant build
ant_path = os.path.join(self.ant_root, 'ant')
buildfile_path = os.path.join(app_android_root, "build.xml")
# generate paramters for custom step
args_ant_copy = custom_step_args.copy()
target_platform = cocos_project.Platforms.ANDROID
# invoke custom step: pre-ant-build
self._project.invoke_custom_step_script(cocos_project.Project.CUSTOM_STEP_PRE_ANT_BUILD,
target_platform, args_ant_copy)
command = "%s clean %s -f %s -Dsdk.dir=%s" % (cocos.CMDRunner.convert_path_to_cmd(ant_path),
build_mode, buildfile_path,
cocos.CMDRunner.convert_path_to_cmd(self.sdk_root))
self._run_cmd(command)
# invoke custom step: post-ant-build
self._project.invoke_custom_step_script(cocos_project.Project.CUSTOM_STEP_POST_ANT_BUILD,
target_platform, args_ant_copy)
def gradle_build_apk(self, build_mode):
# check the compileSdkVersion & buildToolsVersion
check_file = os.path.join(self.app_android_root, 'app', 'build.gradle')
f = open(check_file)
lines = f.readlines()
f.close()
compile_sdk_ver = None
build_tools_ver = None
compile_sdk_pattern = r'compileSdkVersion[ \t]+([\d]+)'
build_tools_pattern = r'buildToolsVersion[ \t]+"(.+)"'
for line in lines:
line_str = line.strip()
match1 = re.match(compile_sdk_pattern, line_str)
if match1:
compile_sdk_ver = match1.group(1)
match2 = re.match(build_tools_pattern, line_str)
if match2:
build_tools_ver = match2.group(1)
if compile_sdk_ver is not None:
# check the compileSdkVersion
check_folder_name = 'android-%s' % compile_sdk_ver
check_path = os.path.join(self.sdk_root, 'platforms', check_folder_name)
if not os.path.isdir(check_path):
cocos.Logging.warning(MultiLanguage.get_string('COMPILE_WARNING_COMPILE_SDK_FMT',
(compile_sdk_ver, check_path)))
if build_tools_ver is not None:
# check the buildToolsVersion
check_path = os.path.join(self.sdk_root, 'build-tools', build_tools_ver)
if not os.path.isdir(check_path):
cocos.Logging.warning(MultiLanguage.get_string('COMPILE_WARNING_BUILD_TOOLS_FMT',
(build_tools_ver, check_path)))
# invoke gradlew for gradle building
if cocos.os_is_win32():
gradle_path = os.path.join(self.app_android_root, 'gradlew.bat')
else:
gradle_path = os.path.join(self.app_android_root, 'gradlew')
if not os.path.isfile(gradle_path):
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_GRALEW_NOT_EXIST_FMT', gradle_path),
cocos.CCPluginError.ERROR_PATH_NOT_FOUND)
mode_str = 'Debug' if build_mode == 'debug' else 'Release'
cmd = '"%s" --parallel --info assemble%s' % (gradle_path, mode_str)
self._run_cmd(cmd, cwd=self.app_android_root)
def do_build_apk(self, build_mode, no_apk, output_dir, custom_step_args, compile_obj):
if self.use_studio:
assets_dir = os.path.join(self.app_android_root, "app", "assets")
project_name = None
setting_file = os.path.join(self.app_android_root, 'settings.gradle')
if os.path.isfile(setting_file):
# get project name from settings.gradle
f = open(setting_file)
lines = f.readlines()
f.close()
pattern = r"project\(':(.*)'\)\.projectDir[ \t]*=[ \t]*new[ \t]*File\(settingsDir, 'app'\)"
for line in lines:
line_str = line.strip()
match = re.match(pattern, line_str)
if match:
project_name = match.group(1)
break
if project_name is None:
# use default project name
project_name = 'app'
gen_apk_folder = os.path.join(self.app_android_root, 'app/build/outputs/apk')
else:
assets_dir = os.path.join(self.app_android_root, "assets")
project_name = self._xml_attr(self.app_android_root, 'build.xml', 'project', 'name')
gen_apk_folder = os.path.join(self.app_android_root, 'bin')
# copy resources
self._copy_resources(custom_step_args, assets_dir)
# check the project config & compile the script files
if self._project._is_lua_project():
compile_obj.compile_lua_scripts(assets_dir, assets_dir)
if self._project._is_js_project():
compile_obj.compile_js_scripts(assets_dir, assets_dir)
if not no_apk:
# gather the sign info if necessary
if build_mode == "release" and not self.has_keystore_in_signprops():
self._gather_sign_info()
# build apk
if self.use_studio:
self.gradle_build_apk(build_mode)
else:
self.ant_build_apk(build_mode, custom_step_args)
# copy the apk to output dir
if output_dir:
apk_name = '%s-%s.apk' % (project_name, build_mode)
gen_apk_path = os.path.join(gen_apk_folder, apk_name)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
shutil.copy(gen_apk_path, output_dir)
cocos.Logging.info(MultiLanguage.get_string('COMPILE_INFO_MOVE_APK_FMT', output_dir))
if build_mode == "release":
signed_name = "%s-%s-signed.apk" % (project_name, build_mode)
apk_path = os.path.join(output_dir, signed_name)
if os.path.exists(apk_path):
os.remove(apk_path)
os.rename(os.path.join(output_dir, apk_name), apk_path)
else:
apk_path = os.path.join(output_dir, apk_name)
return apk_path
else:
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_NOT_SPECIFY_OUTPUT'),
cocos.CCPluginError.ERROR_WRONG_ARGS)
def _gather_sign_info(self):
user_cfg = {}
# get the path of keystore file
while True:
inputed = self._get_user_input(MultiLanguage.get_string('COMPILE_TIP_INPUT_KEYSTORE'))
inputed = inputed.strip()
if not os.path.isabs(inputed):
if self.use_studio:
start_path = os.path.join(self.app_android_root, 'app')
else:
start_path = self.app_android_root
abs_path = os.path.join(start_path, inputed)
else:
abs_path = inputed
if os.path.isfile(abs_path):
user_cfg[self.key_store_str] = inputed.replace('\\', '/')
break
else:
cocos.Logging.warning(MultiLanguage.get_string('COMPILE_INFO_NOT_A_FILE'))
# get the alias of keystore file
user_cfg[self.key_alias_str] = self._get_user_input(MultiLanguage.get_string('COMPILE_TIP_INPUT_ALIAS'))
# get the keystore password
user_cfg[self.key_store_pass_str] = self._get_user_input(MultiLanguage.get_string('COMPILE_TIP_INPUT_KEY_PASS'))
# get the alias password
user_cfg[self.key_alias_pass_str] = self._get_user_input(MultiLanguage.get_string('COMPILE_TIP_INPUT_ALIAS_PASS'))
# write the config into ant.properties
self._write_sign_properties(user_cfg)
def _get_user_input(self, tip_msg):
cocos.Logging.warning(tip_msg)
ret = None
while True:
ret = raw_input()
break
return ret
def _copy_resources(self, custom_step_args, assets_dir):
app_android_root = self.app_android_root
res_files = self.res_files
# remove app_android_root/assets if it exists
if os.path.isdir(assets_dir):
shutil.rmtree(assets_dir)
# generate parameters for custom steps
target_platform = cocos_project.Platforms.ANDROID
cur_custom_step_args = custom_step_args.copy()
cur_custom_step_args["assets-dir"] = assets_dir
# make dir
os.mkdir(assets_dir)
# invoke custom step : pre copy assets
self._project.invoke_custom_step_script(cocos_project.Project.CUSTOM_STEP_PRE_COPY_ASSETS, target_platform, cur_custom_step_args)
# copy resources
for cfg in res_files:
cocos.copy_files_with_config(cfg, app_android_root, assets_dir)
# invoke custom step : post copy assets
self._project.invoke_custom_step_script(cocos_project.Project.CUSTOM_STEP_POST_COPY_ASSETS, target_platform, cur_custom_step_args)
def get_apk_info(self):
if self.use_studio:
manifest_path = os.path.join(self.app_android_root, 'app')
gradle_cfg_path = os.path.join(manifest_path, 'build.gradle')
package = None
if os.path.isfile(gradle_cfg_path):
# get package name from build.gradle
f = open(gradle_cfg_path)
for line in f.readlines():
line_str = line.strip()
pattern = r'applicationId[ \t]+"(.*)"'
match = re.match(pattern, line_str)
if match:
package = match.group(1)
break
if package is None:
# get package name from AndroidManifest.xml
package = self._xml_attr(manifest_path, 'AndroidManifest.xml', 'manifest', 'package')
else:
manifest_path = self.app_android_root
package = self._xml_attr(manifest_path, 'AndroidManifest.xml', 'manifest', 'package')
activity_name = self._xml_attr(manifest_path, 'AndroidManifest.xml', 'activity', 'android:name')
if activity_name.startswith('.'):
activity = package + activity_name
else:
activity = activity_name
ret = (package, activity)
return ret
| dios-game/dios-cocos | src/oslibs/cocos/cocos-src/tools/cocos2d-console/plugins/project_compile/build_android.py | Python | mit | 28,191 | 0.003689 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.