text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
from django.contrib import admin
from .models import Post
class PostAdmin(admin.ModelAdmin):
list_display = ('user', 'mood', 'mood_positive', 'mood_negative', 'mood_neutral', 'created')
list_filter = ('mood', 'created')
admin.site.register(Post, PostAdmin)
| pavlenko-volodymyr/codingmood | codemood/social/admin.py | Python | mit | 270 | 0.003704 |
"""Support for Wink fans."""
import pywink
from homeassistant.components.fan import (
SPEED_HIGH,
SPEED_LOW,
SPEED_MEDIUM,
SUPPORT_DIRECTION,
SUPPORT_SET_SPEED,
FanEntity,
)
from . import DOMAIN, WinkDevice
SPEED_AUTO = "auto"
SPEED_LOWEST = "lowest"
SUPPORTED_FEATURES = SUPPORT_DIRECTION + SUPPORT_SET_SPEED
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Wink platform."""
for fan in pywink.get_fans():
if fan.object_id() + fan.name() not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkFanDevice(fan, hass)])
class WinkFanDevice(WinkDevice, FanEntity):
"""Representation of a Wink fan."""
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self.hass.data[DOMAIN]["entities"]["fan"].append(self)
def set_direction(self, direction: str) -> None:
"""Set the direction of the fan."""
self.wink.set_fan_direction(direction)
def set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
self.wink.set_state(True, speed)
def turn_on(self, speed: str = None, **kwargs) -> None:
"""Turn on the fan."""
self.wink.set_state(True, speed)
def turn_off(self, **kwargs) -> None:
"""Turn off the fan."""
self.wink.set_state(False)
@property
def is_on(self):
"""Return true if the entity is on."""
return self.wink.state()
@property
def speed(self) -> str:
"""Return the current speed."""
current_wink_speed = self.wink.current_fan_speed()
if SPEED_AUTO == current_wink_speed:
return SPEED_AUTO
if SPEED_LOWEST == current_wink_speed:
return SPEED_LOWEST
if SPEED_LOW == current_wink_speed:
return SPEED_LOW
if SPEED_MEDIUM == current_wink_speed:
return SPEED_MEDIUM
if SPEED_HIGH == current_wink_speed:
return SPEED_HIGH
return None
@property
def current_direction(self):
"""Return direction of the fan [forward, reverse]."""
return self.wink.current_fan_direction()
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
wink_supported_speeds = self.wink.fan_speeds()
supported_speeds = []
if SPEED_AUTO in wink_supported_speeds:
supported_speeds.append(SPEED_AUTO)
if SPEED_LOWEST in wink_supported_speeds:
supported_speeds.append(SPEED_LOWEST)
if SPEED_LOW in wink_supported_speeds:
supported_speeds.append(SPEED_LOW)
if SPEED_MEDIUM in wink_supported_speeds:
supported_speeds.append(SPEED_MEDIUM)
if SPEED_HIGH in wink_supported_speeds:
supported_speeds.append(SPEED_HIGH)
return supported_speeds
@property
def supported_features(self) -> int:
"""Flag supported features."""
return SUPPORTED_FEATURES
| sdague/home-assistant | homeassistant/components/wink/fan.py | Python | apache-2.0 | 3,014 | 0 |
# file eulxml/__init__.py
#
# Copyright 2010,2011 Emory University Libraries
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version_info__ = (0, 22, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
| dannyroberts/eulxml | eulxml/__init__.py | Python | apache-2.0 | 903 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-09-04 23:50
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('courses', '0013_auto_20160903_0212'),
]
operations = [
migrations.RenameField(
model_name='section',
old_name='approachable_rating',
new_name='cached_approachable_rating',
),
migrations.RenameField(
model_name='section',
old_name='competency_rating',
new_name='cached_competency_rating',
),
migrations.RenameField(
model_name='section',
old_name='difficulty_rating',
new_name='cached_difficulty_rating',
),
migrations.RenameField(
model_name='section',
old_name='engagement_rating',
new_name='cached_engagement_rating',
),
migrations.RenameField(
model_name='section',
old_name='enthusiasm_rating',
new_name='cached_enthusiasm_rating',
),
migrations.RenameField(
model_name='section',
old_name='lecturing_rating',
new_name='cached_lecturing_rating',
),
migrations.RenameField(
model_name='section',
old_name='rating',
new_name='cached_rating',
),
migrations.RenameField(
model_name='section',
old_name='useful_rating',
new_name='cached_useful_rating',
),
]
| aspc/mainsite | aspc/courses/migrations/0014_auto_20160904_2350.py | Python | mit | 1,605 | 0 |
# volunteers/urls.py
from django.conf.urls import *
from django.contrib.auth.decorators import login_required
from volunteers.views import *
urlpatterns = patterns('',
#(r'^$', login_required(ShowsInProcessing.as_view()), {}, 'volunteer_show_list'),
#(r'^(?P<show_slug>\[-\w]+)/$', login_required(ShowReview.as_view()), {}, 'volunteer_show_review'),
(r'^more_videos/(?P<episode_id>\d+)/(?P<slop>\d+)/$', login_required(ExpandCutList.as_view()), {}, 'volunteer_expand_cutlist'),
(r'^more_videos/(?P<episode_id>\d+)/(?P<slop>\d+)/(?P<edit_key>\w+)/$', ExpandCutList.as_view(), {}, 'guest_expand_cutlist'),
(r'^reopen/(?P<episode_id>\d+)/$', login_required(ReopenEpisode.as_view()), {}, 'volunteer_reopen'),
(r'^reopen/(?P<episode_id>\d+)/(?P<edit_key>\w+)/$', ReopenEpisode.as_view(), {}, 'guest_reopen'),
(r'^(?P<show_slug>[-\w]+)/(?P<episode_slug>[-\w]+)/$', login_required(EpisodeReview.as_view()), {}, 'volunteer_episode_review'),
(r'^(?P<show_slug>[-\w]+)/(?P<episode_slug>[-\w]+)/(?P<edit_key>\w+)/$', EpisodeReview.as_view(), {}, 'guest_episode_review'),
(r'^(?P<show_slug>[-\w]+)/(?P<episode_slug>[-\w]+)/$', login_required(EpisodeReview.as_view()), {'advanced': True}, 'volunteer_episode_review_advanced'),
(r'^(?P<show_slug>[-\w]+)/(?P<episode_slug>[-\w]+)/(?P<edit_key>\w+)/$', EpisodeReview.as_view(), {'advanced': True}, 'guest_episode_review_advanced'),
)
| EricSchles/veyepar | dj/volunteers/urls.py | Python | mit | 1,412 | 0.010623 |
from decimal import Decimal as D
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext_lazy as _
from oscar.core import loading
Scale = loading.get_class('shipping.scales', 'Scale')
weight_precision = getattr(settings, 'OSCAR_SHIPPING_WEIGHT_PRECISION', D('0.000'))
volume_precision = getattr(settings, 'OSCAR_SHIPPING_VOLUME_PRECISION', D('0.000'))
# per product defaults
# 0.1m x 0.1m x 0.1m
DEFAULT_BOX = getattr(settings, 'OSCAR_SHIPPING_DEFAULT_BOX', {'width': float('0.1'),
'height': float('0.1'),
'length': float('0.1')})
# 1 Kg
DEFAULT_WEIGHT = getattr(settings, 'OSCAR_SHIPPING_DEFAULT_WEIGHT', 1)
# basket volue * VOLUME_RATIO = estimated container(s) volume
# very simple method
VOLUME_RATIO = getattr(settings, 'OSCAR_SHIPPING_VOLUME_RATIO', D('1.3'))
class Box(object):
height = 0
width = 0
length = 0
def __init__(self, h, w, l):
self.height, self.width, self.length = h, w, l
@property
def volume(self):
return D(self.height*self.width*self.length).quantize(volume_precision)
class Container(Box):
name = ''
def __init__(self, h, w, l, name):
self.name = name
super(Container, self).__init__(h, w, l)
class ProductBox(Box):
"""
'Packs' given product to the virtual box and scale it.
Takes size and weight from product attributes (if present)
"""
weight = 0
def __init__(self,
product,
size_codes=('width', 'height', 'length'),
weight_code='weight',
default_weight=DEFAULT_WEIGHT):
self.attributes = size_codes
attr_vals = {}
scale = Scale(attribute_code=weight_code,
default_weight=default_weight)
try:
for attr in self.attributes:
attr_vals[attr] = product.attribute_values.get(
attribute__code=attr).value
except ObjectDoesNotExist:
attr_vals = DEFAULT_BOX
self.weight = scale.weigh_product(product)
for attr in attr_vals.keys():
setattr(self, attr, attr_vals[attr])
class Packer(object):
"""
To calculate shipping charge the set of containers required.
That set should be enough for all items of basket
which shoud have appropriate attributes (height,width,lenght)
And this is the problem known as Bin Packing Problem
"""
def __init__(self, containers, **kwargs):
self.containers = containers
self.attributes = kwargs.get('attribute_codes', ('width', 'height', 'length'))
self.weight_code = kwargs.get('weight_code', 'weight')
self.default_weight = kwargs.get('default_weight', DEFAULT_WEIGHT)
def get_default_container(self, volume):
"""Generates _virtual_ cube container which does not exists in the db
but enough to calculate estimated shipping charge
for the basket's volume given
"""
side = float(volume) ** (1 / 3.0)
return Container(side, side, side, _('virtual volume (%s)') % volume)
def box_product(self, product):
return ProductBox(product, self.attributes, self.weight_code, self.default_weight)
def pack_basket(self, basket):
# First attempt but very weird
volume = 0
weight = 0
box = container = matched = None
for line in basket.lines.all():
box = self.box_product(line.product)
volume += box.volume * line.quantity
weight += box.weight * line.quantity
del box
volume = volume * VOLUME_RATIO
# Calc container volume during DB query excution
# source: http://stackoverflow.com/questions/1652577/django-ordering-queryset-by-a-calculated-field
# as we can't use computed values in the WHERE clause
# we will filter containers as python list
# container = self.containers.extra(select={'volume': 'height*width*lenght'})\
# .extra(order_by=['volume'])\
# .extra(where=['"volume">%s'], params=[volume])[0]
# select containers which volumes greater than summarized basket volume
matched = [c for c in self.containers.all() if c.volume >= volume]
if len(matched) > 0:
container = matched[0]
# TODO: count container's weight - add it to model
else:
container = self.get_default_container(volume)
return [{'weight': D(weight).quantize(weight_precision), 'container': container}]
| okfish/django-oscar-shipping | oscar_shipping/packers.py | Python | bsd-3-clause | 4,871 | 0.006775 |
# -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2015 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pybuilder.core import task, use_plugin
from pybuilder.errors import BuildFailedException
from subprocess import PIPE, Popen
import sys
use_plugin("core")
@task
def run_unit_tests(project, logger):
run_command('run_unit_tests', project, logger)
@task
def run_integration_tests(project, logger):
run_command('run_integration_tests', project, logger)
@task
def analyze(project, logger):
run_command('analyze', project, logger)
@task
def package(project, logger):
run_command('package', project, logger)
@task
def publish(project, logger):
run_command('publish', project, logger)
def _write_command_report(project, stdout, stderr, command_line, phase, process_return_code):
project.write_report('exec_%s' % phase, stdout)
project.write_report('exec_%s.err' % phase, stderr)
def _log_quoted_output(logger, output_type, output, phase):
separator = '-' * 5
logger.info('{0} verbatim {1} output of {2} {0}'.format(separator, output_type, phase))
for line in output.split('\n'):
logger.info(line)
logger.info('{0} end of verbatim {1} output {0}'.format(separator, output_type))
def run_command(phase, project, logger):
command_line = project.get_property('%s_command' % phase)
if not command_line:
return
process_handle = Popen(command_line, stdout=PIPE, stderr=PIPE, shell=True)
stdout, stderr = process_handle.communicate()
stdout, stderr = stdout.decode(sys.stdout.encoding or 'utf-8'), stderr.decode(sys.stderr.encoding or 'utf-8')
process_return_code = process_handle.returncode
_write_command_report(project,
stdout,
stderr,
command_line,
phase,
process_return_code)
if project.get_property('%s_propagate_stdout' % phase) and stdout:
_log_quoted_output(logger, '', stdout, phase)
if project.get_property('%s_propagate_stderr' % phase) and stderr:
_log_quoted_output(logger, 'error', stderr, phase)
if process_return_code != 0:
raise BuildFailedException(
'exec plugin command {0} for {1} exited with nonzero code {2}'.format(command_line,
phase,
process_return_code))
| Danielweber7624/pybuilder | src/main/python/pybuilder/plugins/exec_plugin.py | Python | apache-2.0 | 3,101 | 0.00258 |
# Copyright (C) 2012 Aaron Krebs akrebs@ualberta.ca
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
from django.views.generic.simple import direct_to_template
from django.contrib.auth import views as auth_views
from django.conf.urls import patterns, url
from django.core.urlresolvers import reverse_lazy
from registration.views import register
urlpatterns = patterns('',
# urls for simple one-step registration
url(r'^register/$',
register,
{'backend': 'registration.backends.simple.SimpleBackend',
'template_name': 'registration/registration_form.hamlpy',
},
name='registration_register'
),
url(r'^register/closed/$',
direct_to_template,
{'template': 'registration/registration_closed.hamlpy'},
name='registration_disallowed'
),
url(r'^login/$',
auth_views.login,
{'template_name': 'registration/login.hamlpy'},
name='auth_login'
),
url(r'^logout/$',
auth_views.logout,
{'template_name': 'registration/logout.hamlpy'},
name='auth_logout'
),
url(r'^password/change/$',
auth_views.password_change,
{'template_name': 'registration/password_change_form.hamlpy',
# ugh, this is tied to the namespace; needs to be namespace-agnostic
# since the namspace is determined by the importing app
# TODO: see Issue #1
'post_change_redirect': reverse_lazy('registration:auth_password_change_done')
},
name='auth_password_change'
),
url(r'^password/change/done/$',
auth_views.password_change_done,
{'template_name': 'registration/password_change_done.hamlpy'},
name='auth_password_change_done'
),
url(r'^password/reset/$',
auth_views.password_reset,
{'template_name': 'registration/password_reset_form.hamlpy',
# same issue as above
'post_reset_redirect': reverse_lazy('registration:auth_password_reset_done'),
'email_template_name': 'registration/password_reset_email.hamlpy',
'subject_template_name': 'registration/password_reset_subject.hamlpy',
},
name='auth_password_reset'
),
url(r'^password/reset/confirm/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
auth_views.password_reset_confirm,
{'template_name': 'registration/password_reset_confirm.hamlpy',
# same issue as above
'post_reset_redirect': reverse_lazy('registration:auth_password_reset_complete'),
},
name='auth_password_reset_confirm'
),
url(r'^password/reset/complete/$',
auth_views.password_reset_complete,
{'template_name': 'registration/password_reset_complete.hamlpy'},
name='auth_password_reset_complete'
),
url(r'^password/reset/done/$',
auth_views.password_reset_done,
{'template_name': 'registration/password_reset_done.hamlpy'},
name='auth_password_reset_done'
),
)
| a-krebs/finances | finances/django_registration/urls.py | Python | gpl-3.0 | 3,598 | 0.005837 |
from colorsys import rgb_to_hsv
import time
def id_generator():
timestr = time.strftime("%Y%m%d-%H%M%S")
return timestr
def lightness(pixel):
# For backwards compatibility with python2
return rgb_to_hsv(pixel[0], pixel[1], pixel[2])[2] / 255.0
def hue(pixel):
return rgb_to_hsv(pixel[0], pixel[1], pixel[2])[0] / 255.0
def saturation(pixel):
return rgb_to_hsv(pixel[0], pixel[1], pixel[2])[1] / 255.0
def crop_to(image_to_crop, reference_image):
"""
Crops image to the size of a reference image. This function assumes that the relevant image is located in the center
and you want to crop away equal sizes on both the left and right as well on both the top and bottom.
:param image_to_crop
:param reference_image
:return: image cropped to the size of the reference image
"""
reference_size = reference_image.size
current_size = image_to_crop.size
dx = current_size[0] - reference_size[0]
dy = current_size[1] - reference_size[1]
left = dx / 2
upper = dy / 2
right = dx / 2 + reference_size[0]
lower = dy / 2 + reference_size[1]
return image_to_crop.crop(
box=(
int(left),
int(upper),
int(right),
int(lower)))
| satyarth/pixelsort | pixelsort/util.py | Python | mit | 1,262 | 0.001585 |
# This file is part of HEPData.
# Copyright (C) 2016 CERN.
#
# HEPData is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# HEPData is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HEPData; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
import os.path
import responses
import zipfile
from hepdata.config import CFG_CONVERTER_URL
from hepdata.modules.converter.tasks import convert_and_store
from hepdata.modules.records.utils.old_hepdata import mock_import_old_record
def test_convert_and_store_invalid(app, capsys):
with app.app_context():
convert_and_store('12345678', 'test_format', True)
captured = capsys.readouterr()
assert(captured.out == "Unable to find a matching submission for 12345678\n")
@responses.activate
def test_convert_and_store_valid_yaml(app, capsys, load_submission):
with app.app_context():
# Open a .tar.gz file to mock the call to the converter
base_dir = os.path.dirname(os.path.realpath(__file__))
test_tar_gz_file = os.path.join(base_dir, 'test_data', '1396331.tar.gz')
with open(test_tar_gz_file, "rb") as stream:
responses.add(responses.GET, CFG_CONVERTER_URL + '/convert',
status=200, headers={'mimetype': 'application/x-gzip'},
body=stream.read(), stream=True)
capsys.readouterr()
convert_and_store('1487726', 'yaml', True)
captured_lines = capsys.readouterr().out.splitlines()
assert(captured_lines[0] == "Creating yaml conversion for ins1487726")
print(captured_lines)
assert(captured_lines[1].startswith("File for ins1487726 created successfully"))
file_path = captured_lines[1].split()[-1]
assert(file_path.endswith("HEPData-ins1487726-v1-yaml.tar.gz"))
assert(os.path.isfile(file_path))
def test_convert_and_store_valid_original(app, capsys, load_submission):
with app.app_context():
capsys.readouterr()
convert_and_store('1487726', 'original', True)
captured_lines = capsys.readouterr().out.splitlines()
assert(captured_lines[0] == "Creating original conversion for ins1487726")
assert(captured_lines[1].startswith("File created at "))
file_path = captured_lines[1].split()[-1]
assert(file_path.endswith("HEPData-ins1487726-v1.zip"))
assert(os.path.isfile(file_path))
def test_convert_and_store_valid_original_with_old_resources(app, capsys):
with app.app_context():
# Create submission with resources
mock_import_old_record()
capsys.readouterr()
convert_and_store('1299143', 'original', True)
captured_lines = capsys.readouterr().out.splitlines()
assert(captured_lines[0] == 'Creating original conversion for ins1299143')
assert(captured_lines[1].startswith("Creating archive at "))
file_path = captured_lines[1].split()[-1]
assert('/converted/' in file_path)
assert(file_path.endswith("HEPData-ins1299143-v1.zip"))
assert(captured_lines[2] == 'File created at %s' % file_path)
assert(os.path.isfile(file_path))
# Check contents of zip
with zipfile.ZipFile(file_path) as zip:
contents = zip.namelist()
assert(len(contents) == 99)
# Check for a sample of filenames from yaml and resources
for f in ['submission.yaml', 'Table_1.yaml', 'figFigure7a.png']:
assert(f in contents)
# Check submission file has been updated with new resource location
with zip.open('submission.yaml') as f:
for line in f.readlines():
line_str = line.decode()
if 'location' in line_str:
assert('/resource/' not in line_str)
| HEPData/hepdata | tests/converter_test.py | Python | gpl-2.0 | 4,511 | 0.00133 |
"""
Copyright 2009 55 Minutes (http://www.55minutes.com)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import time
test_timestamp = time.strftime('%a %Y-%m-%d %H:%M %Z')
TOP = """\
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="Content-type" content="text/html;charset=UTF-8" />
<title>Test coverage report: %(title)s</title>
<style type="text/css" media="screen">
body
{
font-family: "Lucida Sans Unicode", "Lucida Grande", sans-serif;
font-size: 13px;
}
#content-header
{
margin-left: 50px;
}
#content-header h1
{
font-size: 18px;
margin-bottom: 0;
}
#content-header p
{
font-size: 13px;
margin: 0;
color: #909090;
}
#result-list
{
margin: 0 50px;
}
#result-list ul
{
padding-left: 13px;
list-style-position: inside;
}
</style>
</head>
<body>
"""
CONTENT_HEADER = """\
<div id="content-header">
<h1>Test Coverage Report: %(title)s</h1>"""
CONTENT_HEADER += "<p>Generated: %(test_timestamp)s</p>" %vars()
CONTENT_HEADER += "</div>"
CONTENT_BODY = """\
<div id="result-list">
<p>%(long_desc)s</p>
<ul>
%(exception_list)s
</ul>
Back to <a href="index.html">index</a>.
</div>
"""
EXCEPTION_LINE = "<li>%(module_name)s</li>"
BOTTOM = """\
</body>
</html>
"""
| UT-Austin-FIS/django-coverage | django_coverage/utils/coverage_report/templates/default_module_exceptions.py | Python | apache-2.0 | 2,058 | 0.00243 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TFDecorator-aware replacements for the inspect module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import namedtuple
import functools
import inspect as _inspect
import six
from tensorflow.python.util import tf_decorator
ArgSpec = _inspect.ArgSpec
if hasattr(_inspect, 'FullArgSpec'):
FullArgSpec = _inspect.FullArgSpec # pylint: disable=invalid-name
else:
FullArgSpec = namedtuple('FullArgSpec', [
'args', 'varargs', 'varkw', 'defaults', 'kwonlyargs', 'kwonlydefaults',
'annotations'
])
def _convert_maybe_argspec_to_fullargspec(argspec):
if isinstance(argspec, FullArgSpec):
return argspec
return FullArgSpec(
args=argspec.args,
varargs=argspec.varargs,
varkw=argspec.keywords,
defaults=argspec.defaults,
kwonlyargs=[],
kwonlydefaults=None,
annotations={})
if hasattr(_inspect, 'getfullargspec'):
_getfullargspec = _inspect.getfullargspec # pylint: disable=invalid-name
def _getargspec(target):
"""A python3 version of getargspec.
Calls `getfullargspec` and assigns args, varargs,
varkw, and defaults to a python 2/3 compatible `ArgSpec`.
The parameter name 'varkw' is changed to 'keywords' to fit the
`ArgSpec` struct.
Args:
target: the target object to inspect.
Returns:
An ArgSpec with args, varargs, keywords, and defaults parameters
from FullArgSpec.
"""
fullargspecs = getfullargspec(target)
argspecs = ArgSpec(
args=fullargspecs.args,
varargs=fullargspecs.varargs,
keywords=fullargspecs.varkw,
defaults=fullargspecs.defaults)
return argspecs
else:
_getargspec = _inspect.getargspec
def _getfullargspec(target):
"""A python2 version of getfullargspec.
Args:
target: the target object to inspect.
Returns:
A FullArgSpec with empty kwonlyargs, kwonlydefaults and annotations.
"""
return _convert_maybe_argspec_to_fullargspec(getargspec(target))
def currentframe():
"""TFDecorator-aware replacement for inspect.currentframe."""
return _inspect.stack()[1][0]
def getargspec(obj):
"""TFDecorator-aware replacement for `inspect.getargspec`.
Note: `getfullargspec` is recommended as the python 2/3 compatible
replacement for this function.
Args:
obj: A function, partial function, or callable object, possibly decorated.
Returns:
The `ArgSpec` that describes the signature of the outermost decorator that
changes the callable's signature, or the `ArgSpec` that describes
the object if not decorated.
Raises:
ValueError: When callable's signature can not be expressed with
ArgSpec.
TypeError: For objects of unsupported types.
"""
if isinstance(obj, functools.partial):
return _get_argspec_for_partial(obj)
decorators, target = tf_decorator.unwrap(obj)
spec = next((d.decorator_argspec
for d in decorators
if d.decorator_argspec is not None), None)
if spec:
return spec
try:
# Python3 will handle most callables here (not partial).
return _getargspec(target)
except TypeError:
pass
if isinstance(target, type):
try:
return _getargspec(target.__init__)
except TypeError:
pass
try:
return _getargspec(target.__new__)
except TypeError:
pass
# The `type(target)` ensures that if a class is received we don't return
# the signature of it's __call__ method.
return _getargspec(type(target).__call__)
def _get_argspec_for_partial(obj):
"""Implements `getargspec` for `functools.partial` objects.
Args:
obj: The `functools.partial` obeject
Returns:
An `inspect.ArgSpec`
Raises:
ValueError: When callable's signature can not be expressed with
ArgSpec.
"""
# When callable is a functools.partial object, we construct its ArgSpec with
# following strategy:
# - If callable partial contains default value for positional arguments (ie.
# object.args), then final ArgSpec doesn't contain those positional arguments.
# - If callable partial contains default value for keyword arguments (ie.
# object.keywords), then we merge them with wrapped target. Default values
# from callable partial takes precedence over those from wrapped target.
#
# However, there is a case where it is impossible to construct a valid
# ArgSpec. Python requires arguments that have no default values must be
# defined before those with default values. ArgSpec structure is only valid
# when this presumption holds true because default values are expressed as a
# tuple of values without keywords and they are always assumed to belong to
# last K arguments where K is number of default values present.
#
# Since functools.partial can give default value to any argument, this
# presumption may no longer hold in some cases. For example:
#
# def func(m, n):
# return 2 * m + n
# partialed = functools.partial(func, m=1)
#
# This example will result in m having a default value but n doesn't. This is
# usually not allowed in Python and can not be expressed in ArgSpec correctly.
#
# Thus, we must detect cases like this by finding first argument with default
# value and ensures all following arguments also have default values. When
# this is not true, a ValueError is raised.
n_prune_args = len(obj.args)
partial_keywords = obj.keywords or {}
args, varargs, keywords, defaults = getargspec(obj.func)
# Pruning first n_prune_args arguments.
args = args[n_prune_args:]
# Partial function may give default value to any argument, therefore length
# of default value list must be len(args) to allow each argument to
# potentially be given a default value.
all_defaults = [None] * len(args)
if defaults:
all_defaults[-len(defaults):] = defaults
# Fill in default values provided by partial function in all_defaults.
for kw, default in six.iteritems(partial_keywords):
idx = args.index(kw)
all_defaults[idx] = default
# Find first argument with default value set.
first_default = next((idx for idx, x in enumerate(all_defaults) if x), None)
# If no default values are found, return ArgSpec with defaults=None.
if first_default is None:
return ArgSpec(args, varargs, keywords, None)
# Checks if all arguments have default value set after first one.
invalid_default_values = [
args[i] for i, j in enumerate(all_defaults)
if j is None and i > first_default
]
if invalid_default_values:
raise ValueError('Some arguments %s do not have default value, but they '
'are positioned after those with default values. This can '
'not be expressed with ArgSpec.' % invalid_default_values)
return ArgSpec(args, varargs, keywords, tuple(all_defaults[first_default:]))
def getfullargspec(obj):
"""TFDecorator-aware replacement for `inspect.getfullargspec`.
This wrapper emulates `inspect.getfullargspec` in[^)]* Python2.
Args:
obj: A callable, possibly decorated.
Returns:
The `FullArgSpec` that describes the signature of
the outermost decorator that changes the callable's signature. If the
callable is not decorated, `inspect.getfullargspec()` will be called
directly on the callable.
"""
decorators, target = tf_decorator.unwrap(obj)
return next((_convert_maybe_argspec_to_fullargspec(d.decorator_argspec)
for d in decorators
if d.decorator_argspec is not None), _getfullargspec(target))
def getcallargs(func, *positional, **named):
"""TFDecorator-aware replacement for inspect.getcallargs.
Args:
func: A callable, possibly decorated
*positional: The positional arguments that would be passed to `func`.
**named: The named argument dictionary that would be passed to `func`.
Returns:
A dictionary mapping `func`'s named arguments to the values they would
receive if `func(*positional, **named)` were called.
`getcallargs` will use the argspec from the outermost decorator that provides
it. If no attached decorators modify argspec, the final unwrapped target's
argspec will be used.
"""
argspec = getfullargspec(func)
call_args = named.copy()
this = getattr(func, 'im_self', None) or getattr(func, '__self__', None)
if ismethod(func) and this:
positional = (this,) + positional
remaining_positionals = [arg for arg in argspec.args if arg not in call_args]
call_args.update(dict(zip(remaining_positionals, positional)))
default_count = 0 if not argspec.defaults else len(argspec.defaults)
if default_count:
for arg, value in zip(argspec.args[-default_count:], argspec.defaults):
if arg not in call_args:
call_args[arg] = value
return call_args
def getframeinfo(*args, **kwargs):
return _inspect.getframeinfo(*args, **kwargs)
def getdoc(object): # pylint: disable=redefined-builtin
"""TFDecorator-aware replacement for inspect.getdoc.
Args:
object: An object, possibly decorated.
Returns:
The docstring associated with the object.
The outermost-decorated object is intended to have the most complete
documentation, so the decorated parameter is not unwrapped.
"""
return _inspect.getdoc(object)
def getfile(object): # pylint: disable=redefined-builtin
"""TFDecorator-aware replacement for inspect.getfile."""
unwrapped_object = tf_decorator.unwrap(object)[1]
# Work around for the case when object is a stack frame
# and only .pyc files are used. In this case, getfile
# might return incorrect path. So, we get the path from f_globals
# instead.
if (hasattr(unwrapped_object, 'f_globals') and
'__file__' in unwrapped_object.f_globals):
return unwrapped_object.f_globals['__file__']
return _inspect.getfile(unwrapped_object)
def getmembers(object, predicate=None): # pylint: disable=redefined-builtin
"""TFDecorator-aware replacement for inspect.getmembers."""
return _inspect.getmembers(object, predicate)
def getmodule(object): # pylint: disable=redefined-builtin
"""TFDecorator-aware replacement for inspect.getmodule."""
return _inspect.getmodule(object)
def getmro(cls):
"""TFDecorator-aware replacement for inspect.getmro."""
return _inspect.getmro(cls)
def getsource(object): # pylint: disable=redefined-builtin
"""TFDecorator-aware replacement for inspect.getsource."""
return _inspect.getsource(tf_decorator.unwrap(object)[1])
def getsourcefile(object): # pylint: disable=redefined-builtin
"""TFDecorator-aware replacement for inspect.getsourcefile."""
return _inspect.getsourcefile(tf_decorator.unwrap(object)[1])
def getsourcelines(object): # pylint: disable=redefined-builtin
"""TFDecorator-aware replacement for inspect.getsourcelines."""
return _inspect.getsourcelines(tf_decorator.unwrap(object)[1])
def isbuiltin(object): # pylint: disable=redefined-builtin
"""TFDecorator-aware replacement for inspect.isbuiltin."""
return _inspect.isbuiltin(tf_decorator.unwrap(object)[1])
def isclass(object): # pylint: disable=redefined-builtin
"""TFDecorator-aware replacement for inspect.isclass."""
return _inspect.isclass(tf_decorator.unwrap(object)[1])
def isfunction(object): # pylint: disable=redefined-builtin
"""TFDecorator-aware replacement for inspect.isfunction."""
return _inspect.isfunction(tf_decorator.unwrap(object)[1])
def isframe(object): # pylint: disable=redefined-builtin
"""TFDecorator-aware replacement for inspect.ismodule."""
return _inspect.isframe(tf_decorator.unwrap(object)[1])
def isgenerator(object): # pylint: disable=redefined-builtin
"""TFDecorator-aware replacement for inspect.isgenerator."""
return _inspect.isgenerator(tf_decorator.unwrap(object)[1])
def ismethod(object): # pylint: disable=redefined-builtin
"""TFDecorator-aware replacement for inspect.ismethod."""
return _inspect.ismethod(tf_decorator.unwrap(object)[1])
def ismodule(object): # pylint: disable=redefined-builtin
"""TFDecorator-aware replacement for inspect.ismodule."""
return _inspect.ismodule(tf_decorator.unwrap(object)[1])
def isroutine(object): # pylint: disable=redefined-builtin
"""TFDecorator-aware replacement for inspect.isroutine."""
return _inspect.isroutine(tf_decorator.unwrap(object)[1])
def stack(context=1):
"""TFDecorator-aware replacement for inspect.stack."""
return _inspect.stack(context)[1:]
def getsource_no_unwrap(obj):
"""Return source code for an object. Does not unwrap TFDecorators.
The source code is returned literally, including indentation for functions not
at the top level. This function is analogous to inspect.getsource, with one
key difference - it doesn't unwrap decorators. For simplicity, support for
some Python object types is dropped (tracebacks, frames, code objects).
Args:
obj: a class, method, or function object.
Returns:
source code as a string
"""
lines, lnum = _inspect.findsource(obj)
return ''.join(_inspect.getblock(lines[lnum:]))
| jbedorf/tensorflow | tensorflow/python/util/tf_inspect.py | Python | apache-2.0 | 13,772 | 0.01002 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-16 04:54
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Stat',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('category_count', models.IntegerField()),
],
),
]
| hariharaselvam/djangotraining | products/migrations/0002_stat.py | Python | apache-2.0 | 566 | 0.001767 |
"""Various utilities"""
| UNICT-DMI/Telegram-DMI-Bot | module/utils/__init__.py | Python | gpl-3.0 | 24 | 0 |
# Copyright (c) 2020 Ultimaker B.V.
# Copyright (c) 2013 David Braam
# Uranium is released under the terms of the LGPLv3 or higher.
import os
from UM.Job import Job
from UM.Logger import Logger
from UM.Mesh.MeshReader import MeshReader
from UM.Mesh.MeshBuilder import MeshBuilder
from UM.Scene.SceneNode import SceneNode
class OBJReader(MeshReader):
def __init__(self) -> None:
super().__init__()
self._supported_extensions = [".obj"]
def _toAbsoluteIndex(self, max, data):
""" Handle negative indices (those are relative to the position, so -2 is the second one before the face). """
return [index if index > 0 else 1 + max + index for index in data]
def _read(self, file_name):
scene_node = None
extension = os.path.splitext(file_name)[1]
if extension.lower() in self._supported_extensions:
vertex_list = []
normal_list = []
uv_list = []
face_list = []
scene_node = SceneNode()
mesh_builder = MeshBuilder()
mesh_builder.setFileName(file_name)
previous_line_parts = []
f = open(file_name, "rt", encoding = "utf-8")
for line in f:
parts = previous_line_parts + line.split()
previous_line_parts = []
if len(parts) < 1:
continue
if parts[-1] == "\\":
del parts[-1]
previous_line_parts = parts
continue
if parts[0] == "f":
parts = [i for i in map(lambda p: p.split("/"), parts)]
for idx in range(1, len(parts) - 2):
data = self._toAbsoluteIndex(len(vertex_list), [int(parts[1][0]), int(parts[idx + 1][0]), int(parts[idx + 2][0])])
if len(parts[1]) > 1 and parts[1][1] and parts[idx + 1][1] and parts[idx + 2][1]:
data += self._toAbsoluteIndex(len(normal_list), [int(parts[1][1]), int(parts[idx + 1][1]), int(parts[idx + 2][1])])
else:
data += [0, 0, 0]
if len(parts[1]) > 2:
data += self._toAbsoluteIndex(len(uv_list), [int(parts[1][2]), int(parts[idx + 1][2]), int(parts[idx + 2][2])])
else:
data += [0, 0, 0]
face_list.append(data)
elif parts[0] == "v":
vertex_list.append([float(parts[1]), float(parts[3]), -float(parts[2])])
elif parts[0] == "vn":
normal_list.append([float(parts[1]), float(parts[3]), -float(parts[2])])
elif parts[0] == "vt":
uv_list.append([float(parts[1]), float(parts[2])])
Job.yieldThread()
f.close()
mesh_builder.reserveVertexCount(3 * len(face_list))
num_vertices = len(vertex_list)
for face in face_list:
# Substract 1 from index, as obj starts counting at 1 instead of 0
i = face[0] - 1
j = face[1] - 1
k = face[2] - 1
ui = face[3] - 1
uj = face[4] - 1
uk = face[5] - 1
ni = face[6] - 1
nj = face[7] - 1
nk = face[8] - 1
if i < 0 or i >= num_vertices:
i = 0
if j < 0 or j >= num_vertices:
j = 0
if k < 0 or k >= num_vertices:
k = 0
if ni != -1 and nj != -1 and nk != -1:
mesh_builder.addFaceWithNormals(vertex_list[i][0], vertex_list[i][1], vertex_list[i][2], normal_list[ni][0], normal_list[ni][1], normal_list[ni][2], vertex_list[j][0], vertex_list[j][1], vertex_list[j][2], normal_list[nj][0], normal_list[nj][1], normal_list[nj][2], vertex_list[k][0], vertex_list[k][1], vertex_list[k][2],normal_list[nk][0], normal_list[nk][1], normal_list[nk][2])
else:
mesh_builder.addFaceByPoints(vertex_list[i][0], vertex_list[i][1], vertex_list[i][2], vertex_list[j][0], vertex_list[j][1], vertex_list[j][2], vertex_list[k][0], vertex_list[k][1], vertex_list[k][2])
if ui != -1 and len(uv_list) > ui:
mesh_builder.setVertexUVCoordinates(mesh_builder.getVertexCount() - 3, uv_list[ui][0], uv_list[ui][1])
if uj != -1 and len(uv_list) > uj:
mesh_builder.setVertexUVCoordinates(mesh_builder.getVertexCount() - 2, uv_list[uj][0], uv_list[uj][1])
if uk != -1 and len(uv_list) > uk:
mesh_builder.setVertexUVCoordinates(mesh_builder.getVertexCount() - 1, uv_list[uk][0], uv_list[uk][1])
Job.yieldThread()
if not mesh_builder.hasNormals():
mesh_builder.calculateNormals(fast = True)
# make sure that the mesh data is not empty
if mesh_builder.getVertexCount() == 0:
Logger.log("d", "File did not contain valid data, unable to read.")
return None # We didn't load anything.
scene_node.setMeshData(mesh_builder.build())
return scene_node
| Ultimaker/Uranium | plugins/FileHandlers/OBJReader/OBJReader.py | Python | lgpl-3.0 | 5,356 | 0.003547 |
from grid.models import Grid
from django.contrib.auth.models import Group, User, Permission
from package.models import Category, PackageExample, Package
from grid.models import Element, Feature, GridPackage
from core.tests import datautil
def load():
category, created = Category.objects.get_or_create(
pk=1,
slug=u'apps',
title=u'App',
description=u'Small components used to build projects.',
)
package1, created = Package.objects.get_or_create(
pk=1,
category=category,
repo_watchers=0,
title=u'Testability',
pypi_url='',
participants=u'malcomt,jacobian',
pypi_downloads=0,
repo_url=u'https://github.com/pydanny/django-la-facebook',
repo_commits=0,
repo_forks=0,
slug=u'testability',
repo_description=u'Increase your testing ability with this steroid free supplement.',
)
package2, created = Package.objects.get_or_create(
pk=2,
category=category,
repo_watchers=0,
title=u'Supertester',
pypi_url='',
participants=u'thetestman',
pypi_downloads=0,
repo_url=u'https://github.com/pydanny/django-uni-form',
repo_commits=0,
repo_forks=0,
slug=u'supertester',
repo_description=u'Test everything under the sun with one command!',
)
package3, created = Package.objects.get_or_create(
pk=3,
category=category,
repo_watchers=0,
title=u'Serious Testing',
pypi_url='',
participants=u'pydanny',
pypi_downloads=0,
repo_url=u'https://github.com/cartwheelweb/packaginator',
repo_commits=0,
repo_forks=0,
slug=u'serious-testing',
repo_description=u'Make testing as painless as waxing your legs.',
)
package4, created = Package.objects.get_or_create(
pk=4,
category=category,
repo_watchers=0,
title=u'Another Test',
pypi_url='',
participants=u'pydanny',
pypi_downloads=0,
repo_url=u'https://github.com/djangopackages/djangopackages',
repo_commits=0,
repo_forks=0,
slug=u'another-test',
repo_description=u'Yet another test package, with no grid affiliation.',
)
grid1, created = Grid.objects.get_or_create(
pk=1,
description=u'A grid for testing.',
title=u'Testing',
is_locked=False,
slug=u'testing',
)
grid2, created = Grid.objects.get_or_create(
pk=2,
description=u'Another grid for testing.',
title=u'Another Testing',
is_locked=False,
slug=u'another-testing',
)
gridpackage1, created = GridPackage.objects.get_or_create(
pk=1,
package=package1,
grid=grid1,
)
gridpackage2, created = GridPackage.objects.get_or_create(
pk=2,
package=package1,
grid=grid1,
)
gridpackage3, created = GridPackage.objects.get_or_create(
pk=3,
package=package3,
grid=grid1,
)
gridpackage4, created = GridPackage.objects.get_or_create(
pk=4,
package=package3,
grid=grid2,
)
gridpackage5, created = GridPackage.objects.get_or_create(
pk=5,
package=package2,
grid=grid1,
)
feature1, created = Feature.objects.get_or_create(
pk=1,
title=u'Has tests?',
grid=grid1,
description=u'Does this package come with tests?',
)
feature2, created = Feature.objects.get_or_create(
pk=2,
title=u'Coolness?',
grid=grid1,
description=u'Is this package cool?',
)
element, created = Element.objects.get_or_create(
pk=1,
text=u'Yes',
feature=feature1,
grid_package=gridpackage1,
)
group1, created = Group.objects.get_or_create(
pk=1,
name=u'Moderators',
#permissions=[[u'delete_gridpackage', u'grid', u'gridpackage'], [u'delete_feature', u'grid', u'feature']],
)
group1.permissions.clear()
group1.permissions = [
Permission.objects.get(codename='delete_gridpackage'),
Permission.objects.get(codename='delete_feature')
]
user1, created = User.objects.get_or_create(
pk=1,
username=u'user',
first_name='',
last_name='',
is_active=True,
is_superuser=False,
is_staff=False,
last_login=u'2010-01-01 12:00:00',
password=u'sha1$644c9$347f3dd85fb609a5745ebe33d0791929bf08f22e',
email='',
date_joined=u'2010-01-01 12:00:00',
)
user2, created = User.objects.get_or_create(
pk=2,
username=u'cleaner',
first_name='',
last_name='',
is_active=True,
is_superuser=False,
is_staff=False,
last_login=u'2010-01-01 12:00:00',
#groups=[group1],
password=u'sha1$e6fe2$78b744e21cddb39117997709218f4c6db4e91894',
email='',
date_joined=u'2010-01-01 12:00:00',
)
user2.groups = [group1]
user3, created = User.objects.get_or_create(
pk=3,
username=u'staff',
first_name='',
last_name='',
is_active=True,
is_superuser=False,
is_staff=True,
last_login=u'2010-01-01 12:00:00',
password=u'sha1$8894d$c4814980edd6778f0ab1632c4270673c0fd40efe',
email='',
date_joined=u'2010-01-01 12:00:00',
)
user4, created = User.objects.get_or_create(
pk=4,
username=u'admin',
first_name='',
last_name='',
is_active=True,
is_superuser=True,
is_staff=True,
last_login=u'2010-01-01 12:00:00',
password=u'sha1$52c7f$59b4f64ffca593e6abd23f90fd1f95cf71c367a4',
email='',
date_joined=u'2010-01-01 12:00:00',
)
packageexample, created = PackageExample.objects.get_or_create(
pk=1,
package=package1,
url=u'http://www.example.com/',
active=True,
title=u'www.example.com',
)
datautil.reset_sequences(Grid, Group, User, Permission, Category, PackageExample,
Package, Element, Feature, GridPackage)
| audreyr/opencomparison | apiv1/tests/data.py | Python | mit | 6,264 | 0.000958 |
import sys
from flexmock import flexmock
import inject
from mcloud.events import EventBus
from mcloud.txdocker import IDockerClient, DockerTwistedClient
from mcloud.util import txtimeout
import pytest
from mcloud.remote import Server, Client, ApiError, Task, ApiRpcServer
from twisted.internet import reactor, defer
from twisted.python import log
import txredisapi as redis
class MockServer(Server):
message = None
def on_message(self, client, message, isBinary=False):
self.message = message
class MockClient(Client):
message = None
def on_message(self, message, isBinary=False):
self.message = message
def sleep(secs):
d = defer.Deferred()
reactor.callLater(secs, d.callback, None)
return d
#@pytest.inlineCallbacks
#def test_exchange():
# inject.clear()
#
# #log.startLogging(sys.stdout)
#
# server = MockServer(port=9999)
# server.bind()
#
# assert len(server.clients) == 0
#
# client = MockClient(port=9999)
# yield client.connect()
#
# assert len(server.clients) == 1
#
# log.msg('Sending data')
# yield client.send('boo')
#
# yield sleep(0.1)
#
# assert server.message == 'boo'
#
# yield server.clients[0].sendMessage('baz')
#
# yield sleep(0.1)
#
# assert client.message == 'baz'
#
# client.shutdown()
# server.shutdown()
#
# yield sleep(0.1)
@pytest.inlineCallbacks
def test_request_response():
#-----------------------------------
# preparations
#-----------------------------------
# cleanup a bit
inject.clear()
def my_config(binder):
binder.bind('settings', None)
inject.configure(my_config)
# log.startLogging(sys.stdout)
server = Server(port=9998, no_ssl=True)
server.bind()
client = Client(port=9998, no_ssl=True)
yield client.connect()
response = yield client.call_sync('ping')
assert response == 'pong'
client.shutdown()
server.shutdown()
@pytest.inlineCallbacks
def test_request_response_no_such_command():
#-----------------------------------
# preparations
#-----------------------------------
# cleanup a bit
inject.clear()
def my_config(binder):
binder.bind('settings', None)
inject.configure(my_config)
log.startLogging(sys.stdout)
server = Server(port=9996, no_ssl=True)
server.bind()
client = Client(port=9996, no_ssl=True)
yield client.connect()
with pytest.raises(ApiError):
yield client.call_sync('hoho')
client.shutdown()
server.shutdown()
@pytest.inlineCallbacks
def test_tasks():
#-----------------------------------
# preparations
#-----------------------------------
# cleanup a bit
inject.clear()
rc = yield redis.Connection(dbid=2)
eb = EventBus(rc)
yield eb.connect()
def my_config(binder):
binder.bind(redis.Connection, rc)
binder.bind(EventBus, eb)
binder.bind('settings', None)
inject.configure(my_config)
yield rc.flushdb()
api = inject.instance(ApiRpcServer)
#-----------------------------------
# Test itself
#-----------------------------------
# this will emulate some long-running process
task_defered = defer.Deferred()
# this is mock that will execute our long-running process
task = flexmock()
task.should_receive('foo').with_args(int, 123, 'test').once().and_return(task_defered)
# register our task
api.tasks['baz'] = task.foo
# start server -> real server on tcp port
server = Server(port=9997, no_ssl=True)
server.bind()
# real client connecton here
client = Client(port=9997, no_ssl=True)
yield client.connect()
# client calls a task
task = Task('baz')
yield client.call(task, 123, 'test')
yield sleep(0.1)
assert task.id > 0
assert task.name == 'baz'
assert task.is_running is True
assert len(server.rpc_server.tasks_running) == 1
assert server.rpc_server.tasks_running[task.id]['name'] == 'baz'
assert len(server.rpc_server.task_list()) == 1
# no data should be on client
yield sleep(0.1)
assert task.data == []
assert task.response is None
# now server sends some progress
yield server.clients[0].send_event('task.progress.%s' % task.id, 'nami-nami')
# and client should receive this data
yield sleep(0.1)
assert task.data == ['nami-nami']
assert task.is_running is True
assert task.response is None
# now our long-running process stopped and returned some result
yield task_defered.callback('this is respnse')
# and client should recieve this resul
yield sleep(0.1)
assert task.data == ['nami-nami']
assert task.is_running == False
assert task.response == 'this is respnse'
assert len(server.rpc_server.tasks_running) == 0
assert len(server.rpc_server.task_list()) == 0
#-----------------------------------
# Cleanup
#-----------------------------------
client.shutdown()
server.shutdown()
yield sleep(0.1)
@pytest.inlineCallbacks
def test_task_terminate():
#-----------------------------------
# preparations
#-----------------------------------
# cleanup a bit
inject.clear()
rc = yield redis.Connection(dbid=2)
eb = EventBus(rc)
yield eb.connect()
def my_config(binder):
binder.bind(redis.Connection, rc)
binder.bind(EventBus, eb)
binder.bind('settings', None)
inject.configure(my_config)
yield rc.flushdb()
api = inject.instance(ApiRpcServer)
#-----------------------------------
# Test itself
#-----------------------------------
# this will emulate some long-running process
task_defered = defer.Deferred()
# this is mock that will execute our long-running process
task = flexmock()
task.should_receive('foo').with_args(int, 123, 'test').once().and_return(task_defered)
# register our task
api.tasks['baz'] = task.foo
# start server -> real server on tcp port
server = Server(port=9987, no_ssl=True)
server.bind()
# real client connecton here
client = Client(port=9987, no_ssl=True)
yield client.connect()
# client calls a task
task = Task('baz')
yield client.call(task, 123, 'test')
yield sleep(0.1)
assert task.id > 0
assert task.name == 'baz'
assert task.is_running is True
# now client terminates the task
yield sleep(0.1)
client.terminate_task(task.id)
yield sleep(0.1)
assert task.is_running is False
#-----------------------------------
# Cleanup
#-----------------------------------
client.shutdown()
server.shutdown()
yield sleep(0.1) | modera/mcloud | tests/test_remote.py | Python | apache-2.0 | 6,741 | 0.005192 |
# -*- coding: utf-8 -*-
#------------------------------------------------------------
import sys
PY3 = False
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
if PY3:
import urllib.parse as urlparse # Es muy lento en PY2. En PY3 es nativo
else:
import urlparse # Usamos el nativo de PY2 que es más rápido
import re
from platformcode import config, logger
from core import scrapertools
from core.item import Item
from core import servertools
from core import httptools
host = 'https://frprn.com'
def mainlist(item):
logger.info()
itemlist = []
itemlist.append(item.clone(title="Nuevas" , action="lista", url=host))
itemlist.append(item.clone(title="Mejor valorada" , action="lista", url=host + "/top-rated/"))
itemlist.append(item.clone(title="Mas largo" , action="lista", url=host + "/longest/"))
itemlist.append(item.clone(title="Modelos" , action="categorias", url=host + "/models/most-popular/"))
itemlist.append(item.clone(title="Categorias" , action="categorias", url=host + "/categories/"))
itemlist.append(item.clone(title="Buscar", action="search"))
return itemlist
def search(item, texto):
logger.info()
texto = texto.replace(" ", "%20")
item.url = "%s/search/%s/?mode=async&action=get_block&block_id=list_videos_videos&from2=%s&fromStart=1&fromEnd=%s" % (host, texto,1,1)
try:
return lista(item)
except:
import sys
for line in sys.exc_info():
logger.error("%s" % line)
return []
def categorias(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t| |<br>", "", data)
patron = '<li class="thumb thumb-\w+">.*?'
patron += '<a href="([^"]+)">.*?'
patron += '<img class="lazy" data-original="([^"]+)".*?'
patron += '<div class="title">(.*?)</a>'
matches = re.compile(patron,re.DOTALL).findall(data)
for scrapedurl,scrapedthumbnail,scrapedtitle in matches:
scrapedplot = ""
title = scrapertools.find_single_match(scrapedtitle,'<div class="text">([^<]+)<')
if "/categories/" in item.url:
cantidad = scrapertools.find_single_match(scrapedtitle,'<div class="count">(\d+)</div>')
scrapedtitle = scrapertools.find_single_match(scrapedtitle,'<div class="name">([^<]+)</div>')
title = "%s (%s)" %(scrapedtitle, cantidad)
scrapedurl = urlparse.urljoin(item.url,scrapedurl)
itemlist.append(item.clone(action="lista", title=title, url=scrapedurl,
fanart=scrapedthumbnail, thumbnail=scrapedthumbnail, plot=scrapedplot) )
next_page = scrapertools.find_single_match(data,'<li class="pagination-next"><a href="([^"]+)">')
if next_page!="":
next_page = urlparse.urljoin(item.url,next_page)
itemlist.append(item.clone(action="categorias", title="[COLOR blue]Página Siguiente >>[/COLOR]", url=next_page) )
return itemlist
def lista(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t| |<br>", "", data)
patron = '<div class="thumb">.*?'
patron += '<a href="([^"]+)".*?'
patron += '<img class="lazy" data-original="([^"]+)" alt="([^"]+)".*?'
matches = re.compile(patron,re.DOTALL).findall(data)
for scrapedurl,scrapedthumbnail,scrapedtitle in matches:
url = urlparse.urljoin(item.url,scrapedurl)
duracion = ""
title = "[COLOR yellow]%s[/COLOR] %s" % (duracion, scrapedtitle)
contentTitle = title
thumbnail = scrapedthumbnail
plot = ""
year = ""
action = "play"
if logger.info() == False:
action = "findvideos"
itemlist.append(item.clone(action=action, title=title, url=url, thumbnail=thumbnail,
fanart=thumbnail, plot=plot, contentTitle = contentTitle))
patron = 'data-from="([^"]+)" data-id="([^"]+)" data-total="([^"]+)" data-page="([^"]+)" data-url="([^"]+)"'
matches = re.compile(patron,re.DOTALL).findall(data)
for dfrom,id,total,page,purl in matches:
page = int(page)
page += page
next_page = "%s?action=get_block&block_id=%s&%s=%s" %(purl, id, dfrom, page)
itemlist.append(item.clone(action="lista", title="[COLOR blue]Página Siguiente >>[/COLOR]", url=next_page) )
return itemlist
def findvideos(item):
logger.info()
itemlist = []
itemlist.append(item.clone(action="play", title= "%s", contentTitle = item.title, url=item.url))
itemlist = servertools.get_servers_itemlist(itemlist, lambda i: i.title % i.server.capitalize())
return itemlist
def play(item):
logger.info()
itemlist = []
itemlist.append(item.clone(action="play", title= "%s", contentTitle = item.title, url=item.url))
itemlist = servertools.get_servers_itemlist(itemlist, lambda i: i.title % i.server.capitalize())
return itemlist
| alfa-addon/addon | plugin.video.alfa/channels/freeporn.py | Python | gpl-3.0 | 5,059 | 0.016024 |
# Copyright (c) 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import re
import subprocess
from mock import Mock, MagicMock, patch
import pexpect
from trove.common import exception
from trove.common import utils
from trove.guestagent import pkg
from trove.tests.unittests import trove_testtools
"""
Unit tests for the classes and functions in pkg.py.
"""
class PkgDEBInstallTestCase(trove_testtools.TestCase):
def setUp(self):
super(PkgDEBInstallTestCase, self).setUp()
self.pkg = pkg.DebianPackagerMixin()
self.pkg_fix = self.pkg._fix
self.pkg_fix_package_selections = self.pkg._fix_package_selections
p0 = patch('pexpect.spawn')
p0.start()
self.addCleanup(p0.stop)
p1 = patch('trove.common.utils.execute')
p1.start()
self.addCleanup(p1.stop)
self.pkg._fix = Mock(return_value=None)
self.pkg._fix_package_selections = Mock(return_value=None)
self.pkgName = 'packageName'
def tearDown(self):
super(PkgDEBInstallTestCase, self).tearDown()
self.pkg._fix = self.pkg_fix
self.pkg._fix_package_selections = self.pkg_fix_package_selections
def test_pkg_is_installed_no_packages(self):
packages = []
self.assertTrue(self.pkg.pkg_is_installed(packages))
def test_pkg_is_installed_yes(self):
packages = ["package1=1.0", "package2"]
self.pkg.pkg_version = MagicMock(side_effect=["1.0", "2.0"])
self.assertTrue(self.pkg.pkg_is_installed(packages))
def test_pkg_is_installed_no(self):
packages = ["package1=1.0", "package2", "package3=3.1"]
self.pkg.pkg_version = MagicMock(side_effect=["1.0", "2.0", "3.0"])
self.assertFalse(self.pkg.pkg_is_installed(packages))
def test_success_install(self):
# test
pexpect.spawn.return_value.expect.return_value = 7
pexpect.spawn.return_value.match = False
self.assertTrue(self.pkg.pkg_install(self.pkgName, {}, 5000) is None)
def test_success_install_with_config_opts(self):
# test
config_opts = {'option': 'some_opt'}
pexpect.spawn.return_value.expect.return_value = 7
pexpect.spawn.return_value.match = False
self.assertTrue(
self.pkg.pkg_install(self.pkgName, config_opts, 5000) is None)
def test_permission_error(self):
# test
pexpect.spawn.return_value.expect.return_value = 0
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgPermissionError, self.pkg.pkg_install,
self.pkgName, {}, 5000)
def test_package_not_found_1(self):
# test
pexpect.spawn.return_value.expect.return_value = 1
pexpect.spawn.return_value.match = re.match('(.*)', self.pkgName)
# test and verify
self.assertRaises(pkg.PkgNotFoundError, self.pkg.pkg_install,
self.pkgName, {}, 5000)
def test_package_not_found_2(self):
# test
pexpect.spawn.return_value.expect.return_value = 2
pexpect.spawn.return_value.match = re.match('(.*)', self.pkgName)
# test and verify
self.assertRaises(pkg.PkgNotFoundError, self.pkg.pkg_install,
self.pkgName, {}, 5000)
def test_run_DPKG_bad_State(self):
# test _fix method is called and PackageStateError is thrown
pexpect.spawn.return_value.expect.return_value = 4
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgPackageStateError, self.pkg.pkg_install,
self.pkgName, {}, 5000)
self.assertTrue(self.pkg._fix.called)
def test_admin_lock_error(self):
# test 'Unable to lock the administration directory' error
pexpect.spawn.return_value.expect.return_value = 5
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgAdminLockError, self.pkg.pkg_install,
self.pkgName, {}, 5000)
def test_package_broken_error(self):
pexpect.spawn.return_value.expect.return_value = 6
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgBrokenError, self.pkg.pkg_install,
self.pkgName, {}, 5000)
def test_timeout_error(self):
# test timeout error
pexpect.spawn.return_value.expect.side_effect = (
pexpect.TIMEOUT('timeout error'))
# test and verify
self.assertRaises(pkg.PkgTimeout, self.pkg.pkg_install,
self.pkgName, {}, 5000)
class PkgDEBRemoveTestCase(trove_testtools.TestCase):
def setUp(self):
super(PkgDEBRemoveTestCase, self).setUp()
self.pkg = pkg.DebianPackagerMixin()
self.pkg_version = self.pkg.pkg_version
self.pkg_install = self.pkg._install
self.pkg_fix = self.pkg._fix
p0 = patch('pexpect.spawn')
p0.start()
self.addCleanup(p0.stop)
p1 = patch('trove.common.utils.execute')
p1.start()
self.addCleanup(p1.stop)
self.pkg.pkg_version = Mock(return_value="OK")
self.pkg._install = Mock(return_value=None)
self.pkg._fix = Mock(return_value=None)
self.pkgName = 'packageName'
def tearDown(self):
super(PkgDEBRemoveTestCase, self).tearDown()
self.pkg.pkg_version = self.pkg_version
self.pkg._install = self.pkg_install
self.pkg._fix = self.pkg_fix
def test_remove_no_pkg_version(self):
# test
pexpect.spawn.return_value.expect.return_value = 6
pexpect.spawn.return_value.match = False
with patch.object(self.pkg, 'pkg_version', return_value=None):
self.assertTrue(self.pkg.pkg_remove(self.pkgName, 5000) is None)
def test_success_remove(self):
# test
pexpect.spawn.return_value.expect.return_value = 6
pexpect.spawn.return_value.match = False
self.assertTrue(self.pkg.pkg_remove(self.pkgName, 5000) is None)
def test_permission_error(self):
# test
pexpect.spawn.return_value.expect.return_value = 0
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgPermissionError, self.pkg.pkg_remove,
self.pkgName, 5000)
def test_package_not_found(self):
# test
pexpect.spawn.return_value.expect.return_value = 1
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgNotFoundError, self.pkg.pkg_remove,
self.pkgName, 5000)
def test_package_reinstall_first_1(self):
# test
pexpect.spawn.return_value.expect.return_value = 2
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgPackageStateError, self.pkg.pkg_remove,
self.pkgName, 5000)
self.assertTrue(self.pkg._install.called)
self.assertFalse(self.pkg._fix.called)
def test_package_reinstall_first_2(self):
# test
pexpect.spawn.return_value.expect.return_value = 3
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgPackageStateError, self.pkg.pkg_remove,
self.pkgName, 5000)
self.assertTrue(self.pkg._install.called)
self.assertFalse(self.pkg._fix.called)
def test_package_DPKG_first(self):
# test
pexpect.spawn.return_value.expect.return_value = 4
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgPackageStateError, self.pkg.pkg_remove,
self.pkgName, 5000)
self.assertFalse(self.pkg._install.called)
self.assertTrue(self.pkg._fix.called)
def test_admin_lock_error(self):
# test 'Unable to lock the administration directory' error
pexpect.spawn.return_value.expect.return_value = 5
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgAdminLockError, self.pkg.pkg_remove,
self.pkgName, 5000)
def test_timeout_error(self):
# test timeout error
pexpect.spawn.return_value.expect.side_effect = (
pexpect.TIMEOUT('timeout error'))
# test and verify
self.assertRaises(pkg.PkgTimeout, self.pkg.pkg_remove,
self.pkgName, 5000)
@patch.object(subprocess, 'call')
def test_timeout_error_with_exception(self, mock_call):
# test timeout error
pexpect.spawn.return_value.expect.side_effect = (
pexpect.TIMEOUT('timeout error'))
pexpect.spawn.return_value.close.side_effect = (
pexpect.ExceptionPexpect('error'))
# test and verify
self.assertRaises(pkg.PkgTimeout, self.pkg.pkg_remove,
self.pkgName, 5000)
self.assertEqual(1, mock_call.call_count)
class PkgDEBVersionTestCase(trove_testtools.TestCase):
def setUp(self):
super(PkgDEBVersionTestCase, self).setUp()
self.pkgName = 'mysql-server-5.5'
self.pkgVersion = '5.5.28-0'
self.getoutput = pkg.getoutput
def tearDown(self):
super(PkgDEBVersionTestCase, self).tearDown()
pkg.getoutput = self.getoutput
def test_version_success(self):
cmd_out = "%s:\n Installed: %s\n" % (self.pkgName, self.pkgVersion)
pkg.getoutput = Mock(return_value=cmd_out)
version = pkg.DebianPackagerMixin().pkg_version(self.pkgName)
self.assertTrue(version)
self.assertEqual(self.pkgVersion, version)
def test_version_unknown_package(self):
cmd_out = "N: Unable to locate package %s" % self.pkgName
pkg.getoutput = Mock(return_value=cmd_out)
self.assertFalse(pkg.DebianPackagerMixin().pkg_version(self.pkgName))
def test_version_no_version(self):
cmd_out = "%s:\n Installed: %s\n" % (self.pkgName, "(none)")
pkg.getoutput = Mock(return_value=cmd_out)
self.assertFalse(pkg.DebianPackagerMixin().pkg_version(self.pkgName))
class PkgRPMVersionTestCase(trove_testtools.TestCase):
def setUp(self):
super(PkgRPMVersionTestCase, self).setUp()
self.pkgName = 'python-requests'
self.pkgVersion = '0.14.2-1.el6'
self.getoutput = pkg.getoutput
def tearDown(self):
super(PkgRPMVersionTestCase, self).tearDown()
pkg.getoutput = self.getoutput
@patch('trove.guestagent.pkg.LOG')
def test_version_no_output(self, mock_logging):
cmd_out = ''
pkg.getoutput = Mock(return_value=cmd_out)
self.assertIsNone(pkg.RedhatPackagerMixin().pkg_version(self.pkgName))
def test_version_success(self):
cmd_out = self.pkgVersion
pkg.getoutput = Mock(return_value=cmd_out)
version = pkg.RedhatPackagerMixin().pkg_version(self.pkgName)
self.assertTrue(version)
self.assertEqual(self.pkgVersion, version)
class PkgRPMInstallTestCase(trove_testtools.TestCase):
def setUp(self):
super(PkgRPMInstallTestCase, self).setUp()
self.pkg = pkg.RedhatPackagerMixin()
self.getoutput = pkg.getoutput
self.pkgName = 'packageName'
p0 = patch('pexpect.spawn')
p0.start()
self.addCleanup(p0.stop)
p1 = patch('trove.common.utils.execute')
p1.start()
self.addCleanup(p1.stop)
def tearDown(self):
super(PkgRPMInstallTestCase, self).tearDown()
pkg.getoutput = self.getoutput
def test_pkg_is_installed_no_packages(self):
packages = []
self.assertTrue(self.pkg.pkg_is_installed(packages))
def test_pkg_is_installed_yes(self):
packages = ["package1=1.0", "package2"]
with patch.object(pkg, 'getoutput', MagicMock(
return_value="package1=1.0\n" "package2=2.0")):
self.assertTrue(self.pkg.pkg_is_installed(packages))
def test_pkg_is_installed_no(self):
packages = ["package1=1.0", "package2", "package3=3.0"]
with patch.object(pkg, 'getoutput', MagicMock(
return_value="package1=1.0\n" "package2=2.0")):
self.assertFalse(self.pkg.pkg_is_installed(packages))
def test_permission_error(self):
# test
pexpect.spawn.return_value.expect.return_value = 0
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgPermissionError, self.pkg.pkg_install,
self.pkgName, {}, 5000)
def test_package_not_found(self):
# test
pexpect.spawn.return_value.expect.return_value = 1
pexpect.spawn.return_value.match = re.match('(.*)', self.pkgName)
# test and verify
self.assertRaises(pkg.PkgNotFoundError, self.pkg.pkg_install,
self.pkgName, {}, 5000)
def test_package_conflict_remove(self):
# test
pexpect.spawn.return_value.expect.return_value = 2
pexpect.spawn.return_value.match = re.match('(.*)', self.pkgName)
self.pkg._rpm_remove_nodeps = Mock()
# test and verify
self.pkg._install(self.pkgName, 5000)
self.assertTrue(self.pkg._rpm_remove_nodeps.called)
def test_package_conflict_remove_install(self):
with patch.object(self.pkg, '_install', side_effect=[3, 3, 0]):
self.assertTrue(
self.pkg.pkg_install(self.pkgName, {}, 5000) is None)
self.assertEqual(3, self.pkg._install.call_count)
@patch.object(utils, 'execute')
def test__rpm_remove_nodeps(self, mock_execute):
self.pkg._rpm_remove_nodeps(self.pkgName)
mock_execute.assert_called_with('rpm', '-e', '--nodeps', self.pkgName,
run_as_root=True, root_helper='sudo')
def test_package_scriptlet_error(self):
# test
pexpect.spawn.return_value.expect.return_value = 5
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgScriptletError, self.pkg.pkg_install,
self.pkgName, {}, 5000)
def test_package_http_error(self):
# test
pexpect.spawn.return_value.expect.return_value = 6
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgDownloadError, self.pkg.pkg_install,
self.pkgName, {}, 5000)
def test_package_nomirrors_error(self):
# test
pexpect.spawn.return_value.expect.return_value = 7
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgDownloadError, self.pkg.pkg_install,
self.pkgName, {}, 5000)
def test_package_sign_error(self):
# test
pexpect.spawn.return_value.expect.return_value = 8
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgSignError, self.pkg.pkg_install,
self.pkgName, {}, 5000)
def test_package_already_installed(self):
# test
pexpect.spawn.return_value.expect.return_value = 9
pexpect.spawn.return_value.match = False
# test and verify
self.assertTrue(self.pkg.pkg_install(self.pkgName, {}, 5000) is None)
def test_package_success_updated(self):
# test
pexpect.spawn.return_value.expect.return_value = 10
pexpect.spawn.return_value.match = False
# test and verify
self.assertTrue(self.pkg.pkg_install(self.pkgName, {}, 5000) is None)
def test_package_success_installed(self):
# test
pexpect.spawn.return_value.expect.return_value = 11
pexpect.spawn.return_value.match = False
# test and verify
self.assertTrue(self.pkg.pkg_install(self.pkgName, {}, 5000) is None)
def test_timeout_error(self):
# test timeout error
pexpect.spawn.return_value.expect.side_effect = (
pexpect.TIMEOUT('timeout error'))
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgTimeout, self.pkg.pkg_install,
self.pkgName, {}, 5000)
class PkgRPMRemoveTestCase(trove_testtools.TestCase):
def setUp(self):
super(PkgRPMRemoveTestCase, self).setUp()
self.pkg = pkg.RedhatPackagerMixin()
self.pkg_version = self.pkg.pkg_version
self.pkg_install = self.pkg._install
p0 = patch('pexpect.spawn')
p0.start()
self.addCleanup(p0.stop)
p1 = patch('trove.common.utils.execute')
p1.start()
self.addCleanup(p1.stop)
self.pkg.pkg_version = Mock(return_value="OK")
self.pkg._install = Mock(return_value=None)
self.pkgName = 'packageName'
def tearDown(self):
super(PkgRPMRemoveTestCase, self).tearDown()
self.pkg.pkg_version = self.pkg_version
self.pkg._install = self.pkg_install
def test_permission_error(self):
# test
pexpect.spawn.return_value.expect.return_value = 0
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgPermissionError, self.pkg.pkg_remove,
self.pkgName, 5000)
def test_package_not_found(self):
# test
pexpect.spawn.return_value.expect.return_value = 1
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgNotFoundError, self.pkg.pkg_remove,
self.pkgName, 5000)
def test_remove_no_pkg_version(self):
# test
pexpect.spawn.return_value.expect.return_value = 2
pexpect.spawn.return_value.match = False
with patch.object(self.pkg, 'pkg_version', return_value=None):
self.assertTrue(self.pkg.pkg_remove(self.pkgName, 5000) is None)
def test_success_remove(self):
# test
pexpect.spawn.return_value.expect.return_value = 2
pexpect.spawn.return_value.match = False
self.assertTrue(self.pkg.pkg_remove(self.pkgName, 5000) is None)
def test_timeout_error(self):
# test timeout error
pexpect.spawn.return_value.expect.side_effect = (
pexpect.TIMEOUT('timeout error'))
pexpect.spawn.return_value.match = False
# test and verify
self.assertRaises(pkg.PkgTimeout, self.pkg.pkg_remove,
self.pkgName, 5000)
class PkgDEBFixPackageSelections(trove_testtools.TestCase):
def setUp(self):
super(PkgDEBFixPackageSelections, self).setUp()
self.pkg = pkg.DebianPackagerMixin()
self.getoutput = pkg.getoutput
def tearDown(self):
super(PkgDEBFixPackageSelections, self).tearDown()
pkg.getoutput = self.getoutput
@patch.object(os, 'remove')
@patch.object(pkg, 'NamedTemporaryFile')
@patch.object(utils, 'execute')
def test__fix_package_selections(self, mock_execute, mock_temp_file,
mock_remove):
packages = ["package1"]
config_opts = {'option': 'some_opt'}
pkg.getoutput = Mock(
return_value="* package1/option: some_opt")
self.pkg._fix_package_selections(packages, config_opts)
self.assertEqual(2, mock_execute.call_count)
self.assertEqual(1, mock_remove.call_count)
@patch.object(os, 'remove')
@patch.object(pkg, 'NamedTemporaryFile')
@patch.object(utils, 'execute',
side_effect=exception.ProcessExecutionError)
def test_fail__fix_package_selections(self, mock_execute, mock_temp_file,
mock_remove):
packages = ["package1"]
config_opts = {'option': 'some_opt'}
pkg.getoutput = Mock(
return_value="* package1/option: some_opt")
self.assertRaises(pkg.PkgConfigureError,
self.pkg._fix_package_selections,
packages, config_opts)
self.assertEqual(1, mock_remove.call_count)
@patch.object(utils, 'execute')
def test__fix(self, mock_execute):
self.pkg._fix(30)
mock_execute.assert_called_with('dpkg', '--configure', '-a',
run_as_root=True, root_helper='sudo')
| mmasaki/trove | trove/tests/unittests/guestagent/test_pkg.py | Python | apache-2.0 | 21,209 | 0 |
from ....common.db.sql import VARCHAR, Numeric as NUMBER, DateTime as DATETIME, Column, BaseModel, CLOB, DATE
VARCHAR2 = VARCHAR
class CCommodityFuturesEODPrices(BaseModel):
"""
4.182 中国商品期货日行情
Attributes
----------
object_id: VARCHAR2(100)
对象ID
s_info_windcode: VARCHAR2(40)
Wind代码
trade_dt: VARCHAR2(8)
交易日期
s_dq_presettle: NUMBER(20,4)
前结算价(元)
s_dq_open: NUMBER(20,4)
开盘价(元)
s_dq_high: NUMBER(20,4)
最高价(元)
s_dq_low: NUMBER(20,4)
最低价(元)
s_dq_close: NUMBER(20,4)
收盘价(元)
s_dq_settle: NUMBER(20,4)
结算价(元)
s_dq_volume: NUMBER(20,4)
成交量(手)
s_dq_amount: NUMBER(20,4)
成交金额(万元)
s_dq_oi: NUMBER(20,4)
持仓量(手)
s_dq_change: NUMBER(20,4)
涨跌(元) 收盘价-前结算价
s_dq_oichange: NUMBER(20,4)
持仓量变化
fs_info_type: VARCHAR2(10)
合约类型 1:主力合约2:真实合约3:连续合约
opdate: DATETIME
opdate
opmode: VARCHAR(1)
opmode
"""
__tablename__ = "CCommodityFuturesEODPrices"
object_id = Column(VARCHAR2(100), primary_key=True)
s_info_windcode = Column(VARCHAR2(40))
trade_dt = Column(VARCHAR2(8))
s_dq_presettle = Column(NUMBER(20,4))
s_dq_open = Column(NUMBER(20,4))
s_dq_high = Column(NUMBER(20,4))
s_dq_low = Column(NUMBER(20,4))
s_dq_close = Column(NUMBER(20,4))
s_dq_settle = Column(NUMBER(20,4))
s_dq_volume = Column(NUMBER(20,4))
s_dq_amount = Column(NUMBER(20,4))
s_dq_oi = Column(NUMBER(20,4))
s_dq_change = Column(NUMBER(20,4))
s_dq_oichange = Column(NUMBER(20,4))
fs_info_type = Column(VARCHAR2(10))
opdate = Column(DATETIME)
opmode = Column(VARCHAR(1))
| SnowWalkerJ/quantlib | quant/data/wind/tables/ccommodityfutureseodprices.py | Python | gpl-3.0 | 1,944 | 0.015819 |
from django.db import models
from django.dispatch import receiver
from django.utils.encoding import python_2_unicode_compatible
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.core.validators import MaxValueValidator, MinValueValidator
from projects.models import Project
from orders.models import LineFollowerStage, LineFollowerJuniorStage
class BaseResult(models.Model):
score = models.FloatField(verbose_name=_('Score'), blank=True)
minutes = models.PositiveSmallIntegerField(verbose_name=_("Minutes"))
seconds = models.PositiveSmallIntegerField(verbose_name=_("Seconds"))
milliseconds = models.PositiveSmallIntegerField(
verbose_name=_("Milliseconds"))
disqualification = models.BooleanField(
verbose_name=_('Disqualification'), default=False)
is_best = models.BooleanField(
verbose_name=_("Is best result?"), default=True)
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
abstract = True
@property
def duration(self):
return self.minutes * 60 + self.seconds + self.milliseconds * 0.01
@property
def duration_pretty(self):
return "{} minutes, {} seconds, {} milliseconds".format(
self.minutes, self.seconds, self.milliseconds)
@python_2_unicode_compatible
class LineFollowerResult(BaseResult):
project = models.ForeignKey(
Project, limit_choices_to={"category": "line_follower"})
stage = models.ForeignKey(
LineFollowerStage, verbose_name=_("Line Follower Stage"))
runway_out = models.PositiveSmallIntegerField(
verbose_name=_("Runway Out Count"), default=0)
class Meta:
verbose_name = _("Line Follower Result")
verbose_name_plural = _("Line Follower Results")
ordering = ['disqualification', 'score']
def __str__(self):
return self.project.name
@receiver(models.signals.pre_save, sender=LineFollowerResult)
def line_follower_result_calculate_score(sender, instance, *args, **kwargs):
instance.score = instance.duration * (1 + 0.2 * instance.runway_out)
@python_2_unicode_compatible
class LineFollowerJuniorResult(BaseResult):
project = models.ForeignKey(
Project, limit_choices_to={"category": "line_follower_junior"})
stage = models.ForeignKey(
LineFollowerJuniorStage, verbose_name=_("Line Follower Junior Stage"))
runway_out = models.PositiveSmallIntegerField(
verbose_name=_("Runway Out Count"), default=0)
class Meta:
verbose_name = _("Line Follower Junior Result")
verbose_name_plural = _("Line Follower Junior Results")
ordering = ['disqualification', 'score']
def __str__(self):
return self.project.name
@receiver(models.signals.pre_save, sender=LineFollowerJuniorResult)
def line_follower_junior_result_calculate_score(sender, instance, *args,
**kwargs):
instance.score = instance.duration * (1 + 0.2 * instance.runway_out)
@python_2_unicode_compatible
class ConstructionResult(BaseResult):
project = models.ForeignKey(
Project, limit_choices_to={"category": "construction"})
class Meta:
verbose_name = _("Construction Result")
verbose_name_plural = _("Construction Results")
ordering = [
"disqualification", "-score", "minutes", "seconds", "milliseconds"]
def __str__(self):
return self.project.name
@python_2_unicode_compatible
class DroneResult(models.Model):
project = models.ForeignKey(
Project, limit_choices_to={"category": "drone"})
score = models.FloatField(verbose_name=_('Score'), blank=True)
disqualification = models.BooleanField(
verbose_name=_('Disqualification'), default=False)
is_best = models.BooleanField(
verbose_name=_("Is best result?"), default=True)
created_at = models.DateTimeField(auto_now_add=True)
laps = models.FloatField(verbose_name=_("Laps"), default=0)
shortcuts = models.PositiveSmallIntegerField(
verbose_name=_("Shortcuts"), default=0)
class Meta:
verbose_name = _("Drone Result")
verbose_name_plural = _("Drone Results")
ordering = [
"disqualification", "-score"]
def __str__(self):
return self.project.name
@receiver(models.signals.pre_save, sender=DroneResult)
def drone_result_calculate_score(sender, instance, *args, **kwargs):
instance.score = instance.laps * 100 - instance.shortcuts * 50
@python_2_unicode_compatible
class StairClimbingResult(BaseResult):
project = models.ForeignKey(
Project, limit_choices_to={"category": "stair_climbing"})
stair1 = models.BooleanField(verbose_name=_("Stair #1"), default=False)
stair2 = models.BooleanField(verbose_name=_("Stair #2"), default=False)
stair3 = models.BooleanField(verbose_name=_("Stair #3"), default=False)
stair4 = models.BooleanField(verbose_name=_("Stair #4"), default=False)
stair5 = models.BooleanField(verbose_name=_("Stair #5"), default=False)
stair6 = models.BooleanField(verbose_name=_("Stair #6"), default=False)
stair7 = models.BooleanField(verbose_name=_("Stair #7"), default=False)
down6 = models.BooleanField(verbose_name=_("Down #6"), default=False)
down5 = models.BooleanField(verbose_name=_("Down #5"), default=False)
down4 = models.BooleanField(verbose_name=_("Down #4"), default=False)
down3 = models.BooleanField(verbose_name=_("Down #3"), default=False)
down2 = models.BooleanField(verbose_name=_("Down #2"), default=False)
down1 = models.BooleanField(verbose_name=_("Down #1"), default=False)
plexi_touch = models.PositiveSmallIntegerField(
verbose_name=_("Plexi Touch Count"), default=0)
is_complete = models.BooleanField(
verbose_name=_("Is finish?"), default=False)
class Meta:
verbose_name = _("Stair Climbing Result")
verbose_name_plural = _("Stair Climbing Results")
ordering = [
"disqualification", "-score", "minutes", "seconds", "milliseconds"]
def __str__(self):
return self.project.name
@receiver(models.signals.pre_save, sender=StairClimbingResult)
def stair_climbing_result_calculate_score(sender, instance, *args, **kwargs):
instance.score = sum((
(int(instance.stair1) + int(instance.stair2) +
int(instance.stair3)) * 10,
int(instance.stair4) * 40,
int(instance.stair5) * 80,
int(instance.stair6) * 40,
int(instance.stair7) * 50,
(int(instance.down6) + int(instance.down5) + int(instance.down4)) * 30,
(int(instance.down3)) * 50,
(int(instance.down1) + int(instance.down2)) * 20,
(int(instance.is_complete)) * 40,
instance.plexi_touch * (-10)
))
@python_2_unicode_compatible
class ColorSelectingResult(BaseResult):
project = models.ForeignKey(
Project, limit_choices_to={"category": "color_selecting"})
obtain = models.PositiveSmallIntegerField(
verbose_name=_("Cylinder Obtain Count"))
place_success = models.PositiveSmallIntegerField(
verbose_name=_("Cylinder Successful Placement Count"))
place_failure = models.PositiveSmallIntegerField(
verbose_name=_("Cylinder Unsuccessful Placement Count"))
class Meta:
verbose_name = _("Color Selecting Result")
verbose_name_plural = _("Color Selecting Results")
ordering = [
"disqualification", "-score", "minutes", "seconds", "milliseconds"]
def __str__(self):
return self.project.name
@receiver(models.signals.pre_save, sender=ColorSelectingResult)
def color_selecting_result_calculate_score(sender, instance, *args, **kwargs):
instance.score = sum((
instance.obtain * 100,
instance.place_success * 200,
instance.place_failure * (-50)))
@python_2_unicode_compatible
class ScenarioResult(BaseResult):
project = models.ForeignKey(
Project, limit_choices_to={"category": "scenario"})
is_stopped = models.BooleanField(
verbose_name=_("Is parked wrongly?"), default=False)
is_parked = models.BooleanField(
verbose_name=_("Is parked?"), default=False)
sign_succeed = models.PositiveSmallIntegerField(
verbose_name=_("Succeed Signs"), default=0)
sign_failed = models.PositiveSmallIntegerField(
verbose_name=_("Failed Signs"), default=0)
class Meta:
verbose_name = _("Scenario Result")
verbose_name_plural = _("Scenario Results")
ordering = ["disqualification", "-score",
"minutes", "seconds", "milliseconds"]
def __str__(self):
return self.project.name
@receiver(models.signals.pre_save, sender=ScenarioResult)
def scenario_result_calculate_score(sender, instance, *args, **kwargs):
instance.score = sum((
int(instance.is_stopped) * (-20),
int(instance.is_parked) * 60,
instance.sign_succeed * 10,
instance.sign_failed * (-10)))
@python_2_unicode_compatible
class InnovativeJury(models.Model):
jury = models.CharField(max_length=30, unique=True)
class Meta:
verbose_name = _("Innovative Jury")
verbose_name_plural = _("Innovative Juries")
ordering = ["jury"]
def __str__(self):
return self.jury
@python_2_unicode_compatible
class InnovativeJuryResult(models.Model):
project = models.ForeignKey(
Project, limit_choices_to={"category": "innovative"})
jury = models.ForeignKey(InnovativeJury)
jury_score = models.FloatField(
verbose_name=_('Jury Score'), blank=True, default=0)
design = models.FloatField(
validators=[MinValueValidator(0.0), MaxValueValidator(10.0)],
verbose_name=_("Design"), default=0)
innovative = models.FloatField(
validators=[MinValueValidator(0.0), MaxValueValidator(10.0)],
verbose_name=_("Innovative"), default=0)
technical = models.FloatField(
validators=[MinValueValidator(0.0), MaxValueValidator(10.0)],
verbose_name=_("Technical"), default=0)
presentation = models.FloatField(
validators=[MinValueValidator(0.0), MaxValueValidator(10.0)],
verbose_name=_("Presentation"), default=0)
opinion = models.FloatField(
validators=[MinValueValidator(0.0), MaxValueValidator(10.0)],
verbose_name=_("Opinion"), default=0)
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
verbose_name = _("Innovative Result")
verbose_name_plural = _("Innovative Results")
ordering = ["project", "jury"]
unique_together = ('project', 'jury')
def __str__(self):
return self.project.name
@python_2_unicode_compatible
class InnovativeTotalResult(models.Model):
project = models.ForeignKey(
Project, limit_choices_to={"category": "innovative"}, unique=True)
score = models.FloatField(verbose_name=_('Score'), default=0)
class Meta:
verbose_name = _("Innovative Total Result")
verbose_name_plural = _("Innovative Total Results")
ordering = ["project"]
def __str__(self):
return self.project.name
@receiver(models.signals.pre_save, sender=InnovativeJuryResult)
def innovative_jury_result_calculate_score(sender, instance, *args, **kwargs):
instance.jury_score = sum((
instance.design * 0.2,
instance.innovative * 0.3,
instance.technical * 0.25,
instance.presentation * 0.1,
instance.opinion * 0.05))
| bilbeyt/ituro | ituro/results/models.py | Python | mit | 11,573 | 0.000432 |
import sys
import time
import lexer
import parser
import cfg
import typecheck
import codegen
import errors
import exc
from threading import Lock
compilelock = Lock()
#------------------------------------------------------------------------
# Pipeline
#------------------------------------------------------------------------
class CompileError(RuntimeError):
pass
class Pipeline(object):
def __init__(self, name, passes):
self.name = name
self.__name__ = name
self.passes = passes
def __call__(self, ast, env):
for ppass in self.passes:
ast, env = ppass(ast, env)
if errors.occurred():
errors.reset()
raise CompileError, ppass.__name__
return ast, env
#------------------------------------------------------------------------
# Passes
#------------------------------------------------------------------------
def ppass(name):
def wrapper(fn):
fn.__name__ = name
return fn
return wrapper
# ------------------------------
@ppass("Syntax Parser")
def parse_pass(ast, env):
parse = parser.make_parser()
ast = parse(ast)
return ast, env
# ------------------------------
@ppass("Type checker")
def typecheck_pass(ast, env):
symtab = typecheck.typecheck(ast)
env['symtab'] = symtab
return ast, env
# ------------------------------
@ppass("Rewriter")
def rewrite_pass(ast, env):
return ast, env
# ------------------------------
@ppass("Single static assignment")
def ssa_pass(ast, env):
functions = cfg.ssa_pass(ast)
env['functions'] = functions
return ast, env
# ------------------------------
@ppass("Code generation")
def codegen_pass(ast, env):
cgen = codegen.LLVMEmitter()
blockgen = codegen.BlockEmitter(cgen)
env['cgen'] = cgen
env['blockgen'] = blockgen
functions = env['functions']
lfunctions = []
for name, retty, argtys, start_block in functions:
function = blockgen.generate_function(
name,
retty,
argtys,
start_block
)
function.verify()
lfunctions.append(function)
env['lfunctions'] = lfunctions
return ast, env
# ------------------------------
@ppass("LLVM Optimizer")
def optimizer_pass(ast, env):
cgen = env['cgen']
lfunctions = env['lfunctions']
opt_level = env['args']['O']
optimizer = codegen.LLVMOptimizer(cgen.module, opt_level)
# function-level optimize
#for lfunc in lfunctions:
#optimizer.run(lfunc)
#lfunc.verify()
# module-level optimization
optimizer.runmodule(cgen.module)
cgen.module.verify()
env['lmodule'] = cgen.module
return ast, env
# ------------------------------
@ppass("Linker")
def linker_pass(ast, env):
return ast, env
#------------------------------------------------------------------------
# Pipeline Structure
#------------------------------------------------------------------------
frontend = Pipeline('frontend', [parse_pass,
typecheck_pass,
rewrite_pass
])
backend = Pipeline('backend', [ssa_pass,
codegen_pass,
optimizer_pass,
linker_pass,
])
compiler = Pipeline('compile', [frontend,
backend
])
#------------------------------------------------------------------------
# Toplevel
#------------------------------------------------------------------------
def compile(source, **opts):
opts.setdefault('O', 2)
env = {'args': opts}
with compilelock:
ast, env = compiler(source, env)
return ast, env
#------------------------------------------------------------------------
# Command Line Interface
#------------------------------------------------------------------------
def main():
import argparse
argp = argparse.ArgumentParser('blirc')
argp.add_argument('file', metavar="file", nargs='?', help='Source file')
argp.add_argument('-O', metavar="opt", nargs='?', type=int, help='Optimization level', default=2)
argp.add_argument('--ddump-parse', action='store_true', help='Dump parse tree')
argp.add_argument('--ddump-lex', action='store_true', help='Dump token stream')
argp.add_argument('--ddump-blocks', action='store_true', help='Dump the block structure')
argp.add_argument('--ddump-tc', action='store_true', help='Dump the type checker state')
argp.add_argument('--ddump-optimizer', action='store_true', help='Dump diff of the LLVM optimizer pass')
argp.add_argument('--noprelude', action='store_true', help='Don\'t link against the prelude')
argp.add_argument('--nooptimize', action='store_true', help='Don\'t run LLVM optimization pass')
argp.add_argument('--emit-llvm', action='store_true', help=' Generate output files in LLVM formats ')
argp.add_argument('--emit-x86', action='store_true', help=' Generate output files in x86 assembly ')
argp.add_argument('--run', action='store_true', help='Execute generated code ')
args = argp.parse_args()
if args.file:
source = open(args.file).read()
else:
sys.stderr.write('No input\n')
sys.exit(1)
if args.ddump_lex:
lexer.ddump_lex(source)
if args.ddump_parse:
parser.ddump_parse(source)
if args.ddump_blocks:
cfg.ddump_blocks(source)
if args.ddump_optimizer:
codegen.ddump_optimizer(source)
if args.ddump_tc:
typecheck.ddump_tc(source)
try:
# =====================================
start = time.time()
with errors.listen():
opts = vars(args)
ast, env = compile(source, **opts)
timing = time.time() - start
# =====================================
if args.emit_llvm:
print env['lmodule']
elif args.emit_x86:
print env['lmodule'].to_native_assembly()
elif args.run:
ctx = exc.Context(env)
exc.execute(ctx, fname='main')
else:
print 'Compile time %.3fs' % timing
except CompileError as e:
sys.stderr.write('FAIL: Failure in compiler phase: %s\n' % e.args[0])
sys.exit(1)
errors.reset()
if __name__ == '__main__':
main()
| seibert/blaze-core | blaze/blir/passes.py | Python | bsd-2-clause | 6,473 | 0.006334 |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^signin/', views.signin, name='signin'),
url(r'^signout/', views.signout, name='signout'),
url(r'^change_password/', views.change_password, name='change_password'),
] | wasit7/visionmarker | beta/wl_auth/urls.py | Python | mit | 244 | 0.016393 |
## This file is part of Scapy
## See http://www.secdev.org/projects/scapy for more informations
## Copyright (C) Philippe Biondi <phil@secdev.org>
## This program is published under a GPLv2 license
"""
Customizations needed to support Microsoft Windows.
"""
import os,re,sys,socket,time
from glob import glob
from scapy.config import conf,ConfClass
from scapy.error import Scapy_Exception,log_loading,log_runtime
from scapy.utils import atol, inet_aton, inet_ntoa, PcapReader
from scapy.base_classes import Gen, Net, SetGen
import scapy.plist as plist
from scapy.sendrecv import debug, srp1
from scapy.layers.l2 import Ether, ARP
from scapy.data import MTU, ETHER_BROADCAST, ETH_P_ARP
conf.use_pcap = 1
conf.use_dnet = 1
from scapy.arch import pcapdnet
from scapy.arch.pcapdnet import *
LOOPBACK_NAME="lo0"
WINDOWS = True
def _where(filename, dirs=[], env="PATH"):
"""Find file in current dir or system path"""
if not isinstance(dirs, list):
dirs = [dirs]
if glob(filename):
return filename
paths = [os.curdir] + os.environ[env].split(os.path.pathsep) + dirs
for path in paths:
for match in glob(os.path.join(path, filename)):
if match:
return os.path.normpath(match)
raise IOError("File not found: %s" % filename)
def win_find_exe(filename, installsubdir=None, env="ProgramFiles"):
"""Find executable in current dir, system path or given ProgramFiles subdir"""
for fn in [filename, filename+".exe"]:
try:
if installsubdir is None:
path = _where(fn)
else:
path = _where(fn, dirs=[os.path.join(os.environ[env], installsubdir)])
except IOError:
path = filename
else:
break
return path
class WinProgPath(ConfClass):
_default = "<System default>"
# We try some magic to find the appropriate executables
pdfreader = win_find_exe("AcroRd32")
psreader = win_find_exe("gsview32.exe", "Ghostgum/gsview")
dot = win_find_exe("dot", "ATT/Graphviz/bin")
tcpdump = win_find_exe("windump")
tcpreplay = win_find_exe("tcpreplay")
display = _default
hexedit = win_find_exe("hexer")
wireshark = win_find_exe("wireshark", "wireshark")
conf.prog = WinProgPath()
import _winreg
class PcapNameNotFoundError(Scapy_Exception):
pass
class NetworkInterface(object):
"""A network interface of your local host"""
def __init__(self, dnetdict=None):
self.name = None
self.ip = None
self.mac = None
self.pcap_name = None
self.win_name = None
self.uuid = None
self.dnetdict = dnetdict
if dnetdict is not None:
self.update(dnetdict)
def update(self, dnetdict):
"""Update info about network interface according to given dnet dictionary"""
self.name = dnetdict["name"]
# Other attributes are optional
try:
self.ip = socket.inet_ntoa(dnetdict["addr"].ip)
except (KeyError, AttributeError, NameError):
pass
try:
self.mac = dnetdict["link_addr"]
except KeyError:
pass
self._update_pcapdata()
def _update_pcapdata(self):
"""Supplement more info from pypcap and the Windows registry"""
# XXX: We try eth0 - eth29 by bruteforce and match by IP address,
# because only the IP is available in both pypcap and dnet.
# This may not work with unorthodox network configurations and is
# slow because we have to walk through the Windows registry.
for n in range(30):
guess = "eth%s" % n
win_name = pcapdnet.pcap.ex_name(guess)
if win_name.endswith("}"):
try:
uuid = win_name[win_name.index("{"):win_name.index("}")+1]
keyname = r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters\Interfaces\%s" % uuid
try:
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, keyname)
except WindowsError:
log_loading.debug("Couldn't open 'HKEY_LOCAL_MACHINE\\%s' (for guessed pcap iface name '%s')." % (keyname, guess))
continue
try:
fixed_ip = _winreg.QueryValueEx(key, "IPAddress")[0][0].encode("utf-8")
except (WindowsError, UnicodeDecodeError, IndexError):
fixed_ip = None
try:
dhcp_ip = _winreg.QueryValueEx(key, "DhcpIPAddress")[0].encode("utf-8")
except (WindowsError, UnicodeDecodeError, IndexError):
dhcp_ip = None
# "0.0.0.0" or None means the value is not set (at least not correctly).
# If both fixed_ip and dhcp_ip are set, fixed_ip takes precedence
if fixed_ip is not None and fixed_ip != "0.0.0.0":
ip = fixed_ip
elif dhcp_ip is not None and dhcp_ip != "0.0.0.0":
ip = dhcp_ip
else:
continue
except IOError:
continue
else:
if ip == self.ip:
self.pcap_name = guess
self.win_name = win_name
self.uuid = uuid
break
else:
raise PcapNameNotFoundError
def __repr__(self):
return "<%s: %s %s %s pcap_name=%s win_name=%s>" % (self.__class__.__name__,
self.name, self.ip, self.mac, self.pcap_name, self.win_name)
from UserDict import IterableUserDict
class NetworkInterfaceDict(IterableUserDict):
"""Store information about network interfaces and convert between names"""
def load_from_dnet(self):
"""Populate interface table via dnet"""
for i in pcapdnet.dnet.intf():
try:
# XXX: Only Ethernet for the moment: localhost is not supported by dnet and pcap
# We only take interfaces that have an IP address, because the IP
# is used for the mapping between dnet and pcap interface names
# and this significantly improves Scapy's startup performance
if i["name"].startswith("eth") and "addr" in i:
self.data[i["name"]] = NetworkInterface(i)
except (KeyError, PcapNameNotFoundError):
pass
if len(self.data) == 0:
log_loading.warning("No match between your pcap and dnet network interfaces found. "
"You probably won't be able to send packets. "
"Deactivating unneeded interfaces and restarting Scapy might help.")
def pcap_name(self, devname):
"""Return pypcap device name for given libdnet/Scapy device name
This mapping is necessary because pypcap numbers the devices differently."""
try:
pcap_name = self.data[devname].pcap_name
except KeyError:
raise ValueError("Unknown network interface %r" % devname)
else:
return pcap_name
def devname(self, pcap_name):
"""Return libdnet/Scapy device name for given pypcap device name
This mapping is necessary because pypcap numbers the devices differently."""
for devname, iface in self.items():
if iface.pcap_name == pcap_name:
return iface.name
raise ValueError("Unknown pypcap network interface %r" % pcap_name)
def show(self, resolve_mac=True):
"""Print list of available network interfaces in human readable form"""
print "%s %s %s" % ("IFACE".ljust(5), "IP".ljust(15), "MAC")
for iface_name in sorted(self.data.keys()):
dev = self.data[iface_name]
mac = str(dev.mac)
if resolve_mac:
mac = conf.manufdb._resolve_MAC(mac)
print "%s %s %s" % (str(dev.name).ljust(5), str(dev.ip).ljust(15), mac)
ifaces = NetworkInterfaceDict()
ifaces.load_from_dnet()
def pcap_name(devname):
"""Return pypcap device name for given libdnet/Scapy device name"""
try:
pcap_name = ifaces.pcap_name(devname)
except ValueError:
# pcap.pcap() will choose a sensible default for sniffing if iface=None
pcap_name = None
return pcap_name
def devname(pcap_name):
"""Return libdnet/Scapy device name for given pypcap device name"""
return ifaces.devname(pcap_name)
def show_interfaces(resolve_mac=True):
"""Print list of available network interfaces"""
return ifaces.show(resolve_mac)
_orig_open_pcap = pcapdnet.open_pcap
pcapdnet.open_pcap = lambda iface,*args,**kargs: _orig_open_pcap(pcap_name(iface),*args,**kargs)
def read_routes():
ok = 0
routes = []
ip = '(\d+\.\d+\.\d+\.\d+)'
# On Vista and Windows 7 the gateway can be IP or 'On-link'.
# But the exact 'On-link' string depends on the locale, so we allow any text.
gw_pattern = '(.+)'
metric_pattern = "(\d+)"
delim = "\s+" # The columns are separated by whitespace
netstat_line = delim.join([ip, ip, gw_pattern, ip, metric_pattern])
pattern = re.compile(netstat_line)
f=os.popen("netstat -rn")
for l in f.readlines():
match = re.search(pattern,l)
if match:
dest = match.group(1)
mask = match.group(2)
gw = match.group(3)
netif = match.group(4)
metric = match.group(5)
try:
intf = pcapdnet.dnet.intf().get_dst(pcapdnet.dnet.addr(type=2, addrtxt=dest))
except OSError:
log_loading.warning("Building Scapy's routing table: Couldn't get outgoing interface for destination %s" % dest)
continue
if not intf.has_key("addr"):
break
addr = str(intf["addr"])
addr = addr.split("/")[0]
dest = atol(dest)
mask = atol(mask)
# If the gateway is no IP we assume it's on-link
gw_ipmatch = re.search('\d+\.\d+\.\d+\.\d+', gw)
if gw_ipmatch:
gw = gw_ipmatch.group(0)
else:
gw = netif
routes.append((dest,mask,gw, str(intf["name"]), addr))
f.close()
return routes
def read_routes6():
return []
def getmacbyip(ip, chainCC=0):
"""Return MAC address corresponding to a given IP address"""
if isinstance(ip,Net):
ip = iter(ip).next()
tmp = map(ord, inet_aton(ip))
if (tmp[0] & 0xf0) == 0xe0: # mcast @
return "01:00:5e:%.2x:%.2x:%.2x" % (tmp[1]&0x7f,tmp[2],tmp[3])
iff,a,gw = conf.route.route(ip)
if ( (iff == LOOPBACK_NAME) or (ip == conf.route.get_if_bcast(iff)) ):
return "ff:ff:ff:ff:ff:ff"
# Windows uses local IP instead of 0.0.0.0 to represent locally reachable addresses
ifip = str(pcapdnet.dnet.intf().get(iff)['addr'])
if gw != ifip.split('/')[0]:
ip = gw
mac = conf.netcache.arp_cache.get(ip)
if mac:
return mac
res = srp1(Ether(dst=ETHER_BROADCAST)/ARP(op="who-has", pdst=ip),
type=ETH_P_ARP,
iface = iff,
timeout=2,
verbose=0,
chainCC=chainCC,
nofilter=1)
if res is not None:
mac = res.payload.hwsrc
conf.netcache.arp_cache[ip] = mac
return mac
return None
import scapy.layers.l2
scapy.layers.l2.getmacbyip = getmacbyip
try:
import readline
console = readline.GetOutputFile()
except (ImportError, AttributeError):
log_loading.info("Could not get readline console. Will not interpret ANSI color codes.")
else:
conf.readfunc = readline.rl.readline
orig_stdout = sys.stdout
sys.stdout = console
def sndrcv(pks, pkt, timeout = 2, inter = 0, verbose=None, chainCC=0, retry=0, multi=0):
if not isinstance(pkt, Gen):
pkt = SetGen(pkt)
if verbose is None:
verbose = conf.verb
debug.recv = plist.PacketList([],"Unanswered")
debug.sent = plist.PacketList([],"Sent")
debug.match = plist.SndRcvList([])
nbrecv=0
ans = []
# do it here to fix random fields, so that parent and child have the same
all_stimuli = tobesent = [p for p in pkt]
notans = len(tobesent)
hsent={}
for i in tobesent:
h = i.hashret()
if h in hsent:
hsent[h].append(i)
else:
hsent[h] = [i]
if retry < 0:
retry = -retry
autostop=retry
else:
autostop=0
while retry >= 0:
found=0
if timeout < 0:
timeout = None
pid=1
try:
if WINDOWS or pid == 0:
try:
try:
i = 0
if verbose:
print "Begin emission:"
for p in tobesent:
pks.send(p)
i += 1
time.sleep(inter)
if verbose:
print "Finished to send %i packets." % i
except SystemExit:
pass
except KeyboardInterrupt:
pass
except:
log_runtime.exception("--- Error sending packets")
log_runtime.info("--- Error sending packets")
finally:
try:
sent_times = [p.sent_time for p in all_stimuli if p.sent_time]
except:
pass
if WINDOWS or pid > 0:
# Timeout starts after last packet is sent (as in Unix version)
if timeout:
stoptime = time.time()+timeout
else:
stoptime = 0
remaintime = None
inmask = [pks.ins.fd]
try:
try:
while 1:
if stoptime:
remaintime = stoptime-time.time()
if remaintime <= 0:
break
r = pks.recv(MTU)
if r is None:
continue
ok = 0
h = r.hashret()
if h in hsent:
hlst = hsent[h]
for i in range(len(hlst)):
if r.answers(hlst[i]):
ans.append((hlst[i],r))
if verbose > 1:
os.write(1, "*")
ok = 1
if not multi:
del(hlst[i])
notans -= 1;
else:
if not hasattr(hlst[i], '_answered'):
notans -= 1;
hlst[i]._answered = 1;
break
if notans == 0 and not multi:
break
if not ok:
if verbose > 1:
os.write(1, ".")
nbrecv += 1
if conf.debug_match:
debug.recv.append(r)
except KeyboardInterrupt:
if chainCC:
raise
finally:
if WINDOWS:
for p,t in zip(all_stimuli, sent_times):
p.sent_time = t
finally:
pass
remain = reduce(list.__add__, hsent.values(), [])
if multi:
remain = filter(lambda p: not hasattr(p, '_answered'), remain);
if autostop and len(remain) > 0 and len(remain) != len(tobesent):
retry = autostop
tobesent = remain
if len(tobesent) == 0:
break
retry -= 1
if conf.debug_match:
debug.sent=plist.PacketList(remain[:],"Sent")
debug.match=plist.SndRcvList(ans[:])
#clean the ans list to delete the field _answered
if (multi):
for s,r in ans:
if hasattr(s, '_answered'):
del(s._answered)
if verbose:
print "\nReceived %i packets, got %i answers, remaining %i packets" % (nbrecv+len(ans), len(ans), notans)
return plist.SndRcvList(ans),plist.PacketList(remain,"Unanswered")
import scapy.sendrecv
scapy.sendrecv.sndrcv = sndrcv
def sniff(count=0, store=1, offline=None, prn = None, lfilter=None, L2socket=None, timeout=None, *arg, **karg):
"""Sniff packets
sniff([count=0,] [prn=None,] [store=1,] [offline=None,] [lfilter=None,] + L2ListenSocket args) -> list of packets
count: number of packets to capture. 0 means infinity
store: wether to store sniffed packets or discard them
prn: function to apply to each packet. If something is returned,
it is displayed. Ex:
ex: prn = lambda x: x.summary()
lfilter: python function applied to each packet to determine
if further action may be done
ex: lfilter = lambda x: x.haslayer(Padding)
offline: pcap file to read packets from, instead of sniffing them
timeout: stop sniffing after a given time (default: None)
L2socket: use the provided L2socket
"""
c = 0
if offline is None:
if L2socket is None:
L2socket = conf.L2listen
s = L2socket(type=ETH_P_ALL, *arg, **karg)
else:
s = PcapReader(offline)
lst = []
if timeout is not None:
stoptime = time.time()+timeout
remain = None
while 1:
try:
if timeout is not None:
remain = stoptime-time.time()
if remain <= 0:
break
try:
p = s.recv(MTU)
except PcapTimeoutElapsed:
continue
if p is None:
break
if lfilter and not lfilter(p):
continue
if store:
lst.append(p)
c += 1
if prn:
r = prn(p)
if r is not None:
print r
if count > 0 and c >= count:
break
except KeyboardInterrupt:
break
s.close()
return plist.PacketList(lst,"Sniffed")
import scapy.sendrecv
scapy.sendrecv.sniff = sniff
def get_if_list():
return sorted(ifaces.keys())
def get_working_if():
try:
return devname(pcap.lookupdev())
except Exception:
return 'lo0'
| mytliulei/DCNRobotInstallPackages | windows/win32/scapy-2/scapy/arch/windows/__init__.py | Python | apache-2.0 | 19,506 | 0.009382 |
from django import template
from django.template.loader import render_to_string
from isi_mip.core.models import FooterLinks
register = template.Library()
@register.simple_tag(takes_context=True)
def footer(context, **kwargs):
request = context['request']
settings = FooterLinks.for_site(request.site)
page = context.get('page')
links = []
for link in settings.footer_links.all():
name = link.name
target = link.target.specific
if page and target == page:
active = True
else:
active = False
if target.url:
links.append({'url': target.url + (link.anchor or ''), 'text': name, 'active': active})
context = {
'links': links
}
template = 'widgets/footer.html'
return render_to_string(template, context=context)
| bruecksen/isimip | isi_mip/core/templatetags/footer.py | Python | mit | 833 | 0.0012 |
import datetime
import decimal
from django.db import models
from django.db.models.sql.constants import LOOKUP_SEP
from django.db.models.deletion import Collector
from django.db.models.related import RelatedObject
from django.forms.forms import pretty_name
from django.utils import formats
from django.utils.html import escape
from django.utils.safestring import mark_safe
from django.utils.text import capfirst
from django.utils import timezone
from django.utils.encoding import force_unicode, smart_unicode, smart_str
from django.utils.translation import ungettext
from django.core.urlresolvers import reverse
def lookup_needs_distinct(opts, lookup_path):
"""
Returns True if 'distinct()' should be used to query the given lookup path.
"""
field_name = lookup_path.split('__', 1)[0]
field = opts.get_field_by_name(field_name)[0]
if ((hasattr(field, 'rel') and
isinstance(field.rel, models.ManyToManyRel)) or
(isinstance(field, models.related.RelatedObject) and
not field.field.unique)):
return True
return False
def prepare_lookup_value(key, value):
"""
Returns a lookup value prepared to be used in queryset filtering.
"""
# if key ends with __in, split parameter into separate values
if key.endswith('__in'):
value = value.split(',')
# if key ends with __isnull, special case '' and false
if key.endswith('__isnull'):
if value.lower() in ('', 'false'):
value = False
else:
value = True
return value
def quote(s):
"""
Ensure that primary key values do not confuse the admin URLs by escaping
any '/', '_' and ':' characters. Similar to urllib.quote, except that the
quoting is slightly different so that it doesn't get automatically
unquoted by the Web browser.
"""
if not isinstance(s, basestring):
return s
res = list(s)
for i in range(len(res)):
c = res[i]
if c in """:/_#?;@&=+$,"<>%\\""":
res[i] = '_%02X' % ord(c)
return ''.join(res)
def unquote(s):
"""
Undo the effects of quote(). Based heavily on urllib.unquote().
"""
mychr = chr
myatoi = int
list = s.split('_')
res = [list[0]]
myappend = res.append
del list[0]
for item in list:
if item[1:2]:
try:
myappend(mychr(myatoi(item[:2], 16)) + item[2:])
except ValueError:
myappend('_' + item)
else:
myappend('_' + item)
return "".join(res)
def flatten_fieldsets(fieldsets):
"""Returns a list of field names from an admin fieldsets structure."""
field_names = []
for name, opts in fieldsets:
for field in opts['fields']:
# type checking feels dirty, but it seems like the best way here
if type(field) == tuple:
field_names.extend(field)
else:
field_names.append(field)
return field_names
def get_deleted_objects(objs, opts, user, admin_site, using):
"""
Find all objects related to ``objs`` that should also be deleted. ``objs``
must be a homogenous iterable of objects (e.g. a QuerySet).
Returns a nested list of strings suitable for display in the
template with the ``unordered_list`` filter.
"""
collector = NestedObjects(using=using)
collector.collect(objs)
perms_needed = set()
def format_callback(obj):
has_admin = obj.__class__ in admin_site._registry
opts = obj._meta
if has_admin:
admin_url = reverse('%s:%s_%s_change'
% (admin_site.name,
opts.app_label,
opts.object_name.lower()),
None, (quote(obj._get_pk_val()),))
p = '%s.%s' % (opts.app_label,
opts.get_delete_permission())
if not user.has_perm(p):
perms_needed.add(opts.verbose_name)
# Display a link to the admin page.
return mark_safe(u'%s: <a href="%s">%s</a>' %
(escape(capfirst(opts.verbose_name)),
admin_url,
escape(obj)))
else:
# Don't display link to edit, because it either has no
# admin or is edited inline.
return u'%s: %s' % (capfirst(opts.verbose_name),
force_unicode(obj))
to_delete = collector.nested(format_callback)
protected = [format_callback(obj) for obj in collector.protected]
return to_delete, perms_needed, protected
class NestedObjects(Collector):
def __init__(self, *args, **kwargs):
super(NestedObjects, self).__init__(*args, **kwargs)
self.edges = {} # {from_instance: [to_instances]}
self.protected = set()
def add_edge(self, source, target):
self.edges.setdefault(source, []).append(target)
def collect(self, objs, source_attr=None, **kwargs):
for obj in objs:
if source_attr:
self.add_edge(getattr(obj, source_attr), obj)
else:
self.add_edge(None, obj)
try:
return super(NestedObjects, self).collect(objs, source_attr=source_attr, **kwargs)
except models.ProtectedError as e:
self.protected.update(e.protected_objects)
def related_objects(self, related, objs):
qs = super(NestedObjects, self).related_objects(related, objs)
return qs.select_related(related.field.name)
def _nested(self, obj, seen, format_callback):
if obj in seen:
return []
seen.add(obj)
children = []
for child in self.edges.get(obj, ()):
children.extend(self._nested(child, seen, format_callback))
if format_callback:
ret = [format_callback(obj)]
else:
ret = [obj]
if children:
ret.append(children)
return ret
def nested(self, format_callback=None):
"""
Return the graph as a nested list.
"""
seen = set()
roots = []
for root in self.edges.get(None, ()):
roots.extend(self._nested(root, seen, format_callback))
return roots
def model_format_dict(obj):
"""
Return a `dict` with keys 'verbose_name' and 'verbose_name_plural',
typically for use with string formatting.
`obj` may be a `Model` instance, `Model` subclass, or `QuerySet` instance.
"""
if isinstance(obj, (models.Model, models.base.ModelBase)):
opts = obj._meta
elif isinstance(obj, models.query.QuerySet):
opts = obj.model._meta
else:
opts = obj
return {
'verbose_name': force_unicode(opts.verbose_name),
'verbose_name_plural': force_unicode(opts.verbose_name_plural)
}
def model_ngettext(obj, n=None):
"""
Return the appropriate `verbose_name` or `verbose_name_plural` value for
`obj` depending on the count `n`.
`obj` may be a `Model` instance, `Model` subclass, or `QuerySet` instance.
If `obj` is a `QuerySet` instance, `n` is optional and the length of the
`QuerySet` is used.
"""
if isinstance(obj, models.query.QuerySet):
if n is None:
n = obj.count()
obj = obj.model
d = model_format_dict(obj)
singular, plural = d["verbose_name"], d["verbose_name_plural"]
return ungettext(singular, plural, n or 0)
def lookup_field(name, obj, model_admin=None):
opts = obj._meta
try:
f = opts.get_field(name)
except models.FieldDoesNotExist:
# For non-field values, the value is either a method, property or
# returned via a callable.
if callable(name):
attr = name
value = attr(obj)
elif (model_admin is not None and hasattr(model_admin, name) and
not name == '__str__' and not name == '__unicode__'):
attr = getattr(model_admin, name)
value = attr(obj)
else:
attr = getattr(obj, name)
if callable(attr):
value = attr()
else:
value = attr
f = None
else:
attr = None
value = getattr(obj, name)
return f, attr, value
def label_for_field(name, model, model_admin=None, return_attr=False):
"""
Returns a sensible label for a field name. The name can be a callable or the
name of an object attributes, as well as a genuine fields. If return_attr is
True, the resolved attribute (which could be a callable) is also returned.
This will be None if (and only if) the name refers to a field.
"""
attr = None
try:
field = model._meta.get_field_by_name(name)[0]
if isinstance(field, RelatedObject):
label = field.opts.verbose_name
else:
label = field.verbose_name
except models.FieldDoesNotExist:
if name == "__unicode__":
label = force_unicode(model._meta.verbose_name)
attr = unicode
elif name == "__str__":
label = smart_str(model._meta.verbose_name)
attr = str
else:
if callable(name):
attr = name
elif model_admin is not None and hasattr(model_admin, name):
attr = getattr(model_admin, name)
elif hasattr(model, name):
attr = getattr(model, name)
else:
message = "Unable to lookup '%s' on %s" % (name, model._meta.object_name)
if model_admin:
message += " or %s" % (model_admin.__class__.__name__,)
raise AttributeError(message)
if hasattr(attr, "short_description"):
label = attr.short_description
elif callable(attr):
if attr.__name__ == "<lambda>":
label = "--"
else:
label = pretty_name(attr.__name__)
else:
label = pretty_name(name)
if return_attr:
return (label, attr)
else:
return label
def help_text_for_field(name, model):
try:
help_text = model._meta.get_field_by_name(name)[0].help_text
except models.FieldDoesNotExist:
help_text = ""
return smart_unicode(help_text)
def display_for_field(value, field):
from django.contrib.admin.templatetags.admin_list import _boolean_icon
from django.contrib.admin.views.main import EMPTY_CHANGELIST_VALUE
if field.flatchoices:
return dict(field.flatchoices).get(value, EMPTY_CHANGELIST_VALUE)
# NullBooleanField needs special-case null-handling, so it comes
# before the general null test.
elif isinstance(field, models.BooleanField) or isinstance(field, models.NullBooleanField):
return _boolean_icon(value)
elif value is None:
return EMPTY_CHANGELIST_VALUE
elif isinstance(field, models.DateTimeField):
return formats.localize(timezone.template_localtime(value))
elif isinstance(field, (models.DateField, models.TimeField)):
return formats.localize(value)
elif isinstance(field, models.DecimalField):
return formats.number_format(value, field.decimal_places)
elif isinstance(field, models.FloatField):
return formats.number_format(value)
else:
return smart_unicode(value)
def display_for_value(value, boolean=False):
from django.contrib.admin.templatetags.admin_list import _boolean_icon
from django.contrib.admin.views.main import EMPTY_CHANGELIST_VALUE
if boolean:
return _boolean_icon(value)
elif value is None:
return EMPTY_CHANGELIST_VALUE
elif isinstance(value, datetime.datetime):
return formats.localize(timezone.template_localtime(value))
elif isinstance(value, (datetime.date, datetime.time)):
return formats.localize(value)
elif isinstance(value, (decimal.Decimal, float, int, long)):
return formats.number_format(value)
else:
return smart_unicode(value)
class NotRelationField(Exception):
pass
def get_model_from_relation(field):
if isinstance(field, models.related.RelatedObject):
return field.model
elif getattr(field, 'rel'): # or isinstance?
return field.rel.to
else:
raise NotRelationField
def reverse_field_path(model, path):
""" Create a reversed field path.
E.g. Given (Order, "user__groups"),
return (Group, "user__order").
Final field must be a related model, not a data field.
"""
reversed_path = []
parent = model
pieces = path.split(LOOKUP_SEP)
for piece in pieces:
field, model, direct, m2m = parent._meta.get_field_by_name(piece)
# skip trailing data field if extant:
if len(reversed_path) == len(pieces)-1: # final iteration
try:
get_model_from_relation(field)
except NotRelationField:
break
if direct:
related_name = field.related_query_name()
parent = field.rel.to
else:
related_name = field.field.name
parent = field.model
reversed_path.insert(0, related_name)
return (parent, LOOKUP_SEP.join(reversed_path))
def get_fields_from_path(model, path):
""" Return list of Fields given path relative to model.
e.g. (ModelX, "user__groups__name") -> [
<django.db.models.fields.related.ForeignKey object at 0x...>,
<django.db.models.fields.related.ManyToManyField object at 0x...>,
<django.db.models.fields.CharField object at 0x...>,
]
"""
pieces = path.split(LOOKUP_SEP)
fields = []
for piece in pieces:
if fields:
parent = get_model_from_relation(fields[-1])
else:
parent = model
fields.append(parent._meta.get_field_by_name(piece)[0])
return fields
def remove_trailing_data_field(fields):
""" Discard trailing non-relation field if extant. """
try:
get_model_from_relation(fields[-1])
except NotRelationField:
fields = fields[:-1]
return fields
def get_limit_choices_to_from_path(model, path):
""" Return Q object for limiting choices if applicable.
If final model in path is linked via a ForeignKey or ManyToManyField which
has a `limit_choices_to` attribute, return it as a Q object.
"""
fields = get_fields_from_path(model, path)
fields = remove_trailing_data_field(fields)
limit_choices_to = (
fields and hasattr(fields[-1], 'rel') and
getattr(fields[-1].rel, 'limit_choices_to', None))
if not limit_choices_to:
return models.Q() # empty Q
elif isinstance(limit_choices_to, models.Q):
return limit_choices_to # already a Q
else:
return models.Q(**limit_choices_to) # convert dict to Q
| lzw120/django | django/contrib/admin/util.py | Python | bsd-3-clause | 15,005 | 0.0012 |
import os
import Image
from flask import Module, request, current_app, render_template, jsonify, send_file, abort
from werkzeug import secure_filename
from urbanjungle.models import db
from urbanjungle.models.report import Report
from sqlalchemy.ext.serializer import dumps
frontend = Module(__name__)
def allowed_file(filename):
return '.' in filename and filename.rsplit('.', 1)[1].lower() in current_app.config['ALLOWED_EXTENSIONS']
@frontend.route('/report/<latitude>,<longitude>', methods=['GET', 'PUT'])
def upload(latitude, longitude):
'''Handle file upload'''
if request.method == 'PUT':
file = request.files['file']
if file and allowed_file(file.filename):
r = Report(latitude, longitude)
db.session.add(r)
db.session.commit()
filename = '%s.jpg' % r.id
file.save(os.path.join(current_app.config['UPLOAD_FOLDER'], filename))
return ''
else:
abort(403)
else:
return '''
<!doctype html>
<title>Upload new File</title>
<h1>Upload new File</h1>
<form action="" method="put" enctype=multipart/form-data>
<p><input type=file name=file>
<input type=submit value=Upload>
</form>
'''
@frontend.route('/report/thumbnail/<report_id>.jpg')
def generate_thumbnail(report_id):
'''
Generate thumbnail for given image.
This uri should be passed through flask only if the thumb file does not exists.
Otherwise, it should be served as a static file.
'''
image_path = os.path.join(current_app.config['UPLOAD_FOLDER'], report_id + '.jpg')
thumb_path = os.path.join(current_app.config['THUMBS_FOLDER'], report_id + '.jpg')
if '..' in image_path or not os.path.exists(image_path):
abort(404)
if not os.path.exists(thumb_path):
image = Image.open(image_path)
image.thumbnail((current_app.config['THUMB_WIDTH'], current_app.config['THUMB_HEIGHT']), \
Image.ANTIALIAS)
image.save(thumb_path)
return send_file(thumb_path, mimetype="image/jpeg")
@frontend.route('/map')
def map():
'''Render the main map page'''
return render_template('map.html')
@frontend.route('/map/markers/<ne_lat>,<ne_lng>,<sw_lat>,<sw_lng>.json')
def get_markers(ne_lat, ne_lng, sw_lat, sw_lng):
'''
Return markers related to the given frame.
Send them in a json format
'''
markers = Report.query \
.filter(Report.latitude < ne_lat) \
.filter(Report.latitude > sw_lat) \
.filter(Report.longitude < ne_lng) \
.filter(Report.longitude > sw_lng) \
.all()
json_markers = { 'markers' : [ marker.__json__() for marker in markers ] }
return jsonify(json_markers)
| thibault/UrbanJungle | site/urbanjungle/controllers/frontend.py | Python | gpl-3.0 | 2,808 | 0.006766 |
import click
__all__ = [
'info'
]
INFO_MESSAGE = """
Odooku
--------------------------------------
available modules: {num_modules}
"""
@click.command()
@click.pass_context
def info(ctx):
logger = (
ctx.obj['logger']
)
from odoo.modules import get_modules
print INFO_MESSAGE.format(
num_modules=len(get_modules())
)
| adaptivdesign/odooku-compat | odooku/cli/commands/info.py | Python | apache-2.0 | 365 | 0 |
import sys
import numpy
import scipy
import json
import itertools
import random
import os
from sys import stderr, exit, argv
from scipy.sparse.linalg import spsolve
from sklearn.metrics.pairwise import euclidean_distances
from nltk.stem import SnowballStemmer
def load_data_sparse(prefix) :
return scipy.sparse.csr_matrix((numpy.load(prefix + '.data.npy'),
numpy.load(prefix + '.indices.npy'),
numpy.load(prefix + '.indptr.npy')),
shape=tuple(numpy.load(prefix + '.shape.npy')))
def load_data() :
return load_data_sparse('linrel')
def load_features_json(fname) :
with open(fname) as f :
return json.load(f)
def load_features() :
return load_features_json('linrel_features.json')
def load_topics() :
return load_features_json('linrel_topics.json')
def get_machine_learning_articles() :
return [ int(k) for k,v in load_topics().iteritems() if 'stat.ML' in v ]
def order_keys_by_value(d) :
return [ i[0] for i in sorted(d.items(), key=lambda x : x[1], reverse=True) ]
def okapi_bm25(query, n, data, features) :
stemmer = SnowballStemmer('english')
query_terms = [ stemmer.stem(term) for term in query.lower().split() ]
tmp = {}
for qt in query_terms :
if qt not in features :
continue
findex = features[qt]
for aindex in numpy.nonzero(data[:, findex])[0] :
akey = aindex.item()
if akey not in tmp :
tmp[akey] = 1.0
tmp[akey] *= data[aindex,findex]
return order_keys_by_value(tmp)[:n]
def linrel(articles, feedback, n, data, features, mew=1.0, exploration_rate=0.1) :
assert len(articles) == len(feedback), "articles and feedback are not the same length"
X = data
num_articles = X.shape[0]
num_features = X.shape[1]
X_t = X[ numpy.array(articles) ]
X_tt = X_t.transpose()
I = mew * scipy.sparse.identity(num_features, format='dia')
W = spsolve((X_tt * X_t) + I, X_tt)
A = X * W
Y_t = numpy.matrix(feedback).transpose()
tmpA = numpy.array(A.todense())
normL2 = numpy.matrix(numpy.sqrt(numpy.sum(tmpA * tmpA, axis=1))).transpose()
# W * Y_t is the keyword weights
K = W * Y_t
mean = A * Y_t
variance = (exploration_rate / 2.0) * normL2
I_t = mean + variance
linrel_ordered = numpy.argsort(I_t.transpose()[0]).tolist()[0]
top_n = []
for i in linrel_ordered[::-1] :
if i not in articles :
top_n.append(i)
if len(top_n) == n :
break
return top_n, \
mean[ numpy.array(top_n) ].transpose().tolist()[0], \
variance[ numpy.array(top_n) ].transpose().tolist()[0]
def average_distance_to_target(articles, target, distances) :
return numpy.min(distances[ numpy.array(articles) ])
def main() :
if len(argv) != 4 :
print >> stderr, "Usage: %s <article index|random> <output dir> <exploration rate>" % argv[0]
exit(1)
# parse input
try :
experiment_target = int(argv[1]) if argv[1] != 'random' else None
except ValueError :
print >> stderr, "Error, %s is not an integer!" % argv[1]
exit(1)
results_dir = argv[2]
if not os.path.isdir(results_dir) :
print >> stderr, "Error, %s is not a directory/does not exist!" % results_dir
exit(1)
try :
test_explore_rate = float(argv[3])
except ValueError :
print >> stderr, "Error, %s is not a float!" % argv[3]
exit(1)
# constants
num_shown = 10
num_iterations = 10
num_selections = range(num_shown + 1)
#test_explore_rate = 0.1
experiment_query = "machine learning"
# load the data
data = load_data()
num_articles = data.shape[0]
num_features = data.shape[1]
print "loaded %d articles x %d features" % (num_articles, num_features)
features = load_features()
print "loaded %d features" % len(features)
machine_learning_articles = get_machine_learning_articles()
num_ml_articles = len(machine_learning_articles)
print "loaded %d stat.ML articles" % num_ml_articles
# make sure the data is consistent
assert len(features) == num_features, \
"the number of features differed in the matrix vs the feature list"
# make sure the input is correct
assert experiment_target is None or experiment_target in machine_learning_articles, \
"article %d is not a machine learning article!" % experiment_target
# pick a random target document if needed
if not experiment_target :
experiment_target = machine_learning_articles[random.randint(0, num_ml_articles-1)]
print "random selection of target article %d" % experiment_target
# test if this has been done before
out_filename = results_filename(results_dir, experiment_target)
if os.path.exists(out_filename) :
print "%s exists, exiting..." % out_filename
exit(0)
# precalculate all the distances between all documents and the target
print "calculating distances to article %d" % experiment_target
experiment_distances = euclidean_distances(data, data[experiment_target, :])
# run an initial query using tfidf
print "running okapi bm25 with query '%s'" % experiment_query
experiment_articles = okapi_bm25(experiment_query, num_shown, data, features)
experiment_feedback = []
experiment_means = []
experiment_variances = []
# run for X iterations
for iteration in range(num_iterations) :
# count = 0
# print >> stderr, "iter %d - %d" % (iteration, count),
#
# best_feedback = None
# best_average_distance = sys.float_info.max
# best_version = -1
# user can pick 0 -> 10 articles
# for i in num_selections :
# # go through all possible combinations of feedback
# # to select what the user does
# for selections in itertools.combinations(range(num_shown), i) :
# feedback = [ 1.0 if i in selections else 0.0 for i in range(num_shown) ]
#
# # run linrel without exploration using generated feedback
# articles,means,variances = linrel(experiment_articles,
# experiment_feedback + feedback,
# num_shown,
# data,
# features,
# exploration_rate=0.0)
#
# # test if these documents are better than the 'current best feedback'
# # based on average (?) distance to target
# average_distance = average_distance_to_target(articles,
# experiment_target,
# experiment_distances)
#
# if average_distance < best_average_distance :
# best_version = count
# best_feedback = feedback
# best_average_distance = average_distance
#
# count += 1
# print >> stderr, "\riter %d - %d (best = %d, distance = %f)" % (iteration, count, best_version, best_average_distance),
remaining_articles = range(num_shown)
selected_articles = []
# BASE AVERAGE SHOULD BE WITH NO SELECTIONS
articles,means,variances = linrel(experiment_articles,
experiment_feedback + ([0.0] * num_shown),
num_shown,
data,
features,
exploration_rate=0.0)
current_average_distance = average_distance_to_target(articles,
experiment_target,
experiment_distances)
print >> stderr, "test %d: distance=%.3f selections=%s" % (iteration, current_average_distance, str(selected_articles))
for i in num_selections :
best_article = None
best_average_distance = sys.float_info.max
for a in remaining_articles :
tmp = selected_articles + [a]
feedback = [ 1.0 if i in tmp else 0.0 for i in range(num_shown) ]
# run linrel without exploration using generated feedback
articles,means,variances = linrel(experiment_articles,
experiment_feedback + feedback,
num_shown,
data,
features,
exploration_rate=0.0)
# test if these documents are better than the 'current best feedback'
# based on average (?) distance to target
average_distance = average_distance_to_target(articles,
experiment_target,
experiment_distances)
# keep a note of the article selection that resulted in the min distance to the target
if average_distance < best_average_distance :
best_article = a
best_average_distance = average_distance
print >> stderr, "test %d: distance=%.3f selections=%s" % (iteration, best_average_distance, str(selected_articles + [a]))
# test to see if the best article to add actually increases the distance
# to the target
if best_average_distance >= current_average_distance :
print >> stderr, "stop %d: distance=%.3f selections=%s" % (iteration, current_average_distance, str(selected_articles))
break
selected_articles.append(best_article)
remaining_articles.remove(best_article)
current_average_distance = best_average_distance
print >> stderr, ""
best_feedback = [ 1.0 if i in selected_articles else 0.0 for i in range(num_shown) ]
# we now know what to select, run the actual linrel code with
# actual exploration rate
experiment_feedback += best_feedback
articles,means,variances = linrel(experiment_articles,
experiment_feedback,
num_shown,
data,
features,
exploration_rate=test_explore_rate)
true_average_distance = average_distance_to_target(articles,
experiment_target,
experiment_distances)
print >> stderr, "iter %d: distance=%.3f selections=%s" % (iteration, true_average_distance, str(selected_articles))
print >> stderr, ""
# store everything
experiment_articles.extend(articles)
experiment_means.extend(means)
experiment_variances.extend(variances)
#print experiment_articles
#print [ int(i) for i in experiment_feedback ]
#print experiment_means
#print experiment_variances
guff = {
"out_filename" : out_filename,
"target" : experiment_target,
"query" : experiment_query,
"exploration_rate" : test_explore_rate,
"num_shown" : num_shown,
"num_iterations" : num_iterations,
"num_articles" : num_articles,
"num_features" : num_features
}
# save to file
write_pulp_results(guff,
experiment_articles,
experiment_feedback,
experiment_means,
experiment_variances)
return 0
def results_filename(results_dir, target) :
return os.path.join(results_dir, "results%d.txt" % target)
def write_pulp_results(settings, articles, feedback, means, variances) :
delimit = " "
header = ["iteration", "article", "feedback", "mean", "variance"]
filename = settings["out_filename"]
with open(filename, 'w') as f :
print >> f, "# " + " ".join([ "%s=%s" % (k, '"%s"' % v if isinstance(v, str) else str(v)) for k,v in settings.items() ])
print >> f, delimit.join(header)
iterations = sorted(range(settings["num_iterations"]) * settings["num_shown"])
feedback = [ int(i) for i in feedback ]
for i in zip(iterations, articles, feedback, means, variances) :
print >> f, "%d %d %d %e %e" % i
print "wrote %s" % filename
if __name__ == '__main__' :
try :
exit(main())
except KeyboardInterrupt :
print >> stderr, "Killed by User...\n"
exit(1)
| genie9/pulp | pulp_simulator.py | Python | gpl-3.0 | 13,319 | 0.015166 |
"""Predicate: output token."""
from gtd.utils import ComparableMixin
class Predicate(ComparableMixin):
"""Represents a step in the logical form (i.e., an output token)."""
__slots__ = ['_name', '_original_string', '_types']
def __init__(self, name, original_string=None, types=None):
"""Create Predicate.
Args:
name (unicode)
original_string (unicode)
types (tuple[unicode])
"""
self._name = name
self._original_string = original_string
self._types = types or tuple()
def __eq__(self, other):
return (isinstance(other, Predicate)
and self._name == other._name)
def __hash__(self):
return hash(self._name)
@property
def _cmpkey(self):
return self._name
def __str__(self):
return self._name
__repr__ = __str__
@property
def name(self):
"""Name of the predicate.
Should be unique among the predicates in the same context.
Returns:
unicode
"""
return self._name
@property
def original_string(self):
"""Original string of the predicate. Can be None.
Returns:
unicode or None
"""
return self._original_string
@property
def types(self):
"""A collection of types.
Returns:
tuple[unicode]
"""
return self._types
@property
def delexicalized_name(self):
"""A placeholder used in a delexicalized utterance.
Can be None if the predicate should not be used for delexicalization.
A subclass can customize this method to return different placeholders
for different predicate types.
Returns:
unicode or None
"""
return 'PRED'
| kelvinguu/lang2program | strongsup/predicate.py | Python | apache-2.0 | 1,835 | 0 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: 2017, Dag Wieers (@dagwieers) <dag@wieers.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_defrag
version_added: '2.4'
short_description: Consolidate fragmented files on local volumes
description:
- Locates and consolidates fragmented files on local volumes to improve system performance.
- 'More information regarding C(win_defrag) is available from: U(https://technet.microsoft.com/en-us/library/cc731650(v=ws.11).aspx)'
options:
include_volumes:
description:
- A list of drive letters or mount point paths of the volumes to be defragmented.
- If this parameter is omitted, all volumes (not excluded) will be fragmented.
type: list
exclude_volumes:
description:
- A list of drive letters or mount point paths to exclude from defragmentation.
type: list
freespace_consolidation:
description:
- Perform free space consolidation on the specified volumes.
priority:
description:
- Run the operation at low or normal priority.
choices: [ low, normal ]
default: low
parallel:
description:
- Run the operation on each volume in parallel in the background.
type: bool
default: 'no'
requirements:
- defrag.exe
author:
- Dag Wieers (@dagwieers)
'''
EXAMPLES = r'''
- name: Defragment all local volumes (in parallel)
win_defrag:
parallel: yes
- name: 'Defragment all local volumes, except C: and D:'
win_defrag:
exclude_volumes: [ C, D ]
- name: 'Defragment volume D: with normal priority'
win_defrag:
include_volumes: D
priority: normal
- name: Consolidate free space (useful when reducing volumes)
win_defrag:
freespace_consolidation: yes
'''
RETURN = r'''
cmd:
description: The complete command line used by the module
returned: always
type: string
sample: defrag.exe /C /V
rc:
description: The return code for the command
returned: always
type: int
sample: 0
stdout:
description: The standard output from the command
returned: always
type: string
sample: Success.
stderr:
description: The error output from the command
returned: always
type: string
sample:
msg:
description: Possible error message on failure
returned: failed
type: string
sample: Command 'defrag.exe' not found in $env:PATH.
changed:
description: Whether or not any changes were made.
returned: always
type: bool
sample: True
'''
| hryamzik/ansible | lib/ansible/modules/windows/win_defrag.py | Python | gpl-3.0 | 2,682 | 0.002237 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import time
from imaplib import IMAP4
from imaplib import IMAP4_SSL
from poplib import POP3
from poplib import POP3_SSL
try:
import cStringIO as StringIO
except ImportError:
import StringIO
import zipfile
import base64
import addons
import netsvc
from osv import osv, fields
import tools
from tools.translate import _
logger = logging.getLogger('fetchmail')
class fetchmail_server(osv.osv):
"""Incoming POP/IMAP mail server account"""
_name = 'fetchmail.server'
_description = "POP/IMAP Server"
_order = 'priority'
_columns = {
'name':fields.char('Name', size=256, required=True, readonly=False),
'active':fields.boolean('Active', required=False),
'state':fields.selection([
('draft', 'Not Confirmed'),
('done', 'Confirmed'),
], 'State', select=True, readonly=True),
'server' : fields.char('Server Name', size=256, readonly=True, help="Hostname or IP of the mail server", states={'draft':[('readonly', False)]}),
'port' : fields.integer('Port', readonly=True, states={'draft':[('readonly', False)]}),
'type':fields.selection([
('pop', 'POP Server'),
('imap', 'IMAP Server'),
('local', 'Local Server'),
], 'Server Type', select=True, required=True, readonly=False),
'is_ssl':fields.boolean('SSL/TLS', help="Connections are encrypted with SSL/TLS through a dedicated port (default: IMAPS=993, POP3S=995)"),
'attach':fields.boolean('Keep Attachments', help="Whether attachments should be downloaded. "
"If not enabled, incoming emails will be stripped of any attachments before being processed"),
'original':fields.boolean('Keep Original', help="Whether a full original copy of each email should be kept for reference"
"and attached to each processed message. This will usually double the size of your message database."),
'date': fields.datetime('Last Fetch Date', readonly=True),
'user' : fields.char('Username', size=256, readonly=True, states={'draft':[('readonly', False)]}),
'password' : fields.char('Password', size=1024, readonly=True, states={'draft':[('readonly', False)]}),
'action_id':fields.many2one('ir.actions.server', 'Server Action', help="Optional custom server action to trigger for each incoming mail, "
"on the record that was created or updated by this mail"),
'object_id': fields.many2one('ir.model', "Create a New Record", required=True, help="Process each incoming mail as part of a conversation "
"corresponding to this document type. This will create "
"new documents for new conversations, or attach follow-up "
"emails to the existing conversations (documents)."),
'priority': fields.integer('Server Priority', readonly=True, states={'draft':[('readonly', False)]}, help="Defines the order of processing, "
"lower values mean higher priority"),
'message_ids': fields.one2many('mail.message', 'fetchmail_server_id', 'Messages', readonly=True),
'configuration' : fields.text('Configuration'),
'script' : fields.char('Script', readonly=True, size=64),
}
_defaults = {
'state': "draft",
'type': "pop",
'active': True,
'priority': 5,
'attach': True,
'script': '/mail/static/scripts/openerp_mailgate.py',
}
def onchange_server_type(self, cr, uid, ids, server_type=False, ssl=False, object_id=False):
port = 0
values = {}
if server_type == 'pop':
port = ssl and 995 or 110
elif server_type == 'imap':
port = ssl and 993 or 143
else:
values['server'] = ''
values['port'] = port
conf = {
'dbname' : cr.dbname,
'uid' : uid,
'model' : 'MODELNAME',
}
if object_id:
m = self.pool.get('ir.model')
r = m.read(cr,uid,[object_id],['model'])
conf['model']=r[0]['model']
values['configuration'] = """Use the below script with the following command line options with your Mail Transport Agent (MTA)
openerp_mailgate.py -u %(uid)d -p PASSWORD -o %(model)s -d %(dbname)s --host=HOSTNAME --port=PORT
""" % conf
return {'value':values}
def set_draft(self, cr, uid, ids, context=None):
self.write(cr, uid, ids , {'state':'draft'})
return True
def connect(self, cr, uid, server_id, context=None):
if isinstance(server_id, (list,tuple)):
server_id = server_id[0]
server = self.browse(cr, uid, server_id, context)
if server.type == 'imap':
if server.is_ssl:
connection = IMAP4_SSL(server.server, int(server.port))
else:
connection = IMAP4(server.server, int(server.port))
connection.login(server.user, server.password)
elif server.type == 'pop':
if server.is_ssl:
connection = POP3_SSL(server.server, int(server.port))
else:
connection = POP3(server.server, int(server.port))
#TODO: use this to remove only unread messages
#connection.user("recent:"+server.user)
connection.user(server.user)
connection.pass_(server.password)
return connection
def button_confirm_login(self, cr, uid, ids, context=None):
if context is None:
context = {}
for server in self.browse(cr, uid, ids, context=context):
try:
connection = server.connect()
server.write({'state':'done'})
except Exception, e:
logger.exception("Failed to connect to %s server %s", server.type, server.name)
raise osv.except_osv(_("Connection test failed!"), _("Here is what we got instead:\n %s") % tools.ustr(e))
finally:
try:
if connection:
if server.type == 'imap':
connection.close()
elif server.type == 'pop':
connection.quit()
except Exception:
# ignored, just a consequence of the previous exception
pass
return True
def _fetch_mails(self, cr, uid, ids=False, context=None):
if not ids:
ids = self.search(cr, uid, [('state','=','done')])
return self.fetch_mail(cr, uid, ids, context=context)
def fetch_mail(self, cr, uid, ids, context=None):
"""WARNING: meant for cron usage only - will commit() after each email!"""
if context is None:
context = {}
mail_thread = self.pool.get('mail.thread')
action_pool = self.pool.get('ir.actions.server')
for server in self.browse(cr, uid, ids, context=context):
logger.info('start checking for new emails on %s server %s', server.type, server.name)
context.update({'fetchmail_server_id': server.id, 'server_type': server.type})
count = 0
imap_server = False
pop_server = False
if server.type == 'imap':
try:
imap_server = server.connect()
imap_server.select()
result, data = imap_server.search(None, '(UNSEEN)')
for num in data[0].split():
result, data = imap_server.fetch(num, '(RFC822)')
res_id = mail_thread.message_process(cr, uid, server.object_id.model, data[0][1],
save_original=server.original,
strip_attachments=(not server.attach),
context=context)
if res_id and server.action_id:
action_pool.run(cr, uid, [server.action_id.id], {'active_id': res_id, 'active_ids':[res_id]})
imap_server.store(num, '+FLAGS', '\\Seen')
cr.commit()
count += 1
logger.info("fetched/processed %s email(s) on %s server %s", count, server.type, server.name)
except Exception, e:
logger.exception("Failed to fetch mail from %s server %s", server.type, server.name)
finally:
if imap_server:
imap_server.close()
imap_server.logout()
elif server.type == 'pop':
try:
pop_server = server.connect()
(numMsgs, totalSize) = pop_server.stat()
pop_server.list()
for num in range(1, numMsgs + 1):
(header, msges, octets) = pop_server.retr(num)
msg = '\n'.join(msges)
res_id = mail_thread.message_process(cr, uid, server.object_id.model,
msg,
save_original=server.original,
strip_attachments=(not server.attach),
context=context)
if res_id and server.action_id:
action_pool.run(cr, uid, [server.action_id.id], {'active_id': res_id, 'active_ids':[res_id]})
pop_server.dele(num)
cr.commit()
logger.info("fetched/processed %s email(s) on %s server %s", numMsgs, server.type, server.name)
except Exception, e:
logger.exception("Failed to fetch mail from %s server %s", server.type, server.name)
finally:
if pop_server:
pop_server.quit()
server.write({'date': time.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT)})
return True
class mail_message(osv.osv):
_inherit = "mail.message"
_columns = {
'fetchmail_server_id': fields.many2one('fetchmail.server', "Inbound Mail Server",
readonly=True,
select=True,
oldname='server_id'),
}
def create(self, cr, uid, values, context=None):
if context is None:
context={}
fetchmail_server_id = context.get('fetchmail_server_id')
if fetchmail_server_id:
values['fetchmail_server_id'] = fetchmail_server_id
res = super(mail_message,self).create(cr, uid, values, context=context)
return res
def write(self, cr, uid, ids, values, context=None):
if context is None:
context={}
fetchmail_server_id = context.get('fetchmail_server_id')
if fetchmail_server_id:
values['fetchmail_server_id'] = server_id
res = super(mail_message,self).write(cr, uid, ids, values, context=context)
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| Johnzero/erp | openerp/addons/fetchmail/fetchmail.py | Python | agpl-3.0 | 12,859 | 0.006766 |
# -*- encoding: utf-8 -*-
from __future__ import print_function
import abc
import configobj
import os
import time
import validate
from abjad.tools.abctools.AbjadObject import AbjadObject
class Configuration(AbjadObject):
r'''A configuration object.
'''
### CLASS VARIABLES ###
__slots__ = (
'_settings',
)
### INITIALIZER ###
def __init__(self):
from abjad.tools import systemtools
# verify configuration directory
if not os.path.exists(self.configuration_directory):
os.makedirs(self.configuration_directory)
# attempt to load config from disk, and validate
# a config object will be created if none is found on disk
config = configobj.ConfigObj(
self.configuration_file_path,
configspec=self._config_specification
)
# validate
validator = validate.Validator()
validation = config.validate(validator, copy=True)
# replace failing key:value pairs with default values
if validation is not True:
for key, valid in validation.items():
if not valid:
default = config.default_values[key]
message = 'Warning: config key {!r} failed validation,'
message += ' setting to default: {!r}.'
message = message.format(key, default)
print(message)
config[key] = default
# setup output formatting
config.write_empty_values = True
config.comments.update(self._option_comments)
config.initial_comment = self._initial_comment
# write to disk if doesn't exist
if not os.path.exists(self.configuration_file_path):
if not os.path.exists(self.configuration_directory):
os.makedirs(self.configuration_directory)
config.write()
# write to disk if different from current
else:
# prevent ConfigObj from automatically writing
config.filename = None
with open(self.configuration_file_path, 'r') as f:
old_config_lines = f.read()
old_config_lines = old_config_lines.splitlines()
old_config_lines = [line for line in old_config_lines
if 'configuration file created on' not in line]
old_config_lines = '\n'.join(old_config_lines)
new_config_lines = config.write(None)
new_config_lines = [line for line in new_config_lines
if 'configuration file created on' not in line]
new_config_lines = '\n'.join(new_config_lines)
lines_are_equal = systemtools.TestManager.compare(
old_config_lines,
new_config_lines,
)
# print('----------------------------------------')
# print('TESTING:', type(self))
# print()
# print('OLD:')
# print()
# print(old_config_lines)
# print()
# print('NEW:')
# print()
# print(new_config_lines)
# print()
# print('EQUAL?', lines_are_equal)
# print()
if not lines_are_equal:
# print('WRITING')
# print()
with open(self.configuration_file_path, 'w') as file_pointer:
config.write(file_pointer)
# turn the ConfigObj instance into a standard dict,
# and replace its empty string values with Nones,
# caching the result on this AbjadConfiguration instance.
self._settings = dict(config)
for key, value in self._settings.items():
if value == '' or value == 'None':
self._settings[key] = None
### SPECIAL METHODS ###
def __delitem__(self, i):
r'''Deletes item `i` from configuration.
Returns none.
'''
del(self._settings[i])
def __getitem__(self, i):
r'''Gets item `i` from configuration.
Returns none.
'''
return self._settings[i]
def __iter__(self):
r'''Iterates configuration settings.
Returns generator.
'''
for key in self._settings:
yield key
def __len__(self):
r'''Gets the number of settings in configuration.
Returns nonnegative integer.
'''
return len(self._settings)
def __setitem__(self, i, arg):
r'''Sets configuration item `i` to `arg`.
Returns none.
'''
self._settings[i] = arg
### PRIVATE METHODS ###
@abc.abstractmethod
def _get_option_definitions(self):
raise NotImplementedError
### PRIVATE PROPERTIES ###
@property
def _config_specification(self):
specs = self._option_specification
return ['{} = {}'.format(key, value)
for key, value in sorted(specs.items())]
@property
def _current_time(self):
return time.strftime("%d %B %Y %H:%M:%S")
@abc.abstractproperty
def _initial_comment(self):
raise NotImplementedError
@property
def _option_comments(self):
options = self._get_option_definitions()
comments = [(key, options[key]['comment']) for key in options]
return dict(comments)
@property
def _option_specification(self):
options = self._get_option_definitions()
specs = [(key, options[key]['spec']) for key in options]
return dict(specs)
### PUBLIC PROPERTIES ###
@abc.abstractproperty
def configuration_directory(self):
r'''Gets configuration directory.
Returns string.
'''
raise NotImplementedError
@abc.abstractproperty
def configuration_file_name(self):
r'''Gets configuration file name.
Returns string.
'''
raise NotImplementedError
@property
def configuration_file_path(self):
r'''Gets configuration file path.
Returns string.
'''
return os.path.join(
self.configuration_directory,
self.configuration_file_name,
)
@property
def home_directory(self):
r'''Gets home directory.
Returns string.
'''
path = os.environ.get('HOME') or \
os.environ.get('HOMEPATH') or \
os.environ.get('APPDATA')
return os.path.abspath(path) | andrewyoung1991/abjad | abjad/tools/systemtools/Configuration.py | Python | gpl-3.0 | 6,475 | 0.001853 |
# Copyright 2013, Big Switch Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from horizon.utils import validators
from openstack_dashboard import api
port_validator = validators.validate_port_or_colon_separated_port_range
LOG = logging.getLogger(__name__)
class UpdateRule(forms.SelfHandlingForm):
name = forms.CharField(max_length=80, label=_("Name"), required=False)
description = forms.CharField(
required=False,
max_length=80, label=_("Description"))
protocol = forms.ChoiceField(
label=_("Protocol"), required=False,
choices=[('TCP', _('TCP')), ('UDP', _('UDP')), ('ICMP', _('ICMP')),
('ANY', _('ANY'))],
help_text=_('Protocol for the firewall rule'))
action = forms.ChoiceField(
label=_("Action"), required=False,
choices=[('ALLOW', _('ALLOW')), ('DENY', _('DENY'))],
help_text=_('Action for the firewall rule'))
source_ip_address = forms.IPField(
label=_("Source IP Address/Subnet"),
version=forms.IPv4 | forms.IPv6,
required=False, mask=True,
help_text=_('Source IP address or subnet'))
destination_ip_address = forms.IPField(
label=_('Destination IP Address/Subnet'),
version=forms.IPv4 | forms.IPv6,
required=False, mask=True,
help_text=_('Destination IP address or subnet'))
source_port = forms.CharField(
max_length=80,
label=_("Source Port/Port Range"),
required=False,
validators=[port_validator],
help_text=_('Source port (integer in [1, 65535] or range in a:b)'))
destination_port = forms.CharField(
max_length=80,
label=_("Destination Port/Port Range"),
required=False,
validators=[port_validator],
help_text=_('Destination port (integer in [1, 65535] or range'
' in a:b)'))
shared = forms.BooleanField(label=_("Shared"), required=False)
enabled = forms.BooleanField(label=_("Enabled"), required=False)
failure_url = 'horizon:project:firewalls:index'
def handle(self, request, context):
rule_id = self.initial['rule_id']
name_or_id = context.get('name') or rule_id
if context['protocol'] == 'ANY':
context['protocol'] = None
for f in ['source_ip_address', 'destination_ip_address',
'source_port', 'destination_port']:
if not context[f]:
context[f] = None
try:
rule = api.fwaas.rule_update(request, rule_id, **context)
msg = _('Rule %s was successfully updated.') % name_or_id
LOG.debug(msg)
messages.success(request, msg)
return rule
except Exception as e:
msg = (_('Failed to update rule %(name)s: %(reason)s') %
{'name': name_or_id, 'reason': e})
LOG.error(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
class UpdatePolicy(forms.SelfHandlingForm):
name = forms.CharField(max_length=80, label=_("Name"), required=False)
description = forms.CharField(required=False,
max_length=80, label=_("Description"))
shared = forms.BooleanField(label=_("Shared"), required=False)
audited = forms.BooleanField(label=_("Audited"), required=False)
failure_url = 'horizon:project:firewalls:index'
def handle(self, request, context):
policy_id = self.initial['policy_id']
name_or_id = context.get('name') or policy_id
try:
policy = api.fwaas.policy_update(request, policy_id, **context)
msg = _('Policy %s was successfully updated.') % name_or_id
LOG.debug(msg)
messages.success(request, msg)
return policy
except Exception as e:
msg = _('Failed to update policy %(name)s: %(reason)s') % {
'name': name_or_id, 'reason': e}
LOG.error(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
class UpdateFirewall(forms.SelfHandlingForm):
name = forms.CharField(max_length=80,
label=_("Name"),
required=False)
description = forms.CharField(max_length=80,
label=_("Description"),
required=False)
firewall_policy_id = forms.ChoiceField(label=_("Policy"))
admin_state_up = forms.ChoiceField(choices=[(True, _('UP')),
(False, _('DOWN'))],
label=_("Admin State"))
failure_url = 'horizon:project:firewalls:index'
def __init__(self, request, *args, **kwargs):
super(UpdateFirewall, self).__init__(request, *args, **kwargs)
try:
tenant_id = self.request.user.tenant_id
policies = api.fwaas.policy_list_for_tenant(request, tenant_id)
policies = sorted(policies, key=lambda policy: policy.name)
except Exception:
exceptions.handle(request,
_('Unable to retrieve policy list.'))
policies = []
policy_id = kwargs['initial']['firewall_policy_id']
policy_name = [p.name for p in policies if p.id == policy_id][0]
firewall_policy_id_choices = [(policy_id, policy_name)]
for p in policies:
if p.id != policy_id:
firewall_policy_id_choices.append((p.id, p.name_or_id))
self.fields['firewall_policy_id'].choices = firewall_policy_id_choices
def handle(self, request, context):
firewall_id = self.initial['firewall_id']
name_or_id = context.get('name') or firewall_id
context['admin_state_up'] = (context['admin_state_up'] == 'True')
try:
firewall = api.fwaas.firewall_update(request, firewall_id,
**context)
msg = _('Firewall %s was successfully updated.') % name_or_id
LOG.debug(msg)
messages.success(request, msg)
return firewall
except Exception as e:
msg = _('Failed to update firewall %(name)s: %(reason)s') % {
'name': name_or_id, 'reason': e}
LOG.error(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
class InsertRuleToPolicy(forms.SelfHandlingForm):
firewall_rule_id = forms.ChoiceField(label=_("Insert Rule"))
insert_before = forms.ChoiceField(label=_("Before"),
required=False)
insert_after = forms.ChoiceField(label=_("After"),
required=False)
failure_url = 'horizon:project:firewalls:index'
def __init__(self, request, *args, **kwargs):
super(InsertRuleToPolicy, self).__init__(request, *args, **kwargs)
try:
tenant_id = self.request.user.tenant_id
all_rules = api.fwaas.rule_list_for_tenant(request, tenant_id)
all_rules = sorted(all_rules, key=lambda rule: rule.name_or_id)
available_rules = [r for r in all_rules
if not r.firewall_policy_id]
current_rules = []
for r in kwargs['initial']['firewall_rules']:
r_obj = [rule for rule in all_rules if r == rule.id][0]
current_rules.append(r_obj)
available_choices = [(r.id, r.name_or_id) for r in available_rules]
current_choices = [(r.id, r.name_or_id) for r in current_rules]
except Exception as e:
msg = _('Failed to retrieve available rules: %s') % e
LOG.error(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
self.fields['firewall_rule_id'].choices = available_choices
self.fields['insert_before'].choices = [('', '')] + current_choices
self.fields['insert_after'].choices = [('', '')] + current_choices
def handle(self, request, context):
policy_id = self.initial['policy_id']
policy_name_or_id = self.initial['name'] or policy_id
try:
insert_rule_id = context['firewall_rule_id']
insert_rule = api.fwaas.rule_get(request, insert_rule_id)
body = {'firewall_rule_id': insert_rule_id,
'insert_before': context['insert_before'],
'insert_after': context['insert_after']}
policy = api.fwaas.policy_insert_rule(request, policy_id, **body)
msg = _('Rule %(rule)s was successfully inserted to policy '
'%(policy)s.') % {
'rule': insert_rule.name or insert_rule.id,
'policy': policy_name_or_id}
LOG.debug(msg)
messages.success(request, msg)
return policy
except Exception as e:
msg = _('Failed to insert rule to policy %(name)s: %(reason)s') % {
'name': policy_id, 'reason': e}
LOG.error(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
class RemoveRuleFromPolicy(forms.SelfHandlingForm):
firewall_rule_id = forms.ChoiceField(label=_("Remove Rule"))
failure_url = 'horizon:project:firewalls:index'
def __init__(self, request, *args, **kwargs):
super(RemoveRuleFromPolicy, self).__init__(request, *args, **kwargs)
try:
tenant_id = request.user.tenant_id
all_rules = api.fwaas.rule_list_for_tenant(request, tenant_id)
current_rules = []
for r in kwargs['initial']['firewall_rules']:
r_obj = [rule for rule in all_rules if r == rule.id][0]
current_rules.append(r_obj)
current_choices = [(r.id, r.name_or_id) for r in current_rules]
except Exception as e:
msg = _('Failed to retrieve current rules in policy %(name)s: '
'%(reason)s') % {'name': self.initial['name'], 'reason': e}
LOG.error(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
self.fields['firewall_rule_id'].choices = current_choices
def handle(self, request, context):
policy_id = self.initial['policy_id']
policy_name_or_id = self.initial['name'] or policy_id
try:
remove_rule_id = context['firewall_rule_id']
remove_rule = api.fwaas.rule_get(request, remove_rule_id)
body = {'firewall_rule_id': remove_rule_id}
policy = api.fwaas.policy_remove_rule(request, policy_id, **body)
msg = _('Rule %(rule)s was successfully removed from policy '
'%(policy)s.') % {
'rule': remove_rule.name or remove_rule.id,
'policy': policy_name_or_id}
LOG.debug(msg)
messages.success(request, msg)
return policy
except Exception as e:
msg = _('Failed to remove rule from policy %(name)s: '
'%(reason)s') % {'name': self.initial['name'],
'reason': e}
LOG.error(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
class RouterInsertionFormBase(forms.SelfHandlingForm):
def __init__(self, request, *args, **kwargs):
super(RouterInsertionFormBase, self).__init__(request, *args, **kwargs)
try:
router_choices = self.get_router_choices(request, kwargs)
self.fields['router_ids'].choices = router_choices
except Exception as e:
msg = self.init_failure_msg % {'name': self.initial['name'],
'reason': e}
LOG.error(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
@abc.abstractmethod
def get_router_choices(self, request, kwargs):
"""Return a list of selectable routers."""
@abc.abstractmethod
def get_new_router_ids(self, context):
"""Return a new list of router IDs associated with the firewall."""
def handle(self, request, context):
firewall_id = self.initial['firewall_id']
firewall_name_or_id = self.initial['name'] or firewall_id
try:
body = {'router_ids': self.get_new_router_ids(context)}
firewall = api.fwaas.firewall_update(request, firewall_id, **body)
msg = self.success_msg % {'firewall': firewall_name_or_id}
LOG.debug(msg)
messages.success(request, msg)
return firewall
except Exception as e:
msg = self.failure_msg % {'name': firewall_name_or_id, 'reason': e}
LOG.error(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
class AddRouterToFirewall(RouterInsertionFormBase):
router_ids = forms.MultipleChoiceField(
label=_("Add Routers"),
required=False,
widget=forms.CheckboxSelectMultiple(),
help_text=_("Add selected router(s) to the firewall."))
failure_url = 'horizon:project:firewalls:index'
success_msg = _('Router(s) was/were successfully added to firewall '
'%(firewall)s.')
failure_msg = _('Failed to add router(s) to firewall %(name)s: %(reason)s')
init_failure_msg = _('Failed to retrieve available routers: %(reason)s')
def get_router_choices(self, request, kwargs):
tenant_id = self.request.user.tenant_id
routers_list = api.fwaas.firewall_unassociated_routers_list(
request, tenant_id)
return [(r.id, r.name_or_id) for r in routers_list]
def get_new_router_ids(self, context):
existing_router_ids = self.initial['router_ids']
add_router_ids = context['router_ids']
return add_router_ids + existing_router_ids
class RemoveRouterFromFirewall(RouterInsertionFormBase):
router_ids = forms.MultipleChoiceField(
label=_("Remove Routers"),
required=False,
widget=forms.CheckboxSelectMultiple(),
help_text=_("Unselect the router(s) to be removed from firewall."))
failure_url = 'horizon:project:firewalls:index'
success_msg = _('Router(s) was successfully removed from firewall '
'%(firewall)s.')
failure_msg = _('Failed to remove router(s) from firewall %(name)s: '
'%(reason)s')
init_failure_msg = _('Failed to retrieve current routers in firewall '
'%(name)s: %(reason)s')
def get_router_choices(self, request, kwargs):
tenant_id = self.request.user.tenant_id
all_routers = api.neutron.router_list(request, tenant_id=tenant_id)
current_routers = [r for r in all_routers
if r['id'] in kwargs['initial']['router_ids']]
return [(r.id, r.name_or_id) for r in current_routers]
def get_new_router_ids(self, context):
# context[router_ids] is router IDs to be kept.
return context['router_ids']
| wangxiangyu/horizon | openstack_dashboard/dashboards/project/firewalls/forms.py | Python | apache-2.0 | 16,187 | 0 |
# -*- coding: utf-8 -*-
"""Family module for Vikidia."""
#
# (C) Pywikibot team, 2010-2018
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, division, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily):
"""Family class for Vikidia."""
name = 'vikidia'
domain = 'vikidia.org'
codes = ['ca', 'de', 'el', 'en', 'es', 'eu', 'fr', 'hy', 'it', 'ru', 'scn']
# Sites we want to edit but not count as real languages
test_codes = ['central', 'test']
def protocol(self, code):
"""Return https as the protocol for this family."""
return 'https'
| PersianWikipedia/pywikibot-core | pywikibot/families/vikidia_family.py | Python | mit | 661 | 0 |
"""
Module containing functions to differentiate functions using tensorflow.
"""
try:
import tensorflow as tf
from tensorflow.python.ops.gradients import _hessian_vector_product
except ImportError:
tf = None
from ._backend import Backend, assert_backend_available
class TensorflowBackend(Backend):
def __init__(self):
if tf is not None:
self._session = tf.Session()
def __str__(self):
return "tensorflow"
def is_available(self):
return tf is not None
@assert_backend_available
def is_compatible(self, objective, argument):
if isinstance(objective, tf.Tensor):
if (argument is None or not
isinstance(argument, tf.Variable) and not
all([isinstance(arg, tf.Variable)
for arg in argument])):
raise ValueError(
"Tensorflow backend requires an argument (or sequence of "
"arguments) with respect to which compilation is to be "
"carried out")
return True
return False
@assert_backend_available
def compile_function(self, objective, argument):
if not isinstance(argument, list):
def func(x):
feed_dict = {argument: x}
return self._session.run(objective, feed_dict)
else:
def func(x):
feed_dict = {i: d for i, d in zip(argument, x)}
return self._session.run(objective, feed_dict)
return func
@assert_backend_available
def compute_gradient(self, objective, argument):
"""
Compute the gradient of 'objective' and return as a function.
"""
tfgrad = tf.gradients(objective, argument)
if not isinstance(argument, list):
def grad(x):
feed_dict = {argument: x}
return self._session.run(tfgrad[0], feed_dict)
else:
def grad(x):
feed_dict = {i: d for i, d in zip(argument, x)}
return self._session.run(tfgrad, feed_dict)
return grad
@assert_backend_available
def compute_hessian(self, objective, argument):
if not isinstance(argument, list):
argA = tf.Variable(tf.zeros(tf.shape(argument)))
tfhess = _hessian_vector_product(objective, [argument], [argA])
def hess(x, a):
feed_dict = {argument: x, argA: a}
return self._session.run(tfhess[0], feed_dict)
else:
argA = [tf.Variable(tf.zeros(tf.shape(arg)))
for arg in argument]
tfhess = _hessian_vector_product(objective, argument, argA)
def hess(x, a):
feed_dict = {i: d for i, d in zip(argument+argA, x+a)}
return self._session.run(tfhess, feed_dict)
return hess
| j-towns/pymanopt | pymanopt/tools/autodiff/_tensorflow.py | Python | bsd-3-clause | 2,913 | 0 |
"""
@author: ArcGIS for Intelligence
@contact: defensesolutions@esri.com
@company: Esri
@version: 1.0
@description: Used to stage the apps for Movement Analysis
@requirements: Python 2.7.x, ArcGIS 10.3.1
@copyright: Esri, 2015
"""
import arcresthelper
from arcresthelper import portalautomation
log_file='./logs/DamageAssessment.log'
configFiles= ['./configs/StageApp.json']
globalLoginInfo = './configs/GlobalLoginInfo.json'
dateTimeFormat = '%Y-%m-%d %H:%M'
pa = portalautomation.portalautomation(globalLoginInfo)
pa.setLog(log_file=log_file)
pa.publishfromconfig(configFiles=configFiles,
combinedApp=None,
dateTimeFormat=dateTimeFormat)
del pa | conklinbd/MovementAnalysis | TemplateInstall/PortalDeploy/StageApp.py | Python | apache-2.0 | 726 | 0.009642 |
__doc__ = """External interface to the BeautifulSoup HTML parser.
"""
__all__ = ["fromstring", "parse", "convert_tree"]
from lxml import etree, html
from BeautifulSoup import \
BeautifulSoup, Tag, Comment, ProcessingInstruction, NavigableString
def fromstring(data, beautifulsoup=None, makeelement=None, **bsargs):
"""Parse a string of HTML data into an Element tree using the
BeautifulSoup parser.
Returns the root ``<html>`` Element of the tree.
You can pass a different BeautifulSoup parser through the
`beautifulsoup` keyword, and a diffent Element factory function
through the `makeelement` keyword. By default, the standard
``BeautifulSoup`` class and the default factory of `lxml.html` are
used.
"""
return _parse(data, beautifulsoup, makeelement, **bsargs)
def parse(file, beautifulsoup=None, makeelement=None, **bsargs):
"""Parse a file into an ElemenTree using the BeautifulSoup parser.
You can pass a different BeautifulSoup parser through the
`beautifulsoup` keyword, and a diffent Element factory function
through the `makeelement` keyword. By default, the standard
``BeautifulSoup`` class and the default factory of `lxml.html` are
used.
"""
if not hasattr(file, 'read'):
file = open(file)
root = _parse(file, beautifulsoup, makeelement, **bsargs)
return etree.ElementTree(root)
def convert_tree(beautiful_soup_tree, makeelement=None):
"""Convert a BeautifulSoup tree to a list of Element trees.
Returns a list instead of a single root Element to support
HTML-like soup with more than one root element.
You can pass a different Element factory through the `makeelement`
keyword.
"""
if makeelement is None:
makeelement = html.html_parser.makeelement
root = _convert_tree(beautiful_soup_tree, makeelement)
children = root.getchildren()
for child in children:
root.remove(child)
return children
# helpers
def _parse(source, beautifulsoup, makeelement, **bsargs):
if beautifulsoup is None:
beautifulsoup = BeautifulSoup
if makeelement is None:
makeelement = html.html_parser.makeelement
if 'convertEntities' not in bsargs:
bsargs['convertEntities'] = 'html'
tree = beautifulsoup(source, **bsargs)
root = _convert_tree(tree, makeelement)
# from ET: wrap the document in a html root element, if necessary
if len(root) == 1 and root[0].tag == "html":
return root[0]
root.tag = "html"
return root
def _convert_tree(beautiful_soup_tree, makeelement):
root = makeelement(beautiful_soup_tree.name,
attrib=dict(beautiful_soup_tree.attrs))
_convert_children(root, beautiful_soup_tree, makeelement)
return root
def _convert_children(parent, beautiful_soup_tree, makeelement):
SubElement = etree.SubElement
et_child = None
for child in beautiful_soup_tree:
if isinstance(child, Tag):
et_child = SubElement(parent, child.name, attrib=dict(
[(k, unescape(v)) for (k,v) in child.attrs]))
_convert_children(et_child, child, makeelement)
elif type(child) is NavigableString:
_append_text(parent, et_child, unescape(child))
else:
if isinstance(child, Comment):
parent.append(etree.Comment(child))
elif isinstance(child, ProcessingInstruction):
parent.append(etree.ProcessingInstruction(
*child.split(' ', 1)))
else: # CData
_append_text(parent, et_child, unescape(child))
def _append_text(parent, element, text):
if element is None:
parent.text = (parent.text or '') + text
else:
element.tail = (element.tail or '') + text
# copied from ET's ElementSoup
try:
from html.entities import name2codepoint # Python 3
except ImportError:
from htmlentitydefs import name2codepoint
import re
handle_entities = re.compile("&(\w+);").sub
def unescape(string):
if not string:
return ''
# work around oddities in BeautifulSoup's entity handling
def unescape_entity(m):
try:
return unichr(name2codepoint[m.group(1)])
except KeyError:
return m.group(0) # use as is
return handle_entities(unescape_entity, string)
| cortext/crawtextV2 | ~/venvs/crawler/lib/python2.7/site-packages/lxml/html/soupparser.py | Python | mit | 4,360 | 0.002982 |
"""
This module holds all of the regular expression patterns that pykol uses. It makes sense
to store them all in the same place since many patterns are used by multiple requests.
The 'patterns' data object is a dictionary mapping patternId to pattern. If pattern is a tuple,
then the first element of the tuple should be the pattern while the second element is a flag
to pass to re.compile (like re.DOTALL).
"""
import re
patterns = {
# General patterns.
"whitespace" : r'([\t ]+)',
"results" : r'<b>Results:<\/b><\/td><\/tr><tr><td[^<>]*><center><table><tr><td>(.*?)</td></tr></table></center></td></tr>',
"htmlComment" : r'<!--.*?-->',
"htmlTag" : r'<[^>]*?>',
# Login-related patterns.
"accountPwd" : r'var pwdhash = "([0-9a-f]+)";',
"accountId" : r'var playerid = ([0-9]+);',
"accountName" : r'<a [^<>]*href="charsheet\.php">(?:<b>)?([^<>]+)<',
"badPassword" : r'<b>Login failed\. Bad password\.<\/b>',
"loginChallenge" : r'name="?challenge"?\s+value="?([0-9a-f]+)"?',
"loginURL" : r'^(.*)login\.php\?loginid=([0-9a-f]+)',
"mainFrameset" : r'<frameset id="?rootset"?',
"tooManyLoginsFailuresFromThisIP" : r'Too many login failures from this IP',
"waitOneMinuteLoginError" : r'Please wait a minute',
"waitTwoMinutesLoginError" : r"you'll need to wait a couple of minutes before you can log in again\.",
"waitFiveMinutesLoginError" : r"Please wait five minutes and try again\.",
"waitFifteenMinutesLoginError" : r'Please wait fifteen minutes and try again\.',
# Item-related patterns.
"menuItem" : r'<input type=radio name=whichitem value="?(-?[0-9]+)"?></td><td><img .*? onclick=\'descitem\("?([^"]+)"?\);\'>',
"acquireSingleItem" : r'<td[^>]*><img src="[^"]*" alt="[^"]*" title="[^"]*"[^>]*descitem\(([0-9]+)\)[^>]*><\/td><td[^>]*>You acquire an item',
"acquireMultipleItems" : r'<td[^>]*><img src="[^"]*" alt="[^"]*" title="[^"]*"[^>]*descitem\(([0-9]+)\)[^>]*><\/td><td[^>]*>You acquire <b>([0-9,]*) ',
"gainMeat" : r'<td><img src="[^"]*meat\.gif"[^>]*><\/td><td[^>]*>You gain ([0-9,]*?) Meat\.<\/td>',
"loseMeat" : r'You lose ([0-9,]*?) Meat',
"isCocktailcraftingIngredient" : (r'<br>\(Cocktailcrafting ingredient\)<br>'),
"isCookingIngredient" : r'<br>\(Cooking ingredient\)<br>',
"isJewelrymakingComponent" : r'<br>\(Jewelrymaking component\)<br>',
"isMeatsmithingComponent" : r'<br>\(Meatsmithing component\)<br>',
"inventorySingleItem" : r'<img [^>]*descitem\(([0-9]+)[^>]*></td><td[^>]*><b[^>]*>([^<>]+)</b> <span><\/span>',
"inventoryMultipleItems" : r'<img [^>]*descitem\(([0-9]+)[^>]*></td><td[^>]*><b[^>]*>([^<>]+)</b> <span>\(([0-9]+)\)<\/span>',
"itemAutosell" : r'<br>Selling Price: <b>(\d*) Meat\.<\/b>',
"itemImage" : r'<img src="http:\/\/images\.kingdomofloathing\.com\/itemimages\/(.*?)"',
"itemName" : r'<b>(.+?)<\/b>',
"itemType" : r'<br>Type: <b>([^<]*)<.*\/b><br>',
"tooFull" : r"You're too full to eat that\.",
"tooDrunk" : r"You're way too drunk already\.",
"notBooze" : r"That's not booze\.",
"notFood" : r"That's not something you can eat\.",
"notEquip" : r"That's not something you can equip\. And stop screwing with the URLs\.",
"notEnoughToUse" : r"<table><tr><td>You don't have that many of that item.</td></tr></table>",
"notMultiUse" : r"<table><tr><td>That item isn't usable in quantity.</td></tr></table>",
# Message-related patterns.
"brickMessage" : r"http:\/\/images\.kingdomofloathing\.com\/adventureimages\/(brokewin|bigbrick)\.gif",
"candyHeartMessage" : r"http:\/\/images\.kingdomofloathing\.com\/otherimages\/heart\/hearttop\.gif",
"coffeeMessage" : r"http:\/\/images\.kingdomofloathing\.com\/otherimages\/heart\/cuptop\.gif",
"fullMessage" : ('<tr><td[^>]*><input type=checkbox name="sel([0-9]+)".*?<b>[^<]*<\/b> <a href="showplayer\.php\?who=([0-9]+)">([^<]*)<\/a>.*?<b>Date:<\/b>([^<]*?)</b>.*?<blockquote>(.*?)<\/blockquote>', re.DOTALL),
"userInHardcoreRonin" : r'<center><table><tr><td>That player cannot receive Meat or items from other players right now\.',
"userIgnoringUs" : r"<center><table><tr><td>This message could not be sent, because you are on that player's ignore list\.<\/td><\/tr><\/table><\/center>",
"notEnoughItemsToSend" : r"<center><table><tr><td>You don't have enough of one of the items you're trying to send\.<\/td><\/tr><\/table><\/center>",
"messageSent" : r"<td><center>Message sent\.<\/center><\/td>",
"kmailNotSentUserTrendy" : r"<center><table><tr><td>That player would never use something as old and outmoded as",
"weAreIgnoringUser" : r"<td>This message could not be sent, because that player is on your ignore list\.<\/td>",
# Error patterns.
"cantPulverizeItem" : r"<td>That's not something you can pulverize\.<\/td>",
"notEnoughItems" : r"(?:<td>You haven't got that many\.<\/td>)|(?:You don't have the item you're trying to use\.)|(?:You don't have the item you're trying to equip\.)",
# Chat patterns.
"currentChatChannel" : r'<font color="?#?\w+"?>Currently in channel: ([^<>]+)<',
"chatLastSeen" : r"lastseen:([0-9]+)",
"chatChannel" : r'^<font color="?#?\w+"?>\[([^<>]+)\]<\/font> ',
"chatMessage" : r'<b><a target="?mainpane"? href="showplayer\.php\?who=(-?[0-9]+)"><font color="?#?\w+"?>([^<>]+)<\/font>(?:<\/b>|<\/a>|:)* (.*)$',
"chatEmote" : r'<b><i><a target="?mainpane"? href="showplayer\.php\?who=([0-9]+)"><font color="?#?\w+"?>([^<>]+)<\/b><\/font><\/a> (.*)<\/i>$',
"privateChat" : r'<a target="?mainpane"? href="showplayer\.php\?who=([0-9]+)"><font color="?blue"?><b>([^)]+) \(private\):<\/b><\/font><\/a> <font color="?blue"?>(.*)</font>$',
"chatNewKmailNotification" : r'<a target="?mainpane"? href="messages\.php"><font color="?green"?>New message received from <a target="?mainpane"? href=\'showplayer\.php\?who=([0-9]+)\'><font color="?green"?>([^<>]+)<\/font><\/a>\.<\/font><\/a>$',
"chatLink" : r'<a target="?_blank"? href="([^"]+)"><font color="?blue"?>\[link\]<\/font><\/a> ',
"chatWhoResponse" : r'<table><tr><td class=tiny><center><b>Players in (?:this channel|channel \w+):',
"chatWhoPerson" : r'<a (?:class="([^"]+)" )?target="?mainpane"? href="showplayer\.php\?who=([0-9]+)"><font color="?#?\w+"?>([^<>]+)<\/font><\/a>',
"chatLinkedPlayer" : r"<a style='color: #?\w+' href='showplayer\.php\?who=([0-9]+)' target=mainpane>([^<]+)<\/a>",
"newChatChannel" : r"<font color=[^>]+>You are now talking in channel: ([^\,]+?)\.<p><p>(.*?)</font>",
"chatListenResponse" : r"<font color=[^>]+>Currently listening to channels:(.*?<b>.*?</b>.*?)</font>",
"chatListenCurrent" : r"<br> <b>(.*?)</b>",
"chatListenOthers" : r" ([^<>]*?)<br>",
"chatStartListen" : r'<font color=[^>]+>Now listening to channel: ([^>]+)</font>',
"chatStopListen" : r'<font color=[^>]+>No longer listening to channel: ([^>]+)</font>',
"chatMultiLineStart" : r'<b><a target="?mainpane"? href="showplayer\.php\?who=(-?[0-9]+)"><font color="?#?\w+"?>([^<>]+)<\/font><\/b><\/a>:$',
"chatMultiLineEmote" : r'<b><i><a target="?mainpane"? href="showplayer\.php\?who=(-?[0-9]+)"><font color="?#?\w+"?>([^<>]+)<\/b><\/font><\/a>$',
"outgoingPrivate" : r'<font color="?blue"?><b>private to <a class=nounder target="?mainpane"? href="?showplayer.php\?who=([0-9]+)"?><font color="?blue"?>(.*?)</font></a></b>:(.*?)</font></br>',
"chatPlayerLoggedOn" : r'<font color=green><a target=mainpane href=\'showplayer\.php\?who=([0-9]+)\'><font color=green><b>([^<>]+)<\/b><\/font><\/a> logged on\.<\/font>$',
"chatPlayerLoggedOff" : r'<font color=green><a target=mainpane href=\'showplayer\.php\?who=([0-9]+)\'><font color=green><b>([^<>]+)<\/b><\/font><\/a> logged off\.<\/font>$',
"chatTalkieFrequency" : r'<font color=green>The frequency is (.*?), Mr. Rather\.<\/font>',
# Clan dungeon patterns.
"dungeonActivity" : r'(?:^|<br>|<br><b>|<b>)([^<>]+) \(#([0-9,]+)\) ([^<>]+) \(([0-9,]+) turns?\)',
"dungeonLootDistribution" : r'(?:<blockquote>|<br>)([^<>]+) \(#([0-9,]+)\) distributed <b>([^<>]+)</b> to ([^<>]+) \(#([0-9,]+)\)<br>',
"dungeonUndistributedLoot" : r'<td valign="center"><b>([^<>]+)<\/b> <\/td><td class="small" valign="center">Acquired by: ([^<>]+) \</td>',
"dungeonPreviousRun" : r'<tr><td class="?small"?>([^<>]+) <\/td><td class="?small"?>([^<>]+) <\/td><td class="?small"?>([^<>]+) <\/td><td class="?small"?>([0-9,]+)<\/td><td class="?tiny"?>\[<a href="clan_raidlogs\.php\?viewlog=([0-9]+)">view logs<\/a>\]<\/td><\/tr>',
"dungeonLogCategory" : r'<b>([^<>]+):?<\/b><blockquote>(.*?)<\/blockquote>',
"imprisonedByChums" : r'^(.*) has been imprisoned by the C\. H\. U\. M\.s!$',
"freedFromChums" : r'^(.*) has rescued (.*) from the C\. H\. U\. M\.s\.$',
# Cocktailcrafting patterns.
"itemsDontMakeCocktail" : r"<td>Those two items don't combine to make a refreshing cocktail\.</td>",
"dontHaveSkillToMixCocktail" : r"<td>You don't have the skill necessary to make that cocktail\.</td>",
"dontHaveItemsForThatCocktail" : r"<td>You don't have enough of one of the necessary items to make a cocktail that tasty\.</td>",
"dontHaveAdventuresToMixCocktail" : r"<td>You don't have that many adventures left\. +It takes <i>time<\/i> to make a good cocktail, man\.</td>",
"bartenderExplosion" : r"Smoke begins to pour from the head of your bartender-in-the-box. It begins to vibrate noisily, spits out a few drinks at random, and then explodes\.",
# Cooking patterns.
"itemsDontCook" : r"<td>Those two items don't combine to make anything tasty\.</td>",
"dontHaveSkillToCook" : r"<td>You don't have the skill necessary to cook this item\.</td>",
"dontHaveItemsForCook" : r"<td>You don't have enough of one of the ingredients of that dish\.</td>",
"dontHaveAdventuresToCook" : r"<td>You don't have that many adventures left\. +It takes <i>time<\/i> to cook stuff, man\.</td>",
"chefExplosion" : r"Smoke begins to pour from the head of your chef-in-the-box. It begins to vibrate noisily, spits out a few dishes at random, and then explodes\.",
# Campground patterns.
"campgroundHasOven" : r"You've got an E-Z Cook™ oven installed in your kitchen.",
"campgroundHasRange" : r"You've got a Dramatic™ range installed in your kitchen.",
"campgroundHasChef" : r"You've got a (clockwork )?Chef-in-the-box. He'll help you cook fancy dishes, and make it so cooking doesn't cost an Adventure!",
"campgroundHasShaker" : r"You've got a My First Shaker™ cocktailcrafting kit in your kitchen.",
"campgroundHasKit" : r"You've got a Queue Du Coq cocktailcrafting kit in your kitchen.",
"campgroundHasBartender" : r"You've got a (clockwork )?Bartender-in-the-box. He'll help you mix up fancy cocktails, and make it so cocktailcrafting doesn't cost an Adventure!",
"campgroundHasMat" : r"Your kitchen is equipped with a sushi-rolling mat.",
# Character Pane patterns.
'characterLevel' : r'<br>Level ([0-9]+)<br>(.*?)<table',
'characterMuscle' : r'Muscle:</td><td align=left><b>(?:<font color=blue>([0-9]+)</font>)?(?: )?\(?([0-9]+)\)?</b>',
'characterMoxie' : r'Moxie:</td><td align=left><b>(?:<font color=blue>([0-9]+)</font>)?(?: )?\(?([0-9]+)\)?</b>',
'characterMysticality' : r'Mysticality:</td><td align=left><b>(?:<font color=blue>([0-9]+)</font>)?(?: )?\(?([0-9]+)\)?</b>',
'characterHP' : r'onclick=\'doc\("hp"\);\'[^<>]*><br><span class=[^>]+>([0-9]+) / ([0-9]+)</span>',
'characterMP' : r'onclick=\'doc\("mp"\);\'[^<>]*><br><span class=[^>]+>([0-9]+) / ([0-9]+)</span>',
'characterMeat' : r'onclick=\'doc\("meat"\);\'[^<>]*><br><span class=black>([0-9,]+)</span>',
'characterAdventures' : r'onclick=\'doc\("adventures"\);\'[^<>]*><br><span class=black>([0-9]+)</span>',
'currentFamiliar' : r'href="familiar.php">(?:<b>)?<font size=[0-9]+>(.*?)</a>(?:</b>)?, the ([0-9]+)-pound (.*?)</font></td></tr></table>',
'characterEffect' : r'eff\("[a-fA-F0-9]+"\);\'.*?></td><td valign=center><font size=[0-9]+>(.*?) ?\(([0-9]+)\)</font><br></td>',
'characterRonin' : r'>Ronin</a>: <b>([0-9]+)</b>',
'characterMindControl' : r'>Mind Control</a>: <b>([0-9]{1,2})</b>',
'characterDrunk' : r'>(?:Inebriety|Temulency|Tipsiness|Drunkenness):</td><td><b>([0-9]{1,2})</b>',
# Stat, Substat, Leveling, HP, and MP patterns. Will fail in Haiku Dungeon.
'muscleGainLoss' : r'You (gain|lose) ([0-9,]+) (?:Beefiness|Fortitude|Muscleboundness|Strengthliness|Strongness)',
'mysticalityGainLoss' : r'You (gain|lose) ([0-9,]+) (?:Enchantedness|Magicalness|Mysteriousness|Wizardliness)',
'moxieGainLoss' : r'You (gain|lose) ([0-9,]+) (?:Cheek|Chutzpah|Roguishness|Sarcasm|Smarm)',
'musclePointGainLoss' : r'You (gain|lose) (a|some) Muscle points?',
'mystPointGainLoss' : r'You (gain|lose) (a|some) Mysticality points?',
'moxiePointGainLoss' : r'You (gain|lose) (a|some) Moxie points?',
'levelGain' : r'You gain (a|some) (?:L|l)evels?',
'hpGainLoss' : r'You (gain|lose) ([0-9,]+) hit points?',
'mpGainLoss' : r'You (gain|lose) ([0-9,]+) (?:Muscularity|Mana|Mojo) (?:P|p)oints?',
# Drunkenness, Adventures, and Effect patterns.
'gainDrunk' : r'You gain ([0-9]+) Drunkenness',
'gainAdventures' : r'You gain ([0-9,]+) Adventures',
'gainEffect' : r'<td valign=center class=effect>You acquire an effect: <b>(.*?)</b><br>\(duration: ([0-9,]+) Adventures\)</td>',
# Meatpasting patterns.
'noMeatpaste' : (r"<b>Results:</b>.*You don't have any meat paste.*<b>Combine Items:</b>", re.DOTALL),
'itemsDontMeatpaste' : r"<td>Those two items don't combine to make anything interesting\.</td>",
'dontHaveItemsMeatpaste' : r"<td>You don't have enough of one the necessary items to make that combination\.</td>",
'noMeatForMeatpasting' : r"<td>You don't have enough Meat to make that many\.</td>",
# Store patterns.
'meatSpent' : r'You spent ([0-9,]+) Meat',
'noMeatForStore' : r"(?:You can't afford that many of that item)|(?:You can't afford that item)|(?:You can't afford to purchase that)",
'invalidStore' : r"You've been sent back here by some kind of bug",
'notSoldHere' : r"(?:This store doesn't sell that item)|(?:Invalid item selected)",
"storeInventory" : r'<tr class="deets" rel="([0-9]+)" after="([0-9]+)">(.*?)<b>(.*?)</b></td><td valign="center" align="center">([0-9]+)</td(.*?)name="price\[([0-9]+)\]" value="([0-9,]+)"(.*?)name="limit\[[0-9]+\]" value="([0-9]+)"(.*?)cheapest: ([0-9]+)</span>',
# Hermit patterns.
'noTrinkets' : r"You don't have enough stuff",
'noHermitPermits' : r"You don't have enough Hermit Permits to trade for that many",
'notEnoughClovers' : r"you are able to infer that he doesn't have enough clovers to make that trade",
'notHermitItem' : r"The Hermit doesn't have that item",
# Adventure patterns.
"twiddlingThumbs" : r"You twiddle your thumbs\.",
"userShouldNotBeHere" : r"(?:>You shouldn't be here\.<)|(?:)>This is not currently available to you\.<",
"monsterName" : r"<span id='monname'>(.*?)<\/span>",
"choiceIdentifier" : r'<input type="?hidden"? name="?whichchoice"? value="?([0-9]+)"?>',
"choiceName" : r"<b>([^<>]+?)<\/b><\/td><\/tr>",
"noncombatName" : r"<center><table><tr><td><center><b>([^<>]+)<\/b><br><img",
"fightWon" : r"<center>You win the fight!<!--WINWINWIN--><p>",
"fightLost" : r"<p>You lose\. +You slink away, dejected and defeated\.<p>",
"usedBarrel" : r"KOMPRESSOR does not smash",
"noAdventures" : r"You're out of adventures",
# Rumpus Room patterns.
'rumpusRoomFurniture' : r'rump([0-9])_([0-9])\.gif',
# Mall search patterns.
"mallItemSearchResult" : r'<tr class="graybelow(.*?)<\/tr>',
"mallItemSearchDetails" : r'<a[^<>]*href="mallstore\.php\?whichstore=(?P<storeId>[0-9]+)&searchitem=(?P<itemId>[0-9]+)&searchprice=(?P<price>[0-9]+)"><b>(?P<storeName>.*?)<\/b><\/a>[^<>]*<\/td><td[^<>]*>(?P<quantity>[0-9,]+)<\/td><td[^<>]*>(?: )*(?P<limit>[0-9,]*)[^<>]*<\/td>',
# Mall purchase patterns.
"cantAffordItem" : r"<td>You can't afford that item\.<\/td>",
"mallNoItemAtThatPrice" : r"<td>This store doesn't have that item at that price\.",
"cantBuyItemIgnoreList" : r"<td>That player will not sell to you, because you are on his or her ignore list\.<\/td>",
"mallHitLimit" : r"You may only buy ([0-9,]+) of this item per day from this store\. You have already purchased ([0-9,]+) in the last 24 hours\.",
# Canadia patterns.
"noAdvInstitue" : r">You don't have that many Adventures\. Take off, eh\?<",
"invalidAdvInstitute" : r">That doesn't make any sense, you hoser\.<",
# Guild patterns.
'skillNotTrainable' : r'>Invalid skill selected\.<',
'skillTooWeak' : r">You're not powerful enough to train that skill\.<",
'skillTooPoor' : r">You can't afford to train that skill\.<",
'skillLearned' : r">You learn a new skill: <b>(.*?)</b>",
'skillHaveAlready' : r">You've already got that skill\.<",
# Equipment patterns
"currentHat" : r"Hat</a>:</td><td><img src=\"[^\"]+\" class=hand onClick='descitem\(([0-9]+)\)'",
"currentWeapon" : r"Weapon</a>:</td><td><img src=\"[^\"]+\" class=hand onClick='descitem\(([0-9]+)\)'",
"currentOffhand" : r"Offhand</a>:</td><td><img src=\"[^\"]+\" class=hand onClick='descitem\(([0-9]+)\)'",
"currentShirt" : r"Shirt</a>:</td><td><img src=\"[^\"]+\" class=hand onClick='descitem\(([0-9]+)\)'",
"currentPants" : r"Pants</a>:</td><td><img src=\"[^\"]+\" class=hand onClick='descitem\(([0-9]+)\)'",
"currentAcc" : r"Accessory</a>:</td><td><img src=\"[^\"]+\" class=hand onClick='descitem\(([0-9]+)\)'",
"currentAcc1" : r"Accessory</a> 1:</td><td><img src=\"[^\"]+\" class=hand onClick='descitem\(([0-9]+)\)'",
"currentAcc2" : r"Accessory</a> 2:</td><td><img src=\"[^\"]+\" class=hand onClick='descitem\(([0-9]+)\)'",
"currentAcc3" : r"Accessory</a> 3:</td><td><img src=\"[^\"]+\" class=hand onClick='descitem\(([0-9]+)\)'",
"currentFam" : r"Familiar</a>:</td><td><img src=\"[^\"]+\" class=hand onClick='descitem\(([0-9]+)\)'",
# Autosell patterns.
"autosellResponse" : r"You sell your (.*?) to (?:.*?) for ([0-9,]+) Meat.",
"autosellItems" : r" ([0-9,]*) ?(.*?),",
"autosellLastTwo" : r" ([0-9,]*) ?(.*?) and your ([0-9,]*) ?(.*?)$",
"autosellOne" : r"([0-9,]*) ?(.*?)$",
# Uneffect patterns.
"effectRemoved" : r"<td>Effect removed\.<\/td>",
"youDontHaveThatEffect" : r"<td>You don't have that effect\.",
"youDontHaveSGEEA" : r"<td>You don't have a green soft eyedrop echo antidote\.",
# Ascension History patterns.
"fullAscension" : r'</tr><td[^>]*>([0-9]+).*?</td><td[^>]*>([0-9/]+).*?</td><td[^>]*><span[^>]*>([0-9,]+).*?</span>.*?</td><td[^>]*><img [^>]*title="(.*?)"[^>]*></td><td[^>]*>(.*?)</td><td[^>]*>(<span[^>]*>)?([0-9,]+)(</span>)?</td><td[^>]*>(<span[^>]*>)?([0-9,]+)(</span>)?</td><td[^>]*>(?:<img [^>]*title="(.*?)"[^>]*>)?</td><td[^>]*>(<img [^>]*title="(.*?)"[^>]*>|<img src="http://images.kingdomofloathing.com/otherimages/spacer.gif" width=30 height=30>)(<img [^>]*title="(.*?)"[^>]*>|</td>)',
"familiarAscension" : r'^(.*?) \(([0-9.]+)%\)',
"playerName" : r'Ascension History \(<a[^>]*><font[^>]*>(.*?)<\/font><\/a>\)',
# User Profile patterns.
"profileUserName" : r'<td valign="?center"?>(?:<center>)?<b>([^<>]+)<\/b> \(#[0-9]+\)<br>',
"profileClan" : r'Clan: <b><a class=nounder href="showclan\.php\?whichclan=([0-9]+)">(.*?)<\/a>',
"profileNumAscensions" : r'Ascensions<\/a>:<\/b><\/td><td>([0-9,]+)<\/td>',
"profileNumTrophies" : r'Trophies Collected:<\/b><\/td><td>([0-9,]+)<\/td>',
"profileNumTattoos" : r'Tattoos Collected:<\/b><\/td><td>([0-9,]+)<\/td>',
# Quest Log patterns.
"questsCompleted" : r'<b>([\w\s,\.\'\?!]+)<\/b>(?!<\/td>)<br>([\w\s,\.\'\?!]+)<p>',
# Clan patterns.
"clanName" : r'<a href="clan_hall\.php">([^<>]*)<\/a>',
"clanCredo" : r'<textarea name=newcredo[^<>]*>([^<>]*)</textarea>',
"clanWebsite" : r'<input type=text class=text name=website value="([^"]*)" size=60 maxlength=255>',
"clanAcceptingApps" : r'<p>Your clan is currently accepting applications\.<br>',
"clanRankContainer" : r'<select name=level[0-9]+>(.*?)<\/select>',
"clanRank" : r'<option value=([0-9]+)(?: selected)?>(.*?) \(°([0-9]+)\)<\/option>',
"clanWhitelistMember" : r'''<tr><td><input type=hidden name=(?:player[0-9]+|who) value=[0-9]+><a href='showplayer\.php\?who=(?P<userId>[0-9]+)' class=nounder><b>(?P<userName>[^<>]+)</b> \(#[0-9]+\)<\/a><\/td><td>(?:<select.*?<option value=(?P<clanRankId>[0-9]+) selected>.*?<\/select>|(?P<clanRankName>[^<>]+))<\/td><td>(?:<input type=text class=text size=[0-9]+ name=title[0-9]+ value=")?(?P<clanTitle>[^<>]*)(?:">)?<\/td>''',
"clanLogEntry" : r'>(?P<date>[0-9/]+, [0-9:]+(?:AM|PM)): (?:<a class=nounder href=\'showplayer\.php\?who=[0-9]+\'>)?(?P<userName>[^<>]+) \(#(?P<userId>[0-9]+)\)(?:<\/a>)? (?P<action>.*?)(?=<br>)',
"clanLogFax" : r'faxed in a (?P<monsterName>.*)$',
"clanLogAttack" : r'launched an attack against (?P<clanName>.*)\.$',
"clanLogWhitelistAdd" : r'added <a class=nounder href=\'showplayer\.php\?who=[0-9]+\'>(?P<userName>.*) \(#(?P<userId>[0-9]+)\)<\/a> to the clan\'s whitelist\.$',
"clanLogPlayerJoinedAnotherClan" : r'joined another clan\.$',
"clanLogPlayerJoinedClanWhitelist" : r'was accepted into the clan \(whitelist\)$',
"clanLogStashItemAdd" : r'added (?P<quantity>[0-9,]+) (?P<itemName>.*)\.$',
"clanLogStashItemRemove" : r'took (?P<quantity>[0-9,]+) (?P<itemName>.*)\.$',
"clanLogMeatSpentArmy" : r'spent (?P<meat>[0-9,]+) Meat on the clan army\.$',
"clanLogChangedRank" : r'changed Rank for <a class=nounder href=\'showplayer\.php\?who=[0-9]+\'>(?P<userName>.*) \(#(?P<userId>[0-9]+)\)<\/a>\.$',
"clanLogChangedTitle" : r'changed title for <a class=nounder href=\'showplayer\.php\?who=[0-9]+\'>(?P<userName>.*) \(#(?P<userId>[0-9]+)\)<\/a>\. \((?P<clanTitle>.*)\)$',
# Search player Patterns
"searchPlayers" : r'showplayer\.php\?who=([0-9]+)">([^<]*)<\/a>',
# Traveling Trader Patterns
"traderNotTradingForThatItem" : r'<td>The trader is not trading for that item\.<\/td>',
"traderCantTradeForThatMany" : r'<td>You can\'t trade for that many ',
"traderNotEnoughWads" : r'<td>You don\'t have enough twinkly wads to trade for that many ',
# Crimbo Patterns
"crimboItemIsNotCandy" : r"<td>That's not candy!<\/td>",
"crimboNotEnoughCandy" : r"<td>You don't have that much candy!<\/td>",
"crimboCandyCreditsReceived" : r"That gives you ([0-9,]+) Candy Credits?\. You can trade in your",
"crimboInvalidGift" : r"<td>Invalid gift selected\. Bah Humbug!<\/td>",
"crimboInvalidPlayer" : r"<td>Sorry, I couldn't find the player ",
"crimboUserAlreadyReceivedGift" : r"<td>The factory workers inform you that your intended recipient already has one of those\.<\/td>",
# Curse Patterns
"dontHaveThatItem" : r"<td>You don't have that item\.<\/td>",
"cantFireArrowAtSelf" : r"<td>You can't fire that at yourself\. +Your accuracy at point-blank range is terrible\.<\/td>",
"userAlreadyHitWithArrow" : r"<td>That player has already been hit with a time's arrow today\.<\/td>",
"cantFireArrowAtHardcoreRonin": r"<td>You can't fire a time's arrow at somebody in Ronin or Hardcore\.<\/td>",
"cantCursePlayerNotFound" : r"<td>That player could not be found\. +Confound it\.<\/td>",
"fireArrowSuccess" : r"It hits with a satisfying <i>thwock<\/i>",
# Nash Crosby's Still Patterns
"wrongStillProfession": r"<td>No still for you\.<\/td>",
"invalidStillItem": r"<td>Nash Crosby doesn\'t want anything to do with that item\.<\/td>",
"stillItemNotFound": r"<td>You don\'t have that many of that item, Chief.<\/td>",
"stillMaxLimit": r"<td>The still can\'t handle that much action today\.<\/td>",
# Bounty Hunter Hunter patterns.
'bountyAvailable' : r"These are the things I'm currently paying bounties on",
'dailyBountyItem' : r'<input type=hidden name=action value="takebounty"><input type=hidden name=whichitem value=(?P<itemid>[0-9]+)>',
'bountyChosen' : r'Get out there and collect those',
'bountyActive1' : r"I'm still waiting for you to bring me",
'bountyActive2' : r"You have (.*) collected (.*) so far",
# Wok related patterns.
"dontHaveItemsForWok" : r"<td>You don't have the materials for that amount of wokkage\.</td>",
"dontHaveAdventuresForWok" : r"<td>You don't have that many adventures left\.",
"noWokAccess" : "What wok are you tokking about\?",
"dontHaveSkillForWok" : r"<td>You don't have the skill necessary",
# Sept 2013 Mall interface patterns
"dontHaveEnoughOfItem" : r"<td>You don't have enough of those",
"dontHaveThatItem" : r"<td>You don't have that item.([0-9]+)",
"itemAddedSuccessfully" : r"<td>\(([0-9]+)\) (.*) for ([0-9,]+) meat each",
"dontHaveThatManyInStore" : "You don't have that many in your store.",
"itemTakenSuccessfully" : "You acquire",
"mallPricesUnlimited" : r"<tr><td>unlimited:</td><td><b>([0-9,]+)</b> x([0-9]+).*?</td><td><b>([0-9,]+)</b> x([0-9]+).*?</td><td><b>([0-9,]+)</b> x([0-9]+).*?</td><td><b>([0-9,]+)</b> x([0-9]+).*?</td></tr>",
"mallPricesLimited" : r"<tr><td>limited:</td><td><b>([0-9,]+)</b>\(([0-9]+)/day\) x([0-9]+).*?</td><td><b>([0-9,]+)</b>\(([0-9]+)/day\) x([0-9]+).*?</td><td><b>([0-9,]+)</b>\(([0-9]+)/day\) x([0-9]+).*?</td></tr>",
"mallPriceNotUpdated" : "Nothing updated",
"mallTransactions" : r"([1-9][1-9]/[1-9][1-9]/[1-9][1-9] [1-9][1-9]:[1-9][1-9]:[1-9][1-9]) <a class=nounder href=\"showplayer.php\?who=([0-9]+)\"><b>(.*?)<\/b><\/a> bought ([0-9]+) \((.*?)\) for ([0-9]+) Meat.<br>",
#Trade related patterns
"traderIgnoringUs" : r"<td>You can't make an offer to a player who has placed you on his or her ignore list\.",
"traderIsInRoninHC" : r"<td>That player cannot receive Meat or items from other players\.",
"traderHasNotEnoughMeat" : r"<td>You don't have that much Meat\.",
"traderHasNotEnoughItems" : r"<td>You don't have enough of one of the items you're trying to send.",
"traderBannedFromChat" : r"<td>You can't send offers while you're banned from the chat\.",
"tradeSentSuccessfully" : r"<td>Your trade offer has been sent\.",
"tradeItem" : (r"<tr><td><img onclick\='descitem\((?P<itemdescid>[0-9]+)\).*?<b>(?P<itemname>.*?)\((?P<quantity>[0-9,]+])\)</td>'", re.DOTALL),
"tradeMessage" : (r'<table cellpadding=5><tr><td.*?><b>Note:</b><Br><?/?c?e?n?t?e?r?>?(?P<message>.*?)</td></tr></table>', re.DOTALL),
"tradePendingResponseIncoming" : (r'You will give (?P<playername>.*?) \(#(?P<playerid>[0-9]+)\):<br><table>(?P<outgoingitems>.*?)</table><img src="http://images\.kingdomofloathing\.com/itemimages/meat\.gif" width=30 height=30><b>:</b> (?P<outgoingmeat>[0-9,]+)<br><hr>.*? will give you:<br><table>(?P<incomingitems>.*?)</table><img src\="http://images\.kingdomofloathing\.com/itemimages/meat\.gif" width=30 height=30><b>:</b> (?P<incomingmeat>[0-9,]+)<br>(?P<message>.*?)<a href="makeoffer\.php\?action\=accept&pwd\=.*?&whichoffer\=(?P<tradeid>[0-9]+)">accept', re.DOTALL),
"tradePendingResponseOutgoing" : (r'You will give [^()]*?:<br><table>(?P<outgoingitems>.*?)</table><img src="http://images\.kingdomofloathing\.com/itemimages/meat\.gif" width=30 height=30><b>:</b> (?P<outgoingmeat>[0-9,]+?)<br><hr>(?P<playername>.*?) \(#(?P<playerid>[0-9]+?)\) will give you:<br><table>(?P<incomingitems>.*?)</table><img src="http://images\.kingdomofloathing\.com/itemimages/meat\.gif" width=30 height=30><b>:</b> (?P<incomingmeat>[0-9,]+)<br>(?P<message>.*?)<a href="makeoffer\.php\?action=cancel2&whichoffer=(?P<tradeid>[0-9]+?)">cancel', re.DOTALL),
"tradePendingOfferIncoming" : (r'Offer from (?P<playername>.*?) \(#(?P<playerid>[0-9]+)\)<br><table>(?P<incomingitems>.*?)</table><img src="http://images\.kingdomofloathing\.com/itemimages/meat\.gif" width=30 height=30><b>:</b> (?P<incomingmeat>[0-9,]+)<br>(?P<message>.*?)<a href="counteroffer\.php\?whichoffer=(?P<tradeid>[0-9]+)">respond', re.DOTALL),
"tradePendingOfferOutgoing" : (r'Offered to (?P<playername>.*?) \(#(?P<playerid>[0-9]+)\)<br><table>(?P<outgoingitems>.*?)</table><img src="http://images\.kingdomofloathing\.com/itemimages/meat\.gif" width=30 height=30><b>:</b> (?P<outgoingmeat>[0-9,]+)<br>(?P<message>.*?)<a href="makeoffer\.php\?action=cancel&whichoffer=(?P<tradeid>[0-9]+)">cancel this offer', re.DOTALL),
"tradeResponseSentSuccessfully" : r'Pending Responses \(Outgoing\)',
"tradeCancelledSuccessfully" : r'Offer cancelled\.',
"tradeAccepted" : r'Offer Accepted\.',
}
| ijzer/cwbot-ndy | kol/data/Patterns.py | Python | bsd-3-clause | 28,912 | 0.014596 |
"""
To run this test, type this in command line <kolibri manage test -- kolibri.core.content>
"""
import datetime
import unittest
import uuid
import mock
import requests
from django.conf import settings
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.utils import timezone
from le_utils.constants import content_kinds
from rest_framework import status
from rest_framework.test import APITestCase
from kolibri.core.auth.models import Facility
from kolibri.core.auth.models import FacilityUser
from kolibri.core.auth.test.helpers import provision_device
from kolibri.core.content import models as content
from kolibri.core.content.test.test_channel_upgrade import ChannelBuilder
from kolibri.core.device.models import DevicePermissions
from kolibri.core.device.models import DeviceSettings
from kolibri.core.logger.models import ContentSessionLog
from kolibri.core.logger.models import ContentSummaryLog
DUMMY_PASSWORD = "password"
class ContentNodeTestBase(object):
"""
Basecase for content metadata methods
"""
def test_get_prerequisites_for(self):
"""
test the directional characteristic of prerequisite relationship
"""
c1 = content.ContentNode.objects.get(title="c1")
root = content.ContentNode.objects.get(title="root")
# if root is the prerequisite of c1
expected_output = content.ContentNode.objects.filter(title__in=["root"])
actual_output = content.ContentNode.objects.filter(prerequisite_for=c1)
self.assertEqual(set(expected_output), set(actual_output))
# then c1 should not be the prerequisite of root
unexpected_output = content.ContentNode.objects.filter(title__in=["c1"])
actual_output = content.ContentNode.objects.filter(prerequisite_for=root)
self.assertNotEqual(set(actual_output), set(unexpected_output))
def test_get_has_prerequisites(self):
"""
test the directional characteristic of prerequisite relationship
"""
c1 = content.ContentNode.objects.get(title="c1")
root = content.ContentNode.objects.get(title="root")
# if root is the prerequisite of c1
expected_output = content.ContentNode.objects.filter(title__in=["c1"])
actual_output = content.ContentNode.objects.filter(has_prerequisite=root)
self.assertEqual(set(expected_output), set(actual_output))
# then c1 should not be the prerequisite of root
unexpected_output = content.ContentNode.objects.filter(title__in=["root"])
actual_output = content.ContentNode.objects.filter(has_prerequisite=c1)
self.assertNotEqual(set(actual_output), set(unexpected_output))
def test_get_all_related(self):
"""
test the nondirectional characteristic of related relationship
"""
c1 = content.ContentNode.objects.get(title="c1")
c2 = content.ContentNode.objects.get(title="c2")
# if c1 is related to c2
expected_output = content.ContentNode.objects.filter(title__in=["c2"])
actual_output = content.ContentNode.objects.filter(related=c1)
self.assertEqual(set(expected_output), set(actual_output))
# then c2 should be related to c1
expected_output = content.ContentNode.objects.filter(title__in=["c1"])
actual_output = content.ContentNode.objects.filter(related=c2)
self.assertEqual(set(expected_output), set(actual_output))
def test_descendants_of_kind(self):
p = content.ContentNode.objects.get(title="root")
expected_output = content.ContentNode.objects.filter(title__in=["c1"])
actual_output = p.get_descendants(include_self=False).filter(
kind=content_kinds.VIDEO
)
self.assertEqual(set(expected_output), set(actual_output))
def test_get_top_level_topics(self):
p = content.ContentNode.objects.get(title="root")
expected_output = content.ContentNode.objects.filter(
parent=p, kind=content_kinds.TOPIC
)
actual_output = (
content.ContentNode.objects.get(title="root")
.get_children()
.filter(kind=content_kinds.TOPIC)
)
self.assertEqual(set(expected_output), set(actual_output))
def test_tag_str(self):
# test for ContentTag __str__
p = content.ContentTag.objects.get(tag_name="tag_2")
self.assertEqual(str(p), "tag_2")
def test_lang_str(self):
# test for Language __str__
p = content.Language.objects.get(lang_code="en")
self.assertEqual(str(p), "English-Test")
def test_channelmetadata_str(self):
# test for ChannelMetadata __str__
p = content.ChannelMetadata.objects.get(name="testing")
self.assertEqual(str(p), "testing")
def test_tags(self):
root_tag_count = content.ContentNode.objects.get(title="root").tags.count()
self.assertEqual(root_tag_count, 3)
c1_tag_count = content.ContentNode.objects.get(title="c1").tags.count()
self.assertEqual(c1_tag_count, 1)
c2_tag_count = content.ContentNode.objects.get(title="c2").tags.count()
self.assertEqual(c2_tag_count, 1)
c2c1_tag_count = content.ContentNode.objects.get(title="c2c1").tags.count()
self.assertEqual(c2c1_tag_count, 0)
def test_local_files(self):
self.assertTrue(
content.LocalFile.objects.filter(
id="9f9438fe6b0d42dd8e913d7d04cfb2b2"
).exists()
)
self.assertTrue(
content.LocalFile.objects.filter(
id="725257a0570044acbd59f8cf6a68b2be"
).exists()
)
self.assertTrue(
content.LocalFile.objects.filter(
id="e00699f859624e0f875ac6fe1e13d648"
).exists()
)
self.assertTrue(
content.LocalFile.objects.filter(
id="4c30dc7619f74f97ae2ccd4fffd09bf2"
).exists()
)
self.assertTrue(
content.LocalFile.objects.filter(
id="8ad3fffedf144cba9492e16daec1e39a"
).exists()
)
def test_delete_tree(self):
channel = content.ChannelMetadata.objects.first()
channel_id = channel.id
channel.delete_content_tree_and_files()
self.assertFalse(
content.ContentNode.objects.filter(channel_id=channel_id).exists()
)
self.assertFalse(content.File.objects.all().exists())
class ContentNodeQuerysetTestCase(TestCase):
fixtures = ["content_test.json"]
the_channel_id = "6199dde695db4ee4ab392222d5af1e5c"
@classmethod
def setUpTestData(cls):
provision_device()
cls.facility = Facility.objects.create(name="facility")
cls.admin = FacilityUser.objects.create(username="admin", facility=cls.facility)
cls.admin.set_password(DUMMY_PASSWORD)
cls.admin.save()
cls.facility.add_admin(cls.admin)
def test_filter_uuid(self):
content_ids = content.ContentNode.objects.values_list("id", flat=True)
self.assertEqual(
content.ContentNode.objects.filter_by_uuids(content_ids).count(),
len(content_ids),
)
def test_filter_uuid_bad_uuid(self):
content_ids = list(content.ContentNode.objects.values_list("id", flat=True))
content_ids[0] = '7d1bOR"1"="1"d08e29c36115f1af3da99'
self.assertEqual(
content.ContentNode.objects.filter_by_uuids(content_ids).count(), 0
)
kind_activity_map = {
content_kinds.EXERCISE: "practice",
content_kinds.VIDEO: "watch",
content_kinds.AUDIO: "listen",
content_kinds.DOCUMENT: "read",
content_kinds.HTML5: "explore",
}
def infer_learning_activity(kind):
activity = kind_activity_map.get(kind)
if activity:
return [activity]
return []
class ContentNodeAPITestCase(APITestCase):
"""
Testcase for content API methods
"""
fixtures = ["content_test.json"]
the_channel_id = "6199dde695db4ee4ab392222d5af1e5c"
@classmethod
def setUpTestData(cls):
provision_device()
cls.facility = Facility.objects.create(name="facility")
cls.admin = FacilityUser.objects.create(username="admin", facility=cls.facility)
cls.admin.set_password(DUMMY_PASSWORD)
cls.admin.save()
cls.facility.add_admin(cls.admin)
def test_prerequisite_for_filter(self):
c1_id = content.ContentNode.objects.get(title="c1").id
response = self.client.get(
reverse("kolibri:core:contentnode-list"), data={"prerequisite_for": c1_id}
)
self.assertEqual(response.data[0]["title"], "root")
def test_has_prerequisite_filter(self):
root_id = content.ContentNode.objects.get(title="root").id
response = self.client.get(
reverse("kolibri:core:contentnode-list"), data={"has_prerequisite": root_id}
)
self.assertEqual(response.data[0]["title"], "c1")
def test_related_filter(self):
c1_id = content.ContentNode.objects.get(title="c1").id
response = self.client.get(
reverse("kolibri:core:contentnode-list"), data={"related": c1_id}
)
self.assertEqual(response.data[0]["title"], "c2")
def map_language(self, lang):
if lang:
return {
f: getattr(lang, f)
for f in [
"id",
"lang_code",
"lang_subcode",
"lang_name",
"lang_direction",
]
}
def _assert_node(self, actual, expected):
assessmentmetadata = (
expected.assessmentmetadata.all()
.values(
"assessment_item_ids",
"number_of_assessments",
"mastery_model",
"randomize",
"is_manipulable",
"contentnode",
)
.first()
)
files = []
for f in expected.files.all():
"local_file__id",
"local_file__available",
"local_file__file_size",
"local_file__extension",
"lang_id",
file = {}
for field in [
"id",
"priority",
"preset",
"supplementary",
"thumbnail",
]:
file[field] = getattr(f, field)
file["checksum"] = f.local_file_id
for field in [
"available",
"file_size",
"extension",
]:
file[field] = getattr(f.local_file, field)
file["lang"] = self.map_language(f.lang)
file["storage_url"] = f.get_storage_url()
files.append(file)
self.assertEqual(
actual,
{
"id": expected.id,
"available": expected.available,
"author": expected.author,
"channel_id": expected.channel_id,
"coach_content": expected.coach_content,
"content_id": expected.content_id,
"description": expected.description,
"duration": None,
"learning_activities": infer_learning_activity(expected.kind),
"kind": expected.kind,
"lang": self.map_language(expected.lang),
"license_description": expected.license_description,
"license_name": expected.license_name,
"license_owner": expected.license_owner,
"num_coach_contents": expected.num_coach_contents,
"options": expected.options,
"parent": expected.parent_id,
"sort_order": expected.sort_order,
"title": expected.title,
"lft": expected.lft,
"rght": expected.rght,
"tree_id": expected.tree_id,
"ancestors": list(expected.get_ancestors().values("id", "title")),
"tags": list(
expected.tags.all()
.order_by("tag_name")
.values_list("tag_name", flat=True)
),
"assessmentmetadata": assessmentmetadata,
"is_leaf": expected.kind != "topic",
"files": files,
},
)
def _assert_nodes(self, data, nodes):
for actual, expected in zip(data, nodes):
self._assert_node(actual, expected)
def test_contentnode_list(self):
root = content.ContentNode.objects.get(title="root")
nodes = root.get_descendants(include_self=True).filter(available=True)
expected_output = len(nodes)
response = self.client.get(reverse("kolibri:core:contentnode-list"))
self.assertEqual(len(response.data), expected_output)
self._assert_nodes(response.data, nodes)
@unittest.skipIf(
getattr(settings, "DATABASES")["default"]["ENGINE"]
== "django.db.backends.postgresql",
"Skipping postgres as not as vulnerable to large queries and large insertions are less performant",
)
def test_contentnode_list_long(self):
# This will make > 1000 nodes which should test our ancestor batching behaviour
builder = ChannelBuilder(num_children=10)
builder.insert_into_default_db()
content.ContentNode.objects.update(available=True)
nodes = content.ContentNode.objects.filter(available=True)
expected_output = len(nodes)
self.assertGreater(expected_output, 1000)
response = self.client.get(reverse("kolibri:core:contentnode-list"))
self.assertEqual(len(response.data), expected_output)
self._assert_nodes(response.data, nodes)
def _recurse_and_assert(self, data, nodes, recursion_depth=0):
for actual, expected in zip(data, nodes):
children = actual.pop("children", None)
self._assert_node(actual, expected)
if children:
child_nodes = content.ContentNode.objects.filter(
available=True, parent=expected
)
if children["more"] is None:
self.assertEqual(len(child_nodes), len(children["results"]))
else:
self.assertGreater(len(child_nodes), len(children["results"]))
self.assertEqual(children["more"]["id"], expected.id)
self.assertEqual(
children["more"]["params"]["lft__gt"], child_nodes[24].rght
)
self.assertEqual(
children["more"]["params"]["depth"], 2 - recursion_depth
)
self._recurse_and_assert(
children["results"],
child_nodes,
recursion_depth=recursion_depth + 1,
)
def test_contentnode_tree(self):
root = content.ContentNode.objects.get(title="root")
response = self.client.get(
reverse("kolibri:core:contentnode_tree-detail", kwargs={"pk": root.id})
)
self._recurse_and_assert([response.data], [root])
@unittest.skipIf(
getattr(settings, "DATABASES")["default"]["ENGINE"]
== "django.db.backends.postgresql",
"Skipping postgres as not as vulnerable to large queries and large insertions are less performant",
)
def test_contentnode_tree_long(self):
builder = ChannelBuilder(levels=2, num_children=30)
builder.insert_into_default_db()
content.ContentNode.objects.all().update(available=True)
root = content.ContentNode.objects.get(id=builder.root_node["id"])
response = self.client.get(
reverse("kolibri:core:contentnode_tree-detail", kwargs={"pk": root.id})
)
self._recurse_and_assert([response.data], [root])
def test_contentnode_tree_depth_1(self):
root = content.ContentNode.objects.get(title="root")
response = self.client.get(
reverse("kolibri:core:contentnode_tree-detail", kwargs={"pk": root.id}),
data={"depth": 1},
)
self._recurse_and_assert([response.data], [root])
@unittest.skipIf(
getattr(settings, "DATABASES")["default"]["ENGINE"]
== "django.db.backends.postgresql",
"Skipping postgres as not as vulnerable to large queries and large insertions are less performant",
)
def test_contentnode_tree_lft__gt(self):
builder = ChannelBuilder(levels=2, num_children=30)
builder.insert_into_default_db()
content.ContentNode.objects.all().update(available=True)
root = content.ContentNode.objects.get(id=builder.root_node["id"])
lft__gt = content.ContentNode.objects.filter(parent=root)[24].rght
response = self.client.get(
reverse("kolibri:core:contentnode_tree-detail", kwargs={"pk": root.id}),
data={"lft__gt": lft__gt},
)
self.assertEqual(len(response.data["children"]["results"]), 5)
self.assertIsNone(response.data["children"]["more"])
first_node = content.ContentNode.objects.filter(parent=root)[25]
self._recurse_and_assert(
[response.data["children"]["results"][0]], [first_node], recursion_depth=1
)
@unittest.skipIf(
getattr(settings, "DATABASES")["default"]["ENGINE"]
== "django.db.backends.postgresql",
"Skipping postgres as not as vulnerable to large queries and large insertions are less performant",
)
def test_contentnode_tree_more(self):
builder = ChannelBuilder(levels=2, num_children=30)
builder.insert_into_default_db()
content.ContentNode.objects.all().update(available=True)
root = content.ContentNode.objects.get(id=builder.root_node["id"])
response = self.client.get(
reverse("kolibri:core:contentnode_tree-detail", kwargs={"pk": root.id})
)
first_child = response.data["children"]["results"][0]
self.assertEqual(first_child["children"]["more"]["params"]["depth"], 1)
nested_page_response = self.client.get(
reverse(
"kolibri:core:contentnode_tree-detail",
kwargs={"pk": first_child["children"]["more"]["id"]},
),
data=first_child["children"]["more"]["params"],
)
self.assertEqual(len(nested_page_response.data["children"]["results"]), 5)
self.assertIsNone(nested_page_response.data["children"]["more"])
@mock.patch("kolibri.core.content.api.get_channel_stats_from_studio")
def test_contentnode_granular_network_import(self, stats_mock):
c1 = content.ContentNode.objects.get(title="root")
c1_id = c1.id
c2_id = content.ContentNode.objects.get(title="c1").id
c3_id = content.ContentNode.objects.get(title="c2").id
content.ContentNode.objects.all().update(available=False)
stats = {
c1_id: {
"total_resources": 2,
"coach_content": False,
"num_coach_contents": 0,
},
c2_id: {
"total_resources": 1,
"coach_content": False,
"num_coach_contents": 0,
},
c3_id: {
"total_resources": 1,
"coach_content": False,
"num_coach_contents": 0,
},
}
stats_mock.return_value = stats
response = self.client.get(
reverse("kolibri:core:contentnode_granular-detail", kwargs={"pk": c1_id})
)
self.assertEqual(
response.data,
{
"id": c1_id,
"title": "root",
"kind": "topic",
"is_leaf": False,
"available": False,
"total_resources": 2,
"on_device_resources": 0,
"coach_content": False,
"importable": True,
"num_coach_contents": 0,
"new_resource": False,
"num_new_resources": 0,
"updated_resource": False,
"ancestors": list(c1.get_ancestors().values("id", "title")),
"children": [
{
"id": c2_id,
"title": "c1",
"kind": "video",
"is_leaf": True,
"available": False,
"total_resources": 1,
"on_device_resources": 0,
"importable": True,
"coach_content": False,
"num_coach_contents": 0,
"new_resource": False,
"num_new_resources": 0,
"updated_resource": False,
},
{
"id": c3_id,
"title": "c2",
"kind": "topic",
"is_leaf": False,
"available": False,
"total_resources": 1,
"on_device_resources": 0,
"importable": True,
"coach_content": False,
"num_coach_contents": 0,
"new_resource": False,
"num_new_resources": 0,
"updated_resource": False,
},
],
},
)
@mock.patch("kolibri.core.content.api.get_channel_stats_from_disk")
def test_contentnode_granular_local_import(self, stats_mock):
content.LocalFile.objects.update(available=False)
content.ContentNode.objects.update(available=False)
c1 = content.ContentNode.objects.get(title="root")
c1_id = c1.id
c2_id = content.ContentNode.objects.get(title="c1").id
c3_id = content.ContentNode.objects.get(title="c2").id
stats = {
c1_id: {
"total_resources": 1,
"coach_content": False,
"num_coach_contents": 0,
},
c3_id: {
"total_resources": 1,
"coach_content": False,
"num_coach_contents": 0,
},
}
stats_mock.return_value = stats
response = self.client.get(
reverse("kolibri:core:contentnode_granular-detail", kwargs={"pk": c1_id}),
{"importing_from_drive_id": "123"},
)
self.assertEqual(
response.data,
{
"id": c1_id,
"title": "root",
"kind": "topic",
"is_leaf": False,
"available": False,
"total_resources": 1,
"on_device_resources": 0,
"importable": True,
"coach_content": False,
"num_coach_contents": 0,
"new_resource": False,
"num_new_resources": 0,
"updated_resource": False,
"ancestors": list(c1.get_ancestors().values("id", "title")),
"children": [
{
"id": c2_id,
"title": "c1",
"kind": "video",
"is_leaf": True,
"available": False,
"total_resources": 0,
"on_device_resources": 0,
"importable": False,
"coach_content": False,
"num_coach_contents": 0,
"new_resource": False,
"num_new_resources": 0,
"updated_resource": False,
},
{
"id": c3_id,
"title": "c2",
"kind": "topic",
"is_leaf": False,
"available": False,
"total_resources": 1,
"on_device_resources": 0,
"importable": True,
"coach_content": False,
"num_coach_contents": 0,
"new_resource": False,
"num_new_resources": 0,
"updated_resource": False,
},
],
},
)
@mock.patch("kolibri.core.content.api.get_channel_stats_from_peer")
def test_contentnode_granular_remote_import(self, stats_mock):
content.LocalFile.objects.update(available=False)
content.ContentNode.objects.update(available=False)
c1 = content.ContentNode.objects.get(title="root")
c1_id = c1.id
c2_id = content.ContentNode.objects.get(title="c1").id
c3_id = content.ContentNode.objects.get(title="c2").id
stats = {
c1_id: {
"total_resources": 1,
"coach_content": False,
"num_coach_contents": 0,
},
c3_id: {
"total_resources": 1,
"coach_content": False,
"num_coach_contents": 0,
},
}
stats_mock.return_value = stats
response = self.client.get(
reverse("kolibri:core:contentnode_granular-detail", kwargs={"pk": c1_id}),
{"importing_from_peer_id": "test"},
)
self.assertEqual(
response.data,
{
"id": c1_id,
"title": "root",
"kind": "topic",
"is_leaf": False,
"available": False,
"total_resources": 1,
"on_device_resources": 0,
"importable": True,
"coach_content": False,
"num_coach_contents": 0,
"new_resource": False,
"num_new_resources": 0,
"updated_resource": False,
"ancestors": list(c1.get_ancestors().values("id", "title")),
"children": [
{
"id": c2_id,
"title": "c1",
"kind": "video",
"is_leaf": True,
"available": False,
"total_resources": 0,
"on_device_resources": 0,
"importable": False,
"coach_content": False,
"num_coach_contents": 0,
"new_resource": False,
"num_new_resources": 0,
"updated_resource": False,
},
{
"id": c3_id,
"title": "c2",
"kind": "topic",
"is_leaf": False,
"available": False,
"total_resources": 1,
"on_device_resources": 0,
"importable": True,
"coach_content": False,
"num_coach_contents": 0,
"new_resource": False,
"num_new_resources": 0,
"updated_resource": False,
},
],
},
)
def test_contentnode_granular_export_available(self):
c1 = content.ContentNode.objects.get(title="c1")
c1_id = c1.id
content.ContentNode.objects.filter(title="c1").update(on_device_resources=1)
response = self.client.get(
reverse("kolibri:core:contentnode_granular-detail", kwargs={"pk": c1_id}),
data={"for_export": True},
)
self.assertEqual(
response.data,
{
"id": c1_id,
"title": "c1",
"kind": "video",
"is_leaf": True,
"available": True,
"total_resources": 1,
"on_device_resources": 1,
"importable": None,
"children": [],
"coach_content": False,
"num_coach_contents": 0,
"new_resource": None,
"num_new_resources": None,
"updated_resource": None,
"ancestors": list(c1.get_ancestors().values("id", "title")),
},
)
def test_contentnode_granular_export_unavailable(self):
c1 = content.ContentNode.objects.get(title="c1")
c1_id = c1.id
content.ContentNode.objects.filter(title="c1").update(available=False)
response = self.client.get(
reverse("kolibri:core:contentnode_granular-detail", kwargs={"pk": c1_id}),
data={"for_export": True},
)
self.assertEqual(
response.data,
{
"id": c1_id,
"title": "c1",
"kind": "video",
"is_leaf": True,
"available": False,
"total_resources": 0,
"on_device_resources": 0,
"importable": None,
"children": [],
"coach_content": False,
"num_coach_contents": 0,
"new_resource": None,
"num_new_resources": None,
"updated_resource": None,
"ancestors": list(c1.get_ancestors().values("id", "title")),
},
)
def test_contentnode_retrieve(self):
c1_id = content.ContentNode.objects.get(title="c1").id
response = self.client.get(
reverse("kolibri:core:contentnode-detail", kwargs={"pk": c1_id})
)
self.assertEqual(response.data["id"], c1_id.__str__())
def test_contentnode_descendants_assessments_exercise_node(self):
c1 = content.ContentNode.objects.filter(kind=content_kinds.EXERCISE).first()
c1_id = c1.id
response = self.client.get(
reverse("kolibri:core:contentnode-descendants-assessments"),
data={"ids": c1_id},
)
self.assertEqual(
next(
item["num_assessments"] for item in response.data if item["id"] == c1_id
),
c1.assessmentmetadata.first().number_of_assessments,
)
def test_contentnode_descendants_assessments_exercise_parent(self):
c1 = content.ContentNode.objects.filter(kind=content_kinds.EXERCISE).first()
parent = c1.parent
parent_id = parent.id
response = self.client.get(
reverse("kolibri:core:contentnode-descendants-assessments"),
data={"ids": parent_id},
)
self.assertEqual(
next(
item["num_assessments"]
for item in response.data
if item["id"] == parent_id
),
c1.assessmentmetadata.first().number_of_assessments,
)
def test_contentnode_descendants_assessments_exercise_root(self):
c1 = content.ContentNode.objects.filter(kind=content_kinds.EXERCISE).first()
root = content.ContentNode.objects.get(parent__isnull=True)
root_id = root.id
response = self.client.get(
reverse("kolibri:core:contentnode-descendants-assessments"),
data={"ids": root_id},
)
self.assertEqual(
next(
item["num_assessments"]
for item in response.data
if item["id"] == root_id
),
c1.assessmentmetadata.first().number_of_assessments,
)
def test_contentnode_descendants_assessments_exercise_parent_sum_siblings(self):
c1 = content.ContentNode.objects.filter(kind=content_kinds.EXERCISE).first()
parent = c1.parent
parent_id = parent.id
sibling = content.ContentNode.objects.create(
pk="6a406ac66b224106aa2e93f73a94333d",
channel_id=c1.channel_id,
content_id="ded4a083e75f4689b386fd2b706e792a",
kind=content_kinds.EXERCISE,
parent=parent,
title="sibling exercise",
available=True,
)
sibling_assessment_metadata = content.AssessmentMetaData.objects.create(
id="6a406ac66b224106aa2e93f73a94333d",
contentnode=sibling,
number_of_assessments=5,
)
response = self.client.get(
reverse("kolibri:core:contentnode-descendants-assessments"),
data={"ids": parent_id},
)
self.assertEqual(
next(
item["num_assessments"]
for item in response.data
if item["id"] == parent_id
),
c1.assessmentmetadata.first().number_of_assessments
+ sibling_assessment_metadata.number_of_assessments,
)
def test_contentnode_descendants_assessments_exercise_parent_sum_siblings_one_unavailable(
self,
):
c1 = content.ContentNode.objects.filter(kind=content_kinds.EXERCISE).first()
c1.available = False
c1.save()
parent = c1.parent
parent_id = parent.id
sibling = content.ContentNode.objects.create(
pk="6a406ac66b224106aa2e93f73a94333d",
channel_id=c1.channel_id,
content_id="ded4a083e75f4689b386fd2b706e792a",
kind=content_kinds.EXERCISE,
parent=parent,
title="sibling exercise",
available=True,
)
sibling_assessment_metadata = content.AssessmentMetaData.objects.create(
id="6a406ac66b224106aa2e93f73a94333d",
contentnode=sibling,
number_of_assessments=5,
)
response = self.client.get(
reverse("kolibri:core:contentnode-descendants-assessments"),
data={"ids": parent_id},
)
self.assertEqual(
next(
item["num_assessments"]
for item in response.data
if item["id"] == parent_id
),
sibling_assessment_metadata.number_of_assessments,
)
def test_contentnode_descendants_topic_siblings_ancestor_ids(self):
root = content.ContentNode.objects.get(parent__isnull=True)
topics = content.ContentNode.objects.filter(
parent=root, kind=content_kinds.TOPIC
)
topic_ids = topics.values_list("id", flat=True)
response = self.client.get(
reverse("kolibri:core:contentnode-descendants"),
data={"ids": ",".join(topic_ids)},
)
for datum in response.data:
topic = topics.get(id=datum["ancestor_id"])
self.assertTrue(topic.get_descendants().filter(id=datum["id"]).exists())
def test_contentnode_descendants_topic_siblings_kind_filter(self):
root = content.ContentNode.objects.get(parent__isnull=True)
topics = content.ContentNode.objects.filter(
parent=root, kind=content_kinds.TOPIC
)
topic_ids = topics.values_list("id", flat=True)
response = self.client.get(
reverse("kolibri:core:contentnode-descendants"),
data={
"ids": ",".join(topic_ids),
"descendant_kind": content_kinds.EXERCISE,
},
)
for datum in response.data:
topic = topics.get(id=datum["ancestor_id"])
self.assertTrue(
topic.get_descendants()
.filter(id=datum["id"], kind=content_kinds.EXERCISE)
.exists()
)
def test_contentnode_descendants_topic_parent_child_ancestor_ids(self):
root = content.ContentNode.objects.get(parent__isnull=True)
topic = content.ContentNode.objects.filter(
parent=root, kind=content_kinds.TOPIC, children__isnull=False
).first()
response = self.client.get(
reverse("kolibri:core:contentnode-descendants"),
data={"ids": ",".join((root.id, topic.id))},
)
topic_items = [
datum for datum in response.data if datum["ancestor_id"] == topic.id
]
for node in topic.get_descendants(include_self=False).filter(available=True):
self.assertTrue(next(item for item in topic_items if item["id"] == node.id))
root_items = [
datum for datum in response.data if datum["ancestor_id"] == root.id
]
for node in root.get_descendants(include_self=False).filter(available=True):
self.assertTrue(next(item for item in root_items if item["id"] == node.id))
def test_contentnode_descendants_availability(self):
content.ContentNode.objects.all().update(available=False)
root = content.ContentNode.objects.get(parent__isnull=True)
topics = content.ContentNode.objects.filter(
parent=root, kind=content_kinds.TOPIC
)
topic_ids = topics.values_list("id", flat=True)
response = self.client.get(
reverse("kolibri:core:contentnode-descendants"),
data={"ids": ",".join(topic_ids)},
)
self.assertEqual(len(response.data), 0)
def test_contentnode_node_assessments_available(self):
content.ContentNode.objects.all().update(available=True)
root = content.ContentNode.objects.get(parent__isnull=True)
exercise_ids = (
root.get_descendants()
.filter(kind=content_kinds.EXERCISE)
.values_list("id", flat=True)
)
response = self.client.get(
reverse("kolibri:core:contentnode-node-assessments"),
data={"ids": ",".join(exercise_ids)},
)
self.assertEqual(response.data, 1)
def test_contentnode_node_assessments_not_available(self):
content.ContentNode.objects.all().update(available=False)
root = content.ContentNode.objects.get(parent__isnull=True)
exercise_ids = (
root.get_descendants()
.filter(kind=content_kinds.EXERCISE)
.values_list("id", flat=True)
)
response = self.client.get(
reverse("kolibri:core:contentnode-node-assessments"),
data={"ids": ",".join(exercise_ids)},
)
self.assertEqual(response.data, 0)
def test_contentnode_recommendations(self):
node_id = content.ContentNode.objects.get(title="c2c2").id
response = self.client.get(
reverse(
"kolibri:core:contentnode-recommendations-for", kwargs={"pk": node_id}
)
)
self.assertEqual(len(response.data), 2)
def test_contentnode_recommendations_does_error_for_unavailable_node(self):
node = content.ContentNode.objects.get(title="c2c2")
node.available = False
node.save()
node_id = node.id
response = self.client.get(
reverse(
"kolibri:core:contentnode-recommendations-for", kwargs={"pk": node_id}
)
)
self.assertEqual(response.status_code, 404)
def test_contentnode_ids(self):
titles = ["c2c2", "c2c3"]
nodes = [content.ContentNode.objects.get(title=title) for title in titles]
response = self.client.get(
reverse("kolibri:core:contentnode-list"),
data={"ids": ",".join([n.id for n in nodes])},
)
self.assertEqual(len(response.data), 2)
for i in range(len(titles)):
self.assertEqual(response.data[i]["title"], titles[i])
def test_contentnode_parent(self):
parent = content.ContentNode.objects.get(title="c2")
children = parent.get_children()
response = self.client.get(
reverse("kolibri:core:contentnode-list"),
data={"parent": parent.id, "include_coach_content": False},
)
self.assertEqual(len(response.data), children.count())
for i in range(len(children)):
self.assertEqual(response.data[i]["title"], children[i].title)
def test_contentnode_tags(self):
expected = {
"root": ["tag_1", "tag_2", "tag_3"],
"c1": ["tag_1"],
"c2": ["tag_2"],
}
for title, tags in expected.items():
node = content.ContentNode.objects.get(title=title)
response = self.client.get(
reverse("kolibri:core:contentnode-detail", kwargs={"pk": node.id})
)
self.assertEqual(set(response.data["tags"]), set(tags))
def test_channelmetadata_list(self):
response = self.client.get(reverse("kolibri:core:channel-list", kwargs={}))
self.assertEqual(response.data[0]["name"], "testing")
def test_channelmetadata_retrieve(self):
data = content.ChannelMetadata.objects.values()[0]
response = self.client.get(
reverse("kolibri:core:channel-detail", kwargs={"pk": data["id"]})
)
self.assertEqual(response.data["name"], "testing")
def test_channelmetadata_langfield(self):
data = content.ChannelMetadata.objects.first()
root_lang = content.Language.objects.get(pk=1)
data.root.lang = root_lang
data.root.save()
response = self.client.get(
reverse("kolibri:core:channel-detail", kwargs={"pk": data.id})
)
self.assertEqual(response.data["lang_code"], root_lang.lang_code)
self.assertEqual(response.data["lang_name"], root_lang.lang_name)
def test_channelmetadata_langfield_none(self):
data = content.ChannelMetadata.objects.first()
response = self.client.get(
reverse("kolibri:core:channel-detail", kwargs={"pk": data.id})
)
self.assertEqual(response.data["lang_code"], None)
self.assertEqual(response.data["lang_name"], None)
def test_channelmetadata_content_available_param_filter_lowercase_true(self):
response = self.client.get(
reverse("kolibri:core:channel-list"), {"available": "true"}
)
self.assertEqual(response.data[0]["id"], "6199dde695db4ee4ab392222d5af1e5c")
def test_channelmetadata_content_available_param_filter_uppercase_true(self):
response = self.client.get(
reverse("kolibri:core:channel-list"), {"available": True}
)
self.assertEqual(response.data[0]["id"], "6199dde695db4ee4ab392222d5af1e5c")
def test_channelmetadata_content_unavailable_param_filter_false(self):
content.ContentNode.objects.filter(title="root").update(available=False)
response = self.client.get(
reverse("kolibri:core:channel-list"), {"available": False}
)
self.assertEqual(response.data[0]["id"], "6199dde695db4ee4ab392222d5af1e5c")
def test_channelmetadata_content_available_field_true(self):
response = self.client.get(reverse("kolibri:core:channel-list"))
self.assertEqual(response.data[0]["available"], True)
def test_channelmetadata_content_available_field_false(self):
content.ContentNode.objects.filter(title="root").update(available=False)
response = self.client.get(reverse("kolibri:core:channel-list"))
self.assertEqual(response.data[0]["available"], False)
def test_channelmetadata_has_exercises_filter(self):
# Has nothing else for that matter...
no_exercise_channel = content.ContentNode.objects.create(
pk="6a406ac66b224106aa2e93f73a94333d",
channel_id="f8ec4a5d14cd4716890999da596032d2",
content_id="ded4a083e75f4689b386fd2b706e792a",
kind="topic",
title="no exercise channel",
)
content.ChannelMetadata.objects.create(
id="63acff41781543828861ade41dbdd7ff",
name="no exercise channel metadata",
root=no_exercise_channel,
)
no_filter_response = self.client.get(reverse("kolibri:core:channel-list"))
self.assertEqual(len(no_filter_response.data), 2)
with_filter_response = self.client.get(
reverse("kolibri:core:channel-list"), {"has_exercise": True}
)
self.assertEqual(len(with_filter_response.data), 1)
self.assertEqual(with_filter_response.data[0]["name"], "testing")
def test_file_list(self):
response = self.client.get(reverse("kolibri:core:file-list"))
self.assertEqual(len(response.data), 5)
def test_file_retrieve(self):
response = self.client.get(
reverse(
"kolibri:core:file-detail",
kwargs={"pk": "6bdfea4a01830fdd4a585181c0b8068c"},
)
)
self.assertEqual(response.data["preset"], "high_res_video")
def _setup_contentnode_progress(self):
# set up data for testing progress_fraction field on content node endpoint
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="learner", facility=facility)
user.set_password("pass")
user.save()
root = content.ContentNode.objects.get(title="root")
c1 = content.ContentNode.objects.get(title="c1")
c2 = content.ContentNode.objects.get(title="c2")
c2c1 = content.ContentNode.objects.get(title="c2c1")
c2c3 = content.ContentNode.objects.get(title="c2c3")
for node, progress in [(c2c1, 0.7), (c2c3, 0.5)]:
ContentSummaryLog.objects.create(
user=user,
content_id=node.content_id,
progress=progress,
channel_id=self.the_channel_id,
start_timestamp=datetime.datetime.now(),
)
return facility, root, c1, c2, c2c1, c2c3
def test_contentnode_progress_detail_endpoint(self):
facility, root, c1, c2, c2c1, c2c3 = self._setup_contentnode_progress()
def assert_progress(node, progress):
response = self.client.get(
reverse(
"kolibri:core:contentnodeprogress-detail", kwargs={"pk": node.id}
)
)
self.assertEqual(response.data["progress_fraction"], progress)
# check that there is no progress when not logged in
assert_progress(root, 0)
assert_progress(c1, 0)
assert_progress(c2, 0)
assert_progress(c2c1, 0)
# check that progress is calculated appropriately when user is logged in
self.client.login(username="learner", password="pass", facility=facility)
# The progress endpoint is used, so should report progress for topics
assert_progress(root, 0.24)
assert_progress(c1, 0)
assert_progress(c2, 0.4)
assert_progress(c2c1, 0.7)
def test_contentnode_progress_list_endpoint(self):
facility, root, c1, c2, c2c1, c2c3 = self._setup_contentnode_progress()
response = self.client.get(reverse("kolibri:core:contentnodeprogress-list"))
def get_progress_fraction(node):
return list(filter(lambda x: x["id"] == node.id, response.data))[0][
"progress_fraction"
]
# check that there is no progress when not logged in
self.assertEqual(get_progress_fraction(root), 0)
self.assertEqual(get_progress_fraction(c1), 0)
self.assertEqual(get_progress_fraction(c2), 0)
self.assertEqual(get_progress_fraction(c2c1), 0)
# check that progress is calculated appropriately when user is logged in
self.client.login(username="learner", password="pass", facility=facility)
response = self.client.get(reverse("kolibri:core:contentnodeprogress-list"))
# The progress endpoint is used, so should report progress for topics
self.assertEqual(get_progress_fraction(root), 0.24)
self.assertEqual(get_progress_fraction(c1), 0)
self.assertEqual(get_progress_fraction(c2), 0.4)
self.assertEqual(get_progress_fraction(c2c1), 0.7)
def test_filtering_coach_content_anon(self):
response = self.client.get(
reverse("kolibri:core:contentnode-list"),
data={"include_coach_content": False},
)
# TODO make content_test.json fixture more organized. Here just, hardcoding the correct count
self.assertEqual(len(response.data), 7)
def test_filtering_coach_content_admin(self):
self.client.login(username=self.admin.username, password=DUMMY_PASSWORD)
response = self.client.get(
reverse("kolibri:core:contentnode-list"),
data={"include_coach_content": True},
)
expected_output = content.ContentNode.objects.exclude(
available=False
).count() # coach_content node should be returned
self.assertEqual(len(response.data), expected_output)
def test_copies(self):
# the pk is actually a content id
response = self.client.get(
reverse(
"kolibri:core:contentnode-copies",
kwargs={"pk": "c6f49ea527824f398f4d5d26faf19396"},
)
)
expected_titles = set(["root", "c1", "copy"])
response_titles = set()
for node in response.data[0]:
response_titles.add(node["title"])
self.assertSetEqual(expected_titles, response_titles)
def test_available_copies(self):
# the pk is actually a content id
response = self.client.get(
reverse(
"kolibri:core:contentnode-copies",
kwargs={"pk": "f2332710c2fd483386cdeb5dcbdda81a"},
)
)
# no results should be returned for unavailable content node
self.assertEqual(len(response.data), 0)
def test_copies_count(self):
response = self.client.get(
reverse("kolibri:core:contentnode-copies-count"),
data={
"content_ids": "f2332710c2fd483386cdeb5dcbdda81f,c6f49ea527824f398f4d5d26faf15555,f2332710c2fd483386cdeb5dcbdda81a"
},
)
# assert non existent content id does not show up in results
# no results should be returned for unavailable content node
self.assertEqual(len(response.data), 1)
self.assertEqual(
response.data[0]["count"],
content.ContentNode.objects.filter(
content_id="f2332710c2fd483386cdeb5dcbdda81f"
).count(),
)
def test_search_total_results(self):
response = self.client.get(
reverse("kolibri:core:contentnode_search-list"), data={"search": "root"}
)
self.assertEqual(response.data["total_results"], 1)
def test_search_kinds(self):
response = self.client.get(
reverse("kolibri:core:contentnode_search-list"), data={"search": "root"}
)
self.assertEqual(list(response.data["content_kinds"]), [content_kinds.TOPIC])
def test_search_repeated_kinds(self):
# Ensure that each kind is only returned once.
response = self.client.get(
reverse("kolibri:core:contentnode_search-list"), data={"search": "c"}
)
kinds = response.data["content_kinds"][:]
self.assertEqual(len(kinds), len(set(kinds)))
def test_search_channels(self):
response = self.client.get(
reverse("kolibri:core:contentnode_search-list"), data={"search": "root"}
)
self.assertEqual(response.data["channel_ids"][:], [self.the_channel_id])
def test_search_repeated_channels(self):
# Ensure that each channel_id is only returned once.
response = self.client.get(
reverse("kolibri:core:contentnode_search-list"), data={"search": "c"}
)
channel_ids = response.data["channel_ids"][:]
self.assertEqual(len(channel_ids), len(set(channel_ids)))
def test_search(self):
# ensure search works when there are no words not defined
response = self.client.get(
reverse("kolibri:core:contentnode_search-list"), data={"search": "!?,"}
)
self.assertEqual(len(response.data["results"]), 0)
# ensure search words when there is only stopwords
response = self.client.get(
reverse("kolibri:core:contentnode_search-list"), data={"search": "or"}
)
self.assertEqual(len(response.data["results"]), 0)
# regular search
response = self.client.get(
reverse("kolibri:core:contentnode_search-list"), data={"search": "root"}
)
self.assertEqual(len(response.data["results"]), 1)
def _create_session_logs(self):
content_ids = (
"f2332710c2fd483386cdeb5ecbdda81f",
"ce603df7c46b424b934348995e1b05fb",
"481e1bda1faa445d801ceb2afbd2f42f",
)
channel_id = "6199dde695db4ee4ab392222d5af1e5c"
[
ContentSessionLog.objects.create(
channel_id=channel_id,
content_id=content_ids[0],
start_timestamp=timezone.now(),
kind="audio",
)
for _ in range(50)
]
[
ContentSessionLog.objects.create(
channel_id=channel_id,
content_id=content_ids[1],
start_timestamp=timezone.now(),
kind="exercise",
)
for _ in range(25)
]
[
ContentSessionLog.objects.create(
channel_id=channel_id,
content_id=content_ids[2],
start_timestamp=timezone.now(),
kind="document",
)
for _ in range(1)
]
# create log for non existent content id
# should not show up in api response
ContentSessionLog.objects.create(
channel_id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
start_timestamp=timezone.now(),
kind="content",
)
return content_ids
def test_popular(self):
expected_content_ids = self._create_session_logs()
response = self.client.get(reverse("kolibri:core:contentnode-popular"))
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def test_popular_no_coach_content(self):
expected_content_ids = self._create_session_logs()
node = content.ContentNode.objects.get(content_id=expected_content_ids[0])
node.coach_content = True
node.save()
expected_content_ids = expected_content_ids[1:]
response = self.client.get(
reverse("kolibri:core:contentnode-popular"),
data={"include_coach_content": False},
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def test_popular_coach_has_coach_content(self):
coach = FacilityUser.objects.create(username="coach", facility=self.facility)
coach.set_password(DUMMY_PASSWORD)
coach.save()
self.facility.add_coach(coach)
expected_content_ids = self._create_session_logs()
node = content.ContentNode.objects.get(content_id=expected_content_ids[0])
node.coach_content = True
node.save()
self.client.login(username="coach", password=DUMMY_PASSWORD)
response = self.client.get(
reverse("kolibri:core:contentnode-popular"),
data={"include_coach_content": True},
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def test_popular_ten_minute_cache(self):
self._create_session_logs()
response = self.client.get(reverse("kolibri:core:contentnode-popular"))
self.assertEqual(response["Cache-Control"], "max-age=600")
def _create_summary_logs(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
content_ids = ("f2332710c2fd483386cdeb5ecbdda81f",)
channel_id = "6199dde695db4ee4ab392222d5af1e5c"
ContentSummaryLog.objects.create(
channel_id=channel_id,
content_id=content_ids[0],
user_id=user.id,
start_timestamp=timezone.now(),
kind="audio",
)
# create log with progress of 1
# should not show up in api response
ContentSummaryLog.objects.create(
channel_id=channel_id,
content_id="ce603df7c46b424b934348995e1b05fb",
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
# create log for non existent content id
# should not show up in api response
ContentSummaryLog.objects.create(
channel_id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
user_id=user.id,
start_timestamp=timezone.now(),
kind="content",
)
user.set_password(DUMMY_PASSWORD)
user.save()
return user, content_ids
def test_resume(self):
user, expected_content_ids = self._create_summary_logs()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
response = self.client.get(
reverse("kolibri:core:contentnode-resume", kwargs={"pk": user.id})
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def test_resume_wrong_id(self):
user, expected_content_ids = self._create_summary_logs()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
response = self.client.get(
reverse("kolibri:core:contentnode-resume", kwargs={"pk": "wrong"})
)
response_content_ids = [node["content_id"] for node in response.json()]
self.assertEqual([], response_content_ids)
def test_resume_zero_cache(self):
user, expected_content_ids = self._create_summary_logs()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
response = self.client.get(
reverse("kolibri:core:contentnode-resume", kwargs={"pk": user.id})
)
self.assertEqual(response["Cache-Control"], "max-age=0")
def test_next_steps_prereq(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
root = content.ContentNode.objects.get(title="root")
ContentSummaryLog.objects.create(
channel_id=root.channel_id,
content_id=root.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
post_req = root.prerequisite_for.first()
expected_content_ids = (post_req.content_id,)
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": user.id})
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def test_next_steps_prereq_zero_cache(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
root = content.ContentNode.objects.get(title="root")
ContentSummaryLog.objects.create(
channel_id=root.channel_id,
content_id=root.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": user.id})
)
self.assertEqual(response["Cache-Control"], "max-age=0")
def test_next_steps_prereq_wrong_id(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
root = content.ContentNode.objects.get(title="root")
ContentSummaryLog.objects.create(
channel_id=root.channel_id,
content_id=root.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": "wrong"})
)
response_content_ids = [node["content_id"] for node in response.json()]
self.assertEqual([], response_content_ids)
def test_next_steps_prereq_in_progress(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
root = content.ContentNode.objects.get(title="root")
ContentSummaryLog.objects.create(
channel_id=root.channel_id,
content_id=root.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
post_req = root.prerequisite_for.first()
ContentSummaryLog.objects.create(
channel_id=post_req.channel_id,
content_id=post_req.content_id,
user_id=user.id,
progress=0.5,
start_timestamp=timezone.now(),
kind="audio",
)
expected_content_ids = []
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": user.id})
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def test_next_steps_prereq_coach_content_not_coach(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
root = content.ContentNode.objects.get(title="root")
ContentSummaryLog.objects.create(
channel_id=root.channel_id,
content_id=root.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
post_req = root.prerequisite_for.first()
post_req.coach_content = True
post_req.save()
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": user.id})
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(), response_content_ids)
def test_next_steps_prereq_coach_content_coach(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
facility.add_coach(user)
root = content.ContentNode.objects.get(title="root")
ContentSummaryLog.objects.create(
channel_id=root.channel_id,
content_id=root.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
post_req = root.prerequisite_for.first()
post_req.coach_content = True
post_req.save()
expected_content_ids = (post_req.content_id,)
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": user.id})
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def test_next_steps_sibling(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
node = content.ContentNode.objects.get(
content_id="ce603df7c46b424b934348995e1b05fb"
)
ContentSummaryLog.objects.create(
channel_id=node.channel_id,
content_id=node.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
sibling = node.get_next_sibling()
expected_content_ids = (sibling.content_id,)
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": user.id})
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def test_next_steps_sibling_in_progress(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
node = content.ContentNode.objects.get(
content_id="ce603df7c46b424b934348995e1b05fb"
)
ContentSummaryLog.objects.create(
channel_id=node.channel_id,
content_id=node.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
sibling = node.get_next_sibling()
ContentSummaryLog.objects.create(
channel_id=sibling.channel_id,
content_id=sibling.content_id,
user_id=user.id,
progress=0.5,
start_timestamp=timezone.now(),
kind="audio",
)
expected_content_ids = []
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": user.id})
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def test_next_steps_sibling_coach_content_not_coach(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
node = content.ContentNode.objects.get(
content_id="ce603df7c46b424b934348995e1b05fb"
)
ContentSummaryLog.objects.create(
channel_id=node.channel_id,
content_id=node.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
sibling = node.get_next_sibling()
sibling.coach_content = True
sibling.save()
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": user.id})
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(), response_content_ids)
def test_next_steps_sibling_coach_content_coach(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
facility.add_coach(user)
node = content.ContentNode.objects.get(
content_id="ce603df7c46b424b934348995e1b05fb"
)
ContentSummaryLog.objects.create(
channel_id=node.channel_id,
content_id=node.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
sibling = node.get_next_sibling()
sibling.coach_content = True
sibling.save()
expected_content_ids = (sibling.content_id,)
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": user.id})
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def tearDown(self):
"""
clean up files/folders created during the test
"""
cache.clear()
super(ContentNodeAPITestCase, self).tearDown()
def mock_patch_decorator(func):
def wrapper(*args, **kwargs):
mock_object = mock.Mock()
mock_object.json.return_value = [{"id": 1, "name": "studio"}]
with mock.patch.object(requests, "get", return_value=mock_object):
return func(*args, **kwargs)
return wrapper
class KolibriStudioAPITestCase(APITestCase):
@classmethod
def setUpTestData(cls):
DeviceSettings.objects.create(is_provisioned=True)
cls.facility = Facility.objects.create(name="facility")
superuser = FacilityUser.objects.create(
username="superuser", facility=cls.facility
)
superuser.set_password(DUMMY_PASSWORD)
superuser.save()
cls.superuser = superuser
DevicePermissions.objects.create(user=superuser, is_superuser=True)
def setUp(self):
self.client.login(username=self.superuser.username, password=DUMMY_PASSWORD)
@mock_patch_decorator
def test_channel_list(self):
response = self.client.get(
reverse("kolibri:core:remotechannel-list"), format="json"
)
self.assertEqual(response.data[0]["id"], 1)
@mock_patch_decorator
def test_channel_retrieve_list(self):
response = self.client.get(
reverse("kolibri:core:remotechannel-retrieve-list", kwargs={"pk": 1}),
format="json",
)
self.assertEqual(response.data[0]["id"], 1)
@mock_patch_decorator
def test_no_permission_non_superuser_channel_list(self):
user = FacilityUser.objects.create(username="user", facility=self.facility)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.logout()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
response = self.client.get(
reverse("kolibri:core:remotechannel-list"), format="json"
)
self.assertEqual(response.status_code, 403)
@mock_patch_decorator
def test_channel_retrieve(self):
response = self.client.get(
reverse("kolibri:core:remotechannel-detail", kwargs={"pk": "abc"}),
format="json",
)
self.assertEqual(response.data["name"], "studio")
@mock_patch_decorator
def test_channel_info_404(self):
mock_object = mock.Mock()
mock_object.status_code = 404
requests.get.return_value = mock_object
response = self.client.get(
reverse("kolibri:core:remotechannel-detail", kwargs={"pk": "abc"}),
format="json",
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def tearDown(self):
cache.clear()
| indirectlylit/kolibri | kolibri/core/content/test/test_content_app.py | Python | mit | 72,718 | 0.001595 |
from django.contrib import admin
from django.utils.text import truncate_words
from django.core import urlresolvers
from django.utils.html import escape
from infosys.models import *
def uni_tr_10(field_name):
def func(obj):
return truncate_words(unicode(getattr(obj, field_name)), 10)
func.short_description = field_name
func.admin_order_field = field_name
return func
def uni_fk_tr_10(field_name, order_field=None):
fnparts = field_name.split('__')
def func(obj):
f = getattr(obj, fnparts[0])
for part in fnparts[1:]:
f = getattr(f, part)
url_name = 'admin:%s_%s_change' % (f._meta.app_label,
f._meta.module_name)
url = urlresolvers.reverse(url_name, args=(f.pk,))
name = escape(truncate_words(unicode(f), 10))
return u'<a href="%s">%s</a>' % (url, name)
func.allow_tags = True
func.short_description = fnparts[-1]
if order_field is not False:
func.admin_order_field = order_field or field_name
return func
class NaslovAdmin(admin.ModelAdmin):
search_fields = ['ulica', 'hisna_stevilka', 'posta', 'kraj']
list_display = ['id', 'ulica', 'hisna_stevilka', 'posta', 'kraj']
class SolskoLetoAdmin(admin.ModelAdmin):
search_fields = []
list_display = ['id', 'zacetno_leto', 'koncno_leto', 'aktivno']
raw_id_fields = []
class ProfesorAdmin(admin.ModelAdmin):
search_fields = ['uporabnik__username', 'uporabnik__first_name',
'uporabnik__last_name']
list_display = ['id', uni_fk_tr_10('uporabnik', 'uporabnik__username'),
'ime', 'priimek']
raw_id_fields = ['uporabnik']
def ime(self, obj):
return obj.uporabnik.first_name
ime.admin_order_field = 'uporabnik__first_name'
def priimek(self, obj):
return obj.uporabnik.last_name
priimek.admin_order_field = 'uporabnik__last_name'
class SmerAdmin(admin.ModelAdmin):
search_fields = ['smer']
list_display = ['id', 'smer']
class PredmetAdmin(admin.ModelAdmin):
search_fields = ['predmet', 'ime']
list_display = ['id', 'ime', 'predmet']
class StarsAdmin(admin.ModelAdmin):
search_fields = ['uporabnik__username', 'uporabnik__first_name',
'uporabnik__last_name']
list_display = ['id', uni_fk_tr_10('uporabnik', 'uporabnik__username'),
'ime', 'priimek', uni_fk_tr_10('prebivalisce')]
raw_id_fields = ['uporabnik', 'prebivalisce']
def ime(self, obj):
return obj.uporabnik.first_name
ime.admin_order_field = 'uporabnik__first_name'
def priimek(self, obj):
return obj.uporabnik.last_name
priimek.admin_order_field = 'uporabnik__last_name'
class DijakAdmin(admin.ModelAdmin):
search_fields = ['uporabnik__username', 'uporabnik__first_name',
'uporabnik__last_name', 'emso']
list_display = ['id', uni_fk_tr_10('uporabnik', 'uporabnik__username'),
'ime', 'priimek', 'emso']
raw_id_fields = ['uporabnik', 'stalno_prebivalisce',
'zacasno_prebivalisce', 'oce', 'mati']
list_filter = ['v_dijaskem_domu']
def ime(self, obj):
return obj.uporabnik.first_name
ime.admin_order_field = 'uporabnik__first_name'
def priimek(self, obj):
return obj.uporabnik.last_name
priimek.admin_order_field = 'uporabnik__last_name'
class RazredAdmin(admin.ModelAdmin):
search_fields = ['ime']
list_display = ['id', 'ime', uni_fk_tr_10('solsko_leto'), uni_fk_tr_10('smer'), uni_fk_tr_10('razrednik')]
raw_id_fields = ['razrednik']
filter_horizontal = ['dijaki']
class PoucujeAdmin(admin.ModelAdmin):
search_fields = []
list_display = ['id', uni_fk_tr_10('profesor'), uni_fk_tr_10('razred'),
uni_fk_tr_10('predmet')]
raw_id_fields = ['profesor', 'razred', 'predmet']
class OcenjevalnoObdobjeAdmin(admin.ModelAdmin):
search_fields = ['ime']
list_display = ['id', 'ime', uni_fk_tr_10('solsko_leto'),
'zacetek', 'konec']
class DogodekAdmin(admin.ModelAdmin):
search_fields = ['ime']
list_display = ['id', uni_tr_10('ime'), uni_tr_10('datum'),
uni_fk_tr_10('poucuje__predmet', 'poucuje__predmet__ime'),
uni_fk_tr_10('poucuje__profesor', False),
'ocenjevalno_obdobje']
raw_id_fields = ['poucuje']
class OcenaAdmin(admin.ModelAdmin):
search_fields = ['ocena', 'opomba', 'dijak__uporabnik__first_name',
'dijak__uporabnik__last_name',
'dijak__uporabnik__username']
list_display = ['id', uni_fk_tr_10('dijak', False),
uni_fk_tr_10('poucuje__profesor', False),
uni_fk_tr_10('poucuje__razred', False),
'ocena', 'datum_pridobitve',
uni_fk_tr_10('ocenjevalno_obdobje'),
uni_fk_tr_10('dogodek')]
raw_id_fields = ['dijak', 'poucuje', 'dogodek']
class ZakljucenaOcenaAdmin(admin.ModelAdmin):
search_fields = ['ocena', 'dijak__uporabnik__first_name',
'dijak__uporabnik__last_name',
'dijak__uporabnik__username']
list_display = ['id', uni_fk_tr_10('dijak'),
uni_fk_tr_10('poucuje__profesor', False),
uni_fk_tr_10('poucuje__razred', False),
'ocena', 'datum_pridobitve']
raw_id_fields = ['dijak', 'poucuje']
admin.site.register(Naslov, NaslovAdmin)
admin.site.register(SolskoLeto, SolskoLetoAdmin)
admin.site.register(Profesor, ProfesorAdmin)
admin.site.register(Smer, SmerAdmin)
admin.site.register(Predmet, PredmetAdmin)
admin.site.register(Stars, StarsAdmin)
admin.site.register(Dijak, DijakAdmin)
admin.site.register(Razred, RazredAdmin)
admin.site.register(Poucuje, PoucujeAdmin)
admin.site.register(OcenjevalnoObdobje, OcenjevalnoObdobjeAdmin)
admin.site.register(Dogodek, DogodekAdmin)
admin.site.register(Ocena, OcenaAdmin)
admin.site.register(ZakljucenaOcena, ZakljucenaOcenaAdmin) | bancek/egradebook | src/apps/infosys/admin.py | Python | gpl-3.0 | 6,165 | 0.005515 |
from django.conf.urls import include, url
from . import views
urlpatterns = [
url(r'^register/$', views.register,name="register"),
# url('^logout', views.logout_view,name="logout"),
# url('^login', views.logout_view,name="login"),
url(r'^password_change/$','django.contrib.auth.views.password_change',{'template_name': 'profiles/change-password.html'}),
url(r'^password_change/done',
'django.contrib.auth.views.password_change_done',
{'template_name':'profiles/password_change_done.html'}),
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', 'django.contrib.auth.views.logout',
{'next_page': '/'}),
url(r'^', include('django.contrib.auth.urls')),
url(r'^myprofile/$', views.myprofile, name='myprofile'),
url(r'^users/$', views.user_profilelist, name='user_profilelist'),
url(r'^editprofile/$', views.profile_edit, name='profile_edit'),
url(r'^(?P<id>\d+)$', views.user_profiles, name='user_profiles'),
]
| RachellCalhoun/craftsite | accounts/urls.py | Python | gpl-3.0 | 1,059 | 0.00661 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from vilya.libs.template import st
from vilya.models.feed import get_user_inbox, get_public_feed
MAX_ACT_COUNT = 100
_q_exports = ['actions', 'public_timeline']
def _q_index(request):
return st('/m/feed.html', **locals())
def public_timeline(request):
is_public = True
return st('/m/feed.html', **locals())
def actions(request):
since_id = request.get_form_var('since_id', '')
is_public = request.get_form_var('is_public', '')
user = request.user
all_actions = []
if is_public == 'true':
all_actions = get_public_feed().get_actions(0, MAX_ACT_COUNT)
elif user:
all_actions = get_user_inbox(user.username).get_actions(
0, MAX_ACT_COUNT)
if since_id:
actions = []
for action in all_actions:
if action.get('uid') == since_id:
break
actions.append(action)
else:
actions = all_actions
return st('/m/actions.html', **locals())
| douban/code | vilya/views/m.py | Python | bsd-3-clause | 1,035 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
trigger.contrib.docommand
~~~~~~~~~~~~~~~~~~~~~~~~~
This package provides facilities for running commands on devices using the CLI.
"""
__author__ = 'Jathan McCollum, Mike Biancianello'
__maintainer__ = 'Jathan McCollum'
__email__ = 'jathan@gmail.com'
__copyright__ = 'Copyright 2012-2013, AOL Inc.; 2013 Salesforce.com'
__version__ = '3.2.1'
# Imports
import os
import re
import socket
from twisted.python import log
from trigger.conf import settings
from trigger.cmds import Commando
from xml.etree.cElementTree import ElementTree, Element, SubElement
import xml.etree.cElementTree as ET
# Exports
__all__ = ['DoCommandBase', 'CommandRunner', 'ConfigLoader', 'xml_print', 'core']
from . import core
from core import *
__all__.extend(core.__all__)
# Classes
class DoCommandBase(Commando):
"""
Base class for docommand action classes.
"""
description = 'Insert description here.'
def errback(self, failure, device):
failure = super(DoCommandBase, self).errback(failure, device)
print '%s - Error: %s' % (device, failure.value)
return failure
def from_base(self, results, device, commands=None):
"""Call store_results without calling map_results"""
log.msg('Received %r from %s' % (results, device))
self.store_results(device, results)
# TODO: Right now if you are loading commands from files, this will ultimately
# fail with a ReactorNotRestartable error because the core.main() function is
# calling each action class separately. We need to account for this. See
# https://gist.github.com/jathanism/4543974 for a possible solution.
class CommandRunner(DoCommandBase):
"""
Run commands on network devices.
Usage::
n = CommandRunner(devices=['dev1', dev2'], files=['file1', 'file2'])
n.run()
This will execute all commands inside of each file ('file1','file2')
onto all listed devices ('dev1, 'dev2').
:param devices:
List of device names. Each hostname must have a match in NetDevices.
:param files:
List of files named after the FQDN of each device.
"""
description = 'Run commands on network devices.'
def __init__(self, files=None, commands=None, debug=False, timeout=30, **kwargs):
"""
:param files:
List of fully-qualified paths to command files
:param commands:
List of commands to execute
:param debug:
Whether to display debug information
:param timeout:
Timeout in seconds
"""
if files is None:
files = []
if commands is None:
commands = []
self.commands = commands
self.data = {}
self.files = files
self.debug = debug
self.__loadCmdsFromFiles()
if 'kwargs' in locals():
kwargs['timeout'] = timeout
else:
kwargs = dict(timeout=timeout)
super(CommandRunner, self).__init__(**kwargs)
def __loadCmdsFromFiles(self, skip_comments=True):
"""
Reads in file contents and adds to self.commands list.
This is done to prevent having to read the list of cmds multiple times.
"""
for fname in self.files:
with open(fname, 'r') as fr:
lines = fr.readlines()
if skip_comments:
lines = [line for line in lines if not line.startswith('#')]
for cmd in lines:
cmd = cmd.strip()
self.commands.append(cmd)
def store_results(self, device, results):
"""Define how we're storing results."""
devname = device.nodeName
if self.verbose:
print 'Parsing commands for %s' % devname
if self.debug:
msg = "-->store_results(device=%r, results=%r)" % (devname, results)
print msg
log.msg(msg)
outs = []
for i, out in enumerate(results):
cmd = self.commands[i]
d = {'cmd': cmd, 'out': out, 'dev': device}
outs.append(d)
self.data[devname] = outs
return True
def __children_with_namespace(self, ns):
return lambda elt, tag: elt.findall('./' + ns + tag)
def from_juniper(self, data, device, commands=None):
# If we've set foce_cli, use from_base() instead
if self.force_cli:
return self.from_base(data, device, commands)
devname = device.nodeName
ns = '{http://xml.juniper.net/xnm/1.1/xnm}'
if self.verbose:
print 'parsing JunOS commands for %s' % devname
if self.debug:
print '-->from_juniper(data=%s, device=%r)' % (data, devname)
cmds = self.commands
outs = []
for i, xml in enumerate(data):
cmd = cmds[i]
outarr = xml_print(xml, iterations=10)
out = '\n'.join(outarr)
d = {'cmd': cmd, 'out': out, 'dev': device}
outs.append(d)
if self.debug:
print '\ndata["%s"]:' % i
ET.dump(xml)
self.data[devname] = outs
return True
class ConfigLoader(Commando):
"""
Load configuration changes on network devices.
Usage::
n = ConfigLoader(devices=['dev1', dev2'], files=['file1', 'file2'])
n.run()
This will load all listed config files ('file1','file2')
onto all listed devices ('dev1, 'dev2').
:param files:
List of files named after the FQDN of each device.
+ Files *must* exist in a local TFTP directory for non-Juniper devices.
+ Files *must* be accessible by device via TFTP for non-Juniper devices.
"""
description = 'Load configuration changes on network devices.'
# These are the only officially supported vendors at this time
vendors = ['a10', 'arista', 'brocade', 'cisco', 'foundry', 'dell',
'juniper']
# TODO: The config commands should be moved into NetDevice object
# (.configure_commands). The save commands are already managed like that,
# but we don't yet have a way to account for Juniper CLI commit command (it
# assumes JunoScript). We need to not be hard-coding these types of things
# all over the code-base.
known_commands = {
'config':{
'a10': 'configure terminal',
'arista': 'configure terminal',
'brocade': 'configure terminal',
'cisco': 'configure terminal',
'dell': 'configure',
'foundry': 'configure terminal',
'juniper': 'configure',
},
'save_config':{
'a10': 'write memory',
'arista': 'write memory',
'brocade': 'write memory',
'cisco': 'write memory',
'dell': 'copy running-config startup-config',
'foundry': 'write memory',
'juniper': 'commit and-quit',
}
}
def __init__(self, files=None, commands=None, debug=False, **kwargs):
"""
:param files:
List of filenames named after the FQDN of each device.
:param commands:
List of commands to execute
:param debug:
Whether to display debug information
"""
if files is None:
files = []
if commands is None:
commands = []
self.data = {}
self.commands = commands
self.files = files
self.debug = debug
super(ConfigLoader, self).__init__(**kwargs)
def to_juniper(self, device=None, commands=None, extra=None):
"""
Configure a Juniper device using JunoScript.
:returns:
list
"""
if self.verbose:
print "generating JunOS commands"
files = self.files
cmds = [Element('lock-configuration')]
for fname in files:
# fname is required to contain the full path
lc = Element('load-configuration', action='replace', format='text')
body = SubElement(lc, 'configuration-text')
if self.debug:
print "fname: " + fname
body.text = file(fname).read()
cmds.append(lc)
#commands = self.commands
if len(commands) > 0:
lc = Element('load-configuration', action='replace', format='text')
body = SubElement(lc, 'configuration-text')
body.text = '\n'.join(commands)
cmds.append(lc)
cmds.append(Element('commit-configuration'))
if self.debug:
for xml in cmds:
ET.dump(xml)
return cmds
def store_results(self, device, results):
"""
Store the results from a commands.
If you'd rather just change the default method for storing results,
overload this. All default parse/generate methods call this.
"""
devname = device.nodeName
if self.verbose:
print 'Parsing commands for %s' % devname
if self.debug:
print '-->store_results(device=%r, results=%r)' % (devname, results)
out = '\n'.join(results)
self.data[devname] = [{'dev': device, 'cmd': 'load-configuration', 'out': out}]
return True
def __children_with_namespace(self, ns):
return lambda elt, tag: elt.findall('./' + ns + tag)
def from_juniper(self, data, device, commands=None):
"""Parse results from a Juniper device."""
devname = device.nodeName
if self.verbose:
print "parsing JunOS commands for %s " % devname
if self.debug:
print '-->from_juniper(data=%s, device=%r)' % (data, devname)
if self.debug:
for xml in data:
ET.dump(xml)
ns = '{http://xml.juniper.net/xnm/1.1/xnm}'
children = self.__children_with_namespace(ns)
confresxml = data[1]
success = 0
error = 0
msg = ''
for res in confresxml.getiterator(ns + 'load-configuration-results'):
for succ in res.getiterator(ns + 'load-success'):
success += 1
msg = "Success!"
for err in res.getiterator(ns + 'error'):
error += 1
msg = "ERROR: "
elin = children(err, 'line-number')[0].text
emes = children(err, 'message')[0].text
ecol = children(err, 'column')[0].text
etok = children(err, 'token')[0].text
msg = msg + emes + " in '" + etok + "'\n line:"+elin+",col:"+ecol
msg = "%s %s in %r\n line: %s, col: %s" % (msg, emes, etok,
elin, ecol)
if success:
self.data[devname] = [{'dev': device, 'cmd': 'load-configuration', 'out': 'Success'}]
if error:
self.data[devname] = [{'dev': device, 'cmd': 'load-configuration', 'out': msg}]
return True
# Functions
def xml_print(xml, iterations=10):
"""
Display XML in a tree format.
:param xml:
XML object to parse
:param iterations:
Number of iterations to perform
"""
# TODO: Can't find a way to tie this output to the setting of the 'DEBUG'
# without making it an instance method. How!!
ret = []
if xml is None:
print "No Data"
return None
if iterations < 1:
return [str(xml)]
tag = xml.tag
marr = re.match(r"{http.*}", tag)
ns = marr.group(0)
tag = tag.replace(ns, '')
ret.append(tag)
children = list(xml)
if len(children) < 1:
ptxt = tag + " : " + (xml.text or '')
return [ptxt]
for child in children:
ptxts = xml_print(child, iterations - 1)
for t in ptxts:
# Shows elements in a tree format
ret.append(" " + t)
# Show elements in a tag1 -> tag2 -> tag3 -> field:value format
#ret.append(tag+" -> "+t)
return ret
| chepazzo/trigger | trigger/contrib/docommand/__init__.py | Python | bsd-3-clause | 12,051 | 0.001079 |
from django.contrib import admin
from abcast.models import *
from django.contrib.auth.models import User
from genericadmin.admin import GenericAdminModelAdmin, GenericTabularInline
class MembersInline(admin.TabularInline):
model = Station.members.through
class ChannelsInline(admin.TabularInline):
model = Channel
readonly_fields = ('teaser', 'type', 'stream_server', )
exclude = ('description', 'stream_url', 'teaser',)
class StationAdmin(admin.ModelAdmin):
list_display = ('name', 'type', 'website',)
readonly_fields = ('uuid', 'slug', )
inlines = [ChannelsInline, MembersInline,]
class ChannelAdmin(admin.ModelAdmin):
list_display = ('name', 'station', 'type', 'stream_url', 'mount', )
list_filter = ('station', 'type',)
readonly_fields = ('uuid', 'slug', )
class JingleInline(admin.TabularInline):
exclude = ['description', 'slug', 'processed', 'conversion_status']
model = Jingle
class JingleAdmin(admin.ModelAdmin):
list_display = ('name', 'duration', 'set', 'type' )
list_filter = ('type',)
readonly_fields = ('uuid', 'slug', 'folder')
class JingleSetAdmin(admin.ModelAdmin):
#list_display = ('name', 'duration', 'set', 'type' )
#list_filter = ('type',)
readonly_fields = ('uuid', 'slug', )
inlines = [JingleInline, ]
class StreamServerAdmin(admin.ModelAdmin):
list_display = ('name', 'host', 'type' )
list_filter = ('type',)
readonly_fields = ('uuid',)
admin.site.register(Station, StationAdmin)
admin.site.register(Channel, ChannelAdmin)
admin.site.register(Jingle, JingleAdmin)
admin.site.register(JingleSet, JingleSetAdmin)
admin.site.register(StreamServer, StreamServerAdmin)
admin.site.register(StreamFormat)
admin.site.register(Role)
| hzlf/openbroadcast | website/apps/abcast/admin/baseadmin.py | Python | gpl-3.0 | 1,860 | 0.017204 |
from django.contrib import admin
from django.utils.translation import ugettext, ugettext_lazy as _
from ella.positions.models import Position
from ella.utils import timezone
class PositionOptions(admin.ModelAdmin):
def show_title(self, obj):
if not obj.target:
return '-- %s --' % ugettext('empty position')
else:
return u'%s [%s]' % (obj.target.title, ugettext(obj.target_ct.name),)
show_title.short_description = _('Title')
def is_filled(self, obj):
if obj.target:
return True
else:
return False
is_filled.short_description = _('Filled')
is_filled.boolean = True
def is_active(self, obj):
if obj.disabled:
return False
now = timezone.now()
active_from = not obj.active_from or obj.active_from <= now
active_till = not obj.active_till or obj.active_till > now
return active_from and active_till
is_active.short_description = _('Active')
is_active.boolean = True
list_display = ('name', 'category', 'box_type', 'is_active', 'is_filled', 'show_title', 'disabled',)
list_filter = ('category', 'name', 'disabled', 'active_from', 'active_till',)
search_fields = ('box_type', 'text',)
# suggest_fields = {'category': ('tree_path', 'title', 'slug',),}
admin.site.register(Position, PositionOptions)
| WhiskeyMedia/ella | ella/positions/admin.py | Python | bsd-3-clause | 1,381 | 0.003621 |
from loader import *
from extract_keywords import split_line
from collections import defaultdict
def main():
keyword_data = load_single_file('keyword_table.csv')
paper_data = load_single_file('Paper.csv')
with open('lda_input.txt', 'w', encoding='utf-8') as write_file:
for paper_id in paper_data.keys():
paper = paper_data[paper_id]
title = paper['title']
keyword = paper['keyword']
word_list = split_line(title)
word_list += split_line(keyword)
counter = defaultdict(int)
for word in word_list:
if word in keyword_data.keys():
unique_id = keyword_data[word]['unique']
counter[unique_id] += 1
line = ''
for key in sorted(counter.keys()):
count = counter[key]
line += '{} {} '.format(key, count)
line = line.strip()
if len(line) == 0:
continue
line += '\n'
write_file.write(line)
pass
if __name__ == '__main__':
main() | leeopop/2015-CS570-Project | prepare_lda.py | Python | mit | 897 | 0.031215 |
# Copyright (c) 2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
from __future__ import print_function
import sys
from m5.util import warn
# fix the global frequency
def fixGlobalFrequency():
import _m5.core
_m5.core.fixClockFrequency()
def setGlobalFrequency(ticksPerSecond):
from m5.util import convert
import _m5.core
if isinstance(ticksPerSecond, (int, long)):
tps = ticksPerSecond
elif isinstance(ticksPerSecond, float):
tps = ticksPerSecond
elif isinstance(ticksPerSecond, str):
tps = round(convert.anyToFrequency(ticksPerSecond))
else:
raise TypeError, \
"wrong type '%s' for ticksPerSecond" % type(ticksPerSecond)
_m5.core.setClockFrequency(int(tps))
# how big does a rounding error need to be before we warn about it?
frequency_tolerance = 0.001 # 0.1%
def fromSeconds(value):
import _m5.core
if not isinstance(value, float):
raise TypeError, "can't convert '%s' to type tick" % type(value)
# once someone needs to convert to seconds, the global frequency
# had better be fixed
if not _m5.core.clockFrequencyFixed():
raise AttributeError, \
"In order to do conversions, the global frequency must be fixed"
if value == 0:
return 0
# convert the value from time to ticks
value *= _m5.core.getClockFrequency()
int_value = int(round(value))
err = (value - int_value) / value
if err > frequency_tolerance:
warn("rounding error > tolerance\n %f rounded to %d", value,
int_value)
return int_value
__all__ = [ 'setGlobalFrequency', 'fixGlobalFrequency', 'fromSeconds',
'frequency_tolerance' ]
| vineodd/PIMSim | GEM5Simulation/gem5/src/python/m5/ticks.py | Python | gpl-3.0 | 3,221 | 0.003415 |
#!/usr/bin/env python
## Program: VMTK
## Module: $RCSfile: vmtkfebiowytet10.py,v $
## Language: Python
## Date: $Date: 2016/08/19 09:49:59 $
## Version: $Revision: 1.6 $
## Copyright (c) Jingfeng Jiang, Yu Wang. All rights reserved.
## See LICENCE file for details.
## This software is distributed WITHOUT ANY WARRANTY; without even
## the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
## PURPOSE. See the above copyright notices for more information.
import vtk
import vtkvmtk
import sys
import pypes
vmtkfebiowytet10 = 'vmtkfebiowrite'
class vmtkfebiowrite(pypes.pypeScript):
def __init__(self):
pypes.pypeScript.__init__(self)
self.Surface = None
self.SetScriptName('vmtkfebiowrite')
self.SetScriptDoc('interpolates the point data of a reference surface onto the input surface based on minimum distance criterion')
self.SetInputMembers([
['Surface','i','vtkUnstructuredGrid',1,'','the mesh surface','vmtkmeshreader']
])
self.SetOutputMembers([])
# ['dSurface','o','vtkUnstructuredGrid',1,'','the output surface','vmtkmeshwriter']
def Execute(self):
if self.Surface == None:
self.PrintError('Error: No Surface.')
self.PrintLog('Computing projection.')
surfaceProjection = vtkvmtk.vtkvmtkFEBioWritertet10()
surfaceProjection.SetInput(self.Surface)
# surfaceProjection.SetBoundaryDataArrayName('CellEntityIds')
surfaceProjection.SetVolumeDataArrayName('VolId') # type ID: Volume VolId
surfaceProjection.SetFileName('output.feb')
surfaceProjection.Write()
surfaceProjection.Update()
if __name__=='__main__':
main = pypes.pypeMain()
main.Arguments = sys.argv
main.Execute()
| jjiang-mtu/virtual-breast-project | dynamic_SWE/write_FEBio_format_quadratic/vmtkfebiowytet10.py | Python | gpl-2.0 | 1,844 | 0.016269 |
from vsg.rules import previous_line
from vsg import token
lTokens = []
lTokens.append(token.if_statement.if_keyword)
class rule_031(previous_line):
'''
This rule checks for blank lines or comments above the **if** keyword.
In the case of nested **if** statements, the rule will be enfoced on the first **if**.
|configuring_previous_line_rules_link|
The default style is :code:`no_code`.
**Violation**
.. code-block:: vhdl
C <= '1';
if (A = '1') then
B <= '0';
end if;
-- This is a comment
if (A = '1') then
B <= '0';
end if;
**Fix**
.. code-block:: vhdl
C <= '1';
if (A = '1') then
B <= '0';
end if;
-- This is a comment
if (A = '1') then
B <= '0';
end if;
'''
def __init__(self):
previous_line.__init__(self, 'if', '031', lTokens)
self.lHierarchyLimits = [0]
self.style = 'no_code'
| jeremiah-c-leary/vhdl-style-guide | vsg/rules/if_statement/rule_031.py | Python | gpl-3.0 | 988 | 0.001012 |
# from http://stackoverflow.com/questions/4581789/how-do-i-get-user-ip-address-in-django
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[0].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
| mattcaldwell/autopylot | autopylot/django/__init__.py | Python | mit | 324 | 0.003086 |
from django.core.files.uploadedfile import InMemoryUploadedFile
import re
import six
from django import forms
from django.forms.util import flatatt
from django.forms.widgets import FileInput
from django.template import Context
from django.template.loader import render_to_string
from django.utils.encoding import force_text
from django.utils.safestring import mark_safe
try:
from django.utils.html import format_html
except ImportError:
# Django 1.4 compatibility
from oscar.core.compat import format_html
class ImageInput(FileInput):
"""
Widget providing a input element for file uploads based on the
Django ``FileInput`` element. It hides the actual browser-specific
input element and shows the available image for images that have
been previously uploaded. Selecting the image will open the file
dialog and allow for selecting a new or replacing image file.
"""
template_name = 'partials/image_input_widget.html'
attrs = {'accept': 'image/*'}
def render(self, name, value, attrs=None):
"""
Render the ``input`` field based on the defined ``template_name``. The
image URL is take from *value* and is provided to the template as
``image_url`` context variable relative to ``MEDIA_URL``. Further
attributes for the ``input`` element are provide in ``input_attrs`` and
contain parameters specified in *attrs* and *name*.
If *value* contains no valid image URL an empty string will be provided
in the context.
"""
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if not value or isinstance(value, InMemoryUploadedFile):
# can't display images that aren't stored
image_url = ''
else:
image_url = final_attrs['value'] = force_text(
self._format_value(value))
return render_to_string(self.template_name, Context({
'input_attrs': flatatt(final_attrs),
'image_url': image_url,
'image_id': "%s-image" % final_attrs['id'],
}))
class WYSIWYGTextArea(forms.Textarea):
def __init__(self, *args, **kwargs):
kwargs.setdefault('attrs', {})
kwargs['attrs'].setdefault('class', '')
kwargs['attrs']['class'] += ' wysiwyg'
super(WYSIWYGTextArea, self).__init__(*args, **kwargs)
def datetime_format_to_js_date_format(format):
"""
Convert a Python datetime format to a date format suitable for use with JS
date pickers
"""
converted = format
replacements = {
'%Y': 'yy',
'%m': 'mm',
'%d': 'dd',
'%H:%M': '',
}
for search, replace in six.iteritems(replacements):
converted = converted.replace(search, replace)
return converted.strip()
def datetime_format_to_js_time_format(format):
"""
Convert a Python datetime format to a time format suitable for use with JS
date pickers
"""
converted = format
replacements = {
'%Y': '',
'%m': '',
'%d': '',
'%H': 'HH',
'%M': 'mm',
}
for search, replace in six.iteritems(replacements):
converted = converted.replace(search, replace)
converted = re.sub('[-/][^%]', '', converted)
return converted.strip()
def add_js_formats(widget):
"""
Set data attributes for date and time format on a widget
"""
attrs = {
'data-dateFormat': datetime_format_to_js_date_format(
widget.format),
'data-timeFormat': datetime_format_to_js_time_format(
widget.format)
}
widget.attrs.update(attrs)
class DatePickerInput(forms.DateInput):
"""
DatePicker input that uses the jQuery UI datepicker. Data attributes are
used to pass the date format to the JS
"""
def __init__(self, *args, **kwargs):
super(DatePickerInput, self).__init__(*args, **kwargs)
add_js_formats(self)
class DateTimePickerInput(forms.DateTimeInput):
# Build a widget which uses the locale datetime format but without seconds.
# We also use data attributes to pass these formats to the JS datepicker.
def __init__(self, *args, **kwargs):
include_seconds = kwargs.pop('include_seconds', False)
super(DateTimePickerInput, self).__init__(*args, **kwargs)
if not include_seconds:
self.format = re.sub(':?%S', '', self.format)
add_js_formats(self)
class AdvancedSelect(forms.Select):
"""
Customised Select widget that allows a list of disabled values to be passed
to the constructor. Django's default Select widget doesn't allow this so
we have to override the render_option method and add a section that checks
for whether the widget is disabled.
"""
def __init__(self, attrs=None, choices=(), disabled_values=()):
self.disabled_values = set(force_text(v) for v in disabled_values)
super(AdvancedSelect, self).__init__(attrs, choices)
def render_option(self, selected_choices, option_value, option_label):
option_value = force_text(option_value)
if option_value in self.disabled_values:
selected_html = mark_safe(' disabled="disabled"')
elif option_value in selected_choices:
selected_html = mark_safe(' selected="selected"')
if not self.allow_multiple_selected:
# Only allow for a single selection.
selected_choices.remove(option_value)
else:
selected_html = ''
return format_html(u'<option value="{0}"{1}>{2}</option>',
option_value,
selected_html,
force_text(option_label))
| MrReN/django-oscar | oscar/forms/widgets.py | Python | bsd-3-clause | 5,740 | 0 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 NovaPoint Group LLC (<http://www.novapointgroup.com>)
# Copyright (C) 2004-2010 OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import create_payment_profile
import make_transaction
import edit_payment_profile
import delete_payment_profile
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| ryepdx/account_payment_cim_authdotnet | wizard/__init__.py | Python | agpl-3.0 | 1,222 | 0 |
"""
Django admin dashboard configuration.
"""
from config_models.admin import ConfigurationModelAdmin, KeyedConfigurationModelAdmin
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from common.djangoapps.xblock_django.models import XBlockConfiguration, XBlockStudioConfiguration, XBlockStudioConfigurationFlag # lint-amnesty, pylint: disable=line-too-long
class XBlockConfigurationAdmin(KeyedConfigurationModelAdmin):
"""
Admin for XBlockConfiguration.
"""
fieldsets = (
('XBlock Name', {
'fields': ('name',)
}),
('Enable/Disable XBlock', {
'description': _('To disable the XBlock and prevent rendering in the LMS, leave "Enabled" deselected; '
'for clarity, update XBlockStudioConfiguration support state accordingly.'),
'fields': ('enabled',)
}),
('Deprecate XBlock', {
'description': _("Only XBlocks listed in a course's Advanced Module List can be flagged as deprecated. "
"Remember to update XBlockStudioConfiguration support state accordingly, as deprecated "
"does not impact whether or not new XBlock instances can be created in Studio."),
'fields': ('deprecated',)
}),
)
class XBlockStudioConfigurationAdmin(KeyedConfigurationModelAdmin):
"""
Admin for XBlockStudioConfiguration.
"""
fieldsets = (
('', {
'fields': ('name', 'template')
}),
('Enable Studio Authoring', {
'description': _(
'XBlock/template combinations that are disabled cannot be edited in Studio, regardless of support '
'level. Remember to also check if all instances of the XBlock are disabled in XBlockConfiguration.'
),
'fields': ('enabled',)
}),
('Support Level', {
'description': _(
"Enabled XBlock/template combinations with full or provisional support can always be created "
"in Studio. Unsupported XBlock/template combinations require course author opt-in."
),
'fields': ('support_level',)
}),
)
admin.site.register(XBlockConfiguration, XBlockConfigurationAdmin)
admin.site.register(XBlockStudioConfiguration, XBlockStudioConfigurationAdmin)
admin.site.register(XBlockStudioConfigurationFlag, ConfigurationModelAdmin)
| eduNEXT/edunext-platform | common/djangoapps/xblock_django/admin.py | Python | agpl-3.0 | 2,485 | 0.004427 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2018 The OctoPrint Project - Released under terms of the AGPLv3 License"
import octoprint.plugin
from flask_babel import gettext
import requests
import hashlib
import logging
try:
# noinspection PyCompatibility
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
# noinspection PyCompatibility
import concurrent.futures
from octoprint.util import RepeatedTimer, monotonic_time
from octoprint.util.version import get_octoprint_version_string
from octoprint.events import Events
TRACKING_URL = "https://tracking.octoprint.org/track/{id}/{event}/"
# noinspection PyMissingConstructor
class TrackingPlugin(octoprint.plugin.SettingsPlugin,
octoprint.plugin.EnvironmentDetectionPlugin,
octoprint.plugin.StartupPlugin,
octoprint.plugin.ShutdownPlugin,
octoprint.plugin.TemplatePlugin,
octoprint.plugin.AssetPlugin,
octoprint.plugin.WizardPlugin,
octoprint.plugin.EventHandlerPlugin):
def __init__(self):
self._environment = None
self._throttle_state = None
self._helpers_get_throttle_state = None
self._printer_connection_parameters = None
self._url = None
self._ping_worker = None
self._pong_worker = None
self._executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
self._record_next_firmware_info = False
self._startup_time = monotonic_time()
def initialize(self):
self._init_id()
##~~ SettingsPlugin
def get_settings_defaults(self):
return dict(enabled=None,
unique_id=None,
server=TRACKING_URL,
ping=15*60,
pong=24*60*60,
events=dict(pong=True,
startup=True,
printjob=True,
commerror=True,
plugin=True,
update=True,
printer=True,
printer_safety_check=True,
throttled=True,
slicing=True))
def get_settings_restricted_paths(self):
return dict(admin=[["enabled"], ["unique_id"], ["events"]],
never=[["server"], ["ping"]])
def on_settings_save(self, data):
enabled = self._settings.get(["enabled"])
octoprint.plugin.SettingsPlugin.on_settings_save(self, data)
if enabled is None and self._settings.get(["enabled"]):
# tracking was just enabled, let's start up tracking
self._start_tracking()
##~~ EnvironmentDetectionPlugin
def on_environment_detected(self, environment, *args, **kwargs):
self._environment = environment
##~~ StartupPlugin
def on_after_startup(self):
self._start_tracking()
##~~ ShutdownPlugin
def on_shutdown(self):
if not self._settings.get_boolean(["enabled"]):
return
self._track_shutdown()
##~~ EventHandlerPlugin
# noinspection PyUnresolvedReferences
def on_event(self, event, payload):
if not self._settings.get_boolean(["enabled"]):
return
if event in (Events.PRINT_STARTED, Events.PRINT_DONE, Events.PRINT_FAILED, Events.PRINT_CANCELLED):
self._track_printjob_event(event, payload)
elif event in (Events.ERROR,):
self._track_commerror_event(event, payload)
elif event in (Events.CONNECTED,):
self._printer_connection_parameters = dict(port=payload["port"],
baudrate=payload["baudrate"])
self._record_next_firmware_info = True
elif event in (Events.FIRMWARE_DATA,) and self._record_next_firmware_info:
self._record_next_firmware_info = False
self._track_printer_event(event, payload)
elif event in (Events.SLICING_STARTED,):
self._track_slicing_event(event, payload)
elif hasattr(Events, "PLUGIN_PLUGINMANAGER_INSTALL_PLUGIN") and \
event in (Events.PLUGIN_PLUGINMANAGER_INSTALL_PLUGIN, Events.PLUGIN_PLUGINMANAGER_UNINSTALL_PLUGIN,
Events.PLUGIN_PLUGINMANAGER_ENABLE_PLUGIN, Events.PLUGIN_PLUGINMANAGER_DISABLE_PLUGIN):
self._track_plugin_event(event, payload)
elif hasattr(Events, "PLUGIN_SOFTWAREUPDATE_UPDATE_SUCCEEDED") and \
event in (Events.PLUGIN_SOFTWAREUPDATE_UPDATE_SUCCEEDED, Events.PLUGIN_SOFTWAREUPDATE_UPDATE_FAILED):
self._track_update_event(event, payload)
elif hasattr(Events, "PLUGIN_PI_SUPPORT_THROTTLE_STATE") and event in (Events.PLUGIN_PI_SUPPORT_THROTTLE_STATE,):
self._throttle_state = payload
self._track_throttle_event(event, payload)
elif hasattr(Events, "PLUGIN_PRINTER_SAFETY_CHECK_WARNING") and event in (Events.PLUGIN_PRINTER_SAFETY_CHECK_WARNING,):
self._track_printer_safety_event(event, payload)
##~~ TemplatePlugin
def get_template_configs(self):
return [
dict(type="settings", name=gettext("Anonymous Usage Tracking"), template="tracking_settings.jinja2", custom_bindings=False),
dict(type="wizard", name=gettext("Anonymous Usage Tracking"), template="tracking_wizard.jinja2", custom_bindings=True, mandatory=True)
]
##~~ AssetPlugin
def get_assets(self):
return dict(js=["js/usage.js"])
##~~ WizardPlugin
def is_wizard_required(self):
return self._settings.get(["enabled"]) is None
##~~ helpers
def _init_id(self):
if not self._settings.get(["unique_id"]):
import uuid
self._settings.set(["unique_id"], str(uuid.uuid4()))
self._settings.save()
def _start_tracking(self):
if not self._settings.get_boolean(["enabled"]):
return
if self._ping_worker is None:
ping_interval = self._settings.get_int(["ping"])
if ping_interval:
self._ping_worker = RepeatedTimer(ping_interval, self._track_ping, run_first=True)
self._ping_worker.start()
if self._pong_worker is None:
pong_interval = self._settings.get(["pong"])
if pong_interval:
self._pong_worker = RepeatedTimer(pong_interval, self._track_pong, run_first=True)
self._pong_worker.start()
if self._helpers_get_throttle_state is None:
# cautiously look for the get_throttled helper from pi_support
pi_helper = self._plugin_manager.get_helpers("pi_support", "get_throttled")
if pi_helper and 'get_throttled' in pi_helper:
self._helpers_get_throttle_state = pi_helper['get_throttled']
# now that we have everything set up, phone home.
self._track_startup()
def _track_ping(self):
if not self._settings.get_boolean(["enabled"]):
return
uptime = int(monotonic_time() - self._startup_time)
self._track("ping", octoprint_uptime=uptime)
def _track_pong(self):
if not self._settings.get_boolean(["events", "pong"]):
return
plugins = self._plugin_manager.enabled_plugins
plugins_thirdparty = [plugin for plugin in plugins.values() if not plugin.bundled]
payload = dict(plugins=",".join(map(lambda x: "{}:{}".format(x.key.lower(),
x.version.lower() if x.version else "?"),
plugins_thirdparty)))
self._track("pong", body=True, **payload)
def _track_startup(self):
if not self._settings.get_boolean(["events", "startup"]):
return
payload = dict(version=get_octoprint_version_string(),
os=self._environment["os"]["id"],
python=self._environment["python"]["version"],
pip=self._environment["python"]["pip"],
cores=self._environment["hardware"]["cores"],
freq=self._environment["hardware"]["freq"],
ram=self._environment["hardware"]["ram"])
if "plugins" in self._environment and "pi_support" in self._environment["plugins"]:
payload["pi_model"] = self._environment["plugins"]["pi_support"]["model"]
if "octopi_version" in self._environment["plugins"]["pi_support"]:
payload["octopi_version"] = self._environment["plugins"]["pi_support"]["octopi_version"]
self._track("startup", **payload)
def _track_shutdown(self):
if not self._settings.get_boolean(["enabled"]):
return
if not self._settings.get_boolean(["events", "startup"]):
return
self._track("shutdown")
def _track_plugin_event(self, event, payload):
if not self._settings.get_boolean(["events", "plugin"]):
return
if event.endswith("_install_plugin"):
self._track("install_plugin", plugin=payload.get("id"), plugin_version=payload.get("version"))
elif event.endswith("_uninstall_plugin"):
self._track("uninstall_plugin", plugin=payload.get("id"), plugin_version=payload.get("version"))
elif event.endswith("_enable_plugin"):
self._track("enable_plugin", plugin=payload.get("id"), plugin_version=payload.get("version"))
elif event.endswith("_disable_plugin"):
self._track("disable_plugin", plugin=payload.get("id"), plugin_version=payload.get("version"))
def _track_update_event(self, event, payload):
if not self._settings.get_boolean(["events", "update"]):
return
if event.endswith("_update_succeeded"):
self._track("update_successful", target=payload.get("target"), from_version=payload.get("from_version"), to_version=payload.get("to_version"))
elif event.endswith("_update_failed"):
self._track("update_failed", target=payload.get("target"), from_version=payload.get("from_version"), to_version=payload.get("to_version"))
def _track_throttle_event(self, event, payload):
if not self._settings.get_boolean(["events", "throttled"]):
return
args = dict(throttled_now=payload["current_issue"],
throttled_past=payload["past_issue"],
throttled_mask=payload["raw_value"],
throttled_voltage_now=payload["current_undervoltage"],
throttled_voltage_past=payload["past_undervoltage"],
throttled_overheat_now=payload["current_overheat"],
throttled_overheat_past=payload["past_overheat"])
if payload["current_issue"]:
track_event = "system_throttled"
else:
track_event = "system_unthrottled"
if track_event is not None:
self._track(track_event, **args)
def _track_commerror_event(self, event, payload):
if not self._settings.get_boolean(["events", "commerror"]):
return
if not "reason" in payload or not "error" in payload:
return
track_event = "commerror_{}".format(payload["reason"])
args = dict(commerror_text=payload["error"])
if callable(self._helpers_get_throttle_state):
try:
throttle_state = self._helpers_get_throttle_state(run_now=True)
if throttle_state and (throttle_state.get("current_issue", False) or throttle_state.get("past_issue", False)):
args["throttled_now"] = throttle_state["current_issue"]
args["throttled_past"] = throttle_state["past_issue"]
args["throttled_mask"] = throttle_state["raw_value"]
except Exception:
# ignored
pass
self._track(track_event, **args)
def _track_printjob_event(self, event, payload):
if not self._settings.get_boolean(["events", "printjob"]):
return
unique_id = self._settings.get(["unique_id"])
if not unique_id:
return
sha = hashlib.sha1()
sha.update(payload.get("path").encode("utf-8"))
sha.update(unique_id.encode("utf-8"))
track_event = None
args = dict(origin=payload.get("origin"), file=sha.hexdigest())
if event == Events.PRINT_STARTED:
track_event = "print_started"
elif event == Events.PRINT_DONE:
try:
elapsed = int(payload.get("time", 0))
if elapsed:
args["elapsed"] = elapsed
except (ValueError, TypeError):
pass
track_event = "print_done"
elif event == Events.PRINT_FAILED:
try:
elapsed = int(payload.get("time", 0))
if elapsed:
args["elapsed"] = elapsed
except (ValueError, TypeError):
pass
args["reason"] = payload.get("reason", "unknown")
if "error" in payload and self._settings.get_boolean(["events", "commerror"]):
args["commerror_text"] = payload["error"]
track_event = "print_failed"
elif event == Events.PRINT_CANCELLED:
try:
elapsed = int(payload.get("time", 0))
if elapsed:
args["elapsed"] = elapsed
except (ValueError, TypeError):
pass
track_event = "print_cancelled"
if callable(self._helpers_get_throttle_state):
try:
throttle_state = self._helpers_get_throttle_state(run_now=True)
if throttle_state and (throttle_state.get("current_issue", False) or throttle_state.get("past_issue", False)):
args["throttled_now"] = throttle_state["current_issue"]
args["throttled_past"] = throttle_state["past_issue"]
args["throttled_mask"] = throttle_state["raw_value"]
except Exception:
# ignored
pass
if track_event is not None:
self._track(track_event, **args)
def _track_printer_event(self, event, payload):
if not self._settings.get_boolean(["events", "printer"]):
return
if event in (Events.FIRMWARE_DATA,):
args = dict(firmware_name=payload["name"])
if self._printer_connection_parameters:
args["printer_port"] = self._printer_connection_parameters["port"]
args["printer_baudrate"] = self._printer_connection_parameters["baudrate"]
self._track("printer_connected", **args)
def _track_printer_safety_event(self, event, payload):
if not self._settings.get_boolean(["events", "printer_safety_check"]):
return
self._track("printer_safety_warning",
printer_safety_warning_type=payload.get("warning_type", "unknown"),
printer_safety_check_name=payload.get("check_name", "unknown"))
def _track_slicing_event(self, event, payload):
if not self._settings.get_boolean(["events", "slicing"]):
return
self._track("slicing_started",
slicer=payload.get(b"slicer", "unknown"))
def _track(self, event, **kwargs):
if not self._settings.get_boolean(["enabled"]):
return
self._executor.submit(self._do_track, event, **kwargs)
def _do_track(self, event, body=False, **kwargs):
if not self._connectivity_checker.online:
return
if not self._settings.get_boolean(["enabled"]):
return
unique_id = self._settings.get(["unique_id"])
if not unique_id:
return
server = self._settings.get(["server"])
url = server.format(id=unique_id, event=event)
# Don't print the URL or UUID! That would expose the UUID in forums/tickets
# if pasted. It's okay for the user to know their uuid, but it shouldn't be shared.
headers = {"User-Agent": "OctoPrint/{}".format(get_octoprint_version_string())}
try:
params = urlencode(kwargs, doseq=True).replace("+", "%20")
if body:
requests.post(url,
data=params,
timeout=3.1,
headers=headers)
else:
requests.get(url,
params=params,
timeout=3.1,
headers=headers)
self._logger.info("Sent tracking event {}, payload: {!r}".format(event, kwargs))
except Exception:
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.exception("Error while sending event to anonymous usage tracking".format(url))
else:
pass
__plugin_name__ = "Anonymous Usage Tracking"
__plugin_description__ = "Anonymous version and usage tracking, see homepage for details on what gets tracked"
__plugin_url__ = "https://tracking.octoprint.org"
__plugin_author__ = "Gina Häußge"
__plugin_implementation__ = TrackingPlugin()
| foosel/OctoPrint | src/octoprint/plugins/tracking/__init__.py | Python | agpl-3.0 | 15,412 | 0.026022 |
# Copyright 2019 Rackspace, US Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack import resource
class Flavor(resource.Resource):
resource_key = 'flavor'
resources_key = 'flavors'
base_path = '/lbaas/flavors'
# capabilities
allow_create = True
allow_fetch = True
allow_commit = True
allow_delete = True
allow_list = True
_query_mapping = resource.QueryParameters(
'id', 'name', 'description', 'flavor_profile_id', is_enabled='enabled'
)
# Properties
#: The ID of the flavor.
id = resource.Body('id')
#: The name of the flavor.
name = resource.Body('name')
#: The flavor description.
description = resource.Body('description')
#: The associated flavor profile ID
flavor_profile_id = resource.Body('flavor_profile_id')
#: Whether the flavor is enabled for use or not.
is_enabled = resource.Body('enabled')
| ctrlaltdel/neutrinator | vendor/openstack/load_balancer/v2/flavor.py | Python | gpl-3.0 | 1,423 | 0 |
# ############################################################################
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2020 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
# ############################################################################
from base.ddd.utils import business_validator
from education_group.ddd.business_types import *
from education_group.ddd.domain.exception import CannotCopyMiniTrainingDueToEndDate
class CheckMiniTrainingEndDateValidator(business_validator.BusinessValidator):
def __init__(self, mini_training: 'MiniTraining'):
super().__init__()
self.mini_training = mini_training
def validate(self, *args, **kwargs):
if self.mini_training.end_year and self.mini_training.year >= self.mini_training.end_year:
raise CannotCopyMiniTrainingDueToEndDate(mini_training=self.mini_training)
| uclouvain/OSIS-Louvain | education_group/ddd/validators/_copy_check_mini_training_end_date.py | Python | agpl-3.0 | 1,855 | 0.002157 |
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/stable/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('.'))
def get_version():
B = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(B, '..', 'VERSION'), 'r') as f:
return f.read().strip()
# -- Project information -----------------------------------------------------
project = 'SciKit GStat'
copyright = '2021, Mirko Mälicke'
author = 'Mirko Mälicke'
# The short X.Y version
# version = '0.3.2'
# The full version, including alpha/beta/rc tags
release = get_version()
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.napoleon',
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
'IPython.sphinxext.ipython_console_highlighting',
'IPython.sphinxext.ipython_directive',
'sphinx_gallery.gen_gallery',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ['.rst', '.ipynb']
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', '**.ipynb_checkpoints']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'pydata_sphinx_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'github_url': 'https://github.com/mmaelicke/scikit-gstat',
}
html_context = {
'github_user': 'mmaelicke',
'github_repo': 'scikit-gstat',
'github_version': 'master',
'doc_path': 'docs'
}
html_short_title = 'SciKit-GStat'
"""
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html',
'searchbox.html',
'donate.html'
]
}
"""
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'SciKitGStatdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'SciKitGStat.tex', 'SciKit GStat Documentation',
'Mirko Mälicke', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'scikitgstat', 'SciKit GStat Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'SciKitGStat', 'SciKit GStat Documentation',
author, 'SciKitGStat', 'One line description of project.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
# intersphinx_mapping = {'https://docs.python.org/': None}
# -- Options for todo extension ----------------------------------------------
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Intersphinx mapping -----------------------------------------------------
intersphinx_mapping = {
'python': ('https://docs.python.org/3.6', None),
'pandas': ('https://pandas.pydata.org/pandas-docs/stable/', None),
'numpy': ('https://docs.scipy.org/doc/numpy', None),
'scipy': ('https://docs.scipy.org/doc/scipy/reference', None),
'gstools': ('https://geostat-framework.readthedocs.io/projects/gstools/en/latest/', None),
'sklearn': ('http://scikit-learn.org/stable', None),
}
from plotly.io._sg_scraper import plotly_sg_scraper
image_scrapers = ('matplotlib', plotly_sg_scraper,)
import plotly.io as pio
pio.renderers.default = 'sphinx_gallery'
import sphinx_gallery
sphinx_gallery_conf = {
'examples_dirs': './tutorials',
'gallery_dirs': 'auto_examples',
'backreferences_dir': 'gen_modules/backreferences',
'doc_module': ('skgstat', 'skgstat'),
'image_scrapers': image_scrapers,
'filename_pattern': '/tutorial',
}
| mmaelicke/scikit-gstat | docs/conf.py | Python | mit | 7,252 | 0.000552 |
#!/usr/bin/env python3.5
# Arun Debray, 24 Dec. 2015
# Given a group order, classifies finite abelian groups of that order.
# ./finite_abelian_groups.py [-tpi] number
# -t formats the output in TeX (as opposed to in the terminal)
# -p chooses the primary components decomposition (default)
# -i chooses the invariant factors decomposition
import argparse
import collections
import functools
import itertools
import math
# Handles command-line arguments. See usage, above.
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description='Classifies finite abelian groups of a given order.')
parser.add_argument('-t', action='store_true', help='formats output in LaTeX')
parser.add_argument('-p', action='store_true', help='chooses primary components decomposition (default)')
parser.add_argument('-i', action='store_true', help='chooses invariant factors decomposition')
parser.add_argument('order', metavar='n', type=int, help='group order')
return parser.parse_args()
# Determines the prime factors of a number. No, this isn't the best algorithm, but
# it's good enough. Returns them as a Counter object (a dict of prime -> power), in which
# all values will be strictly positive.
def prime_factors(n: int) -> collections.Counter:
for i in range(2, 1 + math.ceil(math.sqrt(n))):
# By doing this in order, we guarantee that this only happens when i is prime (2 comes before
# any other even number, etc.)
if n % i == 0:
if n == i:
return collections.Counter({n: 1})
else:
to_return = prime_factors(n // i)
to_return[i] += 1
return to_return
# if n is prime
return collections.Counter({n: 1})
# A helper function for the partitions function, below. Returns the partitions of n using integers
# less than or equal to m. However, within this program it makes more sense to do this multiplicatively:
# n is represented as p^n, and we are partitioning as p^n = p^{n_1} x p^{n_2} x ... x p^{n_k}.
@functools.lru_cache(maxsize=None) # Wraps the function in a cache, making memoization simpler
def _part_lt(p: int, n: int, m: int) -> list:
if n == 0:
return [[]]
if n == 1:
return [[p]]
if m > n:
return _part_lt(p, n, n)
# Now, we recurse: the first entry in the partition can be any j in {1, ..., m}, and the rest is a
# partition in _part_lt(n-j, j).
to_return = []
for j in range(1, m+1):
to_return += [part + [p**j] for part in _part_lt(p, n-j, j)]
return to_return
# Returns the partitions of n as pth powers, i.e. the ways of writing p^n = p^{a_1} x ... x p^{a_m}
# such that each a_i is a positive integer and a_i >= a_{i+1} for each i. This is the algorithmic meat
# of each decomposition, though some thought must go into piecing the partitions for different primes
# together. Of course, this function calls the helper function, above.
def partitions(p: int, n: int) -> list:
return _part_lt(p, n, n)
# Flattens one level of a list, turning [[1, 2, 3], [4, 5]] into [1, 2, 3, 4, 5].
def flatten(xs: list) -> list:
return [item for sublist in xs for item in sublist]
# Given the prime factors, returns a list of all abelian groups of the given order in primary-
# factors format.
def primary_factor_decomp(factors: collections.Counter) -> list:
decomps_at_primes = [partitions(p, factors[p]) for p in factors]
return [flatten(choice) for choice in itertools.product(*decomps_at_primes)]
# Uses the partitions in a different way to make a list of all abelian groups of a given order in
# the invariant-factors decomposition.
def invariant_factor_decomp(factors: collections.Counter) -> list:
decomps_at_primes = [partitions(p, factors[p]) for p in factors]
return [(functools.reduce(lambda x,y: x*y, inv_fact)
for inv_fact in itertools.zip_longest(*choice, fillvalue=1))
for choice in itertools.product(*decomps_at_primes)]
# Returns "there are n abelian groups" or "there is one abelian group" depending on the value of n.
def format_plurals(n: int) -> str:
if n == 1:
return 'There is one abelian group'
else:
return 'There are %d abelian groups' % n
# Formats and prints the output.
def output(groups: list, order: int, as_TeX: bool):
if as_TeX:
print('\\documentclass{amsart}')
print('\\newcommand{\\Z}{\\mathbb Z}')
print('\\title{Abelian Groups of Order %d}' % order)
print('\\begin{document}')
print('\\maketitle')
print('%s of order %d.' % (format_plurals(len(groups)), order))
print('\\begin{gather*}')
print('\\\\\n'.join(['\\oplus'.join(['\\Z/%d' % component for component in group]) for group in groups]))
print('\\end{gather*}')
print('\\end{document}')
else:
print('%s of order %d.' % (format_plurals(len(groups)), order))
for group in groups:
print('⊕ '.join('ℤ/%d' % component for component in group))
def main():
arginfo = parse_args()
groups = None
factors = prime_factors(arginfo.order)
if arginfo.i:
groups = invariant_factor_decomp(factors)
else:
groups = primary_factor_decomp(factors)
output(groups, arginfo.order, arginfo.t)
if __name__ == '__main__':
main()
| adebray/enumerating_abelian_groups | finite_abelian_groups.py | Python | mit | 5,033 | 0.020481 |
"""Tests for plugin.py."""
import ckanext.dictionary.plugin as plugin
def test_plugin():
pass | cmuphillycapstone/ckanext-dictionary | ckanext/dictionary/tests/test_plugin.py | Python | agpl-3.0 | 98 | 0.020408 |
#!/usr/bin/env python3
# What directories are needed to run an app?
# Use strace to find out. Potentially useful for setting up chroots.
# Usage: dirsneeded.py cmd [arg arg ...]
'''
TODO:
> o for anything where access attempt is made in chroot and fails,
> at least by default, only consider it something to possibly be added
> to chroot if that something exists outside the chroot
> o for all opens and the like, track if it's opened for read, or
> write/append, or both, and furthermore also track if it's only
> read, or only written / appended to
> o track directories accessed, and if merely accessed, or if also
> read. Likewise, divide and conquer, do any tests fail if read
> access is removed, likewise to then have x removed, or directory
> removed.
'''
import subprocess
from pathlib import Path
from collections import defaultdict
import shlex
import sys
def strace_cmd(cmdargs):
"""Run cmdargs (a list containing a command and all arguments)
under strace, and output a list of files and directories opened.
Returns a dict of lists of fileinfo dicts.
{ "dirpath", [ ("filename": "/tmp/foo", "mode": "O_RDONLY", etc... ] }
"""
'''Some sample strace out lines:
openat(AT_FDCWD, "/etc/ld.so.preload", O_RDONLY|O_CLOEXEC) = 3
execve("/usr/bin/ls", ["ls", "/tmp/foo"], 0x7ffe05c05678 /* 50 vars */) = 0
access("/etc/ld.so.preload", R_OK) = 0
statfs("/sys/fs/selinux", 0x7ffdf3e99420) = -1 ENOENT (No such file or directorystat("/tmp/moncycle", {st_mode=S_IFREG|0664, st_size=183, ...}) = 0
lstat("/tmp/moncycle", {st_mode=S_IFREG|0664, st_size=183, ...}) = 0
'''
cmdargs = ["strace", "-e", "trace=file"] + cmdargs
straceout = subprocess.run(cmdargs, # stdout=subprocess.DEVNULL,
stderr=subprocess.PIPE).stderr.decode()
dirlist = defaultdict(dict)
for line in straceout.split('\n'):
# For now, only parse the openat lines.
if line.startswith("openat("):
# An strace open line looks like:
# openat(AT_FDCWD, "/lib/x86_64-linux-gnu/libc.so.6", O_RDONLY|O_CLOEXEC) = 3
# openat(AT_FDCWD, "/tmp/foo", O_RDONLY|O_CLOEXEC) = -1 ENOENT (No such file or directory)
# Use shlex since the filename is quoted and may include spaces.
parts = shlex.split(line.strip())
# but they all end with commas, so strip those.
for i, p in enumerate(parts):
if p.endswith(','):
parts[i] = p[:-1]
fileinfo = {}
fileinfo["filename"] = parts[1]
fileinfo["syscall"] = parts[0]
fileinfo["mode"] = parts[2]
if fileinfo["mode"][-1] == ')':
# Remove the close-paren
fileinfo["mode"] = fileinfo["mode"][:-1]
# Seems like I've seen additional args after the mode,
# but now that I look, I can't find any examples.
# Skip forward to the = to get the return value.
# Really only need to search from parts[3] on,
# but that messes up i.
for i, part in enumerate(parts):
if part == "=":
fileinfo["retval"] = ' '.join(parts[i+1:])
break
else:
fileinfo = None
if fileinfo:
# We have a filename. Find the file's directory
path = Path(fileinfo["filename"]).resolve()
if path.is_dir():
dirname = str(path)
fileinfo["type"] = "d"
else:
dirname = str(path.parent)
dirlist[dirname][fileinfo["filename"]] = fileinfo
return dirlist
def fileinfo_to_str(fileinfo):
s = fileinfo["filename"]
if "type" in fileinfo and fileinfo["type"] == "d":
s += " (DIRECTORY)"
if "mode" in fileinfo:
s += " (" + fileinfo["mode"] + ")"
if "retval" in fileinfo:
s += " -> " + fileinfo["retval"]
return s
if __name__ == '__main__':
dirlist = strace_cmd(sys.argv[1:])
for d in dirlist:
print(d)
files = sorted(dirlist[d].keys())
for f in files:
print(" ", fileinfo_to_str(dirlist[d][f]))
| akkana/scripts | dirsneeded.py | Python | gpl-2.0 | 4,269 | 0.001171 |
from direction import Direction, Pivot
XMovement = {
Direction.left: -1,
Direction.up: 0,
Direction.right: 1,
Direction.down: 0,
Direction.up_left: -1,
Direction.up_right: 1,
Direction.down_left: -1,
Direction.down_right: 1
}
YMovement = {
Direction.left: 0,
Direction.up: -1,
Direction.right: 0,
Direction.down: 1,
Direction.up_left: -1,
Direction.up_right: -1,
Direction.down_left: 1,
Direction.down_right: 1
}
NewlineDirection = {
Direction.left: Direction.up,
Direction.up: Direction.right,
Direction.right: Direction.down,
Direction.down: Direction.left,
Direction.up_left: Direction.up_right,
Direction.up_right: Direction.down_right,
Direction.down_left: Direction.up_left,
Direction.down_right: Direction.down_left
}
NextDirection = {
Direction.left: Direction.up_left,
Direction.up: Direction.up_right,
Direction.right: Direction.down_right,
Direction.down: Direction.down_left,
Direction.up_left: Direction.up,
Direction.up_right: Direction.right,
Direction.down_left: Direction.left,
Direction.down_right: Direction.down
}
DirectionCharacters = {
Direction.left: "-",
Direction.up: "|",
Direction.right: "-",
Direction.down: "|",
Direction.up_left: "\\",
Direction.up_right: "/",
Direction.down_left: "/",
Direction.down_right: "\\"
}
PivotLookup = {
Pivot.left: {
Direction.left: Direction.down_left,
Direction.up: Direction.up_left,
Direction.right: Direction.up_right,
Direction.down: Direction.down_right,
Direction.up_left: Direction.left,
Direction.up_right: Direction.up,
Direction.down_left: Direction.down,
Direction.down_right: Direction.right
},
Pivot.right: {
Direction.left: Direction.up_left,
Direction.up: Direction.up_right,
Direction.right: Direction.down_right,
Direction.down: Direction.down_left,
Direction.up_left: Direction.up,
Direction.up_right: Direction.right,
Direction.down_left: Direction.left,
Direction.down_right: Direction.down
}
}
DirectionFromXYSigns = {
-1: {-1: Direction.up_left, 0: Direction.left, 1: Direction.down_left},
0: {-1: Direction.up, 0: Direction.right, 1: Direction.down},
1: {-1: Direction.up_right, 0: Direction.right, 1: Direction.down_right}
}
| somebody1234/Charcoal | directiondictionaries.py | Python | mit | 2,433 | 0 |
import os.path
import json
class Tag:
def __init__(self, tag, title, icon):
self.tag = tag
self.title = title
self.icon = icon
class SiteConfig:
def __init__(self, site_dir):
self.site_dir = site_dir
config_file_name = os.path.join(self.site_dir, "config.js")
if not os.path.exists(config_file_name):
raise CommandError("No site config file exists : " + site_config_file)
site_config = {}
with open(config_file_name, "r", encoding="utf-8") as f:
site_config = json.load(f)
self.source_dir = site_config["source_dir"]
self.destination_dir = site_config["destination_dir"]
self.template = site_config["template"]
self.blog_name = site_config["blog_name"]
self.blog_description = site_config["blog_description"]
self.blog_author = site_config["blog_author"]
self.root_url = site_config["root_url"]
self.relative_index = site_config["relative_index"]
self.navigation_menu = site_config["navigation_menu"]
self.twitter_handle = site_config["twitter_handle"]
self.allowed_tags = {}
tags = site_config["allowed_tags"]
for t in tags:
self.allowed_tags[t["tag"]] = Tag(t["tag"], t["title"], t["icon"])
def is_tag_allowed(self, tag):
return tag in self.allowed_tags
| andrewstephens75/gensite | gensite/siteconfig.py | Python | mit | 1,317 | 0.012908 |
"""
Test APIs.
"""
import json
import pytest
from mock import patch
from coursera import api
from coursera.test.utils import slurp_fixture
@pytest.fixture
def course():
course = api.CourseraOnDemand(session=None, course_id='0')
return course
@patch('coursera.api.get_page_json')
def test_ondemand_programming_supplement_no_instructions(get_page_json, course):
no_instructions = slurp_fixture('json/supplement-programming-no-instructions.json')
get_page_json.return_value = json.loads(no_instructions)
output = course.extract_links_from_programming('0')
assert {} == output
@patch('coursera.api.get_page_json')
def test_ondemand_programming_supplement_empty_instructions(get_page_json, course):
empty_instructions = slurp_fixture('json/supplement-programming-empty-instructions.json')
get_page_json.return_value = json.loads(empty_instructions)
output = course.extract_links_from_programming('0')
# Make sure that SOME html content has been extracted, but remove
# it immeditely because it's a hassle to properly prepare test input
# for it. FIXME later.
assert 'html' in output
del output['html']
assert {} == output
@patch('coursera.api.get_page_json')
def test_ondemand_programming_supplement_one_asset(get_page_json, course):
one_asset_tag = slurp_fixture('json/supplement-programming-one-asset.json')
one_asset_url = slurp_fixture('json/asset-urls-one.json')
asset_json = json.loads(one_asset_url)
get_page_json.side_effect = [json.loads(one_asset_tag),
json.loads(one_asset_url)]
expected_output = {'pdf': [(asset_json['elements'][0]['url'],
'statement-pca')]}
output = course.extract_links_from_programming('0')
# Make sure that SOME html content has been extracted, but remove
# it immeditely because it's a hassle to properly prepare test input
# for it. FIXME later.
assert 'html' in output
del output['html']
assert expected_output == output
@patch('coursera.api.get_page_json')
def test_ondemand_programming_supplement_three_assets(get_page_json, course):
three_assets_tag = slurp_fixture('json/supplement-programming-three-assets.json')
three_assets_url = slurp_fixture('json/asset-urls-three.json')
get_page_json.side_effect = [json.loads(three_assets_tag),
json.loads(three_assets_url)]
expected_output = json.loads(slurp_fixture('json/supplement-three-assets-output.json'))
output = course.extract_links_from_programming('0')
output = json.loads(json.dumps(output))
# Make sure that SOME html content has been extracted, but remove
# it immeditely because it's a hassle to properly prepare test input
# for it. FIXME later.
assert 'html' in output
del output['html']
assert expected_output == output
@patch('coursera.api.get_page_json')
def test_extract_links_from_lecture_assets_typename_asset(get_page_json, course):
open_course_assets_reply = slurp_fixture('json/supplement-open-course-assets-reply.json')
api_assets_v1_reply = slurp_fixture('json/supplement-api-assets-v1-reply.json')
get_page_json.side_effect = [json.loads(open_course_assets_reply),
json.loads(api_assets_v1_reply)]
expected_output = json.loads(slurp_fixture('json/supplement-extract-links-from-lectures-output.json'))
assets = ['giAxucdaEeWJTQ5WTi8YJQ']
output = course._extract_links_from_lecture_assets(assets)
output = json.loads(json.dumps(output))
assert expected_output == output
@patch('coursera.api.get_page_json')
def test_extract_links_from_lecture_assets_typname_url_and_asset(get_page_json, course):
"""
This test makes sure that _extract_links_from_lecture_assets grabs url
links both from typename == 'asset' and == 'url'.
"""
get_page_json.side_effect = [
json.loads(slurp_fixture('json/supplement-open-course-assets-typename-url-reply-1.json')),
json.loads(slurp_fixture('json/supplement-open-course-assets-typename-url-reply-2.json')),
json.loads(slurp_fixture('json/supplement-open-course-assets-typename-url-reply-3.json')),
json.loads(slurp_fixture('json/supplement-open-course-assets-typename-url-reply-4.json')),
json.loads(slurp_fixture('json/supplement-open-course-assets-typename-url-reply-5.json')),
]
expected_output = json.loads(slurp_fixture('json/supplement-extract-links-from-lectures-url-asset-output.json'))
assets = ['Yry0spSKEeW8oA5fR3afVQ',
'kMQyUZSLEeWj-hLVp2Pm8w',
'xkAloZmJEeWjYA4jOOgP8Q']
output = course._extract_links_from_lecture_assets(assets)
output = json.loads(json.dumps(output))
assert expected_output == output
| iemejia/coursera-dl | coursera/test/test_api.py | Python | lgpl-3.0 | 4,808 | 0.003536 |
#-------------------------------------------------------------------------
# The Azure Batch Apps Python Client
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the ""Software""), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
#--------------------------------------------------------------------------
"""Unit tests for Pool and PoolSpecifier"""
import sys
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
from unittest import mock
except ImportError:
import mock
from batchapps.pool import (
Pool,
PoolSpecifier)
from batchapps.api import (
BatchAppsApi,
Response)
from batchapps.exceptions import RestCallException
# pylint: disable=W0212
class TestPool(unittest.TestCase):
"""Unit tests for Pool"""
def test_pool_create(self):
"""Test Pool object"""
api = mock.create_autospec(BatchAppsApi)
pool = Pool(api)
self.assertIsNone(pool.id)
self.assertIsNone(pool.created)
self.assertEqual(pool.target_size, 0)
pool_spec = {
'id': 'abc',
'creationTime': '',
'targetDedicated': '5',
'state': 'active',
'communication': True
}
pool = Pool(api, **pool_spec)
self.assertEqual(pool.id, 'abc')
self.assertEqual(pool.created, '')
self.assertEqual(pool.target_size, 5)
self.assertEqual(pool.communication, True)
def test_pool_delete(self):
"""Test delete"""
api = mock.create_autospec(BatchAppsApi)
api.delete_pool.return_value = mock.create_autospec(Response)
api.delete_pool.return_value.success = True
pool = Pool(api)
pool.delete()
api.delete_pool.assert_called_with(None)
api.delete_pool.return_value.success = False
api.delete_pool.return_value.result = RestCallException(None, "Test", None)
with self.assertRaises(RestCallException):
pool.delete()
@mock.patch.object(Pool, 'update')
def test_pool_resize(self, mock_update):
"""Test resize"""
api = mock.create_autospec(BatchAppsApi)
api.resize_pool.return_value = mock.create_autospec(Response)
api.resize_pool.return_value.success = True
pool = Pool(api)
pool.resize(5)
api.resize_pool.assert_called_with(None, 5)
mock_update.assert_called_with()
with self.assertRaises(ValueError):
pool.resize("test")
api.resize_pool.return_value.success = False
api.resize_pool.return_value.result = RestCallException(None, "Test", None)
mock_update.called = False
with self.assertRaises(RestCallException):
pool.resize(1)
self.assertFalse(mock_update.called)
def test_pool_update(self):
"""Test delete"""
api = mock.create_autospec(BatchAppsApi)
pool = Pool(api)
api.get_pool.return_value = mock.create_autospec(Response)
api.get_pool.return_value.success = True
api.get_pool.return_value.result = {
'targetDedicated':'5',
'currentDedicated':'4',
'state':'active',
'allocationState':'test',
}
self.assertEqual(pool.target_size, 0)
self.assertEqual(pool.current_size, 0)
self.assertEqual(pool.state, None)
self.assertEqual(pool.allocation_state, None)
self.assertEqual(pool.resize_error, '')
pool.update()
api.get_pool.assert_called_with(pool_id=None)
self.assertEqual(pool.target_size, 5)
self.assertEqual(pool.current_size, 4)
self.assertEqual(pool.state, 'active')
self.assertEqual(pool.allocation_state, 'test')
self.assertEqual(pool.resize_error, '')
api.get_pool.return_value.success = False
api.get_pool.return_value.result = RestCallException(None, "Test", None)
with self.assertRaises(RestCallException):
pool.update()
class TestPoolSpecifier(unittest.TestCase):
"""Unit tests for PoolSpecifier"""
def test_poolspecifier_create(self):
"""Test PoolSpecifier object"""
api = mock.create_autospec(BatchAppsApi)
pool = PoolSpecifier(api)
self.assertEqual(pool.target_size, 0)
self.assertEqual(pool.max_tasks, 1)
self.assertEqual(pool.communication, False)
self.assertEqual(pool.certificates, [])
pool = PoolSpecifier(api, target_size=5, max_tasks=2, communication=True)
self.assertEqual(pool.target_size, 5)
self.assertEqual(pool.max_tasks, 2)
self.assertEqual(pool.communication, True)
self.assertEqual(pool.certificates, [])
def test_poolspecifier_start(self):
"""Test start"""
api = mock.create_autospec(BatchAppsApi)
api.add_pool.return_value.success = True
api.add_pool.return_value.result = {
'poolId':'abc', 'link':{'href':'test.com'}}
pool = PoolSpecifier(api)
new_pool = pool.start()
self.assertEqual(new_pool, {'id':'abc', 'link':'test.com'})
api.add_pool.assert_called_with(0, 1, False, [])
api.add_pool.return_value.success = False
api.add_pool.return_value.result = RestCallException(None, "Test", None)
with self.assertRaises(RestCallException):
pool.start()
def test_poolspecifier_add_cert(self):
api = mock.create_autospec(BatchAppsApi)
pool = PoolSpecifier(api)
pool.add_cert("test_thumb")
self.assertEqual(pool.certificates, [{
'thumbprint':'test_thumb',
'thumbprintAlgorithm':'SHA1',
'storeLocation':'CurrentUser',
'storeName':'My'}])
pool.add_cert("test_thumb", store_location="test", store_name=None)
self.assertEqual(pool.certificates, [{
'thumbprint':'test_thumb',
'thumbprintAlgorithm':'SHA1',
'storeLocation':'CurrentUser',
'storeName':'My'},{
'thumbprint':'test_thumb',
'thumbprintAlgorithm':'SHA1',
'storeLocation':'test',
'storeName':'None'}])
pool.id = None
pool.certificates = [0,1,2,3,4,5,6,7,8,9]
pool.add_cert("new_cert")
self.assertEqual(pool.certificates, [0,1,2,3,4,5,6,7,8,9])
if __name__ == '__main__':
unittest.main() | Azure/azure-batch-apps-python | batchapps/test/unittest_pool.py | Python | mit | 7,440 | 0.007124 |
"""Numeric integration of data coming from a source sensor over time."""
from decimal import Decimal, DecimalException
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
CONF_NAME,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
TIME_DAYS,
TIME_HOURS,
TIME_MINUTES,
TIME_SECONDS,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_state_change_event
from homeassistant.helpers.restore_state import RestoreEntity
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
ATTR_SOURCE_ID = "source"
CONF_SOURCE_SENSOR = "source"
CONF_ROUND_DIGITS = "round"
CONF_UNIT_PREFIX = "unit_prefix"
CONF_UNIT_TIME = "unit_time"
CONF_UNIT_OF_MEASUREMENT = "unit"
CONF_METHOD = "method"
TRAPEZOIDAL_METHOD = "trapezoidal"
LEFT_METHOD = "left"
RIGHT_METHOD = "right"
INTEGRATION_METHOD = [TRAPEZOIDAL_METHOD, LEFT_METHOD, RIGHT_METHOD]
# SI Metric prefixes
UNIT_PREFIXES = {None: 1, "k": 10 ** 3, "M": 10 ** 6, "G": 10 ** 9, "T": 10 ** 12}
# SI Time prefixes
UNIT_TIME = {
TIME_SECONDS: 1,
TIME_MINUTES: 60,
TIME_HOURS: 60 * 60,
TIME_DAYS: 24 * 60 * 60,
}
ICON = "mdi:chart-histogram"
DEFAULT_ROUND = 3
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME): cv.string,
vol.Required(CONF_SOURCE_SENSOR): cv.entity_id,
vol.Optional(CONF_ROUND_DIGITS, default=DEFAULT_ROUND): vol.Coerce(int),
vol.Optional(CONF_UNIT_PREFIX, default=None): vol.In(UNIT_PREFIXES),
vol.Optional(CONF_UNIT_TIME, default=TIME_HOURS): vol.In(UNIT_TIME),
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_METHOD, default=TRAPEZOIDAL_METHOD): vol.In(
INTEGRATION_METHOD
),
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the integration sensor."""
integral = IntegrationSensor(
config[CONF_SOURCE_SENSOR],
config.get(CONF_NAME),
config[CONF_ROUND_DIGITS],
config[CONF_UNIT_PREFIX],
config[CONF_UNIT_TIME],
config.get(CONF_UNIT_OF_MEASUREMENT),
config[CONF_METHOD],
)
async_add_entities([integral])
class IntegrationSensor(RestoreEntity):
"""Representation of an integration sensor."""
def __init__(
self,
source_entity,
name,
round_digits,
unit_prefix,
unit_time,
unit_of_measurement,
integration_method,
):
"""Initialize the integration sensor."""
self._sensor_source_id = source_entity
self._round_digits = round_digits
self._state = 0
self._method = integration_method
self._name = name if name is not None else f"{source_entity} integral"
if unit_of_measurement is None:
self._unit_template = (
f"{'' if unit_prefix is None else unit_prefix}{{}}{unit_time}"
)
# we postpone the definition of unit_of_measurement to later
self._unit_of_measurement = None
else:
self._unit_of_measurement = unit_of_measurement
self._unit_prefix = UNIT_PREFIXES[unit_prefix]
self._unit_time = UNIT_TIME[unit_time]
async def async_added_to_hass(self):
"""Handle entity which will be added."""
await super().async_added_to_hass()
state = await self.async_get_last_state()
if state:
try:
self._state = Decimal(state.state)
except ValueError as err:
_LOGGER.warning("Could not restore last state: %s", err)
@callback
def calc_integration(event):
"""Handle the sensor state changes."""
old_state = event.data.get("old_state")
new_state = event.data.get("new_state")
if (
old_state is None
or old_state.state in [STATE_UNKNOWN, STATE_UNAVAILABLE]
or new_state.state in [STATE_UNKNOWN, STATE_UNAVAILABLE]
):
return
if self._unit_of_measurement is None:
unit = new_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
self._unit_of_measurement = self._unit_template.format(
"" if unit is None else unit
)
try:
# integration as the Riemann integral of previous measures.
area = 0
elapsed_time = (
new_state.last_updated - old_state.last_updated
).total_seconds()
if self._method == TRAPEZOIDAL_METHOD:
area = (
(Decimal(new_state.state) + Decimal(old_state.state))
* Decimal(elapsed_time)
/ 2
)
elif self._method == LEFT_METHOD:
area = Decimal(old_state.state) * Decimal(elapsed_time)
elif self._method == RIGHT_METHOD:
area = Decimal(new_state.state) * Decimal(elapsed_time)
integral = area / (self._unit_prefix * self._unit_time)
assert isinstance(integral, Decimal)
except ValueError as err:
_LOGGER.warning("While calculating integration: %s", err)
except DecimalException as err:
_LOGGER.warning(
"Invalid state (%s > %s): %s", old_state.state, new_state.state, err
)
except AssertionError as err:
_LOGGER.error("Could not calculate integral: %s", err)
else:
self._state += integral
self.async_write_ha_state()
async_track_state_change_event(
self.hass, [self._sensor_source_id], calc_integration
)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return round(self._state, self._round_digits)
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
state_attr = {ATTR_SOURCE_ID: self._sensor_source_id}
return state_attr
@property
def icon(self):
"""Return the icon to use in the frontend."""
return ICON
| titilambert/home-assistant | homeassistant/components/integration/sensor.py | Python | apache-2.0 | 6,827 | 0.000586 |
"""
Test that we work properly with classes with the trivial_abi attribute
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestTrivialABI(TestBase):
mydir = TestBase.compute_mydir(__file__)
NO_DEBUG_INFO_TESTCASE = True
@skipUnlessSupportedTypeAttribute("trivial_abi")
@expectedFailureAll(oslist=["windows"], bugnumber="llvm.org/pr37995")
@expectedFailureAll(archs=["aarch64"], oslist=["linux"],
bugnumber="llvm.org/pr44161")
def test_call_trivial(self):
"""Test that we can print a variable & call a function with a trivial ABI class."""
self.build()
self.main_source_file = lldb.SBFileSpec("main.cpp")
self.expr_test(True)
@skipUnlessSupportedTypeAttribute("trivial_abi")
# fixed for SysV-x86_64 ABI, but not Windows-x86_64
@expectedFailureAll(oslist=["windows"], bugnumber="llvm.org/pr36870")
@expectedFailureAll(archs=["aarch64"], oslist=["linux"],
bugnumber="llvm.org/pr44161")
@expectedFailureAll(archs=["arm64", "arm64e"], bugnumber="<rdar://problem/57844240>")
def test_call_nontrivial(self):
"""Test that we can print a variable & call a function on the same class w/o the trivial ABI marker."""
self.build()
self.main_source_file = lldb.SBFileSpec("main.cpp")
self.expr_test(False)
def check_value(self, test_var, ivar_value):
self.assertTrue(test_var.GetError().Success(), "Invalid valobj: %s"%(test_var.GetError().GetCString()))
ivar = test_var.GetChildMemberWithName("ivar")
self.assertTrue(test_var.GetError().Success(), "Failed to fetch ivar")
self.assertEqual(ivar_value, ivar.GetValueAsSigned(), "Got the right value for ivar")
def check_frame(self, thread):
frame = thread.frames[0]
inVal_var = frame.FindVariable("inVal")
self.check_value(inVal_var, 10)
options = lldb.SBExpressionOptions()
inVal_expr = frame.EvaluateExpression("inVal", options)
self.check_value(inVal_expr, 10)
thread.StepOut()
outVal_ret = thread.GetStopReturnValue()
self.check_value(outVal_ret, 30)
def expr_test(self, trivial):
(target, process, thread, bkpt) = lldbutil.run_to_source_breakpoint(self,
"Set a breakpoint here", self.main_source_file)
# Stop in a function that takes a trivial value, and try both frame var & expr to get its value:
if trivial:
self.check_frame(thread)
return
# Now continue to the same thing without the trivial_abi and see if we get that right:
threads = lldbutil.continue_to_breakpoint(process, bkpt)
self.assertEqual(len(threads), 1, "Hit my breakpoint the second time.")
self.check_frame(threads[0])
| endlessm/chromium-browser | third_party/llvm/lldb/test/API/lang/cpp/trivial_abi/TestTrivialABI.py | Python | bsd-3-clause | 2,923 | 0.004105 |
#!/usr/bin/env python
# tests the standard library of math functions
import unittest
import math
import cmath
import string
import commands
import types
import testbase
import absyn
import codegen
import fractparser
import fractlexer
import fsymbol
import translate
class Test(testbase.TestBase):
def setUp(self):
self.fakeNode = absyn.Empty(0)
self.codegen = codegen.T(fsymbol.T())
self.parser = fractparser.parser
def compileAndRun(self,c_code):
cFileName = self.codegen.writeToTempFile(c_code,".cpp")
oFileName = self.codegen.writeToTempFile("")
#print c_code
cmd = "g++ -Wall %s -o %s -Ic -lm" % (cFileName, oFileName)
#print cmd
(status,output) = commands.getstatusoutput(cmd)
self.assertEqual(status,0,"C error:\n%s\nProgram:\n%s\n" % \
( output,c_code))
#print "status: %s\noutput:\n%s" % (status, output)
cmd = oFileName
(status,output) = commands.getstatusoutput(cmd)
self.assertEqual(status,0, "Runtime error:\n" + output)
print "status: %s\noutput:\n%s" % (status, output)
return output
def makeC(self,user_preamble="", user_postamble=""):
# construct a C stub for testing
preamble = '''
#include <stdio.h>
#include <math.h>
#include "cmap.cpp"
typedef enum
{
INT = 0,
FLOAT = 1,
GRADIENT = 2
} e_paramtype;
struct s_param
{
e_paramtype t;
int intval;
double doubleval;
void *gradient;
};
typedef struct {
struct s_param *p;
void *arena;
} pf_fake;
int main(){
struct s_param params[20];
int i = 0;
ListColorMap *pMap = new ListColorMap();
pMap->init(2);
pMap->set(0,0.0,255,0,0,0);
pMap->set(1,1.0,0,255,0,0);
for(i = 0; i < 20; ++i) {
params[i].t = FLOAT;
params[i].intval = 773;
params[i].doubleval = 0.0;
params[i].gradient = pMap;
};
pf_fake t__f;
t__f.p = params;
pf_fake *t__pfo = &t__f;
double pixel_re = 0.0, pixel_im = 0.0;
double t__h_zwpixel_re = 0.0, t__h_zwpixel_im = 0.0;
double t__h_color_re = 0.0;
double t__h_color_i = 0.0;
double t__h_color_j = 0.0;
double t__h_color_k = 0.0;
double inputs[] = {
0, 0,
0, 1,
1, 0,
1, 1,
3, 2,
1,-0.0,
0,-1,
-3,2,
-2,-2,
-1,0
};
for(int i__ = 0; i < sizeof(inputs)/sizeof(double); i__ += 2)
{
'''
codegen_symbols = self.codegen.output_symbols(self.codegen,{})
decls = string.join(map(lambda x: x.format(), codegen_symbols),"\n")
str_output = string.join(map(lambda x : x.format(), self.codegen.out),"\n")
postamble = "}\nreturn 0;}\n"
return string.join([preamble,decls,"\n",
user_preamble,str_output,"\n",
user_postamble,postamble],"")
def inspect_bool(self,name):
return "printf(\"%s = %%d\\n\", f%s);" % (name,name)
def inspect_float(self,name):
return "printf(\"%s = %%g\\n\", f%s);" % (name,name)
def inspect_int(self,name):
return "printf(\"%s = %%d\\n\", f%s);" % (name,name)
def inspect_complex(self,name,prefix="f"):
return "printf(\"%s = (%%g,%%g)\\n\", %s%s_re, %s%s_im);" % \
(name,prefix,name,prefix,name)
def inspect_hyper(self,name,prefix="f"):
return ("printf(\"%s = (%%g,%%g,%%g,%%g)\\n\"," +
"%s%s_re, %s%s_i, %s%s_j, %s%s_k);") % \
(name,prefix,name,prefix,name,prefix,name,prefix,name)
def inspect_color(self,name,prefix="f"):
return self.inspect_hyper(name, prefix)
def inspect_colors(self,namelist):
return "".join(map(lambda x : self.inspect_color(x), namelist))
def predict(self,f,arg1=0,arg2=1):
# compare our compiler results to Python stdlib
try:
x = "%.6g" % f(arg1)
except ZeroDivisionError:
x = "inf"
try:
y = "%.6g" % f(arg2)
except ZeroDivisionError:
y = "inf"
return "(%s,%s)" % (x,y)
def cpredict(self,f,arg=(1+0j)):
try:
z = f(arg)
return "(%.6g,%.6g)" % (z.real,z.imag)
except OverflowError:
return "(inf,inf)"
except ZeroDivisionError:
return "(nan,nan)"
def make_test(self,myfunc,pyfunc,val,n):
codefrag = "ct_%s%d = %s((%d,%d))" % (myfunc, n, myfunc, val.real, val.imag)
lookat = "ct_%s%d" % (myfunc, n)
result = self.cpredict(pyfunc,val)
return [ codefrag, lookat, result]
def manufacture_tests(self,myfunc,pyfunc):
vals = [ 0+0j, 0+1j, 1+0j, 1+1j, 3+2j, 1-0j, 0-1j, -3+2j, -2-2j, -1+0j ]
return map(lambda (x,y) : self.make_test(myfunc,pyfunc,x,y), \
zip(vals,range(1,len(vals))))
def cotantests(self):
def mycotan(z):
return cmath.cos(z)/cmath.sin(z)
tests = self.manufacture_tests("cotan",mycotan)
# CONSIDER: comes out as -0,1.31304 in python, but +0 in C++ and gf4d
# think Python's probably in error, but not 100% sure
tests[6][2] = "(0,1.31304)"
return tests
def logtests(self):
tests = self.manufacture_tests("log",cmath.log)
tests[0][2] = "(-inf,0)" # log(0+0j) is overflow in python
return tests
def asintests(self):
tests = self.manufacture_tests("asin",cmath.asin)
# asin(x+0j) = (?,-0) in python, which is wrong
tests[0][2] = "(0,0)"
tests[2][2] = tests[5][2] = "(1.5708,0)"
return tests
def acostests(self):
# work around buggy python acos
tests = self.manufacture_tests("acos",cmath.acos)
tests[0][2] = "(1.5708,0)"
tests[2][2] = tests[5][2] = "(0,0)"
return tests
def atantests(self):
tests = self.manufacture_tests("atan",cmath.atan)
tests[1][2] = "(nan,nan)"
tests[6][2] = "(nan,-inf)" # not really sure who's right on this
return tests
def atanhtests(self):
tests = self.manufacture_tests("atanh",cmath.atanh)
tests[2][2] = tests[5][2] = "(inf,0)" # Python overflows the whole number
return tests
def test_stdlib(self):
'''This is the slowest test, due to how much compilation it does.
Calls standard functions with a variety
of values, checking that they produce the right answers'''
# additions to python math stdlib
def myfcotan(x):
return math.cos(x)/math.sin(x)
def myfcotanh(x):
return math.cosh(x)/math.sinh(x)
def mycotanh(z):
return cmath.cosh(z)/cmath.sinh(z)
def myasinh(z):
return cmath.log(z + cmath.sqrt(z*z+1))
def myacosh(z):
return cmath.log(z + cmath.sqrt(z-1) * cmath.sqrt(z+1))
def myctrunc(z):
return complex(int(z.real),int(z.imag))
def mycfloor(z):
return complex(math.floor(z.real),math.floor(z.imag))
def mycround(z):
return complex(int(z.real+0.5),int(z.imag+0.5))
def mycceil(z):
x = complex(math.ceil(z.real),math.ceil(z.imag))
return x
def mycosxx(z):
cosz = cmath.cos(z)
return complex(cosz.real, -cosz.imag)
def myczero(z):
return complex(0,0)
tests = []
# # code to run, var to inspect, result
# [ "fm = (3.0 % 2.0, 3.1 % 1.5)","fm","(1,0.1)"],
# [ "cj = conj(y)", "cj", "(1,-2)"],
# [ "fl = flip(y)", "fl", "(2,1)"],
# [ "ri = (imag(y),real(y))","ri", "(2,1)"],
# [ "m = |y|","m","(5,0)"],
# [ "t = (4,2) * (2,-1)", "t", "(10,0)"],
# [ "d1 = y/(1,0)","d1","(1,2)"],
# [ "d2 = y/y","d2","(1,0)"],
# [ "d3 = (4,2)/y","d3","(1.6,-1.2)"],
# [ "d4 = (2,1)/2","d4","(1,0.5)"],
# [ "recip1 = recip((4,0))/recip(4)", "recip1", "(1,0)"],
# [ "i = ident(y)","i","(1,2)"],
# [ "a = (abs(4),abs(-4))","a","(4,4)"],
# [ "a2 = abs((4,-4))","a2","(4,4)"],
# [ "cab = (cabs((0,0)), cabs((3,4)))", "cab", "(0,5)"],
# [ "sq = (sqrt(4),sqrt(2))", "sq", self.predict(math.sqrt,4,2)],
# [ "l = (log(1),log(3))", "l", self.predict(math.log,1,3)],
# [ "ex = (exp(1),exp(2))","ex", self.predict(math.exp,1,2)],
# [ "p = (2^2,9^0.5)","p", "(4,3)"],
# [ "pow1 = (1,0)^2","pow1", "(1,0)"],
# [ "pow2 = (-2,-3)^7.5","pow2","(-13320.5,6986.17)"],
# [ "pow3 = (-2,-3)^(1.5,-3.1)","pow3","(0.00507248,-0.00681128)"],
# [ "pow4 = (0,0)^(1.5,-3.1)","pow4","(0,0)"],
# [ "manh1 = (manhattanish(2.0,-1.0),manhattanish(0.1,-0.1))",
# "manh1", "(1,0)"],
# [ "manh2 = (manhattan(2.0,-1.5),manhattan(-2,1.7))",
# "manh2", "(3.5,3.7)"],
# [ "manh3 = (manhattanish2(2.0,-1.0),manhattanish2(0.1,-0.1))",
# "manh3", "(25,0.0004)"],
# [ "mx2 = (max2(2,-3),max2(-3,0))", "mx2", "(9,9)"],
# [ "mn2 = (min2(-1,-2),min2(7,4))", "mn2", "(1,16)"],
# [ "r2 = (real2(3,1),real2(-2.5,2))","r2","(9,6.25)"],
# [ "i2 = (imag2(3,2),imag2(2,-0))", "i2", "(4,0)"],
# [ "ftrunc1 = (trunc(0.5), trunc(0.4))", "ftrunc1", "(0,0)"],
# [ "ftrunc2 = (trunc(-0.5), trunc(-0.4))", "ftrunc2", "(0,0)"],
# [ "frnd1 = (round(0.5), round(0.4))", "frnd1", "(1,0)"],
# [ "frnd2 = (round(-0.5), round(-0.4))", "frnd2", "(0,0)"],
# [ "fceil1 = (ceil(0.5), ceil(0.4))", "fceil1", "(1,1)"],
# [ "fceil2 = (ceil(-0.5), ceil(-0.4))", "fceil2", "(0,0)"],
# [ "ffloor1 = (floor(0.5), floor(0.4))", "ffloor1", "(0,0)"],
# [ "ffloor2 = (floor(-0.5), floor(-0.4))", "ffloor2", "(-1,-1)"],
# [ "fzero = (zero(77),zero(-41.2))", "fzero", "(0,0)"],
#
# # trig functions
# [ "t_sin = (sin(0),sin(1))","t_sin", self.predict(math.sin)],
# [ "t_cos = (cos(0),cos(1))","t_cos", self.predict(math.cos)],
# [ "t_tan = (tan(0),tan(1))","t_tan", self.predict(math.tan)],
# [ "t_cotan = (cotan(0),cotan(1))","t_cotan", self.predict(myfcotan)],
# [ "t_sinh = (sinh(0),sinh(1))","t_sinh", self.predict(math.sinh)],
# [ "t_cosh = (cosh(0),cosh(1))","t_cosh", self.predict(math.cosh)],
# [ "t_tanh = (tanh(0),tanh(1))","t_tanh", self.predict(math.tanh)],
# [ "t_cotanh = (cotanh(0),cotanh(1))","t_cotanh",
# self.predict(myfcotanh)],
#
# # inverse trig functions
# [ "t_asin = (asin(0),asin(1))","t_asin", self.predict(math.asin)],
# [ "t_acos = (acos(0),acos(1))","t_acos", self.predict(math.acos)],
# [ "t_atan = (atan(0),atan(1))","t_atan", self.predict(math.atan)],
# [ "t_atan2 = (atan2((1,1)),atan2((-1,-1)))",
# "t_atan2", "(0.785398,-2.35619)"],
# # these aren't in python stdlib, need to hard-code results
# [ "t_asinh = (asinh(0),asinh(1))","t_asinh", "(0,0.881374)" ],
# [ "t_acosh = (acosh(10),acosh(1))","t_acosh", "(2.99322,0)" ],
# [ "t_atanh = (atanh(0),atanh(0.5))","t_atanh", "(0,0.549306)" ],
#]
#tests += self.manufacture_tests("sin",cmath.sin)
#tests += self.manufacture_tests("cos",cmath.cos)
#tests += self.manufacture_tests("cosxx", mycosxx)
#tests += self.manufacture_tests("tan",cmath.tan)
#tests += self.manufacture_tests("sinh",cmath.sinh)
#tests += self.manufacture_tests("cosh",cmath.cosh)
#tests += self.manufacture_tests("tanh",cmath.tanh)
#tests += self.manufacture_tests("exp",cmath.exp)
#tests += self.manufacture_tests("sqrt",cmath.sqrt)
#tests += self.manufacture_tests("round",mycround)
#tests += self.manufacture_tests("ceil",mycceil)
#tests += self.manufacture_tests("floor",mycfloor)
#tests += self.manufacture_tests("trunc",myctrunc)
#tests += self.manufacture_tests("zero",myczero)
#tests += self.cotantests()
#tests += self.manufacture_tests("cotanh",mycotanh)
#tests += self.logtests()
#
#tests += self.asintests()
#tests += self.acostests()
#tests += self.atantests()
#tests += self.manufacture_tests("asinh",myasinh)
tests += self.manufacture_tests("acosh",myacosh)
#tests += self.atanhtests()
# construct a formula calculating all of the above,
# run it and compare results with expected values
src = 't_c6{\ninit: y = (1,2)\n' + \
string.join(map(lambda x : x[0], tests),"\n") + "\n}"
check = string.join(map(lambda x :self.inspect_complex(x[1]),tests),"\n")
exp = map(lambda x : "%s = %s" % (x[1],x[2]), tests)
self.assertCSays(src,"init",check,exp)
def assertCSays(self,source,section,check,result,dump=None):
asm = self.sourceToAsm(source,section,dump)
postamble = "t__end_f%s:\n%s\n" % (section,check)
c_code = self.makeC("", postamble)
output = self.compileAndRun(c_code)
if isinstance(result,types.ListType):
outputs = string.split(output,"\n")
for (exp,res) in zip(result,outputs):
self.assertEqual(exp,res)
else:
self.assertEqual(output,result)
def translate(self,s,options={}):
fractlexer.lexer.lineno = 1
pt = self.parser.parse(s)
#print pt.pretty()
t = translate.T(pt.children[0],options)
#print t.pretty()
self.assertNoErrors(t)
self.codegen = codegen.T(t.symbols,options)
return t
def translatecf(self,s,name,options={}):
fractlexer.lexer.lineno = 1
pt = self.parser.parse(s)
#print pt.pretty()
t = translate.ColorFunc(pt.children[0],name,options)
#print t.pretty()
self.assertNoErrors(t)
return t
def sourceToAsm(self,s,section,options={}):
t = self.translate(s,options)
self.codegen.generate_all_code(t.canon_sections[section])
if options.get("dumpAsm") == 1:
self.printAsm()
return self.codegen.out
def suite():
return unittest.makeSuite(Test,'test')
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| ericchill/gnofract4d | fract4d/test_stdlib.py | Python | bsd-3-clause | 14,992 | 0.014141 |
import nltk
import json
import sys
sys.path.append("../../")
import parser
from entity import Word
class ModelRewriter:
rewriteRules = None
rewriteRuleFileName = "model.txt"
@staticmethod
def loadModel():
inputFile = open("model.txt")
modelJsonString = inputFile.read()
inputFile.close()
modelMap = json.loads(modelJsonString)
ModelRewriter.rewriteRules = modelMap
return modelMap
def __init__(self):
if ModelRewriter.rewriteRules is None:
ModelRewriter.rewriteRules = ModelRewriter.loadModel()
#this is the only method the user need to invoke
@staticmethod
def generateQuestions(inputSentence):
print inputSentence
sentencePOS = ModelRewriter.getPOSList(inputSentence)
nearestModels = ModelRewriter.getNearestModel(sentencePOS)
questions = []
for model in nearestModels:
tempQuestionList = ModelRewriter.generateQuestionFromModel(model, inputSentence)
questions += tempQuestionList
return questions
@staticmethod
def parseSentence(sentence):
questionWordMap = {}
text = nltk.word_tokenize(sentence)
posTag = nltk.pos_tag(text)
for i in xrange(len(text)):
word = Word()
word.index = i
word.pos = posTag[i][1]
questionWordMap[text[i]] = word
questionWordMap["WHOLE-SENTENCE-LIST"] = text
return questionWordMap
@staticmethod
def getNearestModel(sentencePOSList):
'''
return the nearest model
'''
nearestModelList = []
modelList = ModelRewriter.rewriteRules["template"]
for model in modelList:
posList = model["pos"]
if ModelRewriter.comparePOSList(sentencePOSList, posList):
nearestModelList.append(model)
return nearestModelList
@staticmethod
def comparePOSList(templateModelPOSList, newModelPOSList):
if len(templateModelPOSList) != len(newModelPOSList):
return False
else:
print templateModelPOSList
print newModelPOSList
for i in xrange(len(templateModelPOSList)):
tempTemplate = unicode(templateModelPOSList[i])
tempNew = unicode(newModelPOSList[i])
if tempTemplate != tempNew:
return False
return True
@staticmethod
def getPOSList(sentence):
tokenList = nltk.word_tokenize(sentence)
posList = nltk.pos_tag(tokenList)
resultList = []
for temp in posList:
resultList.append(temp[1])
return resultList
@staticmethod
def generateQuestionFromModel(model, inputSentence):
sentenceToken = nltk.word_tokenize(inputSentence)
questions = []
if model.has_key("Easy"):
questionList = model["Easy"]
for questionMap in questionList:
question = ModelRewriter.generateSingleQuestion(questionMap, sentenceToken)
if question is not None:
questions.append(question) #merge two lists
elif model.has_key["Medium"]:
pass
elif model.has_key["Hard"]:
pass
elif model.has_key["Ghost"]:
pass
return questions
@staticmethod
def generateSingleQuestion(modelMap, sentenceToken):
question = modelMap["question"]
indexList = modelMap["index"]
questionToken = nltk.word_tokenize(question.strip())
questionString = ""
indexList = indexList.strip().split()
for i in xrange(len(indexList)):
if indexList[i] == "-":
questionString += questionToken[i]
else:
questionString += sentenceToken[int(indexList[i].strip())]
questionString += " "
return questionString.strip()
if __name__ == "__main__":
print ModelRewriter.getPOSList("He received two yellow cards as Chelsea lost at White Hart Lane for the first time since 1987.")
| cuijiaxing/nlp | rewriter/rules/rewrite_rule/generator.py | Python | gpl-2.0 | 4,155 | 0.004091 |
# ===============================================================================
# Copyright 2011 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
'''
Gauges Package contains
G{packagetree }
'''
| UManPychron/pychron | pychron/hardware/gauges/mks/__init__.py | Python | apache-2.0 | 788 | 0.001269 |
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_firewall_vip6
short_description: Configure virtual IP for IPv6 in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify firewall feature and vip6 category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
version_added: 2.9
state:
description:
- Indicates whether to create or remove the object.
type: str
required: true
choices:
- present
- absent
version_added: 2.9
firewall_vip6:
description:
- Configure virtual IP for IPv6.
default: null
type: dict
suboptions:
arp_reply:
description:
- Enable to respond to ARP requests for this virtual IP address. Enabled by default.
type: str
choices:
- disable
- enable
color:
description:
- Color of icon on the GUI.
type: int
comment:
description:
- Comment.
type: str
extip:
description:
- IP address or address range on the external interface that you want to map to an address or address range on the destination network.
type: str
extport:
description:
- Incoming port number range that you want to map to a port number range on the destination network.
type: str
http_cookie_age:
description:
- Time in minutes that client web browsers should keep a cookie. Default is 60 seconds. 0 = no time limit.
type: int
http_cookie_domain:
description:
- Domain that HTTP cookie persistence should apply to.
type: str
http_cookie_domain_from_host:
description:
- Enable/disable use of HTTP cookie domain from host field in HTTP.
type: str
choices:
- disable
- enable
http_cookie_generation:
description:
- Generation of HTTP cookie to be accepted. Changing invalidates all existing cookies.
type: int
http_cookie_path:
description:
- Limit HTTP cookie persistence to the specified path.
type: str
http_cookie_share:
description:
- Control sharing of cookies across virtual servers. same-ip means a cookie from one virtual server can be used by another. Disable stops
cookie sharing.
type: str
choices:
- disable
- same-ip
http_ip_header:
description:
- For HTTP multiplexing, enable to add the original client IP address in the XForwarded-For HTTP header.
type: str
choices:
- enable
- disable
http_ip_header_name:
description:
- For HTTP multiplexing, enter a custom HTTPS header name. The original client IP address is added to this header. If empty,
X-Forwarded-For is used.
type: str
http_multiplex:
description:
- Enable/disable HTTP multiplexing.
type: str
choices:
- enable
- disable
https_cookie_secure:
description:
- Enable/disable verification that inserted HTTPS cookies are secure.
type: str
choices:
- disable
- enable
id:
description:
- Custom defined ID.
type: int
ldb_method:
description:
- Method used to distribute sessions to real servers.
type: str
choices:
- static
- round-robin
- weighted
- least-session
- least-rtt
- first-alive
- http-host
mappedip:
description:
- Mapped IP address range in the format startIP-endIP.
type: str
mappedport:
description:
- Port number range on the destination network to which the external port number range is mapped.
type: str
max_embryonic_connections:
description:
- Maximum number of incomplete connections.
type: int
monitor:
description:
- Name of the health check monitor to use when polling to determine a virtual server's connectivity status.
type: list
suboptions:
name:
description:
- Health monitor name. Source firewall.ldb-monitor.name.
required: true
type: str
name:
description:
- Virtual ip6 name.
required: true
type: str
outlook_web_access:
description:
- Enable to add the Front-End-Https header for Microsoft Outlook Web Access.
type: str
choices:
- disable
- enable
persistence:
description:
- Configure how to make sure that clients connect to the same server every time they make a request that is part of the same session.
type: str
choices:
- none
- http-cookie
- ssl-session-id
portforward:
description:
- Enable port forwarding.
type: str
choices:
- disable
- enable
protocol:
description:
- Protocol to use when forwarding packets.
type: str
choices:
- tcp
- udp
- sctp
realservers:
description:
- Select the real servers that this server load balancing VIP will distribute traffic to.
type: list
suboptions:
client_ip:
description:
- Only clients in this IP range can connect to this real server.
type: str
healthcheck:
description:
- Enable to check the responsiveness of the real server before forwarding traffic.
type: str
choices:
- disable
- enable
- vip
holddown_interval:
description:
- Time in seconds that the health check monitor continues to monitor an unresponsive server that should be active.
type: int
http_host:
description:
- HTTP server domain name in HTTP header.
type: str
id:
description:
- Real server ID.
required: true
type: int
ip:
description:
- IPv6 address of the real server.
type: str
max_connections:
description:
- Max number of active connections that can directed to the real server. When reached, sessions are sent to other real servers.
type: int
monitor:
description:
- Name of the health check monitor to use when polling to determine a virtual server's connectivity status. Source firewall
.ldb-monitor.name.
type: str
port:
description:
- Port for communicating with the real server. Required if port forwarding is enabled.
type: int
status:
description:
- Set the status of the real server to active so that it can accept traffic, or on standby or disabled so no traffic is sent.
type: str
choices:
- active
- standby
- disable
weight:
description:
- Weight of the real server. If weighted load balancing is enabled, the server with the highest weight gets more connections.
type: int
server_type:
description:
- Protocol to be load balanced by the virtual server (also called the server load balance virtual IP).
type: str
choices:
- http
- https
- imaps
- pop3s
- smtps
- ssl
- tcp
- udp
- ip
src_filter:
description:
- "Source IP6 filter (x:x:x:x:x:x:x:x/x). Separate addresses with spaces."
type: list
suboptions:
range:
description:
- Source-filter range.
required: true
type: str
ssl_algorithm:
description:
- Permitted encryption algorithms for SSL sessions according to encryption strength.
type: str
choices:
- high
- medium
- low
- custom
ssl_certificate:
description:
- The name of the SSL certificate to use for SSL acceleration. Source vpn.certificate.local.name.
type: str
ssl_cipher_suites:
description:
- SSL/TLS cipher suites acceptable from a client, ordered by priority.
type: list
suboptions:
cipher:
description:
- Cipher suite name.
type: str
choices:
- TLS-RSA-WITH-3DES-EDE-CBC-SHA
- TLS-DHE-RSA-WITH-DES-CBC-SHA
- TLS-DHE-DSS-WITH-DES-CBC-SHA
priority:
description:
- SSL/TLS cipher suites priority.
required: true
type: int
versions:
description:
- SSL/TLS versions that the cipher suite can be used with.
type: str
choices:
- ssl-3.0
- tls-1.0
- tls-1.1
- tls-1.2
ssl_client_fallback:
description:
- Enable/disable support for preventing Downgrade Attacks on client connections (RFC 7507).
type: str
choices:
- disable
- enable
ssl_client_renegotiation:
description:
- Allow, deny, or require secure renegotiation of client sessions to comply with RFC 5746.
type: str
choices:
- allow
- deny
- secure
ssl_client_session_state_max:
description:
- Maximum number of client to FortiGate SSL session states to keep.
type: int
ssl_client_session_state_timeout:
description:
- Number of minutes to keep client to FortiGate SSL session state.
type: int
ssl_client_session_state_type:
description:
- How to expire SSL sessions for the segment of the SSL connection between the client and the FortiGate.
type: str
choices:
- disable
- time
- count
- both
ssl_dh_bits:
description:
- Number of bits to use in the Diffie-Hellman exchange for RSA encryption of SSL sessions.
type: str
choices:
- 768
- 1024
- 1536
- 2048
- 3072
- 4096
ssl_hpkp:
description:
- Enable/disable including HPKP header in response.
type: str
choices:
- disable
- enable
- report-only
ssl_hpkp_age:
description:
- Number of minutes the web browser should keep HPKP.
type: int
ssl_hpkp_backup:
description:
- Certificate to generate backup HPKP pin from. Source vpn.certificate.local.name vpn.certificate.ca.name.
type: str
ssl_hpkp_include_subdomains:
description:
- Indicate that HPKP header applies to all subdomains.
type: str
choices:
- disable
- enable
ssl_hpkp_primary:
description:
- Certificate to generate primary HPKP pin from. Source vpn.certificate.local.name vpn.certificate.ca.name.
type: str
ssl_hpkp_report_uri:
description:
- URL to report HPKP violations to.
type: str
ssl_hsts:
description:
- Enable/disable including HSTS header in response.
type: str
choices:
- disable
- enable
ssl_hsts_age:
description:
- Number of seconds the client should honour the HSTS setting.
type: int
ssl_hsts_include_subdomains:
description:
- Indicate that HSTS header applies to all subdomains.
type: str
choices:
- disable
- enable
ssl_http_location_conversion:
description:
- Enable to replace HTTP with HTTPS in the reply's Location HTTP header field.
type: str
choices:
- enable
- disable
ssl_http_match_host:
description:
- Enable/disable HTTP host matching for location conversion.
type: str
choices:
- enable
- disable
ssl_max_version:
description:
- Highest SSL/TLS version acceptable from a client.
type: str
choices:
- ssl-3.0
- tls-1.0
- tls-1.1
- tls-1.2
ssl_min_version:
description:
- Lowest SSL/TLS version acceptable from a client.
type: str
choices:
- ssl-3.0
- tls-1.0
- tls-1.1
- tls-1.2
ssl_mode:
description:
- Apply SSL offloading between the client and the FortiGate (half) or from the client to the FortiGate and from the FortiGate to the
server (full).
type: str
choices:
- half
- full
ssl_pfs:
description:
- Select the cipher suites that can be used for SSL perfect forward secrecy (PFS). Applies to both client and server sessions.
type: str
choices:
- require
- deny
- allow
ssl_send_empty_frags:
description:
- Enable/disable sending empty fragments to avoid CBC IV attacks (SSL 3.0 & TLS 1.0 only). May need to be disabled for compatibility with
older systems.
type: str
choices:
- enable
- disable
ssl_server_algorithm:
description:
- Permitted encryption algorithms for the server side of SSL full mode sessions according to encryption strength.
type: str
choices:
- high
- medium
- low
- custom
- client
ssl_server_cipher_suites:
description:
- SSL/TLS cipher suites to offer to a server, ordered by priority.
type: list
suboptions:
cipher:
description:
- Cipher suite name.
type: str
choices:
- TLS-RSA-WITH-3DES-EDE-CBC-SHA
- TLS-DHE-RSA-WITH-DES-CBC-SHA
- TLS-DHE-DSS-WITH-DES-CBC-SHA
priority:
description:
- SSL/TLS cipher suites priority.
required: true
type: int
versions:
description:
- SSL/TLS versions that the cipher suite can be used with.
type: str
choices:
- ssl-3.0
- tls-1.0
- tls-1.1
- tls-1.2
ssl_server_max_version:
description:
- Highest SSL/TLS version acceptable from a server. Use the client setting by default.
type: str
choices:
- ssl-3.0
- tls-1.0
- tls-1.1
- tls-1.2
- client
ssl_server_min_version:
description:
- Lowest SSL/TLS version acceptable from a server. Use the client setting by default.
type: str
choices:
- ssl-3.0
- tls-1.0
- tls-1.1
- tls-1.2
- client
ssl_server_session_state_max:
description:
- Maximum number of FortiGate to Server SSL session states to keep.
type: int
ssl_server_session_state_timeout:
description:
- Number of minutes to keep FortiGate to Server SSL session state.
type: int
ssl_server_session_state_type:
description:
- How to expire SSL sessions for the segment of the SSL connection between the server and the FortiGate.
type: str
choices:
- disable
- time
- count
- both
type:
description:
- Configure a static NAT or server load balance VIP.
type: str
choices:
- static-nat
- server-load-balance
uuid:
description:
- Universally Unique Identifier (UUID; automatically assigned but can be manually reset).
type: str
weblogic_server:
description:
- Enable to add an HTTP header to indicate SSL offloading for a WebLogic server.
type: str
choices:
- disable
- enable
websphere_server:
description:
- Enable to add an HTTP header to indicate SSL offloading for a WebSphere server.
type: str
choices:
- disable
- enable
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure virtual IP for IPv6.
fortios_firewall_vip6:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
firewall_vip6:
arp_reply: "disable"
color: "4"
comment: "Comment."
extip: "<your_own_value>"
extport: "<your_own_value>"
http_cookie_age: "8"
http_cookie_domain: "<your_own_value>"
http_cookie_domain_from_host: "disable"
http_cookie_generation: "11"
http_cookie_path: "<your_own_value>"
http_cookie_share: "disable"
http_ip_header: "enable"
http_ip_header_name: "<your_own_value>"
http_multiplex: "enable"
https_cookie_secure: "disable"
id: "18"
ldb_method: "static"
mappedip: "<your_own_value>"
mappedport: "<your_own_value>"
max_embryonic_connections: "22"
monitor:
-
name: "default_name_24 (source firewall.ldb-monitor.name)"
name: "default_name_25"
outlook_web_access: "disable"
persistence: "none"
portforward: "disable"
protocol: "tcp"
realservers:
-
client_ip: "<your_own_value>"
healthcheck: "disable"
holddown_interval: "33"
http_host: "myhostname"
id: "35"
ip: "<your_own_value>"
max_connections: "37"
monitor: "<your_own_value> (source firewall.ldb-monitor.name)"
port: "39"
status: "active"
weight: "41"
server_type: "http"
src_filter:
-
range: "<your_own_value>"
ssl_algorithm: "high"
ssl_certificate: "<your_own_value> (source vpn.certificate.local.name)"
ssl_cipher_suites:
-
cipher: "TLS-RSA-WITH-3DES-EDE-CBC-SHA"
priority: "49"
versions: "ssl-3.0"
ssl_client_fallback: "disable"
ssl_client_renegotiation: "allow"
ssl_client_session_state_max: "53"
ssl_client_session_state_timeout: "54"
ssl_client_session_state_type: "disable"
ssl_dh_bits: "768"
ssl_hpkp: "disable"
ssl_hpkp_age: "58"
ssl_hpkp_backup: "<your_own_value> (source vpn.certificate.local.name vpn.certificate.ca.name)"
ssl_hpkp_include_subdomains: "disable"
ssl_hpkp_primary: "<your_own_value> (source vpn.certificate.local.name vpn.certificate.ca.name)"
ssl_hpkp_report_uri: "<your_own_value>"
ssl_hsts: "disable"
ssl_hsts_age: "64"
ssl_hsts_include_subdomains: "disable"
ssl_http_location_conversion: "enable"
ssl_http_match_host: "enable"
ssl_max_version: "ssl-3.0"
ssl_min_version: "ssl-3.0"
ssl_mode: "half"
ssl_pfs: "require"
ssl_send_empty_frags: "enable"
ssl_server_algorithm: "high"
ssl_server_cipher_suites:
-
cipher: "TLS-RSA-WITH-3DES-EDE-CBC-SHA"
priority: "76"
versions: "ssl-3.0"
ssl_server_max_version: "ssl-3.0"
ssl_server_min_version: "ssl-3.0"
ssl_server_session_state_max: "80"
ssl_server_session_state_timeout: "81"
ssl_server_session_state_type: "disable"
type: "static-nat"
uuid: "<your_own_value>"
weblogic_server: "disable"
websphere_server: "disable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_firewall_vip6_data(json):
option_list = ['arp_reply', 'color', 'comment',
'extip', 'extport', 'http_cookie_age',
'http_cookie_domain', 'http_cookie_domain_from_host', 'http_cookie_generation',
'http_cookie_path', 'http_cookie_share', 'http_ip_header',
'http_ip_header_name', 'http_multiplex', 'https_cookie_secure',
'id', 'ldb_method', 'mappedip',
'mappedport', 'max_embryonic_connections', 'monitor',
'name', 'outlook_web_access', 'persistence',
'portforward', 'protocol', 'realservers',
'server_type', 'src_filter', 'ssl_algorithm',
'ssl_certificate', 'ssl_cipher_suites', 'ssl_client_fallback',
'ssl_client_renegotiation', 'ssl_client_session_state_max', 'ssl_client_session_state_timeout',
'ssl_client_session_state_type', 'ssl_dh_bits', 'ssl_hpkp',
'ssl_hpkp_age', 'ssl_hpkp_backup', 'ssl_hpkp_include_subdomains',
'ssl_hpkp_primary', 'ssl_hpkp_report_uri', 'ssl_hsts',
'ssl_hsts_age', 'ssl_hsts_include_subdomains', 'ssl_http_location_conversion',
'ssl_http_match_host', 'ssl_max_version', 'ssl_min_version',
'ssl_mode', 'ssl_pfs', 'ssl_send_empty_frags',
'ssl_server_algorithm', 'ssl_server_cipher_suites', 'ssl_server_max_version',
'ssl_server_min_version', 'ssl_server_session_state_max', 'ssl_server_session_state_timeout',
'ssl_server_session_state_type', 'type', 'uuid',
'weblogic_server', 'websphere_server']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def firewall_vip6(data, fos):
vdom = data['vdom']
state = data['state']
firewall_vip6_data = data['firewall_vip6']
filtered_data = underscore_to_hyphen(filter_firewall_vip6_data(firewall_vip6_data))
if state == "present":
return fos.set('firewall',
'vip6',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('firewall',
'vip6',
mkey=filtered_data['name'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_firewall(data, fos):
if data['firewall_vip6']:
resp = firewall_vip6(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"firewall_vip6": {
"required": False, "type": "dict", "default": None,
"options": {
"arp_reply": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"color": {"required": False, "type": "int"},
"comment": {"required": False, "type": "str"},
"extip": {"required": False, "type": "str"},
"extport": {"required": False, "type": "str"},
"http_cookie_age": {"required": False, "type": "int"},
"http_cookie_domain": {"required": False, "type": "str"},
"http_cookie_domain_from_host": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"http_cookie_generation": {"required": False, "type": "int"},
"http_cookie_path": {"required": False, "type": "str"},
"http_cookie_share": {"required": False, "type": "str",
"choices": ["disable", "same-ip"]},
"http_ip_header": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"http_ip_header_name": {"required": False, "type": "str"},
"http_multiplex": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"https_cookie_secure": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"id": {"required": False, "type": "int"},
"ldb_method": {"required": False, "type": "str",
"choices": ["static", "round-robin", "weighted",
"least-session", "least-rtt", "first-alive",
"http-host"]},
"mappedip": {"required": False, "type": "str"},
"mappedport": {"required": False, "type": "str"},
"max_embryonic_connections": {"required": False, "type": "int"},
"monitor": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"name": {"required": True, "type": "str"},
"outlook_web_access": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"persistence": {"required": False, "type": "str",
"choices": ["none", "http-cookie", "ssl-session-id"]},
"portforward": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"protocol": {"required": False, "type": "str",
"choices": ["tcp", "udp", "sctp"]},
"realservers": {"required": False, "type": "list",
"options": {
"client_ip": {"required": False, "type": "str"},
"healthcheck": {"required": False, "type": "str",
"choices": ["disable", "enable", "vip"]},
"holddown_interval": {"required": False, "type": "int"},
"http_host": {"required": False, "type": "str"},
"id": {"required": True, "type": "int"},
"ip": {"required": False, "type": "str"},
"max_connections": {"required": False, "type": "int"},
"monitor": {"required": False, "type": "str"},
"port": {"required": False, "type": "int"},
"status": {"required": False, "type": "str",
"choices": ["active", "standby", "disable"]},
"weight": {"required": False, "type": "int"}
}},
"server_type": {"required": False, "type": "str",
"choices": ["http", "https", "imaps",
"pop3s", "smtps", "ssl",
"tcp", "udp", "ip"]},
"src_filter": {"required": False, "type": "list",
"options": {
"range": {"required": True, "type": "str"}
}},
"ssl_algorithm": {"required": False, "type": "str",
"choices": ["high", "medium", "low",
"custom"]},
"ssl_certificate": {"required": False, "type": "str"},
"ssl_cipher_suites": {"required": False, "type": "list",
"options": {
"cipher": {"required": False, "type": "str",
"choices": ["TLS-RSA-WITH-3DES-EDE-CBC-SHA", "TLS-DHE-RSA-WITH-DES-CBC-SHA",
"TLS-DHE-DSS-WITH-DES-CBC-SHA"]},
"priority": {"required": True, "type": "int"},
"versions": {"required": False, "type": "str",
"choices": ["ssl-3.0", "tls-1.0", "tls-1.1",
"tls-1.2"]}
}},
"ssl_client_fallback": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"ssl_client_renegotiation": {"required": False, "type": "str",
"choices": ["allow", "deny", "secure"]},
"ssl_client_session_state_max": {"required": False, "type": "int"},
"ssl_client_session_state_timeout": {"required": False, "type": "int"},
"ssl_client_session_state_type": {"required": False, "type": "str",
"choices": ["disable", "time", "count",
"both"]},
"ssl_dh_bits": {"required": False, "type": "str",
"choices": ["768", "1024", "1536",
"2048", "3072", "4096"]},
"ssl_hpkp": {"required": False, "type": "str",
"choices": ["disable", "enable", "report-only"]},
"ssl_hpkp_age": {"required": False, "type": "int"},
"ssl_hpkp_backup": {"required": False, "type": "str"},
"ssl_hpkp_include_subdomains": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"ssl_hpkp_primary": {"required": False, "type": "str"},
"ssl_hpkp_report_uri": {"required": False, "type": "str"},
"ssl_hsts": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"ssl_hsts_age": {"required": False, "type": "int"},
"ssl_hsts_include_subdomains": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"ssl_http_location_conversion": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"ssl_http_match_host": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"ssl_max_version": {"required": False, "type": "str",
"choices": ["ssl-3.0", "tls-1.0", "tls-1.1",
"tls-1.2"]},
"ssl_min_version": {"required": False, "type": "str",
"choices": ["ssl-3.0", "tls-1.0", "tls-1.1",
"tls-1.2"]},
"ssl_mode": {"required": False, "type": "str",
"choices": ["half", "full"]},
"ssl_pfs": {"required": False, "type": "str",
"choices": ["require", "deny", "allow"]},
"ssl_send_empty_frags": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"ssl_server_algorithm": {"required": False, "type": "str",
"choices": ["high", "medium", "low",
"custom", "client"]},
"ssl_server_cipher_suites": {"required": False, "type": "list",
"options": {
"cipher": {"required": False, "type": "str",
"choices": ["TLS-RSA-WITH-3DES-EDE-CBC-SHA", "TLS-DHE-RSA-WITH-DES-CBC-SHA",
"TLS-DHE-DSS-WITH-DES-CBC-SHA"]},
"priority": {"required": True, "type": "int"},
"versions": {"required": False, "type": "str",
"choices": ["ssl-3.0", "tls-1.0", "tls-1.1",
"tls-1.2"]}
}},
"ssl_server_max_version": {"required": False, "type": "str",
"choices": ["ssl-3.0", "tls-1.0", "tls-1.1",
"tls-1.2", "client"]},
"ssl_server_min_version": {"required": False, "type": "str",
"choices": ["ssl-3.0", "tls-1.0", "tls-1.1",
"tls-1.2", "client"]},
"ssl_server_session_state_max": {"required": False, "type": "int"},
"ssl_server_session_state_timeout": {"required": False, "type": "int"},
"ssl_server_session_state_type": {"required": False, "type": "str",
"choices": ["disable", "time", "count",
"both"]},
"type": {"required": False, "type": "str",
"choices": ["static-nat", "server-load-balance"]},
"uuid": {"required": False, "type": "str"},
"weblogic_server": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"websphere_server": {"required": False, "type": "str",
"choices": ["disable", "enable"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_firewall(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_firewall(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| amenonsen/ansible | lib/ansible/modules/network/fortios/fortios_firewall_vip6.py | Python | gpl-3.0 | 46,189 | 0.002728 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-11-21 18:16
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('Community', '0033_auto_20171112_1742'),
]
operations = [
migrations.CreateModel(
name='SongSuggestions',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('suggestions', models.TextField(help_text="Please list links to songs, we can't play it with just a name")),
('community', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Community.CommunityInst')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AlterField(
model_name='communityextraratings',
name='overall_rating',
field=models.PositiveIntegerField(choices=[(1, '1'), (2, '2'), (3, 'e'), (4, '4'), (5, '5'), (6, '6'), (7, '7'), (8, '8'), (9, '9'), (10, '10')], default=5),
),
migrations.AlterField(
model_name='communitygameratings',
name='game_rating',
field=models.PositiveIntegerField(choices=[(1, '1'), (2, '2'), (3, 'e'), (4, '4'), (5, '5'), (6, '6'), (7, '7'), (8, '8'), (9, '9'), (10, '10')], default=5),
),
]
| ByrdOfAFeather/AlphaTrion | Community/migrations/0034_auto_20171121_1316.py | Python | mit | 1,619 | 0.003706 |
# Copyright 2014 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from neutron.db import l3_db
from neutron.extensions import vpnaas
from neutron import manager
from neutron.plugins.common import constants
class VpnReferenceValidator(object):
"""Baseline validation routines for VPN resources."""
IP_MIN_MTU = {4: 68, 6: 1280}
@property
def l3_plugin(self):
try:
return self._l3_plugin
except AttributeError:
self._l3_plugin = manager.NeutronManager.get_service_plugins().get(
constants.L3_ROUTER_NAT)
return self._l3_plugin
@property
def core_plugin(self):
try:
return self._core_plugin
except AttributeError:
self._core_plugin = manager.NeutronManager.get_plugin()
return self._core_plugin
def _check_dpd(self, ipsec_sitecon):
"""Ensure that DPD timeout is greater than DPD interval."""
if ipsec_sitecon['dpd_timeout'] <= ipsec_sitecon['dpd_interval']:
raise vpnaas.IPsecSiteConnectionDpdIntervalValueError(
attr='dpd_timeout')
def _check_mtu(self, context, mtu, ip_version):
if mtu < VpnReferenceValidator.IP_MIN_MTU[ip_version]:
raise vpnaas.IPsecSiteConnectionMtuError(mtu=mtu,
version=ip_version)
def assign_sensible_ipsec_sitecon_defaults(self, ipsec_sitecon,
prev_conn=None):
"""Provide defaults for optional items, if missing.
Flatten the nested DPD information, and set default values for
any missing information. For connection updates, the previous
values will be used as defaults for any missing items.
"""
if not prev_conn:
prev_conn = {'dpd_action': 'hold',
'dpd_interval': 30,
'dpd_timeout': 120}
dpd = ipsec_sitecon.get('dpd', {})
ipsec_sitecon['dpd_action'] = dpd.get('action',
prev_conn['dpd_action'])
ipsec_sitecon['dpd_interval'] = dpd.get('interval',
prev_conn['dpd_interval'])
ipsec_sitecon['dpd_timeout'] = dpd.get('timeout',
prev_conn['dpd_timeout'])
def validate_ipsec_site_connection(self, context, ipsec_sitecon,
ip_version):
"""Reference implementation of validation for IPSec connection."""
self._check_dpd(ipsec_sitecon)
mtu = ipsec_sitecon.get('mtu')
if mtu:
self._check_mtu(context, mtu, ip_version)
def _check_router(self, context, router_id):
router = self.l3_plugin.get_router(context, router_id)
if not router.get(l3_db.EXTERNAL_GW_INFO):
raise vpnaas.RouterIsNotExternal(router_id=router_id)
def _check_subnet_id(self, context, router_id, subnet_id):
ports = self.core_plugin.get_ports(
context,
filters={
'fixed_ips': {'subnet_id': [subnet_id]},
'device_id': [router_id]})
if not ports:
raise vpnaas.SubnetIsNotConnectedToRouter(
subnet_id=subnet_id,
router_id=router_id)
def validate_vpnservice(self, context, vpnservice):
self._check_router(context, vpnservice['router_id'])
self._check_subnet_id(context, vpnservice['router_id'],
vpnservice['subnet_id'])
def validate_vpnservice_ngfw(self, context, vpnservice):
try:
if vpnservice.has_key('description'):
description = json.loads(vpnservice["description"])
else:
return
except ValueError:
raise vpnaas.DescriptionInvalid(description=vpnservice["description"])
else:
tenant_router_id = description.get("tenant_router_id", None)
if not tenant_router_id:
raise vpnaas.TenantRouterIdMustBeSet()
subnets = description.get("subnets", [])
if not subnets:
raise vpnaas.SubnetMustBeSet()
for subnet in subnets:
self._check_subnet_id(context, tenant_router_id,
subnet) | nash-x/hws | neutron/db/vpn/vpn_validator.py | Python | apache-2.0 | 4,980 | 0.000602 |
#!/usr/bin/python
#coding=utf-8
'''
@author: sheng
@license:
'''
SPELL=u'láogōng'
CN=u'劳宫'
NAME=u'laogong21'
CHANNEL='pericardium'
CHANNEL_FULLNAME='PericardiumChannelofHand-Jueyin'
SEQ='PC8'
if __name__ == '__main__':
pass
| sinotradition/meridian | meridian/acupoints/laogong21.py | Python | apache-2.0 | 241 | 0.034043 |
#!/usr/bin/env python
'''
Oct 10, 2017: Pasi Korhonen, The University of Melbourne
Simplifies system calls, logs and pipe interaction.
'''
import sys, os, time #, ConfigParser
import shlex, subprocess, errno
from threading import Timer
###############################################################################
class Base:
'''
'''
###########################################################################
def __init__(self, logHandle = subprocess.PIPE):
'''
'''
self.fname = None
self.handle = None
self.log = logHandle
###########################################################################
def ropen(self, fname):
''' Allow one to read data either from pipe or file
'''
self.handle = None
self.fname = fname
if fname == '-':
self.handle = sys.stdin.readlines()
else:
self.handle = open(fname, 'r')
return self.handle
###########################################################################
def rclose(self):
''' Allows one to close the file if reading from pipe is allowed
'''
if self.fname != '-': self.handle.close()
###########################################################################
def createDir(self, mydir):
'''Creates a directory for the assembly if one does not exist yet.
'''
try:
os.makedirs(mydir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
###########################################################################
def isNumber(self, myStr):
'''
'''
retVal = True
try:
float(myStr)
except ValueError:
retVal = False
return retVal
###########################################################################
def logTime(self, myStr = ""):
'''
'''
if myStr != "": myStr = myStr + ':'
rt = time.localtime()
self.log.write("\n------------------------------------------------------------\n")
self.log.write("%s %d,%d,%d %d:%d:%d\n" %(myStr, rt.tm_year, rt.tm_mon, rt.tm_mday, rt.tm_hour, rt.tm_min, rt.tm_sec))
self.log.write("------------------------------------------------------------\n\n")
###########################################################################
def setLogHandle(self, handle):
''' Log handle should be always set because a full buffer can cease processing
'''
self.log = handle
###########################################################################
def closeLogHandle(self):
''' Log handle should be always set because a full buffer can cease processing
'''
self.log.close()
###########################################################################
def logger(self, myStr):
''' Writes a message to the log file
'''
self.log.write("## %s\n" %myStr)
###########################################################################
def shell(self, myStr, doPrint = True, myStdout = False, ignoreFailure = False, log = True):
'''Runs given command in a shell and waits for the command to finish.
'''
if log == True:
self.log.write("# %s\n" %myStr)
if doPrint == True:
print("# " + myStr, file=sys.stderr) # is printed as comment line which is easy to remove
if myStdout == True:
p = subprocess.Popen(myStr, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
else:
p = subprocess.Popen(myStr, stdout=self.log, stderr=subprocess.STDOUT, shell=True)
retVal = p.wait()
if retVal != 0 and ignoreFailure == False:
if log == True:
self.logger("# FAILED (%d): %s" %(retVal, myStr))
print("# FAILED (%d): %s" %(retVal, myStr), file = sys.stderr)
sys.exit(retVal)
return p
###########################################################################
def _killProc(self, proc, timeout):
'''
'''
timeout["value"] = True
proc.kill()
###########################################################################
def run(self, cmd, timeoutSec = None, doPrint = True, myStdout = True, ignoreFailure = False, log = True):
''' Runs given command in a subprocess and wait for the command to finish.
Retries 3 times if timeout is given.
'''
retryCnt = 0
while retryCnt < 3:
if log == True:
self.log.write("# %s\n" %cmd)
if doPrint == True:
print("# " + cmd, file = sys.stderr) # is printed as comment line which is easy to remove
if myStdout == True:
proc = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
else:
proc = subprocess.Popen(shlex.split(cmd), stdout=self.log, stderr=subprocess.PIPE)
if timeoutSec != None:
timeout = {"value": False}
timer = Timer(timeoutSec, self._killProc, [proc, timeout])
timer.start()
stdout, stderr = proc.communicate()
if timeoutSec != None:
timer.cancel()
if (proc.returncode > 1 or proc.returncode < 0) and ignoreFailure == False:
retryCnt += 1
if retryCnt >= 3: # Tries three times
self.logger("## FAILED(%d): %s. Three failures. Exiting ..." %(proc.returncode, cmd))
print("## FAILED(%d): %s. Three failures. Exiting ..." %(proc.returncode, cmd), file = sys.stderr)
sys.exit(proc.returncode)
if log == True:
self.logger("## FAILED(%d): %s. Retrying ..." %(proc.returncode, cmd))
print("## FAILED(%d): %s. Retrying ..." %(proc.returncode, cmd), file = sys.stderr)
time.sleep(120) # Wait 2 minutes before the next try
else:
break
return proc
'''
###########################################################################
def readSection(self, config, section, sep=None):
#''Reads a section from config parser and returns it a list of item rows
#''
mylist = []
try:
lines = config.options(section)
lines = sorted(lines)
for line in lines:
items = config.get(section, line).split()
if sep != None: items = config.get(section, line).split(sep)
try:
if items[0][0] != '#': # Comment line
mylist.append(items)
except IndexError:
pass
except ConfigParser.NoSectionError:
print("# WARNING: Base::readSection: section '%s' not found ..." %section)
return mylist
'''
| vetscience/Tools | Utils/base.py | Python | bsd-3-clause | 7,040 | 0.014773 |
#!/usr/bin/python2.7
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.appengine.api import datastore_errors
from model import *
from photo import create_photo, PhotoError
from utils import *
from detect_spam import SpamDetector
import extend
import reveal
import subscribe
from django.utils.translation import ugettext as _
from urlparse import urlparse
# TODO(jessien): Clean up duplicate code here and in create.py.
# https://github.com/google/personfinder/issues/157
# how many days left before we warn about imminent expiration.
# Make this at least 1.
EXPIRY_WARNING_THRESHOLD = 7
class Handler(BaseHandler):
def get(self):
# Check the request parameters.
if not self.params.id:
return self.error(404, _('No person id was specified.'))
try:
person = Person.get(self.repo, self.params.id)
# TODO(ichikawa) Consider removing this "except" clause.
# I don't think ValueError is thrown here.
except ValueError:
return self.error(404,
_("This person's entry does not exist or has been deleted."))
if not person:
return self.error(404,
_("This person's entry does not exist or has been deleted."))
standalone = self.request.get('standalone')
# Render the page.
enable_notes_url = self.get_url('/enable_notes', id=self.params.id)
self.render('add_note.html',
person=person,
standalone=standalone,
enable_notes_url=enable_notes_url)
def post(self):
"""Post a note in person's record view page"""
if not self.params.text:
return self.error(
200, _('Message is required. Please go back and try again.'))
if not self.params.author_name:
return self.error(
200, _('Your name is required in the "About you" section. '
'Please go back and try again.'))
if (self.params.status == 'is_note_author' and
not self.params.author_made_contact):
return self.error(
200, _('Please check that you have been in contact with '
'the person after the disaster, or change the '
'"Status of this person" field.'))
if (self.params.status == 'believed_dead' and
not self.config.allow_believed_dead_via_ui):
return self.error(
200, _('Not authorized to post notes with the status '
'"believed_dead".'))
person = Person.get(self.repo, self.params.id)
if person.notes_disabled:
return self.error(
200, _('The author has disabled status updates '
'on this record.'))
# If a photo was uploaded, create and store a new Photo entry and get
# the URL where it's served; otherwise, use the note_photo_url provided.
photo, photo_url = (None, self.params.note_photo_url)
if self.params.note_photo is not None:
try:
photo, photo_url = create_photo(self.params.note_photo, self)
except PhotoError, e:
return self.error(400, e.message)
photo.put()
spam_detector = SpamDetector(self.config.bad_words)
spam_score = spam_detector.estimate_spam_score(self.params.text)
if (spam_score > 0):
note = NoteWithBadWords.create_original(
self.repo,
entry_date=get_utcnow(),
person_record_id=self.params.id,
author_name=self.params.author_name,
author_email=self.params.author_email,
author_phone=self.params.author_phone,
source_date=get_utcnow(),
author_made_contact=bool(self.params.author_made_contact),
status=self.params.status,
email_of_found_person=self.params.email_of_found_person,
phone_of_found_person=self.params.phone_of_found_person,
last_known_location=self.params.last_known_location,
text=self.params.text,
photo=photo,
photo_url=photo_url,
spam_score=spam_score,
confirmed=False)
# Write the new NoteWithBadWords to the datastore
note.put_new()
# When the note is detected as spam, we do not update person record
# or log action. We ask the note author for confirmation first.
return self.redirect('/post_flagged_note', id=note.get_record_id(),
author_email=note.author_email,
repo=self.repo)
else:
note = Note.create_original(
self.repo,
entry_date=get_utcnow(),
person_record_id=self.params.id,
author_name=self.params.author_name,
author_email=self.params.author_email,
author_phone=self.params.author_phone,
source_date=get_utcnow(),
author_made_contact=bool(self.params.author_made_contact),
status=self.params.status,
email_of_found_person=self.params.email_of_found_person,
phone_of_found_person=self.params.phone_of_found_person,
last_known_location=self.params.last_known_location,
text=self.params.text,
photo=photo,
photo_url=photo_url)
# Write the new regular Note to the datastore
note.put_new()
# Specially log 'believed_dead'.
if note.status == 'believed_dead':
UserActionLog.put_new(
'mark_dead', note, person.primary_full_name,
self.request.remote_addr)
# Specially log a switch to an alive status.
if (note.status in ['believed_alive', 'is_note_author'] and
person.latest_status not in ['believed_alive', 'is_note_author']):
UserActionLog.put_new('mark_alive', note, person.primary_full_name)
# Update the Person based on the Note.
if person:
person.update_from_note(note)
# Send notification to all people
# who subscribed to updates on this person
subscribe.send_notifications(self, person, [note])
# write the updated person record to datastore
db.put(person)
# If user wants to subscribe to updates, redirect to the subscribe page
if self.params.subscribe:
return self.redirect('/subscribe',
id=person.record_id,
subscribe_email=self.params.author_email,
context='add_note')
# Redirect to view page so the browser's back button works properly.
self.redirect('/view', id=self.params.id, query=self.params.query)
| AwesomeTurtle/personfinder | app/add_note.py | Python | apache-2.0 | 7,586 | 0.000791 |
import copy
import confu.schema
import vaping
import vaping.config
import vaping.io
from vaping.plugins import PluginConfigSchema
try:
import vodka
import vodka.data
except ImportError:
pass
try:
import graphsrv
import graphsrv.group
except ImportError:
graphsrv = None
def probe_to_graphsrv(probe):
"""
takes a probe instance and generates
a graphsrv data group for it using the
probe's config
"""
config = probe.config
# manual group set up via `group` config key
if "group" in config:
source, group = config["group"].split(".")
group_field = config.get("group_field", "host")
group_value = config[group_field]
graphsrv.group.add(
source, group, {group_value: {group_field: group_value}}, **config
)
return
# automatic group setup for fping
for group_name, group_config in list(probe.groups.items()):
if "hosts" not in group_config:
continue
r = {}
for host in group_config.get("hosts"):
if isinstance(host, dict):
r[host["host"]] = host
else:
r[host] = {"host": host}
graphsrv.group.add(probe.name, group_name, r, **group_config)
class VodkaSchema(PluginConfigSchema):
"""
Define a schema for FPing and also define defaults.
"""
data = confu.schema.List(item=vaping.config.MixedDict())
apps = confu.schema.Dict(item=vaping.config.MixedDict())
plugins = confu.schema.List(item=vaping.config.MixedDict())
@vaping.plugin.register("vodka")
class VodkaPlugin(vaping.plugins.EmitBase):
"""
Plugin that emits to vodka data
"""
# starting vodka automatically when vaping is spinning
# up all the plugins causes some inconsistent behaviour
# in daemon mode, so we allow it to lazy start for now
#
# TODO: might need to revisit later
lazy_start = True
# Define config schema
ConfigSchema = VodkaSchema
def init(self):
self._is_started = False
def start(self):
if self._is_started:
return
# deep copy vodka plugin config and prepare to pass
# to vodka as it's own copy with type and name keys
# removed
vodka_config = copy.deepcopy(self.config)
if "name" in vodka_config:
del vodka_config["name"]
if "type" in vodka_config:
del vodka_config["type"]
self._is_started = True
vodka.run(vodka_config, self.vaping.config)
if graphsrv:
# if graphsrv is installed proceed to generate
# target configurations for it from probe config
for node in self.vaping.config.get("probes", []):
probe = vaping.plugin.get_probe(node, self.vaping)
probe_to_graphsrv(probe)
def emit(self, message):
if not self._is_started:
self.start()
vodka.data.handle(
message.get("type"), message, data_id=message.get("source"), caller=self
)
| 20c/vaping | src/vaping/plugins/vodka.py | Python | apache-2.0 | 3,075 | 0.000325 |
# urllib3/util.py
# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from base64 import b64encode
from collections import namedtuple
from socket import error as SocketError
from hashlib import md5, sha1
from binascii import hexlify, unhexlify
try:
from select import poll, POLLIN
except ImportError: # `poll` doesn't exist on OSX and other platforms
poll = False
try:
from select import select
except ImportError: # `select` doesn't exist on AppEngine.
select = False
try: # Test for SSL features
SSLContext = None
HAS_SNI = False
import ssl
from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
from ssl import SSLContext # Modern SSL?
from ssl import HAS_SNI # Has SNI?
except ImportError:
pass
from .packages import six
from .exceptions import LocationParseError, SSLError
class Url(namedtuple('Url', ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'])):
"""
Datastructure for representing an HTTP URL. Used as a return value for
:func:`parse_url`.
"""
slots = ()
def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, query=None, fragment=None):
return super(Url, cls).__new__(cls, scheme, auth, host, port, path, query, fragment)
@property
def hostname(self):
"""For backwards-compatibility with urlparse. We're nice like that."""
return self.host
@property
def request_uri(self):
"""Absolute path including the query string."""
uri = self.path or '/'
if self.query is not None:
uri += '?' + self.query
return uri
def split_first(s, delims):
"""
Given a string and an iterable of delimiters, split on the first found
delimiter. Return two split parts and the matched delimiter.
If not found, then the first part is the full input string.
Example: ::
>>> split_first('foo/bar?baz', '?/=')
('foo', 'bar?baz', '/')
>>> split_first('foo/bar?baz', '123')
('foo/bar?baz', '', None)
Scales linearly with number of delims. Not ideal for large number of delims.
"""
min_idx = None
min_delim = None
for d in delims:
idx = s.find(d)
if idx < 0:
continue
if min_idx is None or idx < min_idx:
min_idx = idx
min_delim = d
if min_idx is None or min_idx < 0:
return s, '', None
return s[:min_idx], s[min_idx+1:], min_delim
def parse_url(url):
"""
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
performed to parse incomplete urls. Fields not provided will be None.
Partly backwards-compatible with :mod:`urlparse`.
Example: ::
>>> parse_url('http://google.com/mail/')
Url(scheme='http', host='google.com', port=None, path='/', ...)
>>> parse_url('google.com:80')
Url(scheme=None, host='google.com', port=80, path=None, ...)
>>> parse_url('/foo?bar')
Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
"""
# While this code has overlap with stdlib's urlparse, it is much
# simplified for our needs and less annoying.
# Additionally, this imeplementations does silly things to be optimal
# on CPython.
scheme = None
auth = None
host = None
port = None
path = None
fragment = None
query = None
# Scheme
if '://' in url:
scheme, url = url.split('://', 1)
# Find the earliest Authority Terminator
# (http://tools.ietf.org/html/rfc3986#section-3.2)
url, path_, delim = split_first(url, ['/', '?', '#'])
if delim:
# Reassemble the path
path = delim + path_
# Auth
if '@' in url:
auth, url = url.split('@', 1)
# IPv6
if url and url[0] == '[':
host, url = url[1:].split(']', 1)
# Port
if ':' in url:
_host, port = url.split(':', 1)
if not host:
host = _host
if not port.isdigit():
raise LocationParseError("Failed to parse: %s" % url)
port = int(port)
elif not host and url:
host = url
if not path:
return Url(scheme, auth, host, port, path, query, fragment)
# Fragment
if '#' in path:
path, fragment = path.split('#', 1)
# Query
if '?' in path:
path, query = path.split('?', 1)
return Url(scheme, auth, host, port, path, query, fragment)
def get_host(url):
"""
Deprecated. Use :func:`.parse_url` instead.
"""
p = parse_url(url)
return p.scheme or 'http', p.hostname, p.port
def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
basic_auth=None):
"""
Shortcuts for generating request headers.
:param keep_alive:
If ``True``, adds 'connection: keep-alive' header.
:param accept_encoding:
Can be a boolean, list, or string.
``True`` translates to 'gzip,deflate'.
List will get joined by comma.
String will be used as provided.
:param user_agent:
String representing the user-agent you want, such as
"python-urllib3/0.6"
:param basic_auth:
Colon-separated username:password string for 'authorization: basic ...'
auth header.
Example: ::
>>> make_headers(keep_alive=True, user_agent="Batman/1.0")
{'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
>>> make_headers(accept_encoding=True)
{'accept-encoding': 'gzip,deflate'}
"""
headers = {}
if accept_encoding:
if isinstance(accept_encoding, str):
pass
elif isinstance(accept_encoding, list):
accept_encoding = ','.join(accept_encoding)
else:
accept_encoding = 'gzip,deflate'
headers['accept-encoding'] = accept_encoding
if user_agent:
headers['user-agent'] = user_agent
if keep_alive:
headers['connection'] = 'keep-alive'
if basic_auth:
headers['authorization'] = 'Basic ' + \
b64encode(six.b(basic_auth)).decode('utf-8')
return headers
def is_connection_dropped(conn): # Platform-specific
"""
Returns True if the connection is dropped and should be closed.
:param conn:
:class:`httplib.HTTPConnection` object.
Note: For platforms like AppEngine, this will always return ``False`` to
let the platform handle connection recycling transparently for us.
"""
sock = getattr(conn, 'sock', False)
if not sock: # Platform-specific: AppEngine
return False
if not poll:
if not select: # Platform-specific: AppEngine
return False
try:
return select([sock], [], [], 0.0)[0]
except SocketError:
return True
# This version is better on platforms that support it.
p = poll()
p.register(sock, POLLIN)
for (fno, ev) in p.poll(0.0):
if fno == sock.fileno():
# Either data is buffered (bad), or the connection is dropped.
return True
def resolve_cert_reqs(candidate):
"""
Resolves the argument to a numeric constant, which can be passed to
the wrap_socket function/method from the ssl module.
Defaults to :data:`ssl.CERT_NONE`.
If given a string it is assumed to be the name of the constant in the
:mod:`ssl` module or its abbrevation.
(So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
If it's neither `None` nor a string we assume it is already the numeric
constant which can directly be passed to wrap_socket.
"""
if candidate is None:
return CERT_NONE
if isinstance(candidate, str):
res = getattr(ssl, candidate, None)
if res is None:
res = getattr(ssl, 'CERT_' + candidate)
return res
return candidate
def resolve_ssl_version(candidate):
"""
like resolve_cert_reqs
"""
if candidate is None:
return PROTOCOL_SSLv23
if isinstance(candidate, str):
res = getattr(ssl, candidate, None)
if res is None:
res = getattr(ssl, 'PROTOCOL_' + candidate)
return res
return candidate
def assert_fingerprint(cert, fingerprint):
"""
Checks if given fingerprint matches the supplied certificate.
:param cert:
Certificate as bytes object.
:param fingerprint:
Fingerprint as string of hexdigits, can be interspersed by colons.
"""
# Maps the length of a digest to a possible hash function producing
# this digest.
hashfunc_map = {
16: md5,
20: sha1
}
fingerprint = fingerprint.replace(':', '').lower()
digest_length, rest = divmod(len(fingerprint), 2)
if rest or digest_length not in hashfunc_map:
raise SSLError('Fingerprint is of invalid length.')
# We need encode() here for py32; works on py2 and p33.
fingerprint_bytes = unhexlify(fingerprint.encode())
hashfunc = hashfunc_map[digest_length]
cert_digest = hashfunc(cert).digest()
if not cert_digest == fingerprint_bytes:
raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
.format(hexlify(fingerprint_bytes),
hexlify(cert_digest)))
def is_fp_closed(obj):
"""
Checks whether a given file-like object is closed.
:param obj:
The file-like object to check.
"""
if hasattr(obj, 'fp'):
# Object is a container for another file-like object that gets released
# on exhaustion (e.g. HTTPResponse)
return obj.fp is None
return obj.closed
if SSLContext is not None: # Python 3.2+
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
ca_certs=None, server_hostname=None,
ssl_version=None):
"""
All arguments except `server_hostname` have the same meaning as for
:func:`ssl.wrap_socket`
:param server_hostname:
Hostname of the expected certificate
"""
context = SSLContext(ssl_version)
context.verify_mode = cert_reqs
if ca_certs:
try:
context.load_verify_locations(ca_certs)
# Py32 raises IOError
# Py33 raises FileNotFoundError
except Exception as e: # Reraise as SSLError
raise SSLError(e)
if certfile:
# FIXME: This block needs a test.
context.load_cert_chain(certfile, keyfile)
if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI
return context.wrap_socket(sock, server_hostname=server_hostname)
return context.wrap_socket(sock)
else: # Python 3.1 and earlier
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
ca_certs=None, server_hostname=None,
ssl_version=None):
return wrap_socket(sock, keyfile=keyfile, certfile=certfile,
ca_certs=ca_certs, cert_reqs=cert_reqs,
ssl_version=ssl_version)
| mattesno1/Sick-Beard | lib/requests/packages/urllib3/util.py | Python | gpl-3.0 | 11,326 | 0.000618 |
#------------------------------------------------------------------------------
# bind_sdo.py (Section 4.4)
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# Copyright 2017, 2018, Oracle and/or its affiliates. All rights reserved.
#------------------------------------------------------------------------------
from __future__ import print_function
import cx_Oracle
import db_config
con = cx_Oracle.connect(db_config.user, db_config.pw, db_config.dsn)
cur = con.cursor()
# Create table
cur.execute("""begin
execute immediate 'drop table testgeometry';
exception when others then
if sqlcode <> -942 then
raise;
end if;
end;""")
cur.execute("""create table testgeometry (
id number(9) not null,
geometry MDSYS.SDO_GEOMETRY not null)""")
# Create and populate Oracle objects
typeObj = con.gettype("MDSYS.SDO_GEOMETRY")
elementInfoTypeObj = con.gettype("MDSYS.SDO_ELEM_INFO_ARRAY")
ordinateTypeObj = con.gettype("MDSYS.SDO_ORDINATE_ARRAY")
obj = typeObj.newobject()
obj.SDO_GTYPE = 2003
obj.SDO_ELEM_INFO = elementInfoTypeObj.newobject()
obj.SDO_ELEM_INFO.extend([1, 1003, 3])
obj.SDO_ORDINATES = ordinateTypeObj.newobject()
obj.SDO_ORDINATES.extend([1, 1, 5, 7])
pointTypeObj = con.gettype("MDSYS.SDO_POINT_TYPE")
obj.SDO_POINT = pointTypeObj.newobject()
obj.SDO_POINT.X = 1
obj.SDO_POINT.Y = 2
obj.SDO_POINT.Z = 3
print("Created object", obj)
# Add a new row
print("Adding row to table...")
cur.execute("insert into testgeometry values (1, :objbv)", objbv = obj)
print("Row added!")
# Define a function to dump the contents of an Oracle object
def dumpobject(obj, prefix = " "):
if obj.type.iscollection:
print(prefix, "[")
for value in obj.aslist():
if isinstance(value, cx_Oracle.Object):
dumpobject(value, prefix + " ")
else:
print(prefix + " ", repr(value))
print(prefix, "]")
else:
print(prefix, "{")
for attr in obj.type.attributes:
value = getattr(obj, attr.name)
if isinstance(value, cx_Oracle.Object):
print(prefix + " " + attr.name + " :")
dumpobject(value, prefix + " ")
else:
print(prefix + " " + attr.name + " :", repr(value))
print(prefix, "}")
# Query the row
print("Querying row just inserted...")
cur.execute("select id, geometry from testgeometry")
for (id, obj) in cur:
print("Id: ", id)
dumpobject(obj)
| kawamon/hue | desktop/core/ext-py/cx_Oracle-6.4.1/samples/tutorial/solutions/bind_sdo.py | Python | apache-2.0 | 2,694 | 0.003712 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Common values and methods for TensorFlow Debugger."""
import collections
import json
GRPC_URL_PREFIX = "grpc://"
# A key for a Session.run() call.
RunKey = collections.namedtuple("RunKey", ["feed_names", "fetch_names"])
def get_graph_element_name(elem):
"""Obtain the name or string representation of a graph element.
If the graph element has the attribute "name", return name. Otherwise, return
a __str__ representation of the graph element. Certain graph elements, such as
`SparseTensor`s, do not have the attribute "name".
Args:
elem: The graph element in question.
Returns:
If the attribute 'name' is available, return the name. Otherwise, return
str(fetch).
"""
return elem.name if hasattr(elem, "name") else str(elem)
def get_flattened_names(feeds_or_fetches):
"""Get a flattened list of the names in run() call feeds or fetches.
Args:
feeds_or_fetches: Feeds or fetches of the `Session.run()` call. It maybe
a Tensor, an Operation or a Variable. It may also be nested lists, tuples
or dicts. See doc of `Session.run()` for more details.
Returns:
(list of str) A flattened list of fetch names from `feeds_or_fetches`.
"""
lines = []
if isinstance(feeds_or_fetches, (list, tuple)):
for item in feeds_or_fetches:
lines.extend(get_flattened_names(item))
elif isinstance(feeds_or_fetches, dict):
for key in feeds_or_fetches:
lines.extend(get_flattened_names(feeds_or_fetches[key]))
else:
# This ought to be a Tensor, an Operation or a Variable, for which the name
# attribute should be available. (Bottom-out condition of the recursion.)
lines.append(get_graph_element_name(feeds_or_fetches))
return lines
def get_run_key(feed_dict, fetches):
"""Summarize the names of feeds and fetches as a RunKey JSON string.
Args:
feed_dict: The feed_dict given to the `Session.run()` call.
fetches: The fetches from the `Session.run()` call.
Returns:
A JSON Array consisting of two items. They first items is a flattened
Array of the names of the feeds. The second item is a flattened Array of
the names of the fetches.
"""
return json.dumps(RunKey(get_flattened_names(feed_dict),
get_flattened_names(fetches)))
| tensorflow/tensorflow | tensorflow/python/debug/lib/common.py | Python | apache-2.0 | 2,967 | 0.004382 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from gtp import gtp
from Tkinter import *
from toolbox import *
from toolbox import _
def get_full_sequence_threaded(worker,current_color,deepness):
sequence=get_full_sequence(worker,current_color,deepness)
threading.current_thread().sequence=sequence
def get_full_sequence(worker,current_color,deepness):
try:
sequence=""
undos=0
for d in range(deepness):
if current_color.lower()=="b":
answer=worker.play_black()
current_color="w"
else:
answer=worker.play_white()
current_color="b"
sequence+=answer+" "
if answer=='RESIGN':
break
if answer=='PASS':
undos+=1
break
undos+=1
es=worker.get_gnugo_estimate_score()
for u in range(undos):
worker.undo()
return [sequence.strip(),es]
except Exception, e:
return e
class GnuGoAnalysis():
def run_analysis(self,current_move):
one_move=go_to_move(self.move_zero,current_move)
player_color=guess_color_to_play(self.move_zero,current_move)
gnugo=self.gnugo
log()
log("==============")
log("move",str(current_move))
final_score=gnugo.get_gnugo_estimate_score()
#linelog(final_score)
es=final_score.split()[0]
if es[0]=="B":
lbs="B%+d"%(-1*float(final_score.split()[3][:-1]))
ubs="B%+d"%(-1*float(final_score.split()[5][:-1]))
else:
ubs="W%+d"%(float(final_score.split()[3][:-1]))
lbs="W%+d"%(float(final_score.split()[5][:-1]))
node_set(one_move,"ES",es)
node_set(one_move,"UBS",ubs)
node_set(one_move,"LBS",lbs)
if player_color in ('w',"W"):
log("gnugo plays white")
top_moves=gnugo.gnugo_top_moves_white()
answer=gnugo.play_white()
else:
log("gnugo plays black")
top_moves=gnugo.gnugo_top_moves_black()
answer=gnugo.play_black()
log("====","Gnugo answer:",answer)
node_set(one_move,"CBM",answer)
log("==== Gnugo top moves")
for one_top_move in top_moves:
log("\t",one_top_move)
log()
top_moves=top_moves[:min(self.nb_variations,self.maxvariations)]
if (answer not in ["PASS","RESIGN"]):
gnugo.undo()
while len(top_moves)>0:
all_threads=[]
for worker in self.workers:
worker.need_undo=False
if len(top_moves)>0:
one_top_move=top_moves.pop(0)
if player_color in ('w',"W"):
worker.place_white(one_top_move)
one_thread=threading.Thread(target=get_full_sequence_threaded,args=(worker,'b',self.deepness))
else:
worker.place_black(one_top_move)
one_thread=threading.Thread(target=get_full_sequence_threaded,args=(worker,'w',self.deepness))
worker.need_undo=True
one_thread.one_top_move=one_top_move
one_thread.start()
all_threads.append(one_thread)
for one_thread in all_threads:
one_thread.join()
for worker in self.workers:
if worker.need_undo:
worker.undo()
for one_thread in all_threads:
if type(one_thread.sequence)!=type(["list"]):
raise GRPException(_("GnuGo thread failed:")+"\n"+str(one_thread.sequence))
one_sequence=one_thread.one_top_move+" "+one_thread.sequence[0]
es=one_thread.sequence[1]
one_sequence=one_sequence.strip()
log(">>>>>>",one_sequence)
previous_move=one_move.parent
current_color=player_color
first_move=True
for one_deep_move in one_sequence.split(' '):
if one_deep_move not in ['RESIGN','PASS']:
i,j=gtp2ij(one_deep_move)
new_child=previous_move.new_child()
node_set(new_child,current_color,(i,j))
if first_move:
first_move=False
node_set(new_child,"ES",es)
previous_move=new_child
if current_color in ('w','W'):
current_color='b'
else:
current_color='w'
else:
gnugo.undo()
#one_move.add_comment_text(additional_comments)
log("Creating the influence map")
black_influence=gnugo.get_gnugo_initial_influence_black()
black_territories_points=[]
black_influence_points=[]
white_influence=gnugo.get_gnugo_initial_influence_white()
white_territories_points=[]
white_influence_points=[]
for i in range(self.size):
for j in range(self.size):
if black_influence[i][j]==-3:
black_territories_points.append([i,j])
if white_influence[i][j]==3:
white_territories_points.append([i,j])
if black_influence[i][j]==-2:
black_influence_points.append([i,j])
if white_influence[i][j]==2:
white_influence_points.append([i,j])
if black_influence_points!=[]:
node_set(one_move,"IBM",black_influence_points) #IBM: influence black map
if black_territories_points!=[]:
node_set(one_move,"TBM",black_territories_points) #TBM: territories black map
if white_influence_points!=[]:
node_set(one_move,"IWM",white_influence_points) #IWM: influence white map
if white_territories_points!=[]:
node_set(one_move,"TWM",white_territories_points) #TWM: territories white map
return answer #returning the best move, necessary for live analysis
def play(self,gtp_color,gtp_move):#GnuGo needs to redifine this method to apply it to all its workers
if gtp_color=='w':
self.bot.place_white(gtp_move)
for worker in self.workers:
worker.place_white(gtp_move)
else:
self.bot.place_black(gtp_move)
for worker in self.workers:
worker.place_black(gtp_move)
def undo(self):
self.bot.undo()
for worker in self.workers:
worker.undo()
def terminate_bot(self):
log("killing gnugo")
self.gnugo.close()
log("killing gnugo workers")
for w in self.workers:
w.close()
def initialize_bot(self):
self.nb_variations=4
try:
self.nb_variations=int(self.profile["variations"])
except:
pass
#grp_config.set("GnuGo", "variations",self.nb_variations)"""
self.deepness=4
try:
self.deepness=int(self.profile["deepness"])
except:
pass
#grp_config.set("GnuGo", "deepness",self.deepness)"""
gnugo=gnugo_starting_procedure(self.g,self.profile)
self.nb_workers=self.nb_variations
log("Starting all GnuGo workers")
self.workers=[]
for w in range(self.nb_workers):
log("\t Starting worker",w+1)
gnugo_worker=gnugo_starting_procedure(self.g,self.profile)
self.workers.append(gnugo_worker)
log("All workers ready")
self.gnugo=gnugo
self.time_per_move=0
return gnugo
def gnugo_starting_procedure(sgf_g,profile,silentfail=False):
return bot_starting_procedure("GnuGo","GNU Go",GnuGo_gtp,sgf_g,profile,silentfail)
class RunAnalysis(GnuGoAnalysis,RunAnalysisBase):
def __init__(self,parent,filename,move_range,intervals,variation,komi,profile="slow",existing_variations="remove_everything"):
RunAnalysisBase.__init__(self,parent,filename,move_range,intervals,variation,komi,profile,existing_variations)
class LiveAnalysis(GnuGoAnalysis,LiveAnalysisBase):
def __init__(self,g,filename,profile="slow"):
LiveAnalysisBase.__init__(self,g,filename,profile)
class GnuGo_gtp(gtp):
def get_gnugo_initial_influence_black(self):
self.write("initial_influence black influence_regions")
one_line=self.readline()
one_line=one_line.split("= ")[1].strip().replace(" "," ")
lines=[one_line]
for i in range(self.size-1):
one_line=self.readline().strip().replace(" "," ")
lines.append(one_line)
influence=[]
for i in range(self.size):
influence=[[int(s) for s in lines[i].split(" ")]]+influence
return influence
def get_gnugo_initial_influence_white(self):
self.write("initial_influence white influence_regions")
one_line=self.readline()
one_line=one_line.split("= ")[1].strip().replace(" "," ")
lines=[one_line]
for i in range(self.size-1):
one_line=self.readline().strip().replace(" "," ")
lines.append(one_line)
influence=[]
for i in range(self.size):
influence=[[int(s) for s in lines[i].split(" ")]]+influence
return influence
def quick_evaluation(self,color):
return variation_data_formating["ES"]%self.get_gnugo_estimate_score()
def get_gnugo_estimate_score(self):
self.write("estimate_score")
answer=self.readline().strip()
try:
return answer[2:]
except:
raise GRPException("GRPException in get_gnugo_estimate_score()")
def gnugo_top_moves_black(self):
self.write("top_moves_black")
answer=self.readline()[:-1]
try:
answer=answer.split(" ")[1:-1]
except:
raise GRPException("GRPException in get_gnugo_top_moves_black()")
answers_list=[]
for value in answer:
try:
float(value)
except:
answers_list.append(value)
return answers_list
def gnugo_top_moves_white(self):
self.write("top_moves_white")
answer=self.readline()[:-1]
try:
answer=answer.split(" ")[1:-1]
except:
raise GRPException("GRPException in get_gnugo_top_moves_white()")
answers_list=[]
for value in answer:
try:
float(value)
except:
answers_list.append(value)
return answers_list
def get_gnugo_experimental_score(self,color):
self.write("experimental_score "+color)
answer=self.readline().strip()
return answer[2:]
class GnuGoSettings(BotProfiles):
def __init__(self,parent,bot="GnuGo"):
Frame.__init__(self,parent)
self.parent=parent
self.bot=bot
self.profiles=get_bot_profiles(bot,False)
profiles_frame=self
self.listbox = Listbox(profiles_frame)
self.listbox.grid(column=10,row=10,rowspan=10)
self.update_listbox()
row=10
Label(profiles_frame,text=_("Profile")).grid(row=row,column=11,sticky=W)
self.profile = StringVar()
Entry(profiles_frame, textvariable=self.profile, width=30).grid(row=row,column=12)
row+=1
Label(profiles_frame,text=_("Command")).grid(row=row,column=11,sticky=W)
self.command = StringVar()
Entry(profiles_frame, textvariable=self.command, width=30).grid(row=row,column=12)
row+=1
Label(profiles_frame,text=_("Parameters")).grid(row=row,column=11,sticky=W)
self.parameters = StringVar()
Entry(profiles_frame, textvariable=self.parameters, width=30).grid(row=row,column=12)
row+=1
Label(profiles_frame,text=_("Maximum number of variations")).grid(row=row,column=11,sticky=W)
self.variations = StringVar()
Entry(profiles_frame, textvariable=self.variations, width=30).grid(row=row,column=12)
row+=1
Label(profiles_frame,text=_("Deepness for each variation")).grid(row=row,column=11,sticky=W)
self.deepness = StringVar()
Entry(profiles_frame, textvariable=self.deepness, width=30).grid(row=row,column=12)
row+=10
buttons_frame=Frame(profiles_frame)
buttons_frame.grid(row=row,column=10,sticky=W,columnspan=3)
Button(buttons_frame, text=_("Add profile"),command=self.add_profile).grid(row=row,column=1,sticky=W)
Button(buttons_frame, text=_("Modify profile"),command=self.modify_profile).grid(row=row,column=2,sticky=W)
Button(buttons_frame, text=_("Delete profile"),command=self.delete_profile).grid(row=row,column=3,sticky=W)
Button(buttons_frame, text=_("Test"),command=lambda: self.parent.parent.test(self.bot_gtp,self.command,self.parameters)).grid(row=row,column=4,sticky=W)
self.listbox.bind("<Button-1>", lambda e: self.after(100,self.change_selection))
self.index=-1
self.bot_gtp=GnuGo_gtp
def clear_selection(self):
self.index=-1
self.profile.set("")
self.command.set("")
self.parameters.set("")
self.variations.set("")
self.deepness.set("")
def change_selection(self):
try:
index=int(self.listbox.curselection()[0])
self.index=index
except:
log("No selection")
self.clear_selection()
return
data=self.profiles[index]
self.profile.set(data["profile"])
self.command.set(data["command"])
self.parameters.set(data["parameters"])
self.variations.set(data["variations"])
self.deepness.set(data["deepness"])
def add_profile(self):
profiles=self.profiles
if self.profile.get()=="":
return
data={"bot":self.bot}
data["profile"]=self.profile.get()
data["command"]=self.command.get()
data["parameters"]=self.parameters.get()
data["variations"]=self.variations.get()
data["deepness"]=self.deepness.get()
self.empty_profiles()
profiles.append(data)
self.create_profiles()
self.clear_selection()
def modify_profile(self):
profiles=self.profiles
if self.profile.get()=="":
return
if self.index<0:
log("No selection")
return
index=self.index
profiles[index]["profile"]=self.profile.get()
profiles[index]["command"]=self.command.get()
profiles[index]["parameters"]=self.parameters.get()
profiles[index]["variations"]=self.variations.get()
profiles[index]["deepness"]=self.deepness.get()
self.empty_profiles()
self.create_profiles()
self.clear_selection()
class GnuGoOpenMove(BotOpenMove):
def __init__(self,sgf_g,profile):
BotOpenMove.__init__(self,sgf_g,profile)
self.name='Gnugo'
self.my_starting_procedure=gnugo_starting_procedure
GnuGo={}
GnuGo['name']="GnuGo"
GnuGo['gtp_name']="GNU Go"
GnuGo['analysis']=GnuGoAnalysis
GnuGo['openmove']=GnuGoOpenMove
GnuGo['settings']=GnuGoSettings
GnuGo['gtp']=GnuGo_gtp
GnuGo['liveanalysis']=LiveAnalysis
GnuGo['runanalysis']=RunAnalysis
GnuGo['starting']=gnugo_starting_procedure
if __name__ == "__main__":
main(GnuGo)
| pnprog/goreviewpartner | gnugo_analysis.py | Python | gpl-3.0 | 13,115 | 0.060465 |
import signal
import subprocess
import sys
from pathlib import Path
from unittest import TestCase, skipIf
from django.test import tag
from inloop.testrunner.runner import DockerTestRunner, collect_files
BASE_DIR = Path(__file__).resolve().parent
DATA_DIR = str(BASE_DIR.joinpath("data"))
class CollectorTest(TestCase):
def test_subdirs_and_large_files_are_not_collected(self):
contents, ignored_names = collect_files(DATA_DIR, filesize_limit=300)
self.assertEqual(contents.keys(), {"empty1.txt", "README.md"})
self.assertEqual(ignored_names, {"larger_than_300_bytes.txt"})
def test_subdirs_are_not_collected(self):
contents, ignored_names = collect_files(DATA_DIR, filesize_limit=1000)
self.assertEqual(contents.keys(), {"empty1.txt", "README.md", "larger_than_300_bytes.txt"})
self.assertFalse(ignored_names)
def test_collected_contents_are_correct(self):
contents, _ = collect_files(DATA_DIR, filesize_limit=300)
self.assertEqual(contents["empty1.txt"], "")
self.assertEqual(contents["README.md"], "This is a test harness for collect_files().\n")
@tag("slow", "needs-docker")
class DockerTestRunnerIntegrationTest(TestCase):
"""
Each of the the following tests uses a *real* docker container, there is
no monkey patching (aka mocking) involved.
The Docker image required for the tests uses a simple trick to allow
submitting arbitrary test commands to the container using the task_name
parameter of DockerTestRunner.check_task(). This makes it really
easy to simulate the behaviors of a real tester image.
"""
OPTIONS = {
"image": "inloop-integration-test",
"timeout": 1.5,
}
def setUp(self):
self.runner = DockerTestRunner(self.OPTIONS)
def test_selftest(self):
"""Test if our test image works."""
rc = subprocess.call(["docker", "run", "--rm", self.OPTIONS["image"], "exit 42"])
self.assertEqual(42, rc)
def test_outputs(self):
"""Test if we receive stdout, stderr and exit code."""
result = self.runner.check_task("echo -n OUT; echo -n ERR >&2; exit 42", DATA_DIR)
self.assertEqual(result.rc, 42)
self.assertEqual(result.stdout, "OUT")
self.assertEqual(result.stderr, "ERR")
self.assertGreaterEqual(result.duration, 0.0)
@skipIf(sys.platform == "darwin", reason="Docker Desktop issues")
def test_kill_on_timeout(self):
"""Test if the container gets killed after the timeout."""
result = self.runner.check_task("sleep 10", DATA_DIR)
self.assertEqual(result.rc, signal.SIGKILL)
self.assertGreaterEqual(result.duration, 0.0)
self.assertLess(result.duration, 10.0)
@skipIf(sys.platform == "darwin", reason="Docker Desktop issues")
def test_output_on_timeout(self):
"""Test if we receive output even if a timeout happens."""
result = self.runner.check_task("echo -n OUT; echo -n ERR >&2; sleep 10", DATA_DIR)
self.assertEqual(result.rc, signal.SIGKILL)
self.assertEqual(result.stdout, "OUT")
self.assertEqual(result.stderr, "ERR")
def test_inbound_mountpoint(self):
"""Test if the input mount point works correctly."""
result = self.runner.check_task("cat /checker/input/README.md", DATA_DIR)
self.assertEqual("This is a test harness for collect_files().\n", result.stdout)
self.assertEqual(result.rc, 0)
def test_scratch_area(self):
"""Test that we can write the scratch area."""
result = self.runner.check_task("touch /checker/scratch/test_file", DATA_DIR)
self.assertEqual(result.rc, 0)
def test_inbound_mountpoint_ro(self):
"""Test if the input is mounted read-only."""
result = self.runner.check_task("touch /checker/input/test_file", DATA_DIR)
self.assertNotEqual(result.rc, 0)
def test_storage_exists(self):
"""Test if the storage directory exists."""
result = self.runner.check_task("test -d /checker/output/storage", DATA_DIR)
self.assertEqual(result.rc, 0)
def test_output_filedict(self):
"""Test if we can create a file which appears in the files dictionary."""
result = self.runner.check_task("echo -n FOO >/checker/output/storage/bar", DATA_DIR)
self.assertEqual(result.rc, 0)
self.assertEqual("FOO", result.files["bar"])
def test_container_unprivileged(self):
"""Test if we execute commands as unprivileged user."""
result = self.runner.check_task("id -un", DATA_DIR)
self.assertEqual(result.rc, 0)
self.assertEqual(result.stdout.strip(), "nobody")
def test_maximum_file_size(self):
"""Test limits of the scratch file system."""
result = self.runner.check_task(
"dd if=/dev/zero of=/checker/scratch/largefile bs=1M count=100", DATA_DIR
)
self.assertNotEqual(result.rc, 0)
def test_scratch_mount_options(self):
"""Verify if the tmpfs is mounted correctly."""
result = self.runner.check_task("mount | grep 'tmpfs on /checker/scratch'", DATA_DIR)
# the default size=32m is expanded to kilobytes
self.assertIn("size=32768k", result.stdout)
class DockerTestRunnerTest(TestCase):
def setUp(self):
self.runner = DockerTestRunner(
{
"image": "image-not-used",
"output_limit": 10,
}
)
def test_constructor_requires_configkey(self):
with self.assertRaises(ValueError):
DockerTestRunner({})
# TEST 1: good utf-8 sequence
def test_clean_stream_with_short_valid_utf8(self):
sample_stream = "abcöüä".encode()
cleaned = self.runner.clean_stream(sample_stream)
self.assertEqual(cleaned, "abcöüä")
# TEST 2: bogus utf-8 sequence
def test_clean_stream_with_short_invalid_utf8(self):
sample_stream = "abcöüä".encode()
# cut off the right half of the utf8 char at the end ('ä'), making it invalid
cleaned = self.runner.clean_stream(sample_stream[:-1])
self.assertEqual(len(cleaned), 6)
self.assertIn("abcöü", cleaned)
# TEST 3: good utf-8 sequence, too long
def test_clean_stream_with_too_long_valid_utf8(self):
sample_stream = ("a" * 11).encode()
cleaned = self.runner.clean_stream(sample_stream)
self.assertNotIn("a" * 11, cleaned)
self.assertIn("a" * 10, cleaned)
self.assertIn("output truncated", cleaned)
# TEST 4: too long utf-8 sequence, utf-8 composite at cut position
def test_clean_stream_with_utf8_composite_at_cut_position(self):
sample_stream = "".join(["a", "ä" * 5]).encode()
cleaned = self.runner.clean_stream(sample_stream)
self.assertNotIn("ä" * 5, cleaned)
self.assertIn("aääää", cleaned)
self.assertIn("output truncated", cleaned)
| st-tu-dresden/inloop | tests/testrunner/tests.py | Python | gpl-3.0 | 6,974 | 0.002156 |
# Copyright (C) 2018 Red Hat, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import io
import fixtures
import mock
from nova import conf
from nova.tests import fixtures as nova_fixtures
from nova.tests.fixtures import libvirt as fakelibvirt
from nova.tests.functional import integrated_helpers
CONF = conf.CONF
class ServersTestBase(integrated_helpers._IntegratedTestBase):
"""A libvirt-specific variant of the integrated test base."""
ADDITIONAL_FILTERS = []
def setUp(self):
self.flags(instances_path=self.useFixture(fixtures.TempDir()).path)
self.computes = {}
self.compute_rp_uuids = {}
super(ServersTestBase, self).setUp()
self.useFixture(nova_fixtures.LibvirtImageBackendFixture())
self.useFixture(nova_fixtures.LibvirtFixture())
self.useFixture(nova_fixtures.OSBrickFixture())
self.useFixture(fixtures.MockPatch(
'nova.virt.libvirt.LibvirtDriver._create_image',
return_value=(False, False)))
self.useFixture(fixtures.MockPatch(
'nova.virt.libvirt.LibvirtDriver._get_local_gb_info',
return_value={'total': 128, 'used': 44, 'free': 84}))
self.useFixture(fixtures.MockPatch(
'nova.virt.libvirt.driver.libvirt_utils.is_valid_hostname',
return_value=True))
self.useFixture(fixtures.MockPatch(
'nova.virt.libvirt.driver.libvirt_utils.file_open',
side_effect=lambda *a, **k: io.BytesIO(b'')))
self.useFixture(fixtures.MockPatch(
'nova.privsep.utils.supports_direct_io',
return_value=True))
self.useFixture(fixtures.MockPatch(
'nova.virt.libvirt.host.Host.get_online_cpus',
return_value=set(range(16))))
# Mock the 'get_connection' function, as we're going to need to provide
# custom capabilities for each test
_p = mock.patch('nova.virt.libvirt.host.Host.get_connection')
self.mock_conn = _p.start()
self.addCleanup(_p.stop)
def _setup_compute_service(self):
# NOTE(stephenfin): We don't start the compute service here as we wish
# to configure the host capabilities first. We instead start the
# service in the test
self.flags(compute_driver='libvirt.LibvirtDriver')
def _setup_scheduler_service(self):
enabled_filters = CONF.filter_scheduler.enabled_filters
enabled_filters += self.ADDITIONAL_FILTERS
self.flags(enabled_filters=enabled_filters, group='filter_scheduler')
return self.start_service('scheduler')
def _get_connection(
self, host_info=None, pci_info=None, mdev_info=None, vdpa_info=None,
libvirt_version=None, qemu_version=None, hostname=None,
):
if not host_info:
host_info = fakelibvirt.HostInfo(
cpu_nodes=2, cpu_sockets=1, cpu_cores=2, cpu_threads=2)
# sanity check
self.assertGreater(16, host_info.cpus,
"Host.get_online_cpus is only accounting for 16 CPUs but you're "
"requesting %d; change the mock or your test" % host_info.cpus)
libvirt_version = libvirt_version or fakelibvirt.FAKE_LIBVIRT_VERSION
qemu_version = qemu_version or fakelibvirt.FAKE_QEMU_VERSION
fake_connection = fakelibvirt.Connection(
'qemu:///system',
version=libvirt_version,
hv_version=qemu_version,
host_info=host_info,
pci_info=pci_info,
mdev_info=mdev_info,
vdpa_info=vdpa_info,
hostname=hostname)
return fake_connection
def start_compute(
self, hostname='compute1', host_info=None, pci_info=None,
mdev_info=None, vdpa_info=None, libvirt_version=None,
qemu_version=None,
):
"""Start a compute service.
The started service will be saved in self.computes, keyed by hostname.
:param hostname: A hostname.
:param host_info: A fakelibvirt.HostInfo object for the host. Defaults
to a HostInfo with 2 NUMA nodes, 2 cores per node, 2 threads per
core, and 16GB of RAM.
:returns: The hostname of the created service, which can be used to
lookup the created service and UUID of the assocaited resource
provider.
"""
def _start_compute(hostname, host_info):
fake_connection = self._get_connection(
host_info, pci_info, mdev_info, vdpa_info, libvirt_version,
qemu_version, hostname,
)
# This is fun. Firstly we need to do a global'ish mock so we can
# actually start the service.
with mock.patch('nova.virt.libvirt.host.Host.get_connection',
return_value=fake_connection):
compute = self.start_service('compute', host=hostname)
# Once that's done, we need to tweak the compute "service" to
# make sure it returns unique objects. We do this inside the
# mock context to avoid a small window between the end of the
# context and the tweaking where get_connection would revert to
# being an autospec mock.
compute.driver._host.get_connection = lambda: fake_connection
return compute
# ensure we haven't already registered services with these hostnames
self.assertNotIn(hostname, self.computes)
self.assertNotIn(hostname, self.compute_rp_uuids)
self.computes[hostname] = _start_compute(hostname, host_info)
self.compute_rp_uuids[hostname] = self.placement.get(
'/resource_providers?name=%s' % hostname).body[
'resource_providers'][0]['uuid']
return hostname
class LibvirtMigrationMixin(object):
"""A simple mixin to facilliate successful libvirt live migrations
Requires that the test class set self.server for the specific test instnace
and self.{src,dest} to indicate the direction of the migration. For any
scenarios more complex than this they should override _migrate_stub with
their own implementation.
"""
def setUp(self):
super().setUp()
self.useFixture(fixtures.MonkeyPatch(
'nova.tests.fixtures.libvirt.Domain.migrateToURI3',
self._migrate_stub))
self.migrate_stub_ran = False
def _migrate_stub(self, domain, destination, params, flags):
self.dest.driver._host.get_connection().createXML(
params['destination_xml'],
'fake-createXML-doesnt-care-about-flags')
conn = self.src.driver._host.get_connection()
dom = conn.lookupByUUIDString(self.server['id'])
dom.complete_job()
self.migrate_stub_ran = True
class LibvirtNeutronFixture(nova_fixtures.NeutronFixture):
"""A custom variant of the stock neutron fixture with more networks.
There are three networks available: two l2 networks (one flat and one VLAN)
and one l3 network (VXLAN).
"""
network_1 = {
'id': '3cb9bc59-5699-4588-a4b1-b87f96708bc6',
'status': 'ACTIVE',
'subnets': [],
'name': 'physical-network-foo',
'admin_state_up': True,
'tenant_id': nova_fixtures.NeutronFixture.tenant_id,
'provider:physical_network': 'foo',
'provider:network_type': 'flat',
'provider:segmentation_id': None,
}
network_2 = network_1.copy()
network_2.update({
'id': 'a252b8cd-2d99-4e82-9a97-ec1217c496f5',
'name': 'physical-network-bar',
'provider:physical_network': 'bar',
'provider:network_type': 'vlan',
'provider:segmentation_id': 123,
})
network_3 = network_1.copy()
network_3.update({
'id': '877a79cc-295b-4b80-9606-092bf132931e',
'name': 'tunneled-network',
'provider:physical_network': None,
'provider:network_type': 'vxlan',
'provider:segmentation_id': 69,
})
network_4 = network_1.copy()
network_4.update({
'id': '1b70879f-fd00-411e-8ea9-143e7820e61d',
'name': 'private-network',
'shared': False,
'provider:physical_network': 'physnet4',
"provider:network_type": "vlan",
'provider:segmentation_id': 42,
})
subnet_1 = nova_fixtures.NeutronFixture.subnet_1.copy()
subnet_1.update({
'name': 'physical-subnet-foo',
})
subnet_2 = nova_fixtures.NeutronFixture.subnet_1.copy()
subnet_2.update({
'id': 'b4c13749-c002-47ed-bf42-8b1d44fa9ff2',
'name': 'physical-subnet-bar',
'network_id': network_2['id'],
})
subnet_3 = nova_fixtures.NeutronFixture.subnet_1.copy()
subnet_3.update({
'id': '4dacb20b-917f-4275-aa75-825894553442',
'name': 'tunneled-subnet',
'network_id': network_3['id'],
})
subnet_4 = nova_fixtures.NeutronFixture.subnet_1.copy()
subnet_4.update({
'id': '7cb343ec-6637-494c-89a1-8890eab7788e',
'name': 'physical-subnet-bar',
'network_id': network_4['id'],
})
network_1['subnets'] = [subnet_1]
network_2['subnets'] = [subnet_2]
network_3['subnets'] = [subnet_3]
network_4['subnets'] = [subnet_4]
network_1_port_2 = {
'id': 'f32582b5-8694-4be8-9a52-c5732f601c9d',
'network_id': network_1['id'],
'status': 'ACTIVE',
'mac_address': '71:ce:c7:8b:cd:dc',
'fixed_ips': [
{
'ip_address': '192.168.1.10',
'subnet_id': subnet_1['id']
}
],
'binding:vif_details': {},
'binding:vif_type': 'ovs',
'binding:vnic_type': 'normal',
}
network_1_port_3 = {
'id': '9c7580a0-8b01-41f3-ba07-a114709a4b74',
'network_id': network_1['id'],
'status': 'ACTIVE',
'mac_address': '71:ce:c7:2b:cd:dc',
'fixed_ips': [
{
'ip_address': '192.168.1.11',
'subnet_id': subnet_1['id']
}
],
'binding:vif_details': {},
'binding:vif_type': 'ovs',
'binding:vnic_type': 'normal',
}
network_2_port_1 = {
'id': '67d36444-6353-40f5-9e92-59346cf0dfda',
'network_id': network_2['id'],
'status': 'ACTIVE',
'mac_address': 'd2:0b:fd:d7:89:9b',
'fixed_ips': [
{
'ip_address': '192.168.1.6',
'subnet_id': subnet_2['id']
}
],
'binding:vif_details': {},
'binding:vif_type': 'ovs',
'binding:vnic_type': 'normal',
}
network_3_port_1 = {
'id': '4bfa1dc4-4354-4840-b0b4-f06196fa1344',
'network_id': network_3['id'],
'status': 'ACTIVE',
'mac_address': 'd2:0b:fd:99:89:9b',
'fixed_ips': [
{
'ip_address': '192.168.2.6',
'subnet_id': subnet_3['id']
}
],
'binding:vif_details': {},
'binding:vif_type': 'ovs',
'binding:vnic_type': 'normal',
}
network_4_port_1 = {
'id': 'b4cd0b93-2ac8-40a7-9fa4-2cd680ccdf3e',
'network_id': network_4['id'],
'status': 'ACTIVE',
'mac_address': 'b5:bc:2e:e7:51:ee',
'fixed_ips': [
{
'ip_address': '192.168.4.6',
'subnet_id': subnet_4['id']
}
],
'binding:vif_details': {'vlan': 42},
'binding:vif_type': 'hw_veb',
'binding:vnic_type': 'direct',
}
network_4_port_2 = {
'id': '4a0e3b05-4704-4adb-bfb1-f31f0e4d1bdc',
'network_id': network_4['id'],
'status': 'ACTIVE',
'mac_address': 'b5:bc:2e:e7:51:ef',
'fixed_ips': [
{
'ip_address': '192.168.4.7',
'subnet_id': subnet_4['id']
}
],
'binding:vif_details': {'vlan': 42},
'binding:vif_type': 'hw_veb',
'binding:vnic_type': 'direct',
}
network_4_port_3 = {
'id': 'fb2de1a1-d096-41be-9dbe-43066da64804',
'network_id': network_4['id'],
'status': 'ACTIVE',
'mac_address': 'b5:bc:2e:e7:51:ff',
'fixed_ips': [
{
'ip_address': '192.168.4.8',
'subnet_id': subnet_4['id']
}
],
'binding:vif_details': {'vlan': 42},
'binding:vif_type': 'hw_veb',
'binding:vnic_type': 'direct',
}
def __init__(self, test):
super(LibvirtNeutronFixture, self).__init__(test)
self._networks = {
self.network_1['id']: self.network_1,
self.network_2['id']: self.network_2,
self.network_3['id']: self.network_3,
self.network_4['id']: self.network_4,
}
self._net1_ports = [self.network_1_port_2, self.network_1_port_3]
def create_port(self, body=None):
network_id = body['port']['network_id']
assert network_id in self._networks, ('Network %s not in fixture' %
network_id)
if network_id == self.network_1['id']:
port = self._net1_ports.pop(0)
elif network_id == self.network_2['id']:
port = self.network_2_port_1
elif network_id == self.network_3['id']:
port = self.network_3_port_1
elif network_id == self.network_4['id']:
port = self.network_4_port_1
# this copy is here to avoid modifying class variables like
# network_2_port_1 below at the update call
port = copy.deepcopy(port)
port.update(body['port'])
# the tenant ID is normally extracted from credentials in the request
# and is not present in the body
if 'tenant_id' not in port:
port['tenant_id'] = nova_fixtures.NeutronFixture.tenant_id
# similarly, these attributes are set by neutron itself
port['admin_state_up'] = True
self._ports[port['id']] = port
# this copy is here as nova sometimes modifies the returned port
# locally and we want to avoid that nova modifies the fixture internals
return {'port': copy.deepcopy(port)}
| mahak/nova | nova/tests/functional/libvirt/base.py | Python | apache-2.0 | 14,789 | 0.000135 |
"""OAuth 2.0 WSGI server middleware providing MyProxy certificates as access tokens
"""
__author__ = "W van Engen"
__date__ = "01/11/12"
__copyright__ = "(C) 2011 FOM / Nikhef"
__license__ = "BSD - see LICENSE file in top-level directory"
__contact__ = "wvengen+oauth2@nikhef.nl"
__revision__ = "$Id$"
from base64 import b64decode
from ndg.oauth.server.lib.authenticate.authenticator_interface import AuthenticatorInterface
from ndg.oauth.server.lib.oauth.oauth_exception import OauthException
class PasswordAuthenticator(AuthenticatorInterface):
"""
Authenticator implementation that checks for a client/resource id+secret
combination, either in the HTTP Authorization header, or in the request
parameters, according to the OAuth 2 RFC, section 2.3.1
@todo implement protection against brute force attacks (MUST)
"""
def __init__(self, typ, register):
super(PasswordAuthenticator, self).__init__(typ)
self._register = register
def authenticate(self, request):
"""
Checks for id/secret pair in Authorization header, or else
POSTed request parameters.
@type request: webob.Request
@param request: HTTP request object
@rtype: str
@return: id of authenticated client/resource
Raise OauthException if authentication fails.
"""
cid = secret = None
if 'Authorization' in request.headers and request.headers['Authorization'].startswith('Basic'):
cid, secret = b64decode(request.headers['Authorization'][6:]).split(':',1)
elif 'client_id' in request.POST and 'client_secret' in request.POST:
cid = request.POST['client_id']
secret = request.POST['client_secret']
if not cid or not secret:
raise OauthException('invalid_%s' % self.typ,
'No %s password authentication supplied' % self.typ)
for authorization in self._register.register.itervalues():
if authorization.id == cid and authorization.secret == secret:
return authorization.id
raise OauthException('invalid_%s' % self.typ,
'%s access denied: %s' % (cid, self.typ))
| cedadev/ndg_oauth | ndg_oauth_server/ndg/oauth/server/lib/authenticate/password_authenticator.py | Python | bsd-3-clause | 2,193 | 0.006384 |
import sys
from setuptools import setup, find_packages
package_name = "sqlalchemy_dict"
py_version = sys.version_info[:2]
def read_version(module_name):
from re import match, S
from os.path import join, dirname
f = open(join(dirname(__file__), module_name, "__init__.py"))
return match(r".*__version__ = (\"|')(.*?)('|\")", f.read(), S).group(2)
dependencies = ["sqlalchemy"]
if py_version < (3, 5):
dependencies.append("typing")
setup(
name=package_name,
version=read_version(package_name),
author="Mahdi Ghane.g",
description=(
"sqlalchemy extension for interacting models with python dictionary."
),
long_description=open("README.rst").read(),
url="https://github.com/meyt/sqlalchemy-dict",
packages=find_packages(),
install_requires=dependencies,
license="MIT License",
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
],
)
| meyt/sqlalchemy-dict | setup.py | Python | mit | 1,268 | 0 |
#!/usr/bin/env python3.5
# -*- coding: utf-8 -*-
# Author: ChenLiang
# 执行系统命令
import os
os.system("ls -al")
os.mkdir("pwd")
read = os.popen("df -hT").read()
# 查看系统路径
import sys
print(sys.path)
# 命令行下tab补全命令
# For MAC:
import sys
import readline
import rlcompleter
if sys.platform == 'darwin' and sys.version_info[0] == 2:
readline.parse_and_bind("tab: complete") # linux and python3 on mac
else:
readline.parse_and_bind("bind ^I rl_complete")
# 说明:上面的代码如果在Mac上不好用,可以尝试下面的代码
# https://docs.python.org/2/library/rlcompleter.html
try:
import readline
except ImportError:
print("Module readline not available.")
else:
import rlcompleter
readline.parse_and_bind("tab: complete")
# For Linux:
#!/usr/bin/env python
# python startup file
import sys
import readline
import rlcompleter
import atexit
import os
# tab completion
readline.parse_and_bind('tab: complete')
# history file
histfile = os.path.join(os.environ['HOME'], '.pythonhistory')
try:
readline.read_history_file(histfile)
except IOError:
pass
atexit.register(readline.write_history_file, histfile)
del os, histfile, readline, rlcompleter
# 需要注意: 自己定义的模块都放到/usr/lib/python2.7/site-packages/ | smartczm/python-learn | Old-day01-10/s13-day1/system.py | Python | gpl-2.0 | 1,313 | 0.012573 |
from distutils import spawn
import mock
import pytest
import requests
from pyepm import config as c
config = c.get_default_config()
has_solc = spawn.find_executable("solc")
solc = pytest.mark.skipif(not has_solc, reason="solc compiler not found")
COW_ADDRESS = '0xcd2a3d9f938e13cd947ec05abc7fe734df8dd826'
def is_hex(s):
try:
int(s, 16)
return True
except ValueError:
return False
def mock_json_response(status_code=200, error=None, result=None):
m = mock.MagicMock(spec=requests.Response)
m.status_code = status_code
base_json_response = {u'jsonrpc': u'2.0', u'id': u'c7c427a5-b6e9-4dbf-b218-a6f9d4f09246'}
json_response = dict(base_json_response)
if result:
json_response[u'result'] = result
elif error:
json_response[u'error'] = error
if status_code >= 400:
m.reason = 'Error Reason'
m.json.return_value = json_response
return m
| etherex/pyepm | test/helpers.py | Python | mit | 931 | 0.003222 |
"""Testing code for the tupa.features package, unit-testing only."""
import os
from collections import OrderedDict
import pytest
from ucca import textutil
from tupa.action import Actions
from tupa.features.dense_features import DenseFeatureExtractor
from tupa.features.sparse_features import SparseFeatureExtractor
from tupa.model import Model
from tupa.oracle import Oracle
from tupa.states.state import State
from .conftest import passage_files, load_passage, basename
SPARSE = "sparse"
DENSE = "dense"
VOCAB = os.path.join("test_files", "vocab", "en_core_web_lg.csv")
WORD_VECTORS = os.path.join("test_files", "vocab", "wiki.en.vec")
OMITTED = "d"
class FeatureExtractorCreator:
def __init__(self, name, indexed=False, annotated=False, vocab=None, wordvectors=None, omit=None):
self.name = name
self.indexed = indexed
self.annotated = annotated
self.vocab = vocab
self.id = vocab == "-"
self.wordvectors = wordvectors
self.omit = omit
def __str__(self):
return "-".join([self.name] + [attr for attr in ("indexed", "annotated", "vocab", "id", "wordvectors", "omit")
if getattr(self, attr)])
def __call__(self, config):
config.args.vocab = self.vocab
config.args.word_vectors = self.wordvectors
config.args.omit_features = self.omit
return SparseFeatureExtractor(omit_features=self.omit) if self.name == SPARSE else DenseFeatureExtractor(
OrderedDict((p.name, p.create_from_config()) for p in Model(None, config=config).param_defs()),
indexed=self.indexed, node_dropout=0, omit_features=self.omit)
def feature_extractors(*args, **kwargs):
return [FeatureExtractorCreator(SPARSE, *args, **kwargs), FeatureExtractorCreator(DENSE, *args, **kwargs),
FeatureExtractorCreator(DENSE, *args, indexed=True, **kwargs)]
def extract_features(feature_extractor, state, features):
values = feature_extractor.extract_features(state)
if feature_extractor.params:
for key, vs in values.items():
assert len(vs) == feature_extractor.params[key].num, key
features.append(values)
def _test_features(config, feature_extractor_creator, filename, write_features):
feature_extractor = feature_extractor_creator(config)
passage = load_passage(filename, annotate=feature_extractor_creator.annotated)
textutil.annotate(passage, as_array=True, as_extra=False, vocab=config.vocab())
config.set_format(passage.extra.get("format") or "ucca")
oracle = Oracle(passage)
state = State(passage)
actions = Actions()
for key, param in feature_extractor.params.items():
if not param.numeric:
param.dropout = 0
feature_extractor.init_param(key)
features = [feature_extractor.init_features(state)]
while True:
extract_features(feature_extractor, state, features)
action = min(oracle.get_actions(state, actions).values(), key=str)
state.transition(action)
if state.need_label:
extract_features(feature_extractor, state, features)
label, _ = oracle.get_label(state, action)
state.label_node(label)
if state.finished:
break
features = ["%s %s\n" % i for f in features if f for i in (sorted(f.items()) + [("", "")])]
compare_file = os.path.join("test_files", "features", "-".join((basename(filename), str(feature_extractor_creator)))
+ ".txt")
if write_features:
with open(compare_file, "w", encoding="utf-8") as f:
f.writelines(features)
with open(compare_file, encoding="utf-8") as f:
assert f.readlines() == features, compare_file
@pytest.mark.parametrize("feature_extractor_creator",
[f for v in (None, "-", VOCAB) for w in (None, WORD_VECTORS) for o in (None, OMITTED)
for f in feature_extractors(vocab=v, wordvectors=w, omit=o)], ids=str)
@pytest.mark.parametrize("filename", passage_files(), ids=basename)
def test_features(config, feature_extractor_creator, filename, write_features):
_test_features(config, feature_extractor_creator, filename, write_features)
@pytest.mark.parametrize("feature_extractor_creator",
[f for v in ("-", VOCAB) for w in (None, WORD_VECTORS) for o in (None, OMITTED)
for f in feature_extractors(annotated=True, vocab=v, wordvectors=w, omit=o)], ids=str)
@pytest.mark.parametrize("filename", passage_files("conllu"), ids=basename)
def test_features_conllu(config, feature_extractor_creator, filename, write_features):
_test_features(config, feature_extractor_creator, filename, write_features)
@pytest.mark.parametrize("feature_extractor_creator", [f for o in (None, OMITTED)
for f in feature_extractors(omit=o)[:-1]], ids=str)
def test_feature_templates(config, feature_extractor_creator, write_features):
config.set_format("amr")
feature_extractor = feature_extractor_creator(config)
features = ["%s\n" % i for i in feature_extractor.all_features()]
compare_file = os.path.join("test_files", "features", "templates-%s.txt" % str(feature_extractor_creator))
if write_features:
with open(compare_file, "w") as f:
f.writelines(features)
with open(compare_file) as f:
assert f.readlines() == features, compare_file
| danielhers/tupa | tests/test_features.py | Python | gpl-3.0 | 5,494 | 0.003276 |
import greengraph
if __name__ == '__main__':
from matplotlib import pyplot as plt
mygraph = greengraph.Greengraph('New York','Chicago')
data = mygraph.green_between(20)
plt.plot(data)
plt.show()
| padraic-padraic/MPHYSG001_CW1 | example.py | Python | gpl-2.0 | 216 | 0.00463 |
class Person:
def __init__(self):
self.name = 'jeff'
self.age = 10
p = Person()
print(p.__dict__)
| mikeckennedy/python-data-driven-nov9 | playground/classdict.py | Python | gpl-3.0 | 121 | 0.008264 |
"""
Copyright (c) 2017 SONATA-NFV and Paderborn University
ALL RIGHTS RESERVED.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Neither the name of the SONATA-NFV, Paderborn University
nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written
permission.
This work has been performed in the framework of the SONATA project,
funded by the European Commission under Grant number 671517 through
the Horizon 2020 and 5G-PPP programmes. The authors would like to
acknowledge the contributions of their colleagues of the SONATA
partner consortium (www.sonata-nfv.eu).
"""
from flask_restful import Resource
from flask import request, Response
from emuvim.api.openstack.openstack_dummies.base_openstack_dummy import BaseOpenstackDummy
from emuvim.api.openstack.helper import get_host
from datetime import datetime
import neutron_sfc_dummy_api as SFC
import logging
import json
import uuid
import copy
LOG = logging.getLogger("api.openstack.neutron")
class NeutronDummyApi(BaseOpenstackDummy):
def __init__(self, ip, port, compute):
super(NeutronDummyApi, self).__init__(ip, port)
self.compute = compute
self.api.add_resource(NeutronListAPIVersions, "/")
self.api.add_resource(Shutdown, "/shutdown")
self.api.add_resource(NeutronShowAPIv2Details, "/v2.0")
self.api.add_resource(NeutronListNetworks, "/v2.0/networks.json", "/v2.0/networks",
resource_class_kwargs={'api': self})
self.api.add_resource(NeutronShowNetwork, "/v2.0/networks/<network_id>.json", "/v2.0/networks/<network_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(NeutronCreateNetwork, "/v2.0/networks.json", "/v2.0/networks",
resource_class_kwargs={'api': self})
self.api.add_resource(NeutronUpdateNetwork, "/v2.0/networks/<network_id>.json", "/v2.0/networks/<network_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(NeutronDeleteNetwork, "/v2.0/networks/<network_id>.json", "/v2.0/networks/<network_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(NeutronListSubnets, "/v2.0/subnets.json", "/v2.0/subnets",
resource_class_kwargs={'api': self})
self.api.add_resource(NeutronShowSubnet, "/v2.0/subnets/<subnet_id>.json", "/v2.0/subnets/<subnet_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(NeutronCreateSubnet, "/v2.0/subnets.json", "/v2.0/subnets",
resource_class_kwargs={'api': self})
self.api.add_resource(NeutronUpdateSubnet, "/v2.0/subnets/<subnet_id>.json", "/v2.0/subnets/<subnet_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(NeutronDeleteSubnet, "/v2.0/subnets/<subnet_id>.json", "/v2.0/subnets/<subnet_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(NeutronListPorts, "/v2.0/ports.json", "/v2.0/ports",
resource_class_kwargs={'api': self})
self.api.add_resource(NeutronShowPort, "/v2.0/ports/<port_id>.json", "/v2.0/ports/<port_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(NeutronCreatePort, "/v2.0/ports.json", "/v2.0/ports",
resource_class_kwargs={'api': self})
self.api.add_resource(NeutronUpdatePort, "/v2.0/ports/<port_id>.json", "/v2.0/ports/<port_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(NeutronDeletePort, "/v2.0/ports/<port_id>.json", "/v2.0/ports/<port_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(NeutronAddFloatingIp, "/v2.0/floatingips.json", "/v2.0/floatingips",
resource_class_kwargs={'api': self})
# Service Function Chaining (SFC) API
self.api.add_resource(SFC.PortPairsCreate, "/v2.0/sfc/port_pairs.json", "/v2.0/sfc/port_pairs",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.PortPairsUpdate, "/v2.0/sfc/port_pairs/<pair_id>.json",
"/v2.0/sfc/port_pairs/<pair_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.PortPairsDelete, "/v2.0/sfc/port_pairs/<pair_id>.json",
"/v2.0/sfc/port_pairs/<pair_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.PortPairsList, "/v2.0/sfc/port_pairs.json", "/v2.0/sfc/port_pairs",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.PortPairsShow, "/v2.0/sfc/port_pairs/<pair_id>.json",
"/v2.0/sfc/port_pairs/<pair_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.PortPairGroupCreate, "/v2.0/sfc/port_pair_groups.json", "/v2.0/sfc/port_pair_groups",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.PortPairGroupUpdate, "/v2.0/sfc/port_pair_groups/<group_id>.json",
"/v2.0/sfc/port_pair_groups/<group_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.PortPairGroupDelete, "/v2.0/sfc/port_pair_groups/<group_id>.json",
"/v2.0/sfc/port_pair_groups/<group_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.PortPairGroupList, "/v2.0/sfc/port_pair_groups.json", "/v2.0/sfc/port_pair_groups",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.PortPairGroupShow, "/v2.0/sfc/port_pair_groups/<group_id>.json",
"/v2.0/sfc/port_pair_groups/<group_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.FlowClassifierCreate, "/v2.0/sfc/flow_classifiers.json", "/v2.0/sfc/flow_classifiers",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.FlowClassifierUpdate, "/v2.0/sfc/flow_classifiers/<flow_classifier_id>.json",
"/v2.0/sfc/flow_classifiers/<flow_classifier_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.FlowClassifierDelete, "/v2.0/sfc/flow_classifiers/<flow_classifier_id>.json",
"/v2.0/sfc/flow_classifiers/<flow_classifier_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.FlowClassifierList, "/v2.0/sfc/flow_classifiers.json", "/v2.0/sfc/flow_classifiers",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.FlowClassifierShow, "/v2.0/sfc/flow_classifiers/<flow_classifier_id>.json",
"/v2.0/sfc/flow_classifiers/<flow_classifier_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.PortChainCreate, "/v2.0/sfc/port_chains.json", "/v2.0/sfc/port_chains",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.PortChainUpdate, "/v2.0/sfc/port_chains/<chain_id>.json",
"/v2.0/sfc/port_chains/<chain_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.PortChainDelete, "/v2.0/sfc/port_chains/<chain_id>.json",
"/v2.0/sfc/port_chains/<chain_id>",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.PortChainList, "/v2.0/sfc/port_chains.json", "/v2.0/sfc/port_chains",
resource_class_kwargs={'api': self})
self.api.add_resource(SFC.PortChainShow, "/v2.0/sfc/port_chains/<chain_id>.json",
"/v2.0/sfc/port_chains/<chain_id>",
resource_class_kwargs={'api': self})
def _start_flask(self):
LOG.info("Starting %s endpoint @ http://%s:%d" % (__name__, self.ip, self.port))
if self.app is not None:
self.app.before_request(self.dump_playbook)
self.app.run(self.ip, self.port, debug=True, use_reloader=False)
class Shutdown(Resource):
def get(self):
LOG.debug(("%s is beeing shut down") % (__name__))
func = request.environ.get('werkzeug.server.shutdown')
if func is None:
raise RuntimeError('Not running with the Werkzeug Server')
func()
class NeutronListAPIVersions(Resource):
def get(self):
"""
Lists API versions.
:return: Returns a json with API versions.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: Neutron - List API Versions")
resp = dict()
resp['versions'] = dict()
versions = [{
"status": "CURRENT",
"id": "v2.0",
"links": [
{
"href": request.url_root + '/v2.0',
"rel": "self"
}
]
}]
resp['versions'] = versions
return Response(json.dumps(resp), status=200, mimetype='application/json')
class NeutronShowAPIv2Details(Resource):
def get(self):
"""
Returns API details.
:return: Returns a json with API details.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s GET" % str(self.__class__.__name__))
resp = dict()
resp['resources'] = dict()
resp['resources'] = [{
"links": [
{
"href": request.url_root + 'v2.0/subnets',
"rel": "self"
}
],
"name": "subnet",
"collection": "subnets"
},
{
"links": [
{
"href": request.url_root + 'v2.0/networks',
"rel": "self"
}
],
"name": "network",
"collection": "networks"
},
{
"links": [
{
"href": request.url_root + 'v2.0/ports',
"rel": "self"
}
],
"name": "ports",
"collection": "ports"
}
]
return Response(json.dumps(resp), status=200, mimetype='application/json')
class NeutronListNetworks(Resource):
def __init__(self, api):
self.api = api
def get(self):
"""
Lists all networks, used in son-emu. If a 'name' or one or more 'id's are specified, it will only list the
network with the name, or the networks specified via id.
:return: Returns a json response, starting with 'networks' as root node.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s GET" % str(self.__class__.__name__))
try:
if request.args.get('name'):
tmp_network = NeutronShowNetwork(self.api)
return tmp_network.get_network(request.args.get('name'), True)
id_list = request.args.getlist('id')
if len(id_list) == 1:
tmp_network = NeutronShowNetwork(self.api)
return tmp_network.get_network(request.args.get('id'), True)
network_list = list()
network_dict = dict()
if len(id_list) == 0:
for net in self.api.compute.nets.values():
tmp_network_dict = net.create_network_dict()
if tmp_network_dict not in network_list:
network_list.append(tmp_network_dict)
else:
for net in self.api.compute.nets.values():
if net.id in id_list:
tmp_network_dict = net.create_network_dict()
if tmp_network_dict not in network_list:
network_list.append(tmp_network_dict)
network_dict["networks"] = network_list
return Response(json.dumps(network_dict), status=200, mimetype='application/json')
except Exception as ex:
LOG.exception("Neutron: List networks exception.")
return Response(ex.message, status=500, mimetype='application/json')
class NeutronShowNetwork(Resource):
def __init__(self, api):
self.api = api
def get(self, network_id):
"""
Returns the network, specified via 'network_id'.
:param network_id: The unique ID string of the network.
:type network_id: ``str``
:return: Returns a json response, starting with 'network' as root node and one network description.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s GET" % str(self.__class__.__name__))
return self.get_network(network_id, False)
def get_network(self, network_name_or_id, as_list):
"""
Returns one network description of the network, specified via 'network_name_or_id'.
:param network_name_or_id: The indicator string, which specifies the requested network.
:type network_name_or_id: ``str``
:param as_list: Determines if the network description should start with the root node 'network' or 'networks'.
:type as_list: ``bool``
:return: Returns a json response, with one network description.
:rtype: :class:`flask.response`
"""
try:
net = self.api.compute.find_network_by_name_or_id(network_name_or_id)
if net is None:
return Response(u'Network not found.\n', status=404, mimetype='application/json')
tmp_network_dict = net.create_network_dict()
tmp_dict = dict()
if as_list:
tmp_dict["networks"] = [tmp_network_dict]
else:
tmp_dict["network"] = tmp_network_dict
return Response(json.dumps(tmp_dict), status=200, mimetype='application/json')
except Exception as ex:
logging.exception("Neutron: Show network exception.")
return Response(ex.message, status=500, mimetype='application/json')
class NeutronCreateNetwork(Resource):
def __init__(self, api):
self.api = api
def post(self):
"""
Creates a network with the name, specified within the request under ['network']['name'].
:return: * 400, if the network already exists.
* 500, if any exception occurred while creation.
* 201, if everything worked out.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s POST" % str(self.__class__.__name__))
try:
network_dict = json.loads(request.data)
name = network_dict['network']['name']
net = self.api.compute.find_network_by_name_or_id(name)
if net is not None:
return Response('Network already exists.\n', status=400, mimetype='application/json')
net = self.api.compute.create_network(name)
return Response(json.dumps({"network": net.create_network_dict()}), status=201, mimetype='application/json')
except Exception as ex:
LOG.exception("Neutron: Create network excepiton.")
return Response(ex.message, status=500, mimetype='application/json')
class NeutronUpdateNetwork(Resource):
def __init__(self, api):
self.api = api
def put(self, network_id): # TODO currently only the name will be changed
"""
Updates the existing network with the given parameters.
:param network_id: The indicator string, which specifies the requested network.
:type network_id: ``str``
:return: * 404, if the network could not be found.
* 500, if any exception occurred while updating the network.
* 200, if everything worked out.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s PUT" % str(self.__class__.__name__))
try:
if network_id in self.api.compute.nets:
net = self.api.compute.nets[network_id]
network_dict = json.loads(request.data)
old_net = copy.copy(net)
if "status" in network_dict["network"]:
net.status = network_dict["network"]["status"]
if "subnets" in network_dict["network"]:
pass # tmp_network_dict["subnets"] = None
if "name" in network_dict["network"] and net.name != network_dict["network"]["name"]:
net.name = network_dict["network"]["name"]
if "admin_state_up" in network_dict["network"]:
pass # tmp_network_dict["admin_state_up"] = True
if "tenant_id" in network_dict["network"]:
pass # tmp_network_dict["tenant_id"] = "c1210485b2424d48804aad5d39c61b8f"
if "shared" in network_dict["network"]:
pass # tmp_network_dict["shared"] = False
return Response(json.dumps(network_dict), status=200, mimetype='application/json')
return Response('Network not found.\n', status=404, mimetype='application/json')
except Exception as ex:
LOG.exception("Neutron: Show networks exception.")
return Response(ex.message, status=500, mimetype='application/json')
class NeutronDeleteNetwork(Resource):
def __init__(self, api):
self.api = api
def delete(self, network_id):
"""
Deletes the specified network and all its subnets.
:param network_id: The indicator string, which specifies the requested network.
:type network_id: ``str``
:return: * 404, if the network or the subnet could not be removed.
* 500, if any exception occurred while deletion.
* 204, if everything worked out.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s DELETE" % str(self.__class__.__name__))
try:
if network_id not in self.api.compute.nets:
return Response('Could not find network. (' + network_id + ')\n',
status=404, mimetype='application/json')
net = self.api.compute.nets[network_id]
delete_subnet = NeutronDeleteSubnet(self.api)
resp = delete_subnet.delete(net.subnet_id)
if not '204' in resp.status and not '404' in resp.status:
return resp
self.api.compute.delete_network(network_id)
return Response('Network ' + str(network_id) + ' deleted.\n', status=204, mimetype='application/json')
except Exception as ex:
LOG.exception("Neutron: Delete network exception.")
return Response(ex.message, status=500, mimetype='application/json')
class NeutronListSubnets(Resource):
def __init__(self, api):
self.api = api
def get(self):
"""
Lists all subnets, used in son-emu. If a 'name' or one or more 'id's are specified, it will only list the
subnet with the name, or the subnets specified via id.
:return: Returns a json response, starting with 'subnets' as root node.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s GET" % str(self.__class__.__name__))
try:
if request.args.get('name'):
show_subnet = NeutronShowSubnet(self.api)
return show_subnet.get_subnet(request.args.get('name'), True)
id_list = request.args.getlist('id')
if len(id_list) == 1:
show_subnet = NeutronShowSubnet(self.api)
return show_subnet.get_subnet(id_list[0], True)
subnet_list = list()
subnet_dict = dict()
if len(id_list) == 0:
for net in self.api.compute.nets.values():
if net.subnet_id is not None:
tmp_subnet_dict = net.create_subnet_dict()
subnet_list.append(tmp_subnet_dict)
else:
for net in self.api.compute.nets.values():
if net.subnet_id in id_list:
tmp_subnet_dict = net.create_subnet_dict()
subnet_list.append(tmp_subnet_dict)
subnet_dict["subnets"] = subnet_list
return Response(json.dumps(subnet_dict), status=200, mimetype='application/json')
except Exception as ex:
LOG.exception("Neutron: List subnets exception.")
return Response(ex.message, status=500, mimetype='application/json')
class NeutronShowSubnet(Resource):
def __init__(self, api):
self.api = api
def get(self, subnet_id):
"""
Returns the subnet, specified via 'subnet_id'.
:param subnet_id: The unique ID string of the subnet.
:type subnet_id: ``str``
:return: Returns a json response, starting with 'subnet' as root node and one subnet description.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s GET" % str(self.__class__.__name__))
return self.get_subnet(subnet_id, False)
def get_subnet(self, subnet_name_or_id, as_list):
"""
Returns one subnet description of the subnet, specified via 'subnet_name_or_id'.
:param subnet_name_or_id: The indicator string, which specifies the requested subnet.
:type subnet_name_or_id: ``str``
:param as_list: Determines if the subnet description should start with the root node 'subnet' or 'subnets'.
:type as_list: ``bool``
:return: Returns a json response, with one subnet description.
:rtype: :class:`flask.response`
"""
try:
for net in self.api.compute.nets.values():
if net.subnet_id == subnet_name_or_id or net.subnet_name == subnet_name_or_id:
tmp_subnet_dict = net.create_subnet_dict()
tmp_dict = dict()
if as_list:
tmp_dict["subnets"] = [tmp_subnet_dict]
else:
tmp_dict["subnet"] = tmp_subnet_dict
return Response(json.dumps(tmp_dict), status=200, mimetype='application/json')
return Response('Subnet not found. (' + subnet_name_or_id + ')\n', status=404, mimetype='application/json')
except Exception as ex:
LOG.exception("Neutron: Show subnet exception.")
return Response(ex.message, status=500, mimetype='application/json')
class NeutronCreateSubnet(Resource):
def __init__(self, api):
self.api = api
def post(self):
"""
Creates a subnet with the name, specified within the request under ['subnet']['name'].
:return: * 400, if the 'CIDR' format is wrong or it does not exist.
* 404, if the network was not found.
* 409, if the corresponding network already has one subnet.
* 500, if any exception occurred while creation and
* 201, if everything worked out.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s POST" % str(self.__class__.__name__))
try:
subnet_dict = json.loads(request.data)
net = self.api.compute.find_network_by_name_or_id(subnet_dict['subnet']['network_id'])
if net is None:
return Response('Could not find network.\n', status=404, mimetype='application/json')
net.subnet_name = subnet_dict["subnet"].get('name', str(net.name) + '-sub')
if net.subnet_id is not None:
return Response('Only one subnet per network is supported\n', status=409, mimetype='application/json')
if "id" in subnet_dict["subnet"]:
net.subnet_id = subnet_dict["subnet"]["id"]
else:
net.subnet_id = str(uuid.uuid4())
import emuvim.api.openstack.ip_handler as IP
net.set_cidr(IP.get_new_cidr(net.subnet_id))
if "tenant_id" in subnet_dict["subnet"]:
pass
if "allocation_pools" in subnet_dict["subnet"]:
pass
if "gateway_ip" in subnet_dict["subnet"]:
net.gateway_ip = subnet_dict["subnet"]["gateway_ip"]
if "ip_version" in subnet_dict["subnet"]:
pass
if "enable_dhcp" in subnet_dict["subnet"]:
pass
return Response(json.dumps({'subnet': net.create_subnet_dict()}), status=201, mimetype='application/json')
except Exception as ex:
LOG.exception("Neutron: Create network excepiton.")
return Response(ex.message, status=500, mimetype='application/json')
class NeutronUpdateSubnet(Resource):
def __init__(self, api):
self.api = api
def put(self, subnet_id):
"""
Updates the existing subnet with the given parameters.
:param subnet_id: The indicator string, which specifies the requested subnet.
:type subnet_id: ``str``
:return: * 404, if the network could not be found.
* 500, if any exception occurred while updating the network.
* 200, if everything worked out.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s PUT" % str(self.__class__.__name__))
try:
for net in self.api.compute.nets.values():
if net.subnet_id == subnet_id:
subnet_dict = json.loads(request.data)
if "name" in subnet_dict["subnet"]:
net.subnet_name = subnet_dict["subnet"]["name"]
if "network_id" in subnet_dict["subnet"]:
net.id = subnet_dict["subnet"]["network_id"]
if "tenant_id" in subnet_dict["subnet"]:
pass
if "allocation_pools" in subnet_dict["subnet"]:
pass
if "gateway_ip" in subnet_dict["subnet"]:
net.gateway_ip = subnet_dict["subnet"]["gateway_ip"]
if "ip_version" in subnet_dict["subnet"]:
pass
if "cidr" in subnet_dict["subnet"]:
net.set_cidr(subnet_dict["subnet"]["cidr"])
if "id" in subnet_dict["subnet"]:
net.subnet_id = subnet_dict["subnet"]["id"]
if "enable_dhcp" in subnet_dict["subnet"]:
pass
net.subnet_update_time = str(datetime.now())
tmp_dict = {'subnet': net.create_subnet_dict()}
return Response(json.dumps(tmp_dict), status=200, mimetype='application/json')
return Response('Network not found.\n', status=404, mimetype='application/json')
except Exception as ex:
LOG.exception("Neutron: Show networks exception.")
return Response(ex.message, status=500, mimetype='application/json')
class NeutronDeleteSubnet(Resource):
def __init__(self, api):
self.api = api
def delete(self, subnet_id):
"""
Deletes the specified subnet.
:param subnet_id: The indicator string, which specifies the requested subnet.
:type subnet_id: ``str``
:return: * 404, if the subnet could not be removed.
* 500, if any exception occurred while deletion.
* 204, if everything worked out.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s DELETE" % str(self.__class__.__name__))
try:
for net in self.api.compute.nets.values():
if net.subnet_id == subnet_id:
for server in self.api.compute.computeUnits.values():
for port_name in server.port_names:
port = self.api.compute.find_port_by_name_or_id(port_name)
if port.net_name == net.name:
port.ip_address = None
self.api.compute.dc.net.removeLink(
link=None,
node1=self.api.compute.dc.containers[server.name],
node2=self.api.compute.dc.switch)
port.net_name = None
net.delete_subnet()
return Response('Subnet ' + str(subnet_id) + ' deleted.\n',
status=204, mimetype='application/json')
return Response('Could not find subnet.', status=404, mimetype='application/json')
except Exception as ex:
LOG.exception("Neutron: Delete subnet exception.")
return Response(ex.message, status=500, mimetype='application/json')
class NeutronListPorts(Resource):
def __init__(self, api):
self.api = api
def get(self):
"""
Lists all ports, used in son-emu. If a 'name' or one or more 'id's are specified, it will only list the
port with the name, or the ports specified via id.
:return: Returns a json response, starting with 'ports' as root node.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s GET" % str(self.__class__.__name__))
try:
if request.args.get('name'):
show_port = NeutronShowPort(self.api)
return show_port.get_port(request.args.get('name'), True)
id_list = request.args.getlist('id')
if len(id_list) == 1:
show_port = NeutronShowPort(self.api)
return show_port.get_port(request.args.get('id'), True)
port_list = list()
port_dict = dict()
if len(id_list) == 0:
for port in self.api.compute.ports.values():
tmp_port_dict = port.create_port_dict(self.api.compute)
port_list.append(tmp_port_dict)
else:
for port in self.api.compute.ports.values():
if port.id in id_list:
tmp_port_dict = port.create_port_dict(self.api.compute)
port_list.append(tmp_port_dict)
port_dict["ports"] = port_list
return Response(json.dumps(port_dict), status=200, mimetype='application/json')
except Exception as ex:
LOG.exception("Neutron: List ports exception.")
return Response(ex.message, status=500, mimetype='application/json')
class NeutronShowPort(Resource):
def __init__(self, api):
self.api = api
def get(self, port_id):
"""
Returns the port, specified via 'port_id'.
:param port_id: The unique ID string of the network.
:type port_id: ``str``
:return: Returns a json response, starting with 'port' as root node and one network description.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s GET" % str(self.__class__.__name__))
return self.get_port(port_id, False)
def get_port(self, port_name_or_id, as_list):
"""
Returns one network description of the port, specified via 'port_name_or_id'.
:param port_name_or_id: The indicator string, which specifies the requested port.
:type port_name_or_id: ``str``
:param as_list: Determines if the port description should start with the root node 'port' or 'ports'.
:type as_list: ``bool``
:return: Returns a json response, with one port description.
:rtype: :class:`flask.response`
"""
try:
port = self.api.compute.find_port_by_name_or_id(port_name_or_id)
if port is None:
return Response('Port not found. (' + port_name_or_id + ')\n', status=404, mimetype='application/json')
tmp_port_dict = port.create_port_dict(self.api.compute)
tmp_dict = dict()
if as_list:
tmp_dict["ports"] = [tmp_port_dict]
else:
tmp_dict["port"] = tmp_port_dict
return Response(json.dumps(tmp_dict), status=200, mimetype='application/json')
except Exception as ex:
LOG.exception("Neutron: Show port exception.")
return Response(ex.message, status=500, mimetype='application/json')
class NeutronCreatePort(Resource):
def __init__(self, api):
self.api = api
def post(self):
"""
Creates a port with the name, specified within the request under ['port']['name'].
:return: * 404, if the network could not be found.
* 500, if any exception occurred while creation and
* 201, if everything worked out.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s POST" % str(self.__class__.__name__))
try:
port_dict = json.loads(request.data)
net_id = port_dict['port']['network_id']
if net_id not in self.api.compute.nets:
return Response('Could not find network.\n', status=404, mimetype='application/json')
net = self.api.compute.nets[net_id]
if 'name' in port_dict['port']:
name = port_dict['port']['name']
else:
num_ports = len(self.api.compute.ports)
name = "port:cp%s:man:%s" % (num_ports, str(uuid.uuid4()))
if self.api.compute.find_port_by_name_or_id(name):
return Response("Port with name %s already exists.\n" % name, status=500, mimetype='application/json')
port = self.api.compute.create_port(name)
port.net_name = net.name
port.ip_address = net.get_new_ip_address(name)
if "admin_state_up" in port_dict["port"]:
pass
if "device_id" in port_dict["port"]:
pass
if "device_owner" in port_dict["port"]:
pass
if "fixed_ips" in port_dict["port"]:
pass
if "mac_address" in port_dict["port"]:
port.mac_address = port_dict["port"]["mac_address"]
if "status" in port_dict["port"]:
pass
if "tenant_id" in port_dict["port"]:
pass
# add the port to a stack if the specified network is a stack network
for stack in self.api.compute.stacks.values():
for net in stack.nets.values():
if net.id == net_id:
stack.ports[name] = port
return Response(json.dumps({'port': port.create_port_dict(self.api.compute)}), status=201,
mimetype='application/json')
except Exception as ex:
LOG.exception("Neutron: Show port exception.")
return Response(ex.message, status=500, mimetype='application/json')
class NeutronUpdatePort(Resource):
def __init__(self, api):
self.api = api
def put(self, port_id):
"""
Updates the existing port with the given parameters.
:param network_id: The indicator string, which specifies the requested port.
:type network_id: ``str``
:return: * 404, if the network could not be found.
* 500, if any exception occurred while updating the network.
* 200, if everything worked out.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s PUT" % str(self.__class__.__name__))
try:
port_dict = json.loads(request.data)
port = self.api.compute.find_port_by_name_or_id(port_id)
if port is None:
return Response("Port with id %s does not exists.\n" % port_id, status=404, mimetype='application/json')
old_port = copy.copy(port)
stack = None
for s in self.api.compute.stacks.values():
for port in s.ports.values():
if port.id == port_id:
stack = s
if "admin_state_up" in port_dict["port"]:
pass
if "device_id" in port_dict["port"]:
pass
if "device_owner" in port_dict["port"]:
pass
if "fixed_ips" in port_dict["port"]:
pass
if "id" in port_dict["port"]:
port.id = port_dict["port"]["id"]
if "mac_address" in port_dict["port"]:
port.mac_address = port_dict["port"]["mac_address"]
if "name" in port_dict["port"] and port_dict["port"]["name"] != port.name:
port.set_name(port_dict["port"]["name"])
if stack is not None:
if port.net_name in stack.nets:
stack.nets[port.net_name].update_port_name_for_ip_address(port.ip_address, port.name)
stack.ports[port.name] = stack.ports[old_port.name]
del stack.ports[old_port.name]
if "network_id" in port_dict["port"]:
pass
if "status" in port_dict["port"]:
pass
if "tenant_id" in port_dict["port"]:
pass
return Response(json.dumps({'port': port.create_port_dict(self.api.compute)}), status=200,
mimetype='application/json')
except Exception as ex:
LOG.exception("Neutron: Update port exception.")
return Response(ex.message, status=500, mimetype='application/json')
class NeutronDeletePort(Resource):
def __init__(self, api):
self.api = api
def delete(self, port_id):
"""
Deletes the specified port.
:param port_id: The indicator string, which specifies the requested port.
:type port_id: ``str``
:return: * 404, if the port could not be found.
* 500, if any exception occurred while deletion.
* 204, if everything worked out.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s DELETE" % str(self.__class__.__name__))
try:
port = self.api.compute.find_port_by_name_or_id(port_id)
if port is None:
return Response("Port with id %s does not exists.\n" % port_id, status=404)
stack = None
for s in self.api.compute.stacks.values():
for p in s.ports.values():
if p.id == port_id:
stack = s
if stack is not None:
if port.net_name in stack.nets:
stack.nets[port.net_name].withdraw_ip_address(port.ip_address)
for server in stack.servers.values():
try:
server.port_names.remove(port.name)
except ValueError:
pass
# delete the port
self.api.compute.delete_port(port.id)
return Response('Port ' + port_id + ' deleted.\n', status=204, mimetype='application/json')
except Exception as ex:
LOG.exception("Neutron: Delete port exception.")
return Response(ex.message, status=500, mimetype='application/json')
class NeutronAddFloatingIp(Resource):
def __init__(self, api):
self.api = api
def get(self):
"""
Added a quick and dirty fake for the OSM integration. Returns a list of
floating IPs. Has nothing to do with the setup inside the emulator.
But its enough to make the OSM driver happy.
@PG Sandman: Feel free to improve this and let it do something meaningful.
"""
resp = dict()
resp["floatingips"] = list()
# create a list of floting IP definitions and return it
for i in range(100, 110):
ip = dict()
ip["router_id"] = "router_id"
ip["description"] = "hardcoded in api"
ip["created_at"] = "router_id"
ip["updated_at"] = "router_id"
ip["revision_number"] = 1
ip["tenant_id"] = "tenant_id"
ip["project_id"] = "project_id"
ip["floating_network_id"] = str(i)
ip["status"] = "ACTIVE"
ip["id"] = str(i)
ip["port_id"] = "port_id"
ip["floating_ip_address"] = "172.0.0.%d" % i
ip["fixed_ip_address"] = "10.0.0.%d" % i
resp["floatingips"].append(ip)
return Response(json.dumps(resp), status=200, mimetype='application/json')
def post(self):
"""
Adds a floating IP to neutron.
:return: Returns a floating network description.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s POST" % str(self.__class__.__name__))
try:
# Fiddle with floating_network !
req = json.loads(request.data)
network_id = req["floatingip"]["floating_network_id"]
net = self.api.compute.find_network_by_name_or_id(network_id)
if net != self.api.manage.floating_network:
return Response("You have to specify the existing floating network\n",
status=400, mimetype='application/json')
port_id = req["floatingip"].get("port_id", None)
port = self.api.compute.find_port_by_name_or_id(port_id)
if port is not None:
if port.net_name != self.api.manage.floating_network.name:
return Response("You have to specify a port in the floating network\n",
status=400, mimetype='application/json')
if port.floating_ip is not None:
return Response("We allow only one floating ip per port\n", status=400, mimetype='application/json')
else:
num_ports = len(self.api.compute.ports)
name = "port:cp%s:fl:%s" % (num_ports, str(uuid.uuid4()))
port = self.api.compute.create_port(name)
port.net_name = net.name
port.ip_address = net.get_new_ip_address(name)
port.floating_ip = port.ip_address
response = dict()
resp = response["floatingip"] = dict()
resp["floating_network_id"] = net.id
resp["status"] = "ACTIVE"
resp["id"] = net.id
resp["port_id"] = port.id
resp["floating_ip_address"] = port.floating_ip
resp["fixed_ip_address"] = port.floating_ip
return Response(json.dumps(response), status=200, mimetype='application/json')
except Exception as ex:
LOG.exception("Neutron: Create FloatingIP exception %s.", ex)
return Response(ex.message, status=500, mimetype='application/json')
| stevenvanrossem/son-emu | src/emuvim/api/openstack/openstack_dummies/neutron_dummy_api.py | Python | apache-2.0 | 44,105 | 0.002993 |
'''
Functions for working with DESI mocks and fiberassignment
TODO (maybe):
This contains hardcoded hacks, especially wrt priorities and
interpretation of object types
'''
from __future__ import print_function, division
import sys, os
import numpy as np
from astropy.table import Table, Column
from fiberassign import io
from desitarget import desi_mask
import desitarget
import desispec.brick
def rdzipn2targets(infile):
"""Read rdzipn infile and return target and truth tables
"""
ra, dec, z, itype, priority, numobs = io.read_rdzipn(infile)
n = len(ra)
#- Martin's itype is 1 to n, while Bob's fiberassign is 0 to n-1
itype -= 1
assert np.min(itype >= 0)
#- rdzipn has float32 ra, dec, but it should be float64
ra = ra.astype('float64') % 360 #- enforce 0 <= ra < 360
dec = dec.astype('float64')
#- Hardcoded in rdzipn format
# 0 : 'QSO', #- QSO-LyA
# 1 : 'QSO', #- QSO-Tracer
# 2 : 'LRG', #- LRG
# 3 : 'ELG', #- ELG
# 4 : 'STAR', #- QSO-Fake
# 5 : 'UNKNOWN', #- LRG-Fake
# 6 : 'STAR', #- StdStar
# 7 : 'SKY', #- Sky
qso_lya = (itype==0)
qso_tracer = (itype==1)
qso_fake = (itype==4)
qso = qso_lya | qso_tracer | qso_fake
lrg_real = (itype==2)
lrg_fake = (itype==5)
lrg = lrg_real | lrg_fake
elg = (itype==3)
std = (itype==6)
sky = (itype==7)
if not np.any(std):
print("WARNING: no standard stars found")
if not np.any(sky):
print("WARNING: no sky locations found")
if not np.any(~(std | sky)):
print("WARNING: no science targets found")
#- Create a DESI_TARGET mask
desi_target = np.zeros(n, dtype='i8')
desi_target[qso] |= desi_mask.QSO
desi_target[elg] |= desi_mask.ELG
desi_target[lrg] |= desi_mask.LRG
desi_target[sky] |= desi_mask.SKY
desi_target[std] |= desi_mask.STD_FSTAR
bgs_target = np.zeros(n, dtype='i8') #- TODO
mws_target = np.zeros(n, dtype='i8') #- TODO
#- True type
truetype = np.zeros(n, dtype='S10')
assert np.all(truetype == '')
truetype[qso_lya | qso_tracer] = 'QSO'
truetype[qso_fake] = 'STAR'
truetype[elg] = 'GALAXY'
truetype[lrg_real] = 'GALAXY'
truetype[lrg_fake] = 'UNKNOWN'
truetype[std] = 'STAR'
truetype[sky] = 'SKY'
assert np.all(truetype != '')
#- Misc other
targetid = np.random.randint(2**62, size=n)
### brickname = np.zeros(n, dtype='S8')
brickname = desispec.brick.brickname(ra, dec)
subpriority = np.random.uniform(0, 1, size=n)
targets = Table()
targets['TARGETID'] = targetid
targets['BRICKNAME'] = brickname
targets['RA'] = ra
targets['DEC'] = dec
targets['DESI_TARGET'] = desi_target
targets['BGS_TARGET'] = bgs_target
targets['MWS_TARGET'] = mws_target
targets['SUBPRIORITY'] = subpriority
truth = Table()
truth['TARGETID'] = targetid
truth['BRICKNAME'] = brickname
truth['RA'] = ra
truth['DEC'] = dec
truth['TRUEZ'] = z
truth['TRUETYPE'] = truetype
truth['CATEGORY'] = itype
return targets, truth
| desihub/fiberassign | old/py/mock.py | Python | bsd-3-clause | 3,161 | 0.008858 |
"""
Python script "setup.py"
by Matthew Garcia, PhD student
Dept. of Forest and Wildlife Ecology
University of Wisconsin - Madison
matt.e.garcia@gmail.com
Copyright (C) 2015-2016 by Matthew Garcia
Licensed Gnu GPL v3; see 'LICENSE_GnuGPLv3.txt' for complete terms
Send questions, bug reports, any related requests to matt.e.garcia@gmail.com
See also 'README.md', 'DISCLAIMER.txt', 'CITATION.txt', 'ACKNOWLEDGEMENTS.txt'
Treat others as you would be treated. Pay it forward. Valar dohaeris.
PURPOSE: Verifies sample data, scripts, modules, documents, auxiliary files.
Verifies availability of python dependencies used by various scripts.
Uncompresses certain large example data files
Builds directory structure for script output products.
DEPENDENCIES: all software package source dependencies are polled here
USAGE: '$ python setup.py'
"""
import os
import sys
import glob
def message(char_string):
"""
prints a string to the terminal and flushes the buffer
"""
print char_string
sys.stdout.flush()
return
txt_files = ['ACKNOWLEDGEMENTS.txt', 'CITATION.txt', 'DISCLAIMER.txt',
'LICENSE_GnuGPLv3.txt']
md_files = ['README.md']
main_dirs = ['data', 'docs', 'htcondor', 'source', 'tools']
#
scripts = ['process_NCEI_00.py', 'process_NCEI_01.py',
'process_NCEI_02a.py', 'process_NCEI_02b.py',
'process_NCEI_03_chill_d.py', 'process_NCEI_03_chill_dd.py',
'process_NCEI_03_grow_dd.py', 'process_NCEI_03_grow_dd_base0.py',
'process_NCEI_03_prcp_03d.py', 'process_NCEI_03_prcp_07d.py',
'process_NCEI_03_prcp_120d.py', 'process_NCEI_03_prcp_15d.py',
'process_NCEI_03_prcp_180d.py', 'process_NCEI_03_prcp_30d.py',
'process_NCEI_03_prcp_365d.py', 'process_NCEI_03_prcp_60d.py',
'process_NCEI_03_prcp_90d.py', 'process_NCEI_03_prcp_90d_nd0.py',
'process_NCEI_03_prcp_90d_nd10.py',
'process_NCEI_03_prcp_90d_nd25.py',
'process_NCEI_03_preprocess.py', 'process_NCEI_03_tavg_03d.py',
'process_NCEI_03_tavg_07d.py', 'process_NCEI_03_tavg_15d.py',
'process_NCEI_03_tavg_30d.py', 'process_NCEI_03_tavg_60d.py',
'process_NCEI_03_tavg_90d.py', 'process_NCEI_03_tavg_frz.py',
'process_NCEI_03_tmax_03d.py', 'process_NCEI_03_tmax_07d.py',
'process_NCEI_03_tmax_15d.py', 'process_NCEI_03_tmax_30d.py',
'process_NCEI_03_tmax_60d.py', 'process_NCEI_03_tmax_90d.py',
'process_NCEI_03_tmax_frz.py', 'process_NCEI_03_tmin_03d.py',
'process_NCEI_03_tmin_07d.py', 'process_NCEI_03_tmin_15d.py',
'process_NCEI_03_tmin_30d.py', 'process_NCEI_03_tmin_60d.py',
'process_NCEI_03_tmin_90d.py', 'process_NCEI_03_tmin_frz.py',
'process_NCEI_03_vpd_03d.py', 'process_NCEI_03_vpd_07d.py',
'process_NCEI_03_vpd_15d.py', 'process_NCEI_03_vpd_30d.py',
'process_NCEI_03_vpd_60d.py', 'process_NCEI_03_vpd_90d.py',
'process_NCEI_04a.py', 'process_NCEI_04b.py', 'process_NCEI_05.py',
'process_NCEI_06.py', 'process_NCEI_07.py', 'process_NCEI_08.py',
'process_NCEI_09.py', 'process_NCEI_10.py', 'process_NCEI_11.py',
'process_NCEI_12.py', 'process_NCEI_13.py', 'process_NCEI_14.py',
'process_NCEI_15.py']
#
modules = ['Date_Convert.py', 'Interpolation.py', 'Plots.py',
'process_NCEI_03_aux.py', 'Read_Header_Files.py', 'Stats.py',
'Teleconnections.py', 'UTM_Geo_Convert.py']
#
htcondor = ['process_NCEI_00.sh', 'process_NCEI_00.sub',
'process_NCEI_01.sh', 'process_NCEI_01.sub',
'process_NCEI_02a.sh', 'process_NCEI_02a.sub',
'process_NCEI_02b.sh', 'process_NCEI_02b.sub',
'process_NCEI_02b_dag.sub', 'process_NCEI_03_chill_d.sh',
'process_NCEI_03_chill_dd.sh', 'process_NCEI_03_dag_gen.py',
'process_NCEI_03_generic.sub', 'process_NCEI_03_grow_dd.sh',
'process_NCEI_03_grow_dd_base0.sh', 'process_NCEI_03_prcp_03d.sh',
'process_NCEI_03_prcp_07d.sh', 'process_NCEI_03_prcp_120d.sh',
'process_NCEI_03_prcp_15d.sh', 'process_NCEI_03_prcp_180d.sh',
'process_NCEI_03_prcp_30d.sh', 'process_NCEI_03_prcp_365d.sh',
'process_NCEI_03_prcp_60d.sh', 'process_NCEI_03_prcp_90d.sh',
'process_NCEI_03_prcp_90d_nd0.sh',
'process_NCEI_03_prcp_90d_nd10.sh',
'process_NCEI_03_prcp_90d_nd25.sh',
'process_NCEI_03_preprocess.sh', 'process_NCEI_03_tavg_03d.sh',
'process_NCEI_03_tavg_07d.sh', 'process_NCEI_03_tavg_15d.sh',
'process_NCEI_03_tavg_30d.sh', 'process_NCEI_03_tavg_60d.sh',
'process_NCEI_03_tavg_90d.sh', 'process_NCEI_03_tavg_frz.sh',
'process_NCEI_03_tmax_03d.sh', 'process_NCEI_03_tmax_07d.sh',
'process_NCEI_03_tmax_15d.sh', 'process_NCEI_03_tmax_30d.sh',
'process_NCEI_03_tmax_60d.sh', 'process_NCEI_03_tmax_90d.sh',
'process_NCEI_03_tmax_frz.sh', 'process_NCEI_03_tmin_03d.sh',
'process_NCEI_03_tmin_07d.sh', 'process_NCEI_03_tmin_15d.sh',
'process_NCEI_03_tmin_30d.sh', 'process_NCEI_03_tmin_60d.sh',
'process_NCEI_03_tmin_90d.sh', 'process_NCEI_03_tmin_frz.sh',
'process_NCEI_03_vpd_03d.sh', 'process_NCEI_03_vpd_07d.sh',
'process_NCEI_03_vpd_15d.sh', 'process_NCEI_03_vpd_30d.sh',
'process_NCEI_03_vpd_60d.sh', 'process_NCEI_03_vpd_90d.sh',
'process_NCEI_04a.sh', 'process_NCEI_04a.sub',
'process_NCEI_04b.sh', 'process_NCEI_04b.sub',
'process_NCEI_05.sh', 'process_NCEI_05.sub',
'process_NCEI_06.sh', 'process_NCEI_06.sub',
'process_NCEI_07.sh', 'process_NCEI_07.sub',
'process_NCEI_08.sh', 'process_NCEI_08.sub',
'process_NCEI_09.sh', 'process_NCEI_09.sub']
#
dependencies = ['os', 'sys', 'datetime', 'glob', 'numpy', 'pandas', 'h5py',
'matplotlib', 'matplotlib.pyplot', 'gdal', 'osgeo.osr',
'scipy.interpolate', 'scipy.ndimage', 'scipy.stats',
'mpl_toolkits', 'mpl_toolkits.basemap', 'pickle']
#
gz_data_files = ['EPA_L4_Ecoregions_WLS_UTM15N.bil.gz',
'NCEI_WLS_19830101-20151031.csv.gz',
'NLCD_2011_WLS_UTM15N.bil.gz']
#
data_files = ['EPA_L4_Ecoregions_WLS_polygonIDs.txt',
'EPA_L4_Ecoregions_WLS_UTM15N.bil',
'EPA_L4_Ecoregions_WLS_UTM15N.hdr',
'NCEI_WLS_19830101-20151031.csv',
'NCEP_CPC_AO_indices.csv',
'NCEP_CPC_ENSO_indices.csv',
'NCEP_CPC_NAO_indices.csv',
'NCEP_CPC_PNA_indices.csv',
'NLCD_2011_WLS_UTM15N.bil',
'NLCD_2011_WLS_UTM15N.hdr',
'NOAA_ESRL_AMO_indices.csv',
'NOAA_ESRL_PDO_indices.csv',
'NSIDC_MIFL_Superior_Ice.csv',
'Query_locations_dates_sample.csv']
#
doc_files = ['How_to_get_NCEI_GHCND_data.txt',
'NCEI_GHCND_documentation.pdf']
#
tools = ['query_NCEI_grids.py', 'orientation_maps.py']
#
add_dirs = ['analyses', 'grids', 'images']
#
analyses_dirs = ['annual_maps', 'cluster_maps', 'ecoregion_maps',
'figures', 'summary_maps']
#
os.system('rm .DS_Store')
os.system('rm */.DS_Store')
os.system('rm ._*')
os.system('rm */._*')
#
message('checking for auxiliary files that should accompany this software')
txts_present = glob.glob('*.txt')
mds_present = glob.glob('*.md')
absent = 0
for txt in txt_files:
if txt in txts_present:
message('- found auxiliary file \'%s\' as expected' % txt)
else:
message('- auxiliary file \'%s\' is absent' % txt)
absent += 1
for md in md_files:
if md in mds_present:
message('- found auxiliary file \'%s\' as expected' % md)
else:
message('- auxiliary file \'%s\' is absent' % md)
absent += 1
if absent > 0:
message('- you don\'t need them to run things, but you do need them to \
understand things')
message('- you should probably download this package again from scratch')
message('- exiting setup procedure')
sys.exit(1)
message(' ')
#
message('checking for top-level directories that should already exist')
dirs_present = [d.replace('/', '') for d in glob.glob('*/')]
absent = 0
for dirname in main_dirs:
if dirname in dirs_present:
message('- found main directory \'%s\' as expected' % dirname)
else:
message('- main directory \'%s\' is absent' % dirname)
absent += 1
if absent > 0:
message('- you should download this package again from scratch')
message('- exiting setup procedure')
sys.exit(1)
message(' ')
#
message('checking for main scripts and modules that comprise this software')
src_present = glob.glob('source/*')
absent = 0
for srcfile in scripts:
srcfile = 'source/%s' % srcfile
if srcfile in src_present:
message('- found script \'%s\' as expected' % srcfile)
else:
message('- script \'%s\' is absent' % srcfile)
absent += 1
for srcfile in modules:
srcfile = 'source/%s' % srcfile
if srcfile in src_present:
message('- found module \'%s\' as expected' % srcfile)
else:
message('- module \'%s\' is absent' % srcfile)
absent += 1
if absent > 0:
message('- you should download this package again from scratch')
message('- exiting setup procedure')
sys.exit(1)
message(' ')
#
message('checking for script-based tools that accompany this software')
src_present = glob.glob('tools/*')
absent = 0
for srcfile in tools:
srcfile = 'tools/%s' % srcfile
if srcfile in src_present:
message('- found script \'%s\' as expected' % srcfile)
else:
message('- script \'%s\' is absent' % srcfile)
absent += 1
if absent > 0:
message('- if you need these tools, you should download this package \
again from scratch')
message(' ')
#
message('checking for HTCondor example files that accompany this software')
src_present = glob.glob('htcondor/*')
absent = 0
for srcfile in htcondor:
srcfile = 'htcondor/%s' % srcfile
if srcfile in src_present:
message('- found htcondor file \'%s\' as expected' % srcfile)
else:
message('- htcondor file \'%s\' is absent' % srcfile)
absent += 1
if absent > 0:
message('- if you need these files, you should download this package \
again from scratch')
message(' ')
#
message('checking for essential python package dependencies for this software')
err = 0
#
try:
import os
message('- python dependency \'os\' is available')
except ImportError:
message('- essential python dependency \'os\' is not available')
err += 1
#
try:
import sys
message('- python dependency \'sys\' is available')
except ImportError:
message('- essential python dependency \'sys\' is not available')
err += 1
#
try:
import datetime
message('- python dependency \'datetime\' is available')
except ImportError:
message('- essential python dependency \'datetime\' is not available')
err += 1
#
try:
import glob
message('- python dependency \'glob\' is available')
except ImportError:
message('- essential python dependency \'glob\' is not available')
err += 1
#
try:
import pickle
message('- python dependency \'pickle\' is available')
except ImportError:
message('- essential python dependency \'pickle\' is not available')
err += 1
#
try:
import numpy
message('- python dependency \'numpy\' is available')
except ImportError:
message('- essential python dependency \'numpy\' is not available')
err += 1
#
try:
import pandas
message('- python dependency \'pandas\' is available')
except ImportError:
message('- essential python dependency \'pandas\' is not available')
err += 1
#
try:
import h5py
message('- python dependency \'h5py\' is available')
except ImportError:
message('- essential python dependency \'h5py\' is not available')
err += 1
#
try:
import gdal
message('- python dependency \'gdal\' is available')
except ImportError:
message('- essential python dependency \'gdal\' is not available')
err += 1
#
try:
import osgeo.osr
message('- python dependency \'osgeo.osr\' is available')
except ImportError:
message('- essential python dependency \'osgeo.osr\' is not available')
err += 1
#
try:
import scipy.interpolate
message('- python dependency \'scipy.interpolate\' is available')
except ImportError:
message('- essential python dependency \'scipy.interpolate\' is not \
available')
err += 1
#
try:
import scipy.ndimage
message('- python dependency \'scipy.ndimage\' is available')
except ImportError:
message('- essential python dependency \'scipy.ndimage\' is not available')
err += 1
#
try:
import scipy.stats
message('- python dependency \'scipy.stats\' is available')
except ImportError:
message('- essential python dependency \'scipy.stats\' is not available')
err += 1
#
try:
import matplotlib
message('- python dependency \'matplotlib\' is available')
except ImportError:
message('- essential python dependency \'matplotlib\' is not available')
err += 1
#
try:
import matplotlib.pyplot
message('- python dependency \'matplotlib.pyplot\' is available')
except ImportError:
message('- essential python dependency \'matplotlib.pyplot\' is not \
available')
err += 1
#
try:
import mpl_toolkits
message('- python dependency \'mpl_toolkits\' is available')
except ImportError:
message('- essential python dependency \'mpl_toolkits\' is not available')
err += 1
#
try:
import mpl_toolkits.basemap
message('- python dependency \'mpl_toolkits.basemap\' is available')
except ImportError:
message('- essential python dependency \'mpl_toolkits.basemap\' is not \
available')
err += 1
#
if err > 0:
message('- you need to install one or more additional python packages for \
this software to work')
message('- all of these packages are available via Anaconda (\'conda\') \
and/or PyPI (\'pip\') repositories')
message('- exiting setup procedure')
sys.exit(1)
message(' ')
#
message('checking for example data files that should accompany this software')
gz_data_present = glob.glob('data/*.gz')
absent = 0
for gz_dfile in gz_data_files:
gz_dfile_path = 'data/%s' % gz_dfile
if gz_dfile_path in gz_data_present:
message('- found compressed data file \'%s\' as expected' % gz_dfile)
message('-- uncompressing \'%s\'' % gz_dfile)
os.system('cd data')
os.system('gunzip %s' % gz_dfile)
os.system('cd ..')
else:
message('- compressed example data file \'%s\' is absent' % gz_dfile)
absent += 1
if absent > 0:
message('- you don\'t need these if you have your own data in the right \
formats')
message('- if you need the examples, you can find them at on GitHub at')
message(' https://github.com/megarcia/WxCD')
#
data_present = glob.glob('data/*')
absent = 0
for dfile in data_files:
dfile_path = 'data/%s' % dfile
if dfile_path in data_present:
message('- found data file \'%s\' as expected' % dfile)
else:
message('- example data file \'%s\' is absent' % dfile)
absent += 1
if absent > 0:
message('- you don\'t need these if you have your own data in the right \
formats')
message('- if you need the examples, you can find them at on GitHub at')
message(' https://github.com/megarcia/WxCD')
message(' ')
#
message('checking for data documentation files that should accompany this \
software')
docs_present = glob.glob('docs/*')
absent = 0
for dfile in doc_files:
dfile = 'docs/%s' % dfile
if dfile in docs_present:
message('- found documentation file \'%s\' as expected' % dfile)
else:
message('- data documentation file \'%s\' is absent' % dfile)
absent += 1
if absent > 0:
message('- you don\'t need these if you have your own documentation')
message('- if you need the examples, you can find them at on GitHub at')
message(' https://github.com/megarcia/GT16_JGRA')
message(' ')
#
message('creating top-level and sub-directories that will be used for process \
output')
for dirname in add_dirs:
os.system('mkdir %s' % dirname)
message('- made top-level directory \'%s\' ' % dirname)
for dirname in analyses_dirs:
os.system('mkdir analyses/%s' % dirname)
message('- made sub-directory \'analyses/%s\' ' % dirname)
message(' ')
#
message('copying source scripts and modules to top-level directory')
os.system('cp source/*.py .')
message('archiving original scripts and modules to \'source_orig\' directory')
os.system('mv source source_orig')
#
message('copying tools to top-level directory')
os.system('cp tools/*.py .')
message('archiving original tools scripts to \'tools_orig\' directory')
os.system('mv tools tools_orig')
message(' ')
#
message('all set!')
message(' ')
#
message('if you plan to use the HTCondor example files, you\'ll need to \
move or copy them to')
message(' your top-level directory')
message(' ')
#
message('make sure to read the \'README.md\' file before you get started on \
the scripts')
message(' ')
#
message('if you need help getting your own dataset of GHCND weather \
observations, there is')
message(' a how-to document in the \'docs\' directory')
message(' ')
#
message('please send questions, bug reports, any other requests to \
matt.e.garcia@gmail.com')
message(' (and include a helpfully descriptive subject line, if you could)')
message('or submit them through the Issues tab at the GitHub repository for \
this package')
message(' ')
#
sys.exit(0)
| megarcia/WxCD | setup.py | Python | gpl-3.0 | 18,007 | 0 |
from test_support import *
do_flow(opt=["-u", "indefinite_bounded.adb"])
| ptroja/spark2014 | testsuite/gnatprove/tests/NB19-026__flow_formal_vectors/test.py | Python | gpl-3.0 | 73 | 0 |
import os
from unittest import TestCase
from unittest.mock import patch
from bs4 import BeautifulSoup
from RatS.criticker.criticker_ratings_inserter import CritickerRatingsInserter
TESTDATA_PATH = os.path.abspath(
os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, "assets")
)
class CritickerRatingsInserterTest(TestCase):
def setUp(self):
if not os.path.exists(os.path.join(TESTDATA_PATH, "exports")):
os.makedirs(os.path.join(TESTDATA_PATH, "exports"))
self.movie = dict()
self.movie["title"] = "Fight Club"
self.movie["year"] = 1999
self.movie["imdb"] = dict()
self.movie["imdb"]["id"] = "tt0137523"
self.movie["imdb"]["url"] = "https://www.imdb.com/title/tt0137523"
self.movie["imdb"]["my_rating"] = 9
self.movie["tmdb"] = dict()
self.movie["tmdb"]["id"] = "550"
self.movie["tmdb"]["url"] = "https://www.themoviedb.org/movie/550"
with open(
os.path.join(TESTDATA_PATH, "criticker", "search_result.html"),
encoding="UTF-8",
) as search_results:
self.search_results = search_results.read()
with open(
os.path.join(TESTDATA_PATH, "criticker", "search_result_tile.html"),
encoding="UTF-8",
) as result_tile:
self.search_result_tile_list = [result_tile.read()]
with open(
os.path.join(TESTDATA_PATH, "criticker", "movie_details_page.html"),
encoding="UTF-8",
) as movie_details_page:
self.movie_details_page = movie_details_page.read()
@patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__")
@patch("RatS.utils.browser_handler.Firefox")
def test_init(self, browser_mock, base_init_mock):
CritickerRatingsInserter(None)
self.assertTrue(base_init_mock.called)
@patch("RatS.base.base_ratings_inserter.RatingsInserter._print_progress_bar")
@patch(
"RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._is_requested_movie"
)
@patch(
"RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._get_search_results"
)
@patch("RatS.criticker.criticker_ratings_inserter.Criticker")
@patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__")
@patch("RatS.utils.browser_handler.Firefox")
def test_insert(
self,
browser_mock,
base_init_mock,
site_mock,
overview_page_mock, # pylint: disable=too-many-arguments
eq_check_mock,
progress_print_mock,
):
overview_page_mock.return_value = self.search_result_tile_list
eq_check_mock.return_value = True
site_mock.browser = browser_mock
inserter = CritickerRatingsInserter(None)
inserter.args = False
inserter.site = site_mock
inserter.site.site_name = "Criticker"
inserter.failed_movies = []
inserter.insert([self.movie], "IMDB")
self.assertTrue(base_init_mock.called)
self.assertTrue(progress_print_mock.called)
@patch("RatS.criticker.criticker_ratings_inserter.Criticker")
@patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__")
@patch("RatS.utils.browser_handler.Firefox")
def test_external_link_compare_imdb_success(
self, browser_mock, base_init_mock, site_mock
):
site_mock.browser = browser_mock
inserter = CritickerRatingsInserter(None)
inserter.site = site_mock
inserter.site.site_name = "Criticker"
inserter.failed_movies = []
result = inserter._compare_external_links(
self.movie_details_page, self.movie, "imdb.com", "imdb"
) # pylint: disable=protected-access
self.assertTrue(result)
@patch("RatS.criticker.criticker_ratings_inserter.Criticker")
@patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__")
@patch("RatS.utils.browser_handler.Firefox")
def test_external_link_compare_imdb_fail(
self, browser_mock, base_init_mock, site_mock
):
site_mock.browser = browser_mock
inserter = CritickerRatingsInserter(None)
inserter.site = site_mock
inserter.site.site_name = "Criticker"
inserter.failed_movies = []
movie2 = dict()
movie2["title"] = "Arrival"
movie2["year"] = 2006
movie2["imdb"] = dict()
movie2["imdb"]["id"] = "tt2543164"
movie2["imdb"]["url"] = "https://www.imdb.com/title/tt2543164"
movie2["imdb"]["my_rating"] = 7
result = inserter._compare_external_links(
self.movie_details_page, movie2, "imdb.com", "imdb"
) # pylint: disable=protected-access
self.assertFalse(result)
@patch(
"RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._compare_external_links"
)
@patch("RatS.criticker.criticker_ratings_inserter.Criticker")
@patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__")
@patch("RatS.utils.browser_handler.Firefox")
def test_find_movie_success_by_imdb(
self, browser_mock, base_init_mock, site_mock, compare_mock
):
site_mock.browser = browser_mock
browser_mock.page_source = self.search_results
inserter = CritickerRatingsInserter(None)
inserter.site = site_mock
inserter.site.site_name = "Criticker"
inserter.failed_movies = []
compare_mock.return_value = True
result = inserter._find_movie(self.movie) # pylint: disable=protected-access
self.assertTrue(result)
@patch(
"RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._compare_external_links"
)
@patch("RatS.criticker.criticker_ratings_inserter.Criticker")
@patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__")
@patch("RatS.utils.browser_handler.Firefox")
def test_find_movie_success_by_year(
self, browser_mock, base_init_mock, site_mock, compare_mock
):
site_mock.browser = browser_mock
browser_mock.page_source = self.movie_details_page
inserter = CritickerRatingsInserter(None)
inserter.site = site_mock
inserter.site.site_name = "Criticker"
inserter.failed_movies = []
compare_mock.return_value = True
movie2 = dict()
movie2["title"] = "Fight Club"
movie2["year"] = 1999
search_result = BeautifulSoup(self.search_result_tile_list[0], "html.parser")
result = inserter._is_requested_movie(
movie2, search_result
) # pylint: disable=protected-access
self.assertTrue(result)
@patch(
"RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._compare_external_links"
)
@patch("RatS.criticker.criticker_ratings_inserter.Criticker")
@patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__")
@patch("RatS.utils.browser_handler.Firefox")
def test_find_movie_fail_by_year(
self, browser_mock, base_init_mock, site_mock, compare_mock
):
site_mock.browser = browser_mock
browser_mock.page_source = self.movie_details_page
inserter = CritickerRatingsInserter(None)
inserter.site = site_mock
inserter.site.site_name = "Criticker"
inserter.failed_movies = []
compare_mock.return_value = True
movie2 = dict()
movie2["title"] = "Fight Club"
movie2["year"] = 1998
search_result = BeautifulSoup(self.search_result_tile_list[0], "html.parser")
result = inserter._is_requested_movie(
movie2, search_result
) # pylint: disable=protected-access
self.assertFalse(result)
@patch(
"RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._is_requested_movie"
)
@patch(
"RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._get_search_results"
)
@patch(
"RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._compare_external_links"
)
@patch("RatS.criticker.criticker_ratings_inserter.Criticker")
@patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__")
@patch("RatS.utils.browser_handler.Firefox")
def test_find_movie_fail(
self,
browser_mock,
base_init_mock,
site_mock,
compare_mock,
tiles_mock,
equality_mock,
): # pylint: disable=too-many-arguments
site_mock.browser = browser_mock
browser_mock.page_source = self.search_results
inserter = CritickerRatingsInserter(None)
inserter.site = site_mock
inserter.site.site_name = "Criticker"
inserter.failed_movies = []
compare_mock.return_value = False
tiles_mock.return_value = self.search_result_tile_list
equality_mock.return_value = False
movie2 = dict()
movie2["title"] = "The Matrix"
movie2["year"] = 1995
movie2["imdb"] = dict()
movie2["imdb"]["id"] = "tt0137523"
movie2["imdb"]["url"] = "https://www.imdb.com/title/tt0137523"
movie2["imdb"]["my_rating"] = 9
result = inserter._find_movie(movie2) # pylint: disable=protected-access
self.assertFalse(result)
| StegSchreck/RatS | tests/unit/criticker/test_criticker_ratings_inserter.py | Python | agpl-3.0 | 9,321 | 0.001609 |
from openerp.modules.registry import RegistryManager
from openerp.netsvc import Service
from osv import fields, osv
from osv.orm import MetaModel
from reimport import reimport
class module(osv.osv):
_inherit = "ir.module.module"
def button_reload(self, cr, uid, ids, context=None):
for module_record in self.browse(cr, uid, ids, context=context):
#Remove any report parsers registered for this module.
module_path = 'addons/' + module_record.name
for service_name, service in Service._services.items():
template = getattr(service, 'tmpl', '')
if type(template) == type(''):
if template.startswith(module_path):
Service.remove(service_name)
#Remove any model classes registered for this module
MetaModel.module_to_models[module_record.name] = []
#Reload all Python modules from the OpenERP module's directory.
modulename = 'openerp.addons.' + module_record.name
root = __import__(modulename)
module = getattr(root.addons, module_record.name)
reimport(module)
RegistryManager.delete(cr.dbname)
RegistryManager.new(cr.dbname)
return {}
module()
| silvau/Addons_Odoo | module_reload/module.py | Python | gpl-2.0 | 1,349 | 0.008154 |
# -*- Mode: Python; py-indent-offset: 4 -*-
# pygobject - Python bindings for the GObject library
# Copyright (C) 2006-2007 Johan Dahlin
#
# gobject/constants.py: GObject type constants
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
# USA
import sys
from . import _gobject
# TYPE_INVALID defined in gobjectmodule.c
TYPE_NONE = _gobject.type_from_name('void')
TYPE_INTERFACE = _gobject.type_from_name('GInterface')
TYPE_CHAR = _gobject.type_from_name('gchar')
TYPE_UCHAR = _gobject.type_from_name('guchar')
TYPE_BOOLEAN = _gobject.type_from_name('gboolean')
TYPE_INT = _gobject.type_from_name('gint')
TYPE_UINT = _gobject.type_from_name('guint')
TYPE_LONG = _gobject.type_from_name('glong')
TYPE_ULONG = _gobject.type_from_name('gulong')
TYPE_INT64 = _gobject.type_from_name('gint64')
TYPE_UINT64 = _gobject.type_from_name('guint64')
TYPE_ENUM = _gobject.type_from_name('GEnum')
TYPE_FLAGS = _gobject.type_from_name('GFlags')
TYPE_FLOAT = _gobject.type_from_name('gfloat')
TYPE_DOUBLE = _gobject.type_from_name('gdouble')
TYPE_STRING = _gobject.type_from_name('gchararray')
TYPE_POINTER = _gobject.type_from_name('gpointer')
TYPE_BOXED = _gobject.type_from_name('GBoxed')
TYPE_PARAM = _gobject.type_from_name('GParam')
TYPE_OBJECT = _gobject.type_from_name('GObject')
TYPE_PYOBJECT = _gobject.type_from_name('PyObject')
TYPE_UNICHAR = TYPE_UINT
# do a little dance to maintain API compatibility
# as these were origianally defined here, and are
# now defined in gobjectmodule.c
G_MINFLOAT = _gobject.G_MINFLOAT
G_MAXFLOAT = _gobject.G_MAXFLOAT
G_MINDOUBLE = _gobject.G_MINDOUBLE
G_MAXDOUBLE = _gobject.G_MAXDOUBLE
G_MINSHORT = _gobject.G_MINSHORT
G_MAXSHORT = _gobject.G_MAXSHORT
G_MAXUSHORT = _gobject.G_MAXUSHORT
G_MININT = _gobject.G_MININT
G_MAXINT = _gobject.G_MAXINT
G_MAXUINT = _gobject.G_MAXUINT
G_MINLONG = _gobject.G_MINLONG
G_MAXLONG = _gobject.G_MAXLONG
G_MAXULONG = _gobject.G_MAXULONG
G_MININT8 = _gobject.G_MININT8
G_MAXINT8 = _gobject.G_MAXINT8
G_MAXUINT8 = _gobject.G_MAXUINT8
G_MININT16 = _gobject.G_MININT16
G_MAXINT16 = _gobject.G_MAXINT16
G_MAXUINT16 = _gobject.G_MAXUINT16
G_MININT32 = _gobject.G_MININT32
G_MAXINT32 = _gobject.G_MAXINT32
G_MAXUINT32 = _gobject.G_MAXUINT32
G_MININT64 = _gobject.G_MININT64
G_MAXINT64 = _gobject.G_MAXINT64
G_MAXUINT64 = _gobject.G_MAXUINT64
G_MAXSIZE = _gobject.G_MAXSIZE
G_MAXSSIZE = _gobject.G_MAXSSIZE
G_MINOFFSET = _gobject.G_MINOFFSET
G_MAXOFFSET = _gobject.G_MAXOFFSET
| Alberto-Beralix/Beralix | i386-squashfs-root/usr/share/pyshared/gi/_gobject/constants.py | Python | gpl-3.0 | 3,114 | 0.000321 |
# -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2001-2014 Micronaet SRL (<http://www.micronaet.it>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from . import delivery_b
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| Micronaet/micronaet-mx8 | sale_delivery_partial_B/__init__.py | Python | agpl-3.0 | 1,040 | 0 |
import os
import logging
from werkzeug.exceptions import BadRequest, NotFound
from flask import Blueprint, redirect, send_file, request
from apikit import jsonify, Pager, get_limit, get_offset, request_data
from aleph.core import archive, url_for, db
from aleph.model import Document, Entity, Reference, Collection
from aleph.logic import update_document
from aleph.events import log_event
from aleph.views.cache import enable_cache
from aleph.search.tabular import tabular_query, execute_tabular_query
from aleph.search.util import next_params
from aleph.views.util import get_document, get_tabular, get_page
log = logging.getLogger(__name__)
blueprint = Blueprint('documents_api', __name__)
@blueprint.route('/api/1/documents', methods=['GET'])
def index():
authz = request.authz
collections = request.args.getlist('collection')
collections = authz.collections_intersect(authz.READ, collections)
q = Document.all()
q = q.filter(Document.collection_id.in_(collections))
hashes = request.args.getlist('content_hash')
if len(hashes):
q = q.filter(Document.content_hash.in_(hashes))
return jsonify(Pager(q))
@blueprint.route('/api/1/documents/<int:document_id>')
def view(document_id):
doc = get_document(document_id)
enable_cache()
data = doc.to_dict()
log_event(request, document_id=doc.id)
data['data_url'] = archive.generate_url(doc.meta)
if data['data_url'] is None:
data['data_url'] = url_for('documents_api.file',
document_id=document_id)
if doc.meta.is_pdf:
data['pdf_url'] = data['data_url']
else:
try:
data['pdf_url'] = archive.generate_url(doc.meta.pdf)
except Exception as ex:
log.info('Could not generate PDF url: %r', ex)
if data.get('pdf_url') is None:
data['pdf_url'] = url_for('documents_api.pdf',
document_id=document_id)
return jsonify(data)
@blueprint.route('/api/1/documents/<int:document_id>', methods=['POST', 'PUT'])
def update(document_id):
document = get_document(document_id, action=request.authz.WRITE)
data = request_data()
document.update(data)
db.session.commit()
log_event(request, document_id=document.id)
update_document(document)
return view(document_id)
@blueprint.route('/api/1/documents/<int:document_id>/references')
def references(document_id):
doc = get_document(document_id)
q = db.session.query(Reference)
q = q.filter(Reference.document_id == doc.id)
q = q.filter(Reference.origin == 'regex')
q = q.join(Entity)
q = q.filter(Entity.state == Entity.STATE_ACTIVE)
q = q.filter(Entity.collection_id.in_(request.authz.collections_read))
q = q.order_by(Reference.weight.desc())
return jsonify(Pager(q, document_id=document_id))
@blueprint.route('/api/1/documents/<int:document_id>/file')
def file(document_id):
document = get_document(document_id)
enable_cache(server_side=True)
log_event(request, document_id=document.id)
url = archive.generate_url(document.meta)
if url is not None:
return redirect(url)
local_path = archive.load_file(document.meta)
if not os.path.isfile(local_path):
raise NotFound("File does not exist.")
fh = open(local_path, 'rb')
return send_file(fh, as_attachment=True,
attachment_filename=document.meta.file_name,
mimetype=document.meta.mime_type)
@blueprint.route('/api/1/documents/<int:document_id>/pdf')
def pdf(document_id):
document = get_document(document_id)
enable_cache(server_side=True)
log_event(request, document_id=document.id)
if document.type != Document.TYPE_TEXT:
raise BadRequest("PDF is only available for text documents")
pdf = document.meta.pdf
url = archive.generate_url(pdf)
if url is not None:
return redirect(url)
try:
local_path = archive.load_file(pdf)
fh = open(local_path, 'rb')
except Exception as ex:
raise NotFound("Missing PDF file: %r" % ex)
return send_file(fh, mimetype=pdf.mime_type)
@blueprint.route('/api/1/documents/<int:document_id>/pages/<int:number>')
def page(document_id, number):
document, page = get_page(document_id, number)
enable_cache(server_side=True)
return jsonify(page)
@blueprint.route('/api/1/documents/<int:document_id>/tables/<int:table_id>')
def table(document_id, table_id):
document, tabular = get_tabular(document_id, table_id)
enable_cache(vary_user=True)
return jsonify(tabular)
@blueprint.route('/api/1/documents/<int:document_id>/tables/<int:table_id>/rows')
def rows(document_id, table_id):
document, tabular = get_tabular(document_id, table_id)
query = tabular_query(document_id, table_id, request.args)
query['size'] = get_limit(default=100)
query['from'] = get_offset()
result = execute_tabular_query(query)
params = next_params(request.args, result)
if params is not None:
result['next'] = url_for('documents_api.rows', document_id=document_id,
table_id=table_id, **params)
return jsonify(result)
| smmbllsm/aleph | aleph/views/documents_api.py | Python | mit | 5,217 | 0.000192 |
from pyramid.config import Configurator
from pyramid.renderers import JSONP
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
config = Configurator(settings=settings)
config.add_renderer('jsonp', JSONP(param_name='callback'))
config.include('pyramid_mako')
config.include('pyramid_redis')
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('home', '/')
config.add_route('api', '/api')
config.add_route('redirect', '/{shortened}')
config.scan()
return config.make_wsgi_app()
| SevereOverfl0w/Krympa | krympa/__init__.py | Python | mit | 597 | 0.001675 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.