text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
"""combine URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^combine/', include('core.urls')),
url(r'^admin/', admin.site.urls),
]
| WSULib/combine | combine/urls.py | Python | mit | 818 | 0 |
# -*- coding: UTF-8 -*-
from django.contrib import admin
from django.db import models
from django_markdown.admin import MarkdownModelAdmin, AdminMarkdownWidget
from django_markdown.models import MarkdownField
from happenings.models import Event as HappeningsEvent
from happenings.admin import EventAdmin as HappeningsEventAdmin
from happenings.admin import CancellationInline
from django.utils.translation import ugettext as _
from .models import Event, Person, Registration
class EventAdmin(HappeningsEventAdmin):
fieldsets = (
(None, {
'fields': ('start_date', 'end_date', 'all_day', 'repeat',
'end_repeat', 'title', 'description',
'created_by', 'extra_url', 'gforms_url', 'image',
)
}),
('Location', {
'fields': ('location',)
}),
('Registrations', {
'classes': ('collapse',),
'fields': ( 'registration_requirement', 'max_registrations', 'close_registrations',
'event_cost', 'materials_cost', 'materials_mandatory',
'payment_due', 'hide_join_checkbox',
)
}),
('Category', {
'classes': ('collapse',),
'fields': ('categories',)
}),
('Tag', {
'classes': ('collapse',),
'fields': ('tags',)
}),
('Color', {
'classes': ('collapse',),
'fields': (
('background_color', 'background_color_custom'),
('font_color', 'font_color_custom'),
)
}),
)
formfield_overrides = {
MarkdownField: {'widget': AdminMarkdownWidget},
models.TextField: {'widget': AdminMarkdownWidget},
}
list_display = ('title', 'start_date', 'end_date', 'repeat', 'end_repeat', 'formLink')
list_filter = ['start_date']
search_fields = ['title']
date_hierarchy = 'start_date'
inlines = [CancellationInline]
class RegistrationAdmin(admin.ModelAdmin):
search_fields = ['event__title', 'person__first_name', 'person__last_name', 'person__email']
list_filter = ['state']
list_display = ('person','event', 'state')
# Remove the happenings event admin
admin.site.unregister(HappeningsEvent)
# And use our own
admin.site.register(Event, EventAdmin)
admin.site.register(Person)
admin.site.register(Registration, RegistrationAdmin)
| hacklab-fi/hhlevents | hhlevents/apps/hhlregistrations/admin.py | Python | bsd-3-clause | 2,455 | 0.003666 |
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from mptt.admin import MPTTModelAdmin
from .models import Channel
from .forms import ChannelAdminForm
from opps.core.admin import PublishableAdmin
from opps.core.admin import apply_opps_rules
from opps.core.permissions.admin import AdminViewPermission
from opps.core.utils import get_template_path
import json
@apply_opps_rules('channels')
class ChannelAdmin(PublishableAdmin, MPTTModelAdmin, AdminViewPermission):
prepopulated_fields = {"slug": ("name",)}
list_display = ['name', 'show_channel_path', 'get_parent', 'site',
'date_available', 'homepage', 'order', 'show_in_menu',
'published']
list_filter = ['date_available', 'published', 'site', 'homepage', 'parent',
'show_in_menu']
search_fields = ['name', 'slug', 'long_slug', 'description']
exclude = ('user', 'long_slug')
raw_id_fields = ['parent', 'main_image']
form = ChannelAdminForm
fieldsets = (
(_(u'Identification'), {
'fields': ('site', 'parent', 'name', 'slug', 'layout', 'hat',
'description', 'main_image',
'order', ('show_in_menu', 'include_in_main_rss'),
'homepage', 'group', 'paginate_by')}),
(_(u'Publication'), {
'classes': ('extrapretty'),
'fields': ('published', 'date_available')}),
)
def get_parent(self, obj):
if obj.parent_id:
long_slug, slug = obj.long_slug.rsplit("/", 1)
return long_slug
get_parent.admin_order_field = "parent"
get_parent.short_description = "Parent"
def show_channel_path(self, obj):
return unicode(obj)
show_channel_path.short_description = _(u'Channel Path')
def save_model(self, request, obj, form, change):
long_slug = u"{0}".format(obj.slug)
if obj.parent:
long_slug = u"{0}/{1}".format(obj.parent.slug, obj.slug)
obj.long_slug = long_slug
super(ChannelAdmin, self).save_model(request, obj, form, change)
def get_form(self, request, obj=None, **kwargs):
form = super(ChannelAdmin, self).get_form(request, obj, **kwargs)
channel_json = []
def _get_template_path(_path):
template = get_template_path(_path)
with open(template) as f:
_jsonData = f.read().replace('\n', '')
return json.loads(_jsonData)
def _get_json_channel(_obj):
return _get_template_path(
u'containers/{0}/channel.json'.format(_obj.long_slug))
def _get_json_channel_recursivelly(_obj):
channel_json = []
try:
channel_json = _get_json_channel(_obj)
except:
_is_root = _obj.is_root_node()
if not _is_root:
channel_json = _get_json_channel_recursivelly(_obj.parent)
elif _is_root:
try:
channel_json = _get_template_path(
u'containers/channel.json')
except:
pass
finally:
return channel_json
channel_json = _get_json_channel_recursivelly(obj)
if u'layout' in channel_json:
layout_list = ['default'] + [l for l in channel_json['layout']]
layout_choices = (
(n, n.title()) for n in layout_list)
form.base_fields['layout'].choices = layout_choices
return form
admin.site.register(Channel, ChannelAdmin)
| jeanmask/opps | opps/channels/admin.py | Python | mit | 3,690 | 0.000813 |
from django import template
from django.template.loader import render_to_string
from django.conf import settings
from ..utils import get_tag_id, set_lazy_tag_data
register = template.Library()
@register.simple_tag
def lazy_tag(tag, *args, **kwargs):
"""
Lazily loads a template tag after the page has loaded. Requires jQuery
(for now).
Usage:
{% load lazy_tags %}
{% lazy_tag 'tag_lib.tag_name' arg1 arg2 kw1='test' kw2='hello' %}
Args:
tag (str): the tag library and tag name separated by a period. For a
template tag named `do_thing` in a tag library named `thing_tags`
the `tag` argument would be `'thing_tags.doc_thing'`.
*args: arguments to be passed to the template tag.
**kwargs: keyword arguments to be passed to the template tag.
"""
tag_id = get_tag_id()
set_lazy_tag_data(tag_id, tag, args, kwargs)
return render_to_string('lazy_tags/lazy_tag.html', {
'tag_id': tag_id,
'STATIC_URL': settings.STATIC_URL,
})
def _render_js(library):
error_message = getattr(settings,
'LAZY_TAGS_ERROR_MESSAGE',
'An error occurred.')
template = 'lazy_tags/lazy_tags_{0}.html'.format(library)
return render_to_string(template, {
'error_message': error_message,
})
@register.simple_tag
def lazy_tags_javascript():
"""Outputs the necessary JavaScript to load tags over AJAX."""
return _render_js('javascript')
@register.simple_tag
def lazy_tags_jquery():
"""Outputs the necessary jQuery to load tags over AJAX."""
return _render_js('jquery')
@register.simple_tag
def lazy_tags_prototype():
"""Outputs the necessary Prototype to load tags over AJAX."""
return _render_js('prototype')
@register.simple_tag
def lazy_tags_js():
"""An alias to the JavaScript library specified in settings."""
library = getattr(settings, 'LAZY_TAGS_AJAX_JS', 'jquery')
return _render_js(library.lower())
| grantmcconnaughey/django-lazy-tags | lazy_tags/templatetags/lazy_tags.py | Python | mit | 2,029 | 0 |
# The Hazard Library
# Copyright (C) 2012 GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import numpy
from openquake.hazardlib.source import AreaSource
from openquake.hazardlib.pmf import PMF
from openquake.hazardlib.scalerel import WC1994
from openquake.hazardlib.gsim.boore_atkinson_2008 import BooreAtkinson2008
from openquake.hazardlib.calc import disagg
from openquake.hazardlib.geo import Point, Polygon, NodalPlane
from openquake.hazardlib.mfd import TruncatedGRMFD
from openquake.hazardlib.imt import SA
from openquake.hazardlib.tom import PoissonTOM
from openquake.hazardlib.site import Site
class DisaggTestCase(unittest.TestCase):
def test_areasource(self):
nodalplane = NodalPlane(strike=0.0, dip=90.0, rake=0.0)
src = AreaSource(
source_id='src_1',
name='area source',
tectonic_region_type='Active Shallow Crust',
mfd=TruncatedGRMFD(a_val=3.5, b_val=1.0, min_mag=5.0,
max_mag=6.5, bin_width=0.1),
nodal_plane_distribution=PMF([(1.0, nodalplane)]),
hypocenter_distribution=PMF([(1.0, 5.0)]),
upper_seismogenic_depth=0.0,
lower_seismogenic_depth=10.0,
magnitude_scaling_relationship = WC1994(),
rupture_aspect_ratio=1.0,
polygon=Polygon([Point(-0.5,-0.5), Point(-0.5,0.5),
Point(0.5,0.5), Point(0.5,-0.5)]),
area_discretization=9.0,
rupture_mesh_spacing=1.0
)
site = Site(location=Point(0.0,0.0),
vs30=800.0,
vs30measured=True,
z1pt0=500.0,
z2pt5=2.0)
gsims = {'Active Shallow Crust': BooreAtkinson2008()}
imt = SA(period=0.1,damping=5.0)
iml = 0.2
time_span = 50.0
truncation_level = 3.0
n_epsilons = 3
mag_bin_width = 0.2
# in km
dist_bin_width = 10.0
# in decimal degree
coord_bin_width = 0.2
# compute disaggregation
bin_edges, diss_matrix = disagg.disaggregation_poissonian(
[src], site, imt, iml, gsims, time_span, truncation_level,
n_epsilons, mag_bin_width, dist_bin_width, coord_bin_width
)
mag_bins, dist_bins, lon_bins, lat_bins, eps_bins, trt_bins = bin_edges
numpy.testing.assert_almost_equal(
mag_bins, [5., 5.2, 5.4, 5.6, 5.8, 6., 6.2, 6.4, 6.6]
)
numpy.testing.assert_almost_equal(
dist_bins, [0., 10., 20., 30., 40., 50., 60., 70., 80.]
)
numpy.testing.assert_almost_equal(
lat_bins, [-0.6, -0.4, -0.2, 0., 0.2, 0.4, 0.6]
)
numpy.testing.assert_almost_equal(
lon_bins, [-0.6, -0.4, -0.2, 0., 0.2, 0.4, 0.6]
)
numpy.testing.assert_almost_equal(
eps_bins, [-3., -1., 1., 3.]
)
self.assertEqual(trt_bins, ['Active Shallow Crust'])
expected_matrix = numpy.fromstring("""\
eJztnXlcTdv7x3eSJuVEKSWOg5LSPVEZytm7lESl5Ia4nG6GuF1FdUWGTcpYMpZolEa5hwgN7OIm
lEYNKOeWBlNFyZDqd/q9vq+v8717da99zz5N9vs/S6+1nr3Ws/Y6e33W8ywIoiCVcM+brec1YbSo
fvtn5mYYmsTNHN+wGP7v/591TK2FLWEoO1H1caMJ/Dc1kcupjGMOYWy8PRQU/REWFiS31xqGLsZ2
ii9e+9WfsZAw3S0TeOUlR+7RFvWgn5clIg/vs6AGh2O0JfZf22VvFJ3UaQhDl1W0LgQtoeYdxd9j
PV05eIIW3k+4j4I37lMSnv8EialczZ2Br/9EveoLNSN8uaeJ8uHYefhyJ5G0dT5Mwe3c35GQ7j8N
X8+8s/uhaB18edO8xfa2k/HlKCQr7kYXXr/N864wHm4IqL947M5VDGq+9xZIcI651SB8/2Pqj/UX
jMOXIwr6MoNGAvxHIzM/4zNLYHs4z+oSz2gL7g9cnzFwNcB+ooQnaLY6jxK8HvRjdtpyEvwclR4/
J08SMK9PmGP6gOcN74BFa8YDxuvLb+MzAOM+YCk5rqDyFuCfT94uPs8V3G+7xbkmbm0bvn705Rsl
pBXQbpLYFI13gPIIkzSVJsHtRH6OzvQdTIIfhlfVlrcA7Pl4ycUA9Fzd1fNcOb+dhPdGt1zMTJz+
5tvrx/Q6tDslAO/DZeLQKwgwj56J7b4C8Ct0j/sSxS9CfK7egmYejFwi4bmwe/HrQ0ioJ3bwoFsY
CfUw20xFrgDq4Ry6axADKOcefm2X24fG13XcuGG3+5A93cHZvWT3eRLsnGfhUpUCqqfO0ecaCfUv
LaiVB/kVp0R9HRn2U1BQUFBQUHx30INWx2VpwZDdp2v2u9fDkEX1xNG/zP/6fREuXxpdaQFDzB+M
tjrP6rnvdLVAhuKHn/D2UFD0R4Zr3R+WugSGRJ4u2juN/dWfZ/wSxkEMet7PnV5XltyYAUP175ct
zLP92u6KJQwDlgkMmdB2Xv/Rlpp3FH+PUo495AvQdxB4/nLvscLznya2vrPPbHz97rki6UXG+PLt
lon2BxYA9qslMcm3uoLbmW3XFtg5HV9PUHJeYwRAF6NZGjvdBOgL+ZnPO/+cILx+G5oXFpKFAMYr
eu9qfTVqvvcW2K+DG2yHAvzEwci6aRK+3Fo91FMToJOim8N/ow8RfBzZ0tCaVD0S/CHrED0aoPMS
xTplUPMdEnSrAO0y2w4S7GEf2Jl3fzi+Hva7qT7VgPFyrb0lrg84JwDdXHVbTOb7mXdIR2nSQoB/
ouJxbl6fhLefyX6EaCbSAP18lKNYDtKd3bSdZoB0lkR1mxIieiVt/89aZfjn4vpHnFsmT4K+bLjl
QhlABycK6qCeWScleD3YQ79pEiTouYiVtTdHGTC/LIwbReUA49Li9X6bKGAcy9pyG2UH4PwqeKSx
8TkJ8wVNkRCpIFCPu4mxeAbg76MfZiyrJMGeJT768wjoy2ipwrtUkJ7eW8yvM9/V2IfsOexok3kP
YM+tnKvL6gS3E82wcLf4SMLzcs30FUC64ZszcVqgcwgpFZ7qQP9fftXkOgn20PfboEG9MI50o1V/
HO1D/kPxDxx8JgfS5UmDVmkXTEL9+QkSjAgyzkvsefDam/JPCgqKAUCLMqdNDYYYjsmH3BxgKGCD
W2UC3/5Yi8tcl+B5MITR3NdfIOGc/LdyZWPKe42leHsoKPoj8fAGiyZ7GMpWassp5otndAqoXllh
CkO6unrtkHnP+Xnsa/kVaYB2PdVKtMvn97w9FP0Tp3Q35R8A+g5X8oL9JRLiPv4Kus61QL+FBbnG
Htu1aM7X+tHS+TbxCjA0I27U2myYL74ydqihthRvHalfvXU7QC9jJ10UXQHQrb6ZABns6WMWxB1j
an5+Jl+7wWefOYgD1s1aucK2KhaUr/vn/lxQfM1rxTs26sKbd1r67PB7gPi4cK85bEyI7VL8PeyN
YrEsgJ4SdH67r+tUfHnAtgmH5QA6KeL3a8BlEvSU/SPjxxQBdG2izJh4pkiMBH3ZdWgA4kOCfyqp
M6FnJPyORe+tj0YUATqXquvBHYB5vbT8WpMioD/ZNum61wDjPlDhzhr5+BJAv8DMo6XlxYTXD9yM
m7PSVb69fuz3I5LHATodlqh0bjWR+WVprrcBsH+LXnh/Q3YMCXqT2V2ddAUC9ayZW7CyGqDH+foc
fDWChHlx3My1FKDjE6VpjJcoHfR+u1z3NhcQV464ag12A4wL223hwXOAedrvaa/1ciUQ39cdaKP9
L8tA+kJ33MSedzwF/L3atftBVSTsi24+G5klQmC8ZGWj9PpQfB/KyMs1e9937IHWJe5K+RNgT7K7
9j0y+s1c9vY6QBw0YeLznuwA6LDYPo8YR5Cefj9z+xtQP684rXkQcN6gW5o8ntvHAf4+asveWaTE
FWpnXCYSDxhbUz/tQR/yH4q/pzg4vpCIvxHF+Xb2JzL80Hdic84jEup5bSiS1JfibSkoehL0PkMF
pfx/oND08K7xI953Bm01G8u3gyF0jb6OFN+534DTmSmMOTAUTqsNk5rYc98RhXNMM1QX4e2hoOiP
zI2MLlCzh6FYF6mCUIuv/ky7ZK1RbgZDElEPz/nDPefnOU9PYlMB7ebIxyaWzO95eyj6Ga5Bzluj
WZDneF13LmB/nu3e8qVICPpXd9C0WtqVdWAoKIQZqWvGp0MZpGvFM/DrCJq1eiVDHIayrcPGnyJh
f/6vBDRI6pV3xYF4zP1Thl+Pk/L+tGE4fj1FfVRVrJtZEPPJuI2hU8i3BztYtLFqKAyVNW2WOcHi
q99OBJFu5LX7QTbUSwjtUgjGdW3vk+yZ+HGhBZ5I/gz4PYbZ3bazAegLRKnPVA8JJuF3F2eEy9pA
fRLirWyqtg0jIW4roPS8RxYoDosgaKFhmFYHQNc455paAXhe9pU2QytAuwgd9ZlCRL/o56B5ErGg
eCWkxkGvTlqI/bBp3yEjQP5MZENj5c8A3Q0bkT69BRAPxZ12qaONgF6J/ToOcgTEJbG1d62UIkH/
oudHrTkzmkA9498FVwHiNZCcSgMREvKLYhVPdEVI0NEQy5BP4gDdCouRbXfUwJfTM4fM2QcYF/qT
Y4ExQswn3Gv4Lc52ewnYh7lmWuYMyofZDeiJNyG3iOggK98ahtQD/n6vVo0/gfyW3ZI171EegThE
tKV+tEF739mPQgM5P9kR6H9hg86OKzb4ALDnaHTHIRLixBGbwAqHYUI8t+D8ec1cQNwuOjZPxgQQ
nwu16nqNrCHQ//mMhGE5gL9HbibdIxIX2R0nkh6sKiVQD313SwpIX6bom8Sn6wQUCnG87KLLnMiI
q0WqP3mA3ttEqTBiZADOz1BQfBfEjvkoe5Py/4ECbYiDcxoDhkzulDrnWMAQtne5jV/XPoNr1Pjy
CBY040lc7gsD3r/H7ozzA+SjEBbudUvd8sz57PkPQTqpMX76PW8PBYUgWFnbrnppB0PyxrEt9Xxx
KxwDyysHTGHItfhVygtAHI2w0B3l0XDaBN8u2+ij0fXp+HlHQcEP+uVyWLIs3k/QhWWJGl15rIT1
fn7fWmb8mgVh7Wvj9oh/rT87+XoQrMfz5yrliMN8eXq5RxJ9IzXwdobHpQ5NoQvPzz/qz/dYNhU/
v5D6iuVzlfHrF1cy5aysovDsYZoarL8+AW8PvXU5I3sENd/7HDF1E31535meGl6GF/nvudv5MXIJ
73ubxrw34QeA/oVaOV1QEiSe6Nqr2V9qWFDsxaRXMwRZj2K1mIw6FsTep8deIIj+tWuV7SqePfWs
kNkzSIjbYnN1jQaTcY4rw2fbDv59P8zhpxN/sCDmojrYEvC8tE8ni0sA939x6y7bn/yO9C8koLg4
DaRDTSp/JwbKT0gSaFyrv7wqYL5U6UiFigPaHbUzKwYQx4Rsb7jZSeRey1tbTPcD8u9h9/zC75Cg
N3HdOr/sJqDvoL8PSTsC0G2R04r1UiTEcWBr6otaSPBnROHP8AjAeyz/zcTVNzUB41hpVIYC8kly
tnjMlgHkI+3voAtii+eD7jsz9Z5eRCAfHbbqwqwtBPJVop0Fu84B8hOicpwjBs2C7wthR6QmvCCi
f4VcfbcSpO/0EmizilOkEPO4Eia5QCakEzBej390lyUhThz5bFUeKcT7K9mbT+hKgfLEmjVuVQXd
nxjxoN3uNYH+58zeMhsUv6NvdSeUiI7WHfmiqiWg+Lvu2PLpzQwy2qXoGRiqQz+QoZN2R+vLdSNq
SYjzvXleHiES59sdszKXvGqg/JPiO+WKvfOBPMr/BwxBultcpWGI/eatwpSpMIQFuqhm8L5Dsfqm
tN+6vmM2ZLpqGfP+//XSz1gPnqOrH5PAyDDCtxu7OXfKMeZXOyko+gMfnxx55jEfhoLqrs09wxcv
wzyaVrLUEoY8RX+62iSEOJTuKE44tCjOhNduqtYVjG9fERnM9Niu2/PznaJ/gWS4wcMl8O9h9EuB
ir+i8PyHu3rv7x5yMETPybmjybcPuX947J6maTx7lBwNc/jimCQ2fnHJ4pVbT9a8zOXbN0PWnl6y
m/ddjeqVplwQRC84/kuU2UcWhB67MSqB7xyy9ahtm8ep4/uBOyI1KkaN167D+pWn+O5Hw5j0UB0a
CfZ0R9V7I7oGz56WauNxfOfn2YO/HKscTc33XkfcW8yl7av/IJLiS+dKwlDTUb/G4XzvZ6w5yD95
EM+fQxpH2P4AGK+GlUp3iOSP+iv7Jmac72RBNLHAYUYCxElhuYtDSnj+zJlzvH2hIHGFL4sUXgzm
Pa+mGCtGkHvxypm38jp4z6Wy8MsNQfycuwrec5MFIVctIyP4dY0xv4Smy8BQuJap2Qr+dVxLZPn5
z7z3g5u5/f/kc5s/1X1NAa/8x3P5F4S4f9jXCJfIma0OOBeBbb3mfkaIv+extQUxoqC8eVYXvJsB
+hcWkV3RLgGwR/OAuSGBeCtuQmpCEWD/FvWWnCYKqp8gtBEuyTcIxFmzg1+IyoDiKSaavrUj4/4v
un9aIAn5BrHJ+2PEQHrWw+vX3ADvgfA/CmVeA+Lp2NWGR6yEeJ9mb4GqqYxktQF0jatTT6gByiE/
/SSdF4C/r5IKuk0gfgqt2n3AHlT/log2lIR8jJA9XOkCiuvpDvOLUfqgODiK/wc9PduRBYg/Df8k
eraYhPctUqpxNpuEfKdQ9Qrvba8A4zj4tHk1QE/H3lyazQa9r27LDdHgEvAHL8fEB6C//zx5dHY5
CX4VdcNXlcg9a/a36sLIaJeiZ0h80alKxj2MZJGtoekDuu9vt8bEPDLy0yrb5k/pQ/GtFBQ9irvF
pF1/UP4/UEBbI2KRITCEJkXfydKHIXro724TeL83kDuXpAOHw5BrZ7XnLQMYYtfYOxWScF7xH4m9
+5BZxoKyNQt2mXXpXHcuH0W79hnq0mAd3jrD1ttxspHPTgqK/kCUwqiK0cYwxDG7q+HFd4/JidUx
rX/M482vvfViyaD9TCFBu5w17cGsrn1FlQW5DL44Gi8xuzm8+c6c06o3lUHNLwowiNP0yHWK+Pcw
ZhkdtVVJeO9n9uaRe91U8fUjUQc2hmnAUPhvZScl+O55obfaTk9k4v2cTS9m7JLjW1/+JdyLJibG
8vh60GOpc/W64qpehh2ZwJdfK99npNlFgJ2odmZ9Vtc55oehetfJOKf3F7AkzhITBn4dz18jcqZD
jbfem4R5J4+l5nuPM2aNz6A6Fs5PkMLbgYHS+HHBVnq5K/DGj1taqv4rf7yekqnK4SLB/QfFGCuC
5QV//2PwpWWdvPlAF9CvkOpRxZpS+PlCuB7Dw2sSZAWvB53sd3BwM3686AlvLh0egX/P0B3uF5cr
89r9y7oPXTUrbKnF1zPQYZ+UEFcG6COI6ya5sUTiqgiCPTgmKcV/roAzpm3FQxaErF/1YBh//wf8
Wm2fwZs/tr575PnjC7AnLSP9eeU+l2UTBVnXhlbPSs5iQahXQPNkQXTYWp9powt59j8ZciRIkPxj
nMhNbXEsiHvjrl2iIHlTG1Qm7ijg9c+BNOVkUFzGt3L9mWRCHu+5zJ1H3+Xvn4CT6/MjWVB4UdjP
ufzzly1rOP8uC8rXds00A+WDPZs1U2IA3q+H/rbHtIKIbj5h39YrgLgkdKTIherPgv/e4HKm6+iR
oKegxVNuJ/Wl+7MGKOhWy0FnSMhbiDh9WKpPxrmFF9cDFRpBeTXLnFUA9dNTfMWtQO9hd2tmJRn5
A2XGmdiQEYeVeb3k/mPKn787trcvlwScN+g15j7x1ichPhdt1nF8AYj/paCgoOh33K+pePCBBQVI
xz4a1/W9UbKJkd7O+z7bNszZprPrXpDfj0ydBEOYlmeeJQn5ov+RJP/ArdWAdn0Daz3zeXa5M1vH
df0e2jqHmU5GvnEKih6AFpn20pQJQ4huqMopvu/xj375nD16MIRWXTQS68l8ntOOyd1V582vuIdD
0vnyYCA05LDTJHw5BQU/aIamQrPY1/XivzS7PTWmCW+9QB1tIz7I4+vnwvHlfnJ4e7ifqy/Tafhy
bOZTnWeSAPuJMnRT4X1pQD273FqsB+HXKWxLYLwSBFi/8gvFarr2p4S0rnGV1qB2yoD5blFy6qMU
Cf1A8e+w0nlzuAYw7nVWCyoH48cLHV5LfwAoh+6lumzjCu4/yKuk955igs9fjFMrFi8u+DqCGp1T
3N91P42g/mnaKtokSsJ7qUYcm/ka0M9YwqQECfzzYifC94ZJAfrhqqmjSa3w5nuf5ZC9wQNQvIAK
Tf+ZILpJdzTkTBnVdZ4eHvqY8y33i9E5doHFgHGZd+Dontsk+OEw0/cNXXp3T31P/RMrV5g/fEbC
c5GFf9WB1V268MyfPF7x63F35rVpVbHw82hPnuKYYkB/ordPde07I1qO7ZsGoL6Mnt7RdpqM/UwF
Nel7gDyKhBkqLaZERnxB8LlDUkTiZSj+HXUbExBQHB9RpN59KCcjHiSn9r0WIA4LlV3x5CJgXUAP
NpRJAfK4Qs8XqReSkY+u6eonXVBeRAqK/ohy3LXjZOi5/h2he0qoeUFB0Qv8H5mRW2E=\
""".decode('base64').decode('zip')).reshape((8, 8, 6, 6, 3, 1))
numpy.testing.assert_almost_equal(diss_matrix, expected_matrix)
| ROB-Seismology/oq-hazardlib | openquake/hazardlib/tests/acceptance/disagg_test.py | Python | agpl-3.0 | 10,965 | 0.00073 |
#! /usr/bin/env python
## -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
#
# Copyright (c) 2014 Siddharth Santurkar
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation;
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# NOTE: Run this script with the Python3 interpreter if the python3 compatibility
# of the ns-3 unit test runner needs to be tested.
# The following options of waf are being tested for poratability by this script.
# To see the options supported by this script, run with the -h option on the command line
#
# build : executes the build (pre: configure, post: clean)
# check : run the equivalent of the old ns-3 unit tests using test.py
# clean : cleans the project
# configure: configures the project (pore: None, post: distclean)
# dist : makes a tarball for redistributing the sources (pre:none )
# distcheck: checks if the project compiles (tarball from 'dist') (pre: dist, post: rm -rf ns-3*.tar.bz2)
# docs : build all the documentation: doxygen, manual, tutorial, models (pre: configure; post: distclean)
# doxygen : do a full build, generate the introspected doxygen and then the doxygen
# install : installs the targets on the system (pre: configure, post: uninstall )
# list : lists the targets to execute (pre: configure)
# shell : run a shell with an environment suitably modified to run locally built programs (pre:configure)
# sphinx : build the Sphinx documentation: manual, tutorial, models
# step : executes tasks in a step-by-step fashion, for debugging (pre: configure)
# uninstall: removes the targets installed (pre: install, post uninstall)
# *update : updates the plugins from the *waflib/extras* directory
from __future__ import print_function
from TestBase import TestBaseClass
import sys
def replace(pre, post, main_cmd_list):
if pre:
pre = pre + ' && '
else:
pre = ''
if post:
post = ' && ' + post
else:
post = ''
return [ pre + main_cmd + post for main_cmd in main_cmd_list ]
def main(argv):
"""
Prepares test cases and executes
"""
runner = TestBaseClass(argv[1:], "Test suite for the ns-3 Waf build system", 'waf')
in_cmds = runner.override_cmds()
if in_cmds:
cmds = in_cmds.split(',')
else:
cmds = ['basic', 'build', 'configure', 'step', 'clean', 'dist', 'list']
config_test_cases = [
"--enable-gcov",
"--enable-sudo",
"--enable-sudo",
"--enable-tests",
"--disable-tests",
"--enable-examples",
"--disable-examples",
"--doxygen-no-build",
"--enable-static",
"--enable-mpi",
"--enable-rpath",
"--enable-modules=build/utils/test-runner.cc.1.o",
"--boost-static",
"--boost-mt",
"--boost-linkage_autodetect",
"--boost-python=33",
"--disable-gtk",
"--int64x64=cairo",
"--disable-pthread",
"--force-planetlab",
"--nopyc",
"--nopyo",
"--disable-python",
"--apiscan=all",
"--with-python=/usr/bin/python2.7",
"--no32bit-scan",
"-o test_out && rm -rf test_out",
"--out=test_out && rm -rf test_out",
"-t test_top && rm -rf test_top",
"--top=test_top && rm -rf test_top",
"--download",
"--check-c-compiler=gc",
"--check-cxx-compiler=g++",
]
basic_test_cases = [
"--version",
"-h",
"--help",
]
build_test_cases = [
"-j10",
"--jobs=10",
"-d optimized",
"-d debug",
"-d release",
"--build-profile optimized",
"--build-profile debug",
"--build-profile release",
"-p",
"--progress",
]
step_test_cases = [
"--files=\"*/main.c,*/test/main.o\"",
]
install_test_cases = [
"-f",
"--force",
"--prefix=./test-prefix && rm -rf ./test-prefix",
"--exec-prefix=.",
"--bindir=./test-prefix/bin --sbindir=./test-prefix/sbin --libexecdir=./test-prefix/libexec --sysconfdir=./test-prefix/etc --sharedstatedir=./test-prefix/com --localstatedir=./test-prefix/var --libdir=./test-prefix/lib --includedir=./test-prefix/include --oldincludedir=./test-prefix/usr/include --datarootdir=./test-prefix/share --datadir=./test-prefix/share_root --infodir=./test-prefix/info --localedir=./test-prefix/locale --mandir=./test-prefix/man --docdir=./test-prefix/doc/package --htmldir=./test-prefix/doc --dvidir=./test-prefix/doc --pdfdir=./test-prefix/doc --psdir=./test-prefix/doc && rm -rf ./test-prefix",
]
common_test_cases = [
"",
"-k",
"--keep",
"-v",
"--verbose",
"--nocache",
"--zones=task_gen",
"--zones=deps",
"--zones=tasks",
"--no-task-lines",
]
test_case_mappings = {
'basic' : basic_test_cases,
'configure' : config_test_cases,
'build' : build_test_cases,
'step' : step_test_cases,
'install' : install_test_cases,
}
waf_string = sys.executable + ' waf'
cmd_execute_list = []
for cmd in cmds:
if cmd == 'basic':
cmd_list = []
else:
cmd_list = ['%s %s %s' % (waf_string, cmd, option) for option in common_test_cases ]
if cmd in test_case_mappings:
cmd_list += ['%s %s %s' % (waf_string, cmd, option) for option in test_case_mappings[cmd] ]
if cmd == 'basic':
cmd_list.append('%s configure && %s build && %s --run scratch/myfirst' % tuple([waf_string]*3))
cmd_list.append('%s configure && %s build && %s --pyrun scratch/myfirst.py' % tuple([waf_string]*3))
if cmd == 'build':
cmd_list = replace(waf_string+' configure', waf_string+' clean', cmd_list)
cmd_list.append('%s configure --enable-gcov && %s build --lcov-report && %s clean' % tuple([waf_string]*3))
elif cmd == 'configure':
cmd_list = replace(None, waf_string+' distclean', cmd_list)
elif cmd == 'distcheck':
cmd_list = replace(waf_string+' dist', 'rm -rf ns-3*.tar.bz2', cmd_list)
elif cmd == 'docs':
cmd_list = replace(waf_string+' configure', waf_string+' distclean', cmd_list)
elif cmd == 'install':
cmd_list = replace(waf_string+' configure', waf_string+' uninstall', cmd_list)
elif cmd == 'list':
cmd_list = replace(waf_string+' configure', waf_string +' distclean', cmd_list)
elif cmd == 'shell':
cmd_list = replace(waf_string+' configure', waf_string+' distclean', cmd_list)
elif cmd == 'step':
cmd_list = replace(waf_string+' configure', waf_string+' distclean', cmd_list)
elif cmd == 'uninstall':
cmd_list = replace(waf_string+' install', None, cmd_list)
cmd_execute_list += cmd_list
return runner.runtests(cmd_execute_list)
if __name__ == '__main__':
sys.exit(main(sys.argv)) | bijaydev/Implementation-of-Explicit-congestion-notification-ECN-in-TCP-over-wireless-network-in-ns-3 | utils/tests/test-waf.py | Python | gpl-2.0 | 7,623 | 0.005903 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.bigquery_to_gcs`."""
import warnings
from airflow.providers.google.cloud.operators.bigquery_to_gcs import BigQueryToGCSOperator
warnings.warn(
"This module is deprecated. Please use `airflow.providers.google.cloud.operators.bigquery_to_gcs`.",
DeprecationWarning, stacklevel=2
)
class BigQueryToCloudStorageOperator(BigQueryToGCSOperator):
"""
This class is deprecated.
Please use `airflow.providers.google.cloud.operators.bigquery_to_gcs.BigQueryToGCSOperator`.
"""
def __init__(self, *args, **kwargs):
warnings.warn(
"""This class is deprecated.
Please use `airflow.providers.google.cloud.operators.bigquery_to_gcs.BigQueryToGCSOperator`.""",
DeprecationWarning, stacklevel=2
)
super().__init__(*args, **kwargs)
| wileeam/airflow | airflow/contrib/operators/bigquery_to_gcs.py | Python | apache-2.0 | 1,678 | 0.00298 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities to warm-start TF.Learn Estimators."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import six
from tensorflow.python.framework import ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import checkpoint_ops
from tensorflow.python.training import checkpoint_utils
from tensorflow.python.training import saver
from tensorflow.python.util.tf_export import tf_export
@tf_export("train.VocabInfo", "estimator.VocabInfo")
class VocabInfo(
collections.namedtuple("VocabInfo", [
"new_vocab",
"new_vocab_size",
"num_oov_buckets",
"old_vocab",
"old_vocab_size",
"backup_initializer",
])):
"""Vocabulary information for warm-starting.
See @{tf.estimator.WarmStartSettings$WarmStartSettings} for examples of using
VocabInfo to warm-start.
Attributes:
new_vocab: [Required] A path to the new vocabulary file (used with the
model to be trained).
new_vocab_size: [Required] An integer indicating how many entries of the new
vocabulary will used in training.
num_oov_buckets: [Required] An integer indicating how many OOV buckets are
associated with the vocabulary.
old_vocab: [Required] A path to the old vocabulary file (used with the
checkpoint to be warm-started from).
old_vocab_size: [Optional] An integer indicating how many entries of the old
vocabulary were used in the creation of the checkpoint. If not provided,
the entire old vocabulary will be used.
backup_initializer: [Optional] A variable initializer used for variables
corresponding to new vocabulary entries and OOV. If not provided, these
entries will be zero-initialized.
"""
def __new__(cls,
new_vocab,
new_vocab_size,
num_oov_buckets,
old_vocab,
old_vocab_size=-1,
backup_initializer=None):
return super(VocabInfo, cls).__new__(
cls,
new_vocab,
new_vocab_size,
num_oov_buckets,
old_vocab,
old_vocab_size,
backup_initializer,
)
def _is_variable(x):
return (isinstance(x, variables_lib.Variable) or
isinstance(x, resource_variable_ops.ResourceVariable))
def _infer_var_name(var):
"""Returns name of the `var`.
Args:
var: A list. The list can contain either of the following:
(i) A single `Variable`
(ii) A single `ResourceVariable`
(iii) Multiple `Variable` objects which must be slices of the same larger
variable.
(iv) A single `PartitionedVariable`
Returns:
Name of the `var`
"""
name_to_var_dict = saver.BaseSaverBuilder.OpListToDict(var)
if len(name_to_var_dict) > 1:
raise TypeError("`var` = %s passed as arg violates the constraints. "
"name_to_var_dict = %s" % (var, name_to_var_dict))
return list(name_to_var_dict.keys())[0]
def _warm_start_var(var, prev_ckpt, prev_tensor_name=None):
"""Warm-starts given variable from `prev_tensor_name` tensor in `prev_ckpt`.
Args:
var: Current graph's variable that needs to be warm-started (initialized).
Can be either of the following:
(i) `Variable`
(ii) `ResourceVariable`
(iii) list of `Variable`: The list must contain slices of the same larger
variable.
(iv) `PartitionedVariable`
prev_ckpt: A string specifying the directory with checkpoint file(s) or path
to checkpoint. The given checkpoint must have tensor with name
`prev_tensor_name` (if not None) or tensor with name same as given `var`.
prev_tensor_name: Name of the tensor to lookup in provided `prev_ckpt`. If
None, we lookup tensor with same name as given `var`.
"""
if _is_variable(var):
current_var_name = _infer_var_name([var])
elif isinstance(var, list) and all(_is_variable(v) for v in var):
current_var_name = _infer_var_name(var)
elif isinstance(var, variables_lib.PartitionedVariable):
current_var_name = _infer_var_name([var])
var = var._get_variable_list() # pylint: disable=protected-access
else:
raise TypeError(
"var MUST be one of the following: a Variable, list of Variable or "
"PartitionedVariable, but is {}".format(type(var)))
if not prev_tensor_name:
# Assume tensor name remains the same.
prev_tensor_name = current_var_name
checkpoint_utils.init_from_checkpoint(prev_ckpt, {prev_tensor_name: var})
# pylint: disable=protected-access
# Accesses protected members of tf.Variable to reset the variable's internal
# state.
def _warm_start_var_with_vocab(var,
current_vocab_path,
current_vocab_size,
prev_ckpt,
prev_vocab_path,
previous_vocab_size=-1,
current_oov_buckets=0,
prev_tensor_name=None,
initializer=None):
"""Warm-starts given variable from `prev_tensor_name` tensor in `prev_ckpt`.
Use this method when the `var` is backed by vocabulary. This method stitches
the given `var` such that values corresponding to individual features in the
vocabulary remain consistent irrespective of changing order of the features
between old and new vocabularies.
Args:
var: Current graph's variable that needs to be warm-started (initialized).
Can be either of the following:
(i) `Variable`
(ii) `ResourceVariable`
(iii) list of `Variable`: The list must contain slices of the same larger
variable.
(iv) `PartitionedVariable`
current_vocab_path: Path to the vocab file used for the given `var`.
current_vocab_size: An `int` specifying the number of entries in the current
vocab.
prev_ckpt: A string specifying the directory with checkpoint file(s) or path
to checkpoint. The given checkpoint must have tensor with name
`prev_tensor_name` (if not None) or tensor with name same as given `var`.
prev_vocab_path: Path to the vocab file used for the tensor in `prev_ckpt`.
previous_vocab_size: If provided, will constrain previous vocab to the first
`previous_vocab_size` entries. -1 means use the entire previous vocab.
current_oov_buckets: An `int` specifying the number of out-of-vocabulary
buckets used for given `var`.
prev_tensor_name: Name of the tensor to lookup in provided `prev_ckpt`. If
None, we lookup tensor with same name as given `var`.
initializer: Variable initializer to be used for missing entries. If None,
missing entries will be zero-initialized.
Raises:
ValueError: If required args are not provided.
"""
if not (current_vocab_path and current_vocab_size and prev_ckpt and
prev_vocab_path):
raise ValueError("Invalid args: Must provide all of [current_vocab_path, "
"current_vocab_size, prev_ckpt, prev_vocab_path}.")
if _is_variable(var):
var = [var]
elif isinstance(var, list) and all(_is_variable(v) for v in var):
var = var
elif isinstance(var, variables_lib.PartitionedVariable):
var = var._get_variable_list()
else:
raise TypeError(
"var MUST be one of the following: a Variable, list of Variable or "
"PartitionedVariable, but is {}".format(type(var)))
if not prev_tensor_name:
# Assume tensor name remains the same.
prev_tensor_name = _infer_var_name(var)
for v in var:
v_shape = v.get_shape().as_list()
slice_info = v._get_save_slice_info()
partition_info = None
if slice_info:
partition_info = variable_scope._PartitionInfo(
full_shape=slice_info.full_shape,
var_offset=slice_info.var_offset)
# TODO(eddz): Support cases where class vocabularies need remapping too.
init = checkpoint_ops._load_and_remap_matrix_initializer(
ckpt_path=checkpoint_utils._get_checkpoint_filename(prev_ckpt),
old_tensor_name=prev_tensor_name,
new_row_vocab_size=current_vocab_size,
new_col_vocab_size=v_shape[1],
old_row_vocab_size=previous_vocab_size,
old_row_vocab_file=prev_vocab_path,
new_row_vocab_file=current_vocab_path,
old_col_vocab_file=None,
new_col_vocab_file=None,
num_row_oov_buckets=current_oov_buckets,
num_col_oov_buckets=0,
initializer=initializer)
new_init_val = ops.convert_to_tensor(
init(shape=v_shape, partition_info=partition_info))
v._initializer_op = state_ops.assign(v, new_init_val)
# pylint: enable=protected-access
@tf_export("train.warm_start")
def warm_start(ckpt_to_initialize_from,
vars_to_warm_start=".*",
var_name_to_vocab_info=None,
var_name_to_prev_var_name=None):
"""Warm-starts a model using the given settings.
If you are using a tf.estimator.Estimator, this will automatically be called
during training.
Args:
ckpt_to_initialize_from: [Required] A string specifying the directory with
checkpoint file(s) or path to checkpoint from which to warm-start the
model parameters.
vars_to_warm_start: [Optional] A regular expression that captures which
variables to warm-start (see tf.get_collection). Defaults to `'.*'`,
which warm-starts all variables. If `None` is explicitly given, only
variables specified in `var_name_to_vocab_info` will be warm-started.
var_name_to_vocab_info: [Optional] Dict of variable names (strings) to
VocabInfo. The variable names should be "full" variables, not the names
of the partitions. If not explicitly provided, the variable is assumed to
have no vocabulary.
var_name_to_prev_var_name: [Optional] Dict of variable names (strings) to
name of the previously-trained variable in `ckpt_to_initialize_from`. If
not explicitly provided, the name of the variable is assumed to be same
between previous checkpoint and current model.
Raises:
ValueError: If the WarmStartSettings contains prev_var_name or VocabInfo
configuration for variable names that are not used. This is to ensure
a stronger check for variable configuration than relying on users to
examine the logs.
"""
if var_name_to_vocab_info is None:
var_name_to_vocab_info = {}
if var_name_to_prev_var_name is None:
var_name_to_prev_var_name = {}
logging.info("Warm-starting from: %s", (ckpt_to_initialize_from,))
# We have to deal with partitioned variables, since get_collection flattens
# out the list.
grouped_variables = {}
# Both vars_to_warm_start = '.*' and
# vars_to_warm_start = None will match everything here.
for v in ops.get_collection(
# TODO(eddz): Allow for different collections here (to support
# warm-starting accumulators).
ops.GraphKeys.TRAINABLE_VARIABLES,
scope=vars_to_warm_start):
if not isinstance(v, list):
var_name = _infer_var_name([v])
else:
var_name = _infer_var_name(v)
grouped_variables.setdefault(var_name, []).append(v)
# Keep track of which var_names in var_name_to_prev_var_name and
# var_name_to_vocab_info have been used. Err on the safer side by throwing an
# exception if any are unused by the end of the loop. It is easy to misname
# a variable during this configuration, in which case without this check, we
# would fail to warm-start silently.
prev_var_name_used = set()
vocab_info_used = set()
for var_name, variable in six.iteritems(grouped_variables):
prev_var_name = var_name_to_prev_var_name.get(var_name)
if prev_var_name:
prev_var_name_used.add(var_name)
vocab_info = var_name_to_vocab_info.get(var_name)
if vocab_info:
vocab_info_used.add(var_name)
logging.info(
"Warm-starting variable: {}; current_vocab: {} current_vocab_size: {}"
" prev_vocab: {} prev_vocab_size: {} current_oov: {} prev_tensor: {}"
" initializer: {}".format(
var_name,
vocab_info.new_vocab,
vocab_info.new_vocab_size,
vocab_info.old_vocab,
(vocab_info.old_vocab_size if vocab_info.old_vocab_size > 0
else "All"),
vocab_info.num_oov_buckets,
prev_var_name or "Unchanged",
vocab_info.backup_initializer or "zero-initialized"))
_warm_start_var_with_vocab(
variable,
current_vocab_path=vocab_info.new_vocab,
current_vocab_size=vocab_info.new_vocab_size,
prev_ckpt=ckpt_to_initialize_from,
prev_vocab_path=vocab_info.old_vocab,
previous_vocab_size=vocab_info.old_vocab_size,
current_oov_buckets=vocab_info.num_oov_buckets,
prev_tensor_name=prev_var_name,
initializer=vocab_info.backup_initializer)
else:
# For the special value of vars_to_warm_start = None,
# we only warm-start variables with explicitly specified vocabularies.
if vars_to_warm_start:
logging.info("Warm-starting variable: {}; prev_var_name: {}".format(
var_name, prev_var_name or "Unchanged"))
# Because we use a default empty list in grouped_variables, single
# unpartitioned variables will be lists here, which we rectify in order
# for init_from_checkpoint logic to work correctly.
if len(variable) == 1:
variable = variable[0]
_warm_start_var(variable, ckpt_to_initialize_from, prev_var_name)
prev_var_name_not_used = set(
var_name_to_prev_var_name.keys()) - prev_var_name_used
vocab_info_not_used = set(var_name_to_vocab_info.keys()) - vocab_info_used
if prev_var_name_not_used:
raise ValueError(
"You provided the following variables in "
"var_name_to_prev_var_name that were not used: "
"{0}. Perhaps you misspelled them? Here is the list of viable "
"variable names: {1}".format(prev_var_name_not_used,
grouped_variables.keys()))
if vocab_info_not_used:
raise ValueError(
"You provided the following variables in "
"var_name_to_vocab_info that were not used: {0}. "
" Perhaps you misspelled them? Here is the list of viable variable "
"names: {1}".format(vocab_info_not_used, grouped_variables.keys()))
| allenlavoie/tensorflow | tensorflow/python/training/warm_starting_util.py | Python | apache-2.0 | 15,397 | 0.004287 |
import fnmatch
import os
import shlex
def autoCompleteList(text, items):
if not text:
completions = items
else:
completions = [item for item in items if item.lower().startswith(text.lower())]
return completions
def containsAny(string, chars):
return True in [char in string for char in chars]
def findRightMostSeparator(text, separators):
max_pos = 0
for separator in separators:
pos = text.rfind(separator)
max_pos = max(pos, max_pos)
return max_pos
def autoCompleteListWithSeparator(text, items, separators=":,@"):
if containsAny(text, separators):
auto_complete_list = autoCompleteList(text, items)
separator_pos = findRightMostSeparator(text, separators)
auto_complete_list = [item[separator_pos + 1:] for item in auto_complete_list]
else:
auto_complete_list = autoCompleteList(text, items)
return auto_complete_list
def createParameterizedHelpFunction(parameters, help_message):
def helpFunction(self):
return parameters, help_message
return helpFunction
def pathify(head, tail):
path = os.path.join(head, tail)
if os.path.isdir(path):
return "%s/" % tail
return tail
def getPossibleFilenameCompletions(text, separators="-"):
head, tail = os.path.split(text.strip())
if head == "": # no head
head = "."
files = os.listdir(head)
separator_pos = 0
if containsAny(tail, separators):
separator_pos = findRightMostSeparator(tail, separators) + 1
return [pathify(head, f)[separator_pos:] for f in files if f.startswith(tail)]
def extractFullArgument(line, endidx):
newstart = line.rfind(" ", 0, endidx)
return line[newstart:endidx].strip()
def matchItems(line, items):
patterns = shlex.split(line)
result_items = set()
for pattern in patterns:
pattern_matches = set()
for item in items:
if fnmatch.fnmatch(item.lower(), pattern.lower()): # case-insensitive matching
pattern_matches.add(item)
if len(pattern_matches) == 0:
print("Error: Name/Pattern '%s' does not match anything." % pattern)
else:
result_items = result_items | pattern_matches
return result_items | iLoop2/ResInsight | ThirdParty/Ert/devel/python/python/ert_gui/shell/shell_tools.py | Python | gpl-3.0 | 2,275 | 0.004396 |
# coding=utf-8
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.utils import excutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common.i18n import _LI
from ironic.common.i18n import _LW
from ironic.common import states
from ironic.conductor import task_manager
from ironic.openstack.common import log
LOG = log.getLogger(__name__)
@task_manager.require_exclusive_lock
def node_set_boot_device(task, device, persistent=False):
"""Set the boot device for a node.
:param task: a TaskManager instance.
:param device: Boot device. Values are vendor-specific.
:param persistent: Whether to set next-boot, or make the change
permanent. Default: False.
:raises: InvalidParameterValue if the validation of the
ManagementInterface fails.
"""
if getattr(task.driver, 'management', None):
task.driver.management.validate(task)
task.driver.management.set_boot_device(task,
device=device,
persistent=persistent)
@task_manager.require_exclusive_lock
def node_power_action(task, new_state):
"""Change power state or reset for a node.
Perform the requested power action if the transition is required.
:param task: a TaskManager instance containing the node to act on.
:param new_state: Any power state from ironic.common.states. If the
state is 'REBOOT' then a reboot will be attempted, otherwise
the node power state is directly set to 'state'.
:raises: InvalidParameterValue when the wrong state is specified
or the wrong driver info is specified.
:raises: other exceptions by the node's power driver if something
wrong occurred during the power action.
"""
node = task.node
target_state = states.POWER_ON if new_state == states.REBOOT else new_state
if new_state != states.REBOOT:
try:
curr_state = task.driver.power.get_power_state(task)
except Exception as e:
with excutils.save_and_reraise_exception():
node['last_error'] = _(
"Failed to change power state to '%(target)s'. "
"Error: %(error)s") % {'target': new_state, 'error': e}
node['target_power_state'] = states.NOSTATE
node.save()
if curr_state == new_state:
# Neither the ironic service nor the hardware has erred. The
# node is, for some reason, already in the requested state,
# though we don't know why. eg, perhaps the user previously
# requested the node POWER_ON, the network delayed those IPMI
# packets, and they are trying again -- but the node finally
# responds to the first request, and so the second request
# gets to this check and stops.
# This isn't an error, so we'll clear last_error field
# (from previous operation), log a warning, and return.
node['last_error'] = None
node['target_power_state'] = states.NOSTATE
node.save()
LOG.warn(_LW("Not going to change_node_power_state because "
"current state = requested state = '%(state)s'."),
{'state': curr_state})
return
if curr_state == states.ERROR:
# be optimistic and continue action
LOG.warn(_LW("Driver returns ERROR power state for node %s."),
node.uuid)
# Set the target_power_state and clear any last_error, if we're
# starting a new operation. This will expose to other processes
# and clients that work is in progress.
if node['target_power_state'] != target_state:
node['target_power_state'] = target_state
node['last_error'] = None
node.save()
# take power action
try:
if new_state != states.REBOOT:
task.driver.power.set_power_state(task, new_state)
else:
task.driver.power.reboot(task)
except Exception as e:
with excutils.save_and_reraise_exception():
node['last_error'] = _(
"Failed to change power state to '%(target)s'. "
"Error: %(error)s") % {'target': target_state, 'error': e}
else:
# success!
node['power_state'] = target_state
LOG.info(_LI('Succesfully set node %(node)s power state to '
'%(state)s.'),
{'node': node.uuid, 'state': target_state})
finally:
node['target_power_state'] = states.NOSTATE
node.save()
@task_manager.require_exclusive_lock
def cleanup_after_timeout(task):
"""Cleanup deploy task after timeout.
:param task: a TaskManager instance.
"""
node = task.node
node.provision_state = states.DEPLOYFAIL
node.target_provision_state = states.NOSTATE
msg = (_('Timeout reached while waiting for callback for node %s')
% node.uuid)
node.last_error = msg
LOG.error(msg)
node.save()
error_msg = _('Cleanup failed for node %(node)s after deploy timeout: '
' %(error)s')
try:
task.driver.deploy.clean_up(task)
except exception.IronicException as e:
msg = error_msg % {'node': node.uuid, 'error': e}
LOG.error(msg)
node.last_error = msg
node.save()
except Exception as e:
msg = error_msg % {'node': node.uuid, 'error': e}
LOG.error(msg)
node.last_error = _('Deploy timed out, but an unhandled exception was '
'encountered while aborting. More info may be '
'found in the log file.')
node.save()
| froyobin/ironic | ironic/conductor/utils.py | Python | apache-2.0 | 6,324 | 0.000316 |
"""
Data processing for VisualWordLSTM happens here; this creates a class that
acts as a data generator/feed for model training.
"""
from __future__ import print_function
from collections import defaultdict
import cPickle
import h5py
import logging
import numpy as np
np.set_printoptions(threshold='nan')
import os
import sys
import random
# Set up logger
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
logger = logging.getLogger(__name__)
# Strings for beginning, end of sentence, padding
# These get specified indices in word2index
BOS = "<S>" # index 1
EOS = "<E>" # index 2
PAD = "<P>" # index 0
# Dimensionality of image feature vector
IMG_FEATS = 4096
class VisualWordDataGenerator(object):
"""
Creates input arrays for VisualWordLSTM and deals with input dataset in
general. Input dataset must now be in HTF5 format.
Important methods:
random_generator() yields random batches from the training data split
fixed_generator() yields batches in the order it is stored on disk
generation_generator() yields batches with empty word sequences
"""
def __init__(self, args_dict, input_dataset=None):
"""
Initialise data generator: this involves loading the dataset and
generating vocabulary sizes.
If dataset is not given, use flickr8k.h5.
"""
logger.info("Initialising data generator")
self.args = args_dict
# Number of descriptions to return per image.
self.num_sents = args_dict.num_sents # default 5 (for flickr8k)
self.unk = args_dict.unk # default 5
self.run_string = args_dict.run_string
# self.datasets holds 1+ datasets, where additional datasets will
# be used for supertraining the model
self.datasets = []
self.openmode = "r+" if self.args.h5_writeable else "r"
if not input_dataset:
logger.warn("No dataset given, using flickr8k")
self.dataset = h5py.File("flickr8k/dataset.h5", self.openmode)
else:
self.dataset = h5py.File("%s/dataset.h5" % input_dataset, self.openmode)
logger.info("Train/val dataset: %s", input_dataset)
if args_dict.supertrain_datasets is not None:
for path in args_dict.supertrain_datasets:
logger.info("Adding supertrain datasets: %s", path)
self.datasets.append(h5py.File("%s/dataset.h5" % path, "r"))
self.datasets.append(self.dataset)
# hsn doesn't have to be a class variable.
# what happens if self.hsn is false but hsn_size is not zero?
self.use_source = False
if self.args.source_vectors is not None:
self.source_dataset = h5py.File("%s/dataset.h5"
% self.args.source_vectors,
"r")
self.source_encoder = args_dict.source_enc
self.source_type = args_dict.source_type
h5_dataset_keys = self.source_dataset['train']['000000'].keys()
self.h5_dataset_str = next((z for z in h5_dataset_keys if
z.startswith("%s-hidden_feats-%s" % (self.source_type,
self.source_encoder))), None)
#self.h5_dataset_str = "%s-hidden_feats-%s-%d" % (self.source_type,
# self.source_encoder,
# self.source_dim)
assert self.h5_dataset_str is not None
self.hsn_size = len(self.source_dataset['train']['000000']
[self.h5_dataset_str][0])
self.source_dim = self.hsn_size
self.num_hsn = len(self.source_dataset['train']['000000']
[self.h5_dataset_str])
self.use_source = True
logger.info("Reading %d source vectors from %s with %d dims",
self.num_hsn, self.h5_dataset_str, self.hsn_size)
self.use_image = False if self.args.no_image else True
# These variables are filled by extract_vocabulary
self.word2index = dict()
self.index2word = dict()
# This is set to include BOS & EOS padding
self.max_seq_len = 0
# Can check after extract_vocabulary what the actual max seq length is
# (including padding)
self.actual_max_seq_len = 0
# This counts number of descriptions per split
# Ignores test for now (change in extract_vocabulary)
self.split_sizes = {'train': 0, 'val': 0, 'test': 0}
# These are used to speed up the validation process
self._cached_val_input = None
self._cached_val_targets = None
self._cached_references = None
if self.args.use_predicted_tokens and self.args.no_image:
logger.info("Input predicted descriptions")
self.ds_type = 'predicted_description'
else:
logger.info("Input gold descriptions")
self.ds_type = 'descriptions'
def random_generator(self, split):
"""
Generator that produces input/output tuples for a given dataset and split.
Typically used to produce random batches for training a model.
The data is yielded by first shuffling the description indices and
then shuffling the image instances within the split.
"""
# For randomization, we use a independent Random instance.
random_instance = random.Random()
# Make sure that the desired split is actually in the dataset.
assert split in self.dataset
# Get a list of the keys. We will use this list to shuffle and iterate over.
identifiers = self.dataset[split].keys()
# Get the number of descriptions.
first_id = identifiers[0]
num_descriptions = len(self.dataset[split][first_id]['descriptions'])
description_indices = list(range(num_descriptions))
arrays = self.get_batch_arrays(self.args.batch_size)
batch_indices = []
j = 0
# Shuffle the description indices.
random_instance.shuffle(description_indices)
while j <= len(identifiers):
# And loop over them.
i = 0
for desc_idx in description_indices:
# For each iteration over the description indices, also shuffle the
# identifiers.
random_instance.shuffle(identifiers)
# And loop over them.
for ident in identifiers:
if i == self.args.batch_size:
targets = self.get_target_descriptions(arrays[0])
yield_data = self.create_yield_dict(arrays, targets,
batch_indices)
#logger.debug(yield_data['img'][0,0,:])
#logger.debug(' '.join([self.index2word[np.argmax(x)] for x in yield_data['text'][0,:,:]]))
#logger.debug(' '.join([self.index2word[np.argmax(x)] for x in yield_data['output'][0,:,:]]))
yield yield_data
i = 0
arrays = self.get_batch_arrays(self.args.batch_size)
batch_indices = []
description = self.dataset[split][ident]['descriptions'][desc_idx]
img_feats = self.get_image_features(self.dataset, split, ident)
try:
description_array = self.format_sequence(description.split(),
train=True)
arrays[0][i] = description_array
if self.use_image and self.use_source:
if self.args.peeking_source:
arrays[1][i, :] = \
self.get_source_features(split,
ident)
else:
arrays[1][i, 0] = \
self.get_source_features(split,
ident)
if self.args.mrnn:
arrays[2][i, :] = img_feats
else:
arrays[2][i, 0] = img_feats
elif self.use_image:
if self.args.mrnn:
arrays[1][i, :] = img_feats
else:
arrays[1][i, 0] = img_feats
elif self.use_source:
if self.args.peeking_source:
arrays[1][i, :] = \
self.get_source_features(split,
ident)
else:
arrays[1][i, 0] = \
self.get_source_features(split,
ident)
batch_indices.append([ident, desc_idx])
i += 1
except AssertionError:
# If the description doesn't share any words with the vocabulary.
pass
if i != 0:
self.resize_arrays(i, arrays)
targets = self.get_target_descriptions(arrays[0])
#logger.info(' '.join([self.index2word[np.argmax(x)] for x in arrays[0][0,:,:]]))
yield_data = self.create_yield_dict(arrays,targets,
batch_indices)
yield yield_data
i = 0
j = 0
arrays = self.get_batch_arrays(self.args.batch_size)
batch_indices = []
def fixed_generator(self, split='val'):
"""Generator that returns the instances in a split in the fixed order
defined in the underlying data. Useful for calculating perplexity, etc.
No randomization."""
arrays = self.get_batch_arrays(self.args.batch_size)
batch_indices = []
i = 0
j = 0
# Get the number of descriptions.
identifiers = self.dataset[split].keys()
first_id = identifiers[0]
num_descriptions = len(self.dataset[split][first_id]['descriptions'])
description_indices = list(range(num_descriptions))
while j <= len(identifiers):
i = 0
for ident in identifiers:
for desc_idx in description_indices:
if i == self.args.batch_size:
targets = self.get_target_descriptions(arrays[0])
yield_data = self.create_yield_dict(arrays, targets,
batch_indices)
yield yield_data
i = 0
arrays = self.get_batch_arrays(self.args.batch_size)
batch_indices = []
description = self.dataset[split][ident]['descriptions'][desc_idx]
img_feats = self.get_image_features(self.dataset, split, ident)
try:
description_array = self.format_sequence(description.split())
arrays[0][i] = description_array
if self.use_image and self.use_source:
if self.args.peeking_source:
arrays[1][i, :] = \
self.get_source_features(split,
ident)
else:
arrays[1][i, 0] = \
self.get_source_features(split,
ident)
if self.args.mrnn:
arrays[2][i, :] = img_feats
else:
arrays[2][i, 0] = img_feats
elif self.use_image:
if self.args.mrnn:
arrays[1][i, :] = img_feats
else:
arrays[1][i, 0] = img_feats
elif self.use_source:
if self.args.peeking_source:
arrays[1][i, :] = \
self.get_source_features(split,
ident)
else:
arrays[1][i, 0] = \
self.get_source_features(split,
ident)
batch_indices.append([ident, desc_idx])
i += 1
except AssertionError:
# If the description doesn't share any words with the vocabulary.
logger.info('Could not encode %s', description)
pass
if i != 0:
logger.debug("Outside for loop")
self.resize_arrays(i, arrays)
targets = self.get_target_descriptions(arrays[0])
logger.debug(' '.join([self.index2word[np.argmax(x)] for x in
arrays[0][0,:,:] if self.index2word[np.argmax(x)] != "<P>"]))
yield_data = self.create_yield_dict(arrays, targets,
batch_indices)
yield yield_data
i = 0
j = 0
arrays = self.get_batch_arrays(self.args.batch_size)
batch_indices = []
def generation_generator(self, split='val', batch_size=-1, in_callbacks=False):
"""Generator for generating descriptions.
This will only return one array per instance in the data.
No randomization.
batch_size=1 will return minibatches of one.
Use this for beam search decoding.
"""
identifiers = self.dataset[split].keys()
i = 0 # used to control the enumerator
batch_size = self.args.batch_size \
if batch_size == -1 \
else batch_size
arrays = self.get_batch_arrays(batch_size, generation=not in_callbacks)
batch_indices = []
desc_idx = 0
for ident in identifiers:
if i == batch_size:
targets = self.get_target_descriptions(arrays[0])
logger.debug(arrays[0].shape)
logger.debug(' '.join([self.index2word[np.argmax(x)] for x
in arrays[0][0,:,:] if self.index2word[np.argmax(x)]
!= "<P>"]))
yield_data = self.create_yield_dict(arrays,
targets,
batch_indices)
yield yield_data
i = 0
arrays = self.get_batch_arrays(batch_size,
generation=not in_callbacks)
batch_indices = []
description = self.dataset[split][ident]['descriptions'][desc_idx]
img_feats = self.get_image_features(self.dataset, split, ident)
try:
description_array = self.format_sequence(description.split(),
generation=not in_callbacks,
in_callbacks=in_callbacks)
arrays[0][i] = description_array
if self.use_image and self.use_source:
if self.args.peeking_source:
arrays[1][i, :] = \
self.get_source_features(split,
ident)
else:
arrays[1][i, 0] = \
self.get_source_features(split,
ident)
if self.args.mrnn:
arrays[2][i, :] = img_feats
else:
arrays[2][i, 0] = img_feats
elif self.use_image:
if self.args.mrnn:
arrays[1][i, :] = img_feats
else:
arrays[1][i, 0] = img_feats
elif self.use_source:
if self.args.peeking_source:
arrays[1][i, :] = \
self.get_source_features(split,
ident)
else:
arrays[1][i, 0] = \
self.get_source_features(split,
ident)
batch_indices.append([ident, desc_idx])
i += 1
except AssertionError:
# If the description doesn't share any words with the vocabulary.
pass
if i != 0:
logger.debug("Outside for loop")
self.resize_arrays(i, arrays)
targets = self.get_target_descriptions(arrays[0])
logger.debug(' '.join([self.index2word[np.argmax(x)] for x in
arrays[0][0,:,:] if self.index2word[np.argmax(x)] != "<P>"]))
yield_data = self.create_yield_dict(arrays,
targets,
batch_indices)
yield yield_data
i = 0
arrays = self.get_batch_arrays(batch_size,
generation=not in_callbacks)
batch_indices = []
def get_batch_arrays(self, batch_size, generation=False):
"""
Get empty arrays for yield_training_batch.
Helper function for {random/fixed/generation}_generator()
"""
t = self.args.generation_timesteps if generation else self.max_seq_len
arrays = []
# dscrp_array at arrays[0]
arrays.append(np.zeros((batch_size,
t,
len(self.word2index))))
if self.use_source: # hsn_array at arrays[1] (if used)
arrays.append(np.zeros((batch_size,
t,
self.hsn_size)))
if self.use_image: # at arrays[2] or arrays[1]
arrays.append(np.zeros((batch_size,
t,
IMG_FEATS)))
return arrays
def create_yield_dict(self, array, targets, indices):
'''
Returns a dictionary object of the array, the targets,
and the image, description indices in the batch.
Helper function for {random,fixed,generation}_generator().
'''
if self.use_source and self.use_image:
return [{'text': array[0],
'src': array[1],
'img': array[2],
'indices': indices},
{'output': targets}]
elif self.use_image:
return [{'text': array[0],
'img': array[1],
'indices': indices},
{'output': targets}]
elif self.use_source:
return [{'text': array[0],
'src': array[1],
'indices': indices},
{'output': targets}]
def resize_arrays(self, new_size, arrays):
"""
Resize all the arrays to new_size along dimension 0.
Sometimes we need to initialise a np.zeros() to an arbitrary size
and then cut it down to out intended new_size.
"""
logger.debug("Resizing batch_size in structures from %d -> %d",
arrays[0].shape[0], new_size)
for i, array in enumerate(arrays):
arrays[i] = np.resize(array, (new_size, array.shape[1],
array.shape[2]))
return arrays
def format_sequence(self, sequence, generation=False, train=False,
in_callbacks=False):
"""
Transforms a list of words (sequence) into input matrix
seq_array of (timesteps, vocab-onehot)
generation == True will return an input matrix of length
self.args.generation_timesteps. The first timestep will
be set to <B>, everything else will be <P>.
The zero default value is equal to padding.
"""
if generation:
timesteps = self.max_seq_len if in_callbacks else self.args.generation_timesteps
seq_array = np.zeros((timesteps,
len(self.word2index)))
seq_array[0, self.word2index[BOS]] = 1 # BOS token at t=0
return seq_array
seq_array = np.zeros((self.max_seq_len, len(self.word2index)))
w_indices = [self.word2index[w] for w in sequence
if w in self.word2index]
if train and self.is_too_long(w_indices):
# We don't process training sequences that are too long
logger.debug("Skipping '%s' because it is too long" % ' '.join([x for x in sequence]))
raise AssertionError
if len(w_indices) > self.actual_max_seq_len:
self.actual_max_seq_len = len(w_indices)
seq_array[0, self.word2index[BOS]] = 1 # BOS token at zero timestep
time = 0
for time, vocab in enumerate(w_indices):
seq_array[time + 1, vocab] += 1
# add EOS token at end of sentence
try:
assert time + 1 == len(w_indices),\
"time %d sequence %s len w_indices %d seq_array %s" % (
time, " ".join([x for x in sequence]), len(w_indices),
seq_array)
except AssertionError:
if len(w_indices) == 0 and time == 0:
# none of the words in this description appeared in the
# vocabulary. this is most likely caused by the --unk
# threshold.
#
# we don't encode this sentence because [BOS, EOS] doesn't
# make sense
logger.debug("Skipping '%s' because none of its words appear in the vocabulary" % ' '.join([x for x in sequence]))
raise AssertionError
seq_array[len(w_indices) + 1, self.word2index[EOS]] += 1
return seq_array
def get_target_descriptions(self, input_array):
"""
Target is always _next_ word, so we move input_array over by -1
timesteps (target at t=1 is input at t=2).
Helper function used by {random,fixed,generation}_generator()
"""
target_array = np.zeros(input_array.shape)
target_array[:, :-1, :] = input_array[:, 1:, :]
return target_array
def get_refs_by_split_as_list(self, split):
"""
Returns a list of lists of gold standard sentences. Useful for
automatic evaluation (BLEU, Meteor, etc.)
Helper function for callbacks.py and generate.py
"""
# Not needed for train.
assert split in ['test', 'val'], "Not possible for split %s" % split
references = []
for data_key in self.dataset[split]:
this_image = []
for descr in self.dataset[split][data_key]['descriptions']:
this_image.append(descr)
references.append(this_image)
return references
def get_source_features(self, split, data_key):
'''
Return the source feature vector from self.source_dataset.
Relies on self.source_encoder,
self.source_dim,
self.source_type.
The type of the returned vector depends on self.args.source_type:
'sum': will add all the vectors into the same vector
'avg': will do 'sum' and then divide by the number of vectors
TODO: support a 'concat' mode for merging the source features
'''
mode = self.args.source_merge
try:
source = self.source_dataset[split][data_key][self.h5_dataset_str]
if mode == 'sum' or mode =='avg':
return_feats = np.zeros(self.source_dim)
for feats in source:
return_feats = np.add(return_feats, feats)
if mode == 'avg':
return_feats = return_feats/len(source)
#elif mode =='concat':
# return_feats = np.zeros(self.source_dim*self.args.num_sents)
# marker = 0
# for feats in source:
# return_feats[marker:marker+len(feats)] = feats
# marker += len(feats)
return return_feats
except KeyError:
# this image -- description pair doesn't have a source-language
# vector. Raise a KeyError so the requester can deal with the
# missing data.
logger.info("Skipping '%s' because it doesn't have a source vector", data_key)
raise KeyError
def get_image_features(self, dataset, split, data_key):
""" Return image features vector for split[data_key]."""
return dataset[split][data_key]['img_feats'][:]
def set_predicted_description(self, split, data_key, sentence):
'''
Set the predicted sentence tokens in the data_key group,
creating the group if necessary, or erasing the current value if
necessary.
'''
if self.openmode != "r+":
# forcefully quit when trying to write to a read-only file
raise RuntimeError("Dataset is read-only, try again with --h5_writable")
dataset_key = 'predicted_description'
try:
predicted_text = self.dataset[split][data_key].create_dataset(dataset_key, (1,), dtype=h5py.special_dtype(vlen=unicode))
except RuntimeError:
# the dataset already exists, erase it and create an empty space
del self.dataset[split][data_key][dataset_key]
predicted_text = self.dataset[split][data_key].create_dataset(dataset_key, (1,), dtype=h5py.special_dtype(vlen=unicode))
predicted_text[0] = " ".join([x for x in sentence])
def set_source_features(self, split, data_key, dataset_key, feats, dims,
desc_idx=0):
'''
Set the source feature vector stored in the dataset_key group,
creating the group if necessary, or erasing the current value if
necessary.
'''
if self.openmode != "r+":
# forcefully quit when trying to write to a read-only file
raise RuntimeError("Dataset is read-only, try again with --h5_writable")
try:
source_data = self.dataset[split][data_key].create_dataset(
dataset_key, ((self.args.num_sents, dims)),
dtype='float32')
except RuntimeError:
# the dataset already exists so we just need to fill in the
# relevant element, given the dataset key
source_data = self.dataset[split][data_key][dataset_key]
source_data[desc_idx] = feats
def set_vocabulary(self, path):
'''
Initialise the vocabulary from a checkpointed model.
TODO: some duplication from extract_vocabulary
'''
self.extract_complete_vocab()
logger.info("Initialising vocabulary from pre-defined model")
try:
v = cPickle.load(open("%s/../vocabulary.pk" % path, "rb"))
except:
v = cPickle.load(open("%s/vocabulary.pk" % path, "rb"))
self.index2word = dict((v, k) for k, v in v.iteritems())
self.word2index = dict((k, v) for k, v in v.iteritems())
longest_sentence = 0
# set the length of the longest sentence
train_longest = self.find_longest_sentence('train')
val_longest = self.find_longest_sentence('val')
self.longest_sentence = max(longest_sentence, train_longest, val_longest)
self.calculate_split_sizes()
self.corpus_statistics()
# self.max_seq_len = longest_sentence + 2
# logger.info("Max seq length %d, setting max_seq_len to %d",
# longest_sentence, self.max_seq_len)
#
# logger.info("Split sizes %s", self.split_sizes)
#
# logger.info("Number of words in vocabulary %d", len(self.word2index))
# #logger.debug("word2index %s", self.word2index.items())
# logger.debug("Number of indices %d", len(self.index2word))
# #logger.debug("index2word: %s", self.index2word.items())
def find_longest_sentence(self, split):
'''
Calculcates the length of the longest sentence in a given split of
a dataset and updates the number of sentences in a split.
TODO: can we get split_sizes from H5 dataset indices directly?
'''
local_ds_type = "descriptions" if split == 'train' else self.ds_type
longest_sentence = 0
for dataset in self.datasets:
for data_key in dataset[split]:
for description in dataset[split][data_key][local_ds_type][0:self.args.num_sents]:
d = description.split()
if len(d) > longest_sentence:
longest_sentence = len(d)
return longest_sentence
def extract_vocabulary(self):
'''
Collect word frequency counts over the train / val inputs and use
these to create a model vocabulary. Words that appear fewer than
self.unk times will be ignored.
Also finds longest sentence, since it's already iterating over the
whole dataset. HOWEVER this is the longest sentence *including* UNK
words, which are removed from the data and shouldn't really be
included in max_seq_len.
But max_seq_len/longest_sentence is just supposed to be a safe
upper bound, so we're good (except for some redundant cycles.)
'''
logger.info("Extracting vocabulary")
self.extract_complete_vocab()
longest_sentence = 0
# set the length of the longest sentence
train_longest = self.find_longest_sentence('train')
val_longest = self.find_longest_sentence('val')
self.longest_sentence = max(longest_sentence, train_longest, val_longest)
# vocabulary is a word:id dict (superceded by/identical to word2index?)
# <S>, <E> are special first indices
vocabulary = {PAD: 0, BOS: 1, EOS: 2}
for v in self.unk_dict:
if self.unk_dict[v] > self.unk:
vocabulary[v] = len(vocabulary)
assert vocabulary[BOS] == 1
assert vocabulary[EOS] == 2
logger.info("Pickling dictionary to checkpoint/%s/vocabulary.pk",
self.run_string)
try:
os.mkdir("checkpoints/%s" % self.run_string)
except OSError:
pass
cPickle.dump(vocabulary,
open("checkpoints/%s/vocabulary.pk"
% self.run_string, "wb"))
self.index2word = dict((v, k) for k, v in vocabulary.iteritems())
self.word2index = vocabulary
self.calculate_split_sizes()
self.corpus_statistics()
def extract_complete_vocab(self):
"""
Extract the complete vocabulary over the training data.
Stores the result in a dictionary of word:count pairs in self.unk_dict
"""
self.unk_dict = defaultdict(int)
for dataset in self.datasets:
for data_key in dataset['train']:
for description in dataset['train'][data_key]['descriptions'][0:self.args.num_sents]:
for token in description.split():
self.unk_dict[token] += 1
def calculate_split_sizes(self):
'''
Calculates the expected number of instances in a data split.
Does not include sentences that cannot be encoded in the vocabulary.
TODO: handle splits for which we don't yet have the test data.
'''
for split in ["train", "val", "test"]:
for dataset in self.datasets:
for data_key in dataset[split]:
for idx, description in enumerate(dataset[split][data_key]['descriptions'][0:self.args.num_sents]):
w_indices = [self.word2index[w] for w in description.split() if w in self.word2index]
if split == "train" and self.is_too_long(w_indices):
logger.debug("Skipping [%s][%s] ('%s') because\
it contains too many words",
data_key, idx, description)
continue
if split == "train":
if len(w_indices) != 0:
self.split_sizes[split] += 1
else:
logger.debug("Skipping [%s][%s] ('%s') because\
none of its words appear in the vocabulary",
data_key, idx, description)
else:
self.split_sizes[split] += 1
def corpus_statistics(self):
"""
Logs some possibly useful information about the dataset.
"""
self.max_seq_len = self.longest_sentence + 2
logger.info("Max seq length %d, setting max_seq_len to %d",
self.longest_sentence, self.max_seq_len)
logger.info("Split sizes %s", self.split_sizes)
logger.info("Number of words %d -> %d", len(self.unk_dict),
len(self.word2index))
actual_len, true_len = self.discard_percentage()
logger.info("Retained / Original Tokens: %d / %d (%.2f pc)",
actual_len, true_len, 100 * float(actual_len)/true_len)
avg_len = self.avg_len()
logger.info("Average train sentence length: %.2f tokens" % avg_len)
def get_vocab_size(self):
"""
Return training data vocabulary size.
"""
return len(self.word2index)
def discard_percentage(self):
'''
One-off calculation of how many words are throw-out from the training
sequences using the defined UNK threshold.
'''
true_len = 0
actual_len = 0
split = 'train'
for data_key in self.dataset[split]:
for description in self.dataset[split][data_key]['descriptions'][0:self.args.num_sents]:
d = description.split()
true_len += len(d)
unk_d = [self.word2index[w] for w in d if w in self.word2index]
actual_len += len(unk_d)
return (actual_len, true_len)
def avg_len(self):
'''
One-off calculation of the average length of sentences in the training
data before UNKing.
'''
true_len = 0
num_sents = 0.0
split = 'train'
for data_key in self.dataset[split]:
for description in self.dataset[split][data_key][self.ds_type][0:self.args.num_sents]:
d = description.split()
true_len += len(d)
num_sents += 1
return (true_len/num_sents)
def is_too_long(self, sequence):
"""
Determine if a sequence is too long to be included in the training
data. Sentences that are too long (--maximum_length) are not processed
in the training data. The validation and test data are always
processed, regardless of --maxmimum_length.
"""
if len(sequence) > self.args.maximum_length:
return True
else:
return False
| elliottd/GroundedTranslation | data_generator.py | Python | bsd-3-clause | 36,237 | 0.003229 |
# Copyright 2017 DT42
#
# This file is part of BerryNet.
#
# BerryNet is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BerryNet is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BerryNet. If not, see <http://www.gnu.org/licenses/>.
"""Engine service is a bridge between incoming data and inference engine.
"""
import argparse
import logging
from datetime import datetime
from berrynet import logger
from berrynet.comm import payload
from berrynet.dlmodelmgr import DLModelManager
from berrynet.engine.movidius_engine import MovidiusEngine
from berrynet.engine.movidius_engine import MovidiusMobileNetSSDEngine
from berrynet.service import EngineService
from berrynet.utils import draw_bb
from berrynet.utils import generate_class_color
class MovidiusClassificationService(EngineService):
def __init__(self, service_name, engine, comm_config):
super(MovidiusClassificationService, self).__init__(service_name,
engine,
comm_config)
def result_hook(self, generalized_result):
logger.debug('result_hook, annotations: {}'.format(generalized_result['annotations']))
self.comm.send('berrynet/engine/mvclassification/result',
payload.serialize_payload(generalized_result))
class MovidiusMobileNetSSDService(EngineService):
def __init__(self, service_name, engine, comm_config, draw=False):
super(MovidiusMobileNetSSDService, self).__init__(service_name,
engine,
comm_config)
self.draw = draw
def inference(self, pl):
duration = lambda t: (datetime.now() - t).microseconds / 1000
t = datetime.now()
logger.debug('payload size: {}'.format(len(pl)))
logger.debug('payload type: {}'.format(type(pl)))
jpg_json = payload.deserialize_payload(pl.decode('utf-8'))
jpg_bytes = payload.destringify_jpg(jpg_json['bytes'])
logger.debug('destringify_jpg: {} ms'.format(duration(t)))
t = datetime.now()
bgr_array = payload.jpg2bgr(jpg_bytes)
logger.debug('jpg2bgr: {} ms'.format(duration(t)))
t = datetime.now()
image_data = self.engine.process_input(bgr_array)
output = self.engine.inference(image_data)
model_outputs = self.engine.process_output(output)
logger.debug('Result: {}'.format(model_outputs))
logger.debug('Detection takes {} ms'.format(duration(t)))
classes = self.engine.classes
labels = self.engine.labels
logger.debug('draw = {}'.format(self.draw))
if self.draw is False:
self.result_hook(self.generalize_result(jpg_json, model_outputs))
else:
self.result_hook(
draw_bb(bgr_array,
self.generalize_result(jpg_json, model_outputs),
generate_class_color(class_num=classes),
labels))
def result_hook(self, generalized_result):
logger.debug('result_hook, annotations: {}'.format(generalized_result['annotations']))
self.comm.send('berrynet/engine/mvmobilenetssd/result',
payload.serialize_payload(generalized_result))
def parse_args():
ap = argparse.ArgumentParser()
ap.add_argument('--model',
help='Model file path')
ap.add_argument('--label',
help='Label file path')
ap.add_argument('--model_package',
default='',
help='Model package name')
ap.add_argument('--service_name', required=True,
help='Valid value: Classification, MobileNetSSD')
ap.add_argument('--num_top_predictions', default=5,
help='Display this many predictions')
ap.add_argument('--draw',
action='store_true',
help='Draw bounding boxes on image in result')
ap.add_argument('--debug',
action='store_true',
help='Debug mode toggle')
return vars(ap.parse_args())
def main():
# Test Movidius engine
args = parse_args()
if args['debug']:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
if args['model_package'] != '':
dlmm = DLModelManager()
meta = dlmm.get_model_meta(args['model_package'])
args['model'] = meta['model']
args['label'] = meta['label']
logger.debug('model filepath: ' + args['model'])
logger.debug('label filepath: ' + args['label'])
comm_config = {
'subscribe': {},
'broker': {
'address': 'localhost',
'port': 1883
}
}
if args['service_name'] == 'Classification':
mvng = MovidiusEngine(args['model'], args['label'])
service_functor = MovidiusClassificationService
elif args['service_name'] == 'MobileNetSSD':
mvng = MovidiusMobileNetSSDEngine(args['model'], args['label'])
service_functor = MovidiusMobileNetSSDService
else:
logger.critical('Legal service names are Classification, MobileNetSSD')
engine_service = service_functor(args['service_name'],
mvng,
comm_config,
draw=args['draw'])
engine_service.run(args)
if __name__ == '__main__':
main()
| DT42/BerryNet | berrynet/service/movidius_service.py | Python | gpl-3.0 | 5,991 | 0.000501 |
import sys
#from functools import partial
from PyQt4 import QtCore, QtGui
from PyQt4.Qt import *
from ome_globals import *
import ui_histogram_dataselect_page
class HistogramDataSelectPage(QWizardPage, ui_histogram_dataselect_page.Ui_WizardPage):
def __init__(self, model, prev_hist_var=None, parent=None):
super(HistogramDataSelectPage, self).__init__(parent)
self.setupUi(self)
self.model = model
self.prev_hist_var = prev_hist_var
self._populate_combo_box()
def _populate_combo_box(self):
''' populates combo box with numerical variables '''
variables= self.model.get_variables()
variables.sort(key=lambda var: var.get_label())
default_index = 0
for var in variables:
# store column of var in user data
col = self.model.get_column_assigned_to_variable(var)
self.comboBox.addItem(var.get_label(), userData=QVariant(col))
index_of_item = self.comboBox.count()-1
if self.prev_hist_var == var:
default_index = index_of_item
# set default selection if given
self.comboBox.setCurrentIndex(default_index)
self.completeChanged.emit()
def isComplete(self):
return True
def get_selected_var(self):
idx = self.comboBox.currentIndex()
data = self.comboBox.itemData(idx)
col = data.toInt()[0]
return self.model.get_variable_assigned_to_column(col)
| gdietz/OpenMEE | common_wizard_pages/histogram_dataselect_page.py | Python | gpl-3.0 | 1,558 | 0.009628 |
# coding:utf-8
'''
Author : qbeenslee
Created : 2015/1/20
'''
import json
import datetime
JSON_PARSE_ERROR = u"{'success':0,'data':[],'message':'Json 解析错误'}"
class NeJSONEncoder(json.JSONEncoder):
""" Wrapper class to try calling an object's tojson() method. This allows us to JSONify objects coming from the ORM.
Also handles dates and datetimes. """
def default(self, obj):
try:
if hasattr(obj, '__json__'):
return obj.__json__()
elif isinstance(obj, datetime.datetime):
return obj.isoformat()
elif isinstance(obj, datetime.date):
return obj.isoformat()
except AttributeError:
return json.JSONEncoder.default(self, obj)
def obj2json(obj):
'''
自定义对象转Json
:param obj:
:return:
'''
if obj is not None:
try:
return json.dumps(obj, cls=NeJSONEncoder)
except TypeError, ins:
return JSON_PARSE_ERROR
else:
return JSON_PARSE_ERROR
def result_set_parse():
'''
数据集解析
:return:
'''
pass
| qbeenslee/Nepenthes-Server | utils/jsonutil.py | Python | gpl-3.0 | 1,193 | 0.00172 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp import models, api
class ir_translation(models.Model):
_inherit = 'ir.translation'
@api.model
def _get_terms_mapping(self, field, records):
if self._context.get('edit_translations'):
self.insert_missing(field, records)
return lambda data: '<span data-oe-translation-id="%(id)s" data-oe-translation-state="%(state)s">%(value)s</span>' % data
return super(ir_translation, self)._get_terms_mapping(field, records)
| minhphung171093/GreenERP | openerp/addons/web_editor/models/ir_translation.py | Python | gpl-3.0 | 578 | 0.00346 |
a = (1, 2)
b = (1, 3, 5, 7, 8, 11)
print a[0]
#b[3] = 3 # error!
x1 = a[0]
y1 = a[1]
x1, y1 = a
b1, b2, b3, b4, b5, b6 = b
print b4
#b1, b2 = b # error!
a = 1, 2, 3
print a
def f():
return 1, 3
a = f()
x, y = f()
x = f()[0]
| amiraliakbari/sharif-mabani-python | by-session/ta-921/j8/tuple1.py | Python | mit | 263 | 0.011407 |
# import argcomplete
# import httplib
# import logging
# import simplejson
# import sys
# import urllib2
# from time import strftime, localtime
# from conpaas.core import https
# from .base import BaseClient
# from .config import config
# from .service import ServiceCmd
# MODES = ['DEMO', 'REAL']
# TASKFARM_MNG_PORT = 8475
# def http_jsonrpc_post(hostname, uri, method, port=TASKFARM_MNG_PORT, params=None):
# """Perform a plain HTTP JSON RPC post (for task farming)"""
# if params is None:
# params = {}
# url = "http://%s:%s%s" % (hostname, port, uri)
# data = simplejson.dumps({'method': method,
# 'params': params,
# 'jsonrpc': '2.0',
# 'id': 1,
# })
# req = urllib2.Request(url, data, {'Content-Type': 'application/json'})
# res = urllib2.urlopen(req).read()
# return res
# def http_file_upload_post(host, uri, port=TASKFARM_MNG_PORT, params=None, files=None):
# """Perform a plain HTTP file upload post (for task farming)"""
# if params is None:
# params = {}
# if files is None:
# files = []
# content_type, body = https.client._encode_multipart_formdata(params, files)
# h = httplib.HTTP(host, port)
# h.putrequest('POST', uri)
# h.putheader('content-type', content_type)
# h.putheader('content-length', str(len(body)))
# h.endheaders()
# h.send(body)
# _errcode, _errmsg, _headers = h.getreply()
# return h.file.read()
# class TaskFarmCmd(ServiceCmd):
# def __init__(self, parser, client):
# self.initial_expected_state = 'RUNNING'
# ServiceCmd.__init__(self, parser, client, "taskfarm", ['node'],
# "TaskFarm service sub-commands help")
# self._add_get_mode()
# self._add_set_mode()
# self._add_upload()
# self._add_select_schedule()
# def call_manager(self, app_id, service_id, method, data=None):
# """TaskFarm peculiarities:
# 1) it works via plain HTTP
# 2) it uses port 8475
# 3) the 'shutdown' method is called 'terminate_workers'
# 4) it accepts only POST requests
# 5) it does not have to be started or stopped
# """
# if data is None:
# data = {}
# if method == "shutdown":
# method = "terminate_workers"
# service = self.client.service_dict(app_id, service_id)
# res = http_jsonrpc_post(service['application']['manager'], '/', method, params=data)
# try:
# data = simplejson.loads(res[1])
# except ValueError:
# data = simplejson.loads(res)
# return data.get('result', data)
# def _add_start(self):
# """
# TaskFarm does not have to be started.
# Overrides ServiceCmd._add_start().
# """
# pass
# def _add_stop(self):
# """
# TaskFarm does not have to be stopped.
# Overrides ServiceCmd._add_stop()
# """
# pass
# def _print_res(self, res):
# resres = res['result']
# if 'error' in resres:
# self.client.error("%s" % resres['error'])
# elif 'message' in resres:
# print "%s" % resres['message']
# else:
# print "%s" % res
# # ======= get_mode
# def _add_get_mode(self):
# subparser = self.add_parser('get_mode', help="get TaskFarm mode")
# subparser.set_defaults(run_cmd=self.get_mode, parser=subparser)
# subparser.add_argument('app_name_or_id',
# help="Name or identifier of an application")
# subparser.add_argument('serv_name_or_id',
# help="Name or identifier of a service")
# def get_mode(self, args):
# app_id, service_id = self.check_service(args.app_name_or_id, args.serv_name_or_id)
# mode = self.get_string_mode(app_id, service_id)
# print "%s" % mode
# def get_string_mode(self, app_id, service_id):
# res = self.call_manager(app_id, service_id, "get_service_info")
# return res['mode']
# # ======= set_mode
# def _add_set_mode(self):
# subparser = self.add_parser('set_mode', help="set TaskFarm mode")
# subparser.set_defaults(run_cmd=self.set_mode, parser=subparser)
# subparser.add_argument('app_name_or_id',
# help="Name or identifier of an application")
# subparser.add_argument('serv_name_or_id',
# help="Name or identifier of a service")
# subparser.add_argument('mode', choices=MODES, help="mode")
# def set_mode(self, args):
# app_id, service_id = self.check_service(args.app_name_or_id, args.serv_name_or_id)
# old_mode = self.get_string_mode(app_id, service_id)
# if old_mode != 'NA':
# res = {'result': {'error': 'ERROR: mode is already set to %s' % old_mode}}
# else:
# res = self.call_manager(app_id, service_id, "set_service_mode", [args.mode])
# self._print_res(res)
# # ========== upload bag of tasks
# def _add_upload(self):
# subparser = self.add_parser('upload_bot', help="upload bag of tasks")
# subparser.set_defaults(run_cmd=self.upload_bag_of_tasks,
# parser=subparser)
# subparser.add_argument('app_name_or_id',
# help="Name or identifier of an application")
# subparser.add_argument('serv_name_or_id',
# help="Name or identifier of a service")
# subparser.add_argument('filename',
# help="file containing the bag of tasks")
# subparser.add_argument('location',
# help="XtreemFS location, e.g., 192.168.122.1/uc3")
# def upload_bag_of_tasks(self, args):
# app_id, service_id = self.check_service(args.app_name_or_id, args.serv_name_or_id)
# mode = self.get_string_mode(app_id, service_id)
# if mode == 'NA':
# res = {'result': {'error': 'ERROR: to upload bag of task, first specify run mode.'}}
# else:
# service = self.client.service_dict(app_id, service_id)
# params = {'uriLocation': args.location,
# 'method': 'start_sampling'}
# filecontents = open(args.filename).read()
# res = http_file_upload_post(service['application']['manager'], '/', params=params,
# files=[('botFile', args.filename, filecontents)])
# res = simplejson.loads(res)
# self._print_res(res)
# # ========= select_schedule
# def _add_select_schedule(self):
# subparser = self.add_parser('upload_bot', help="upload bag of tasks")
# subparser.set_defaults(run_cmd=self.select_schedule, parser=subparser)
# subparser.add_argument('app_name_or_id',
# help="Name or identifier of an application")
# subparser.add_argument('serv_name_or_id',
# help="Name or identifier of a service")
# subparser.add_argument('schedule', type=int, help="schedule identifier")
# def _select_schedule(self, args):
# app_id, service_id = self.check_service(args.app_name_or_id, args.serv_name_or_id)
# mode = self.get_mode(app_id, service_id)
# if mode == 'NA':
# return {'result': {'error': 'ERROR: to select a schedule, first specify run mode DEMO or REAL, then upload a bag of tasks '}}
# # check schedule availability
# res = self.call_manager(app_id, service_id, "get_service_info")
# if res['noCompletedTasks'] == 0:
# return {'message': "No schedule available yet: try again later..."}
# if res['state'] != 'RUNNING':
# return {'message': "Busy %s: try again later..." % res['phase']}
# sres = self.call_manager(app_id, service_id, "get_sampling_results")
# sdata = simplejson.loads(sres)
# if 'timestamp' in sdata:
# # Sampling is ready, check if bag is ready, or if we have to choose a schedule
# ts = sdata['timestamp']
# print strftime("Bag sampled on %a %d %b %Y at %H:%M:%S %Z", localtime(ts / 1000))
# if 'schedules' in sdata:
# #sch = sdata['schedules']
# #ss = simplejson.dumps(sch)
# # print "schedules: ", ss
# numscheds = len(sdata['schedules'])
# if numscheds == 0:
# return {'result': {'message': "Bag finished during sampling phase"}}
# if res['noTotalTasks'] == res['noCompletedTasks']:
# return {'result': {'message': "Taskfarm already finished"}}
# # check schedule selection
# if (args.schedule < 1) or (args.schedule > numscheds):
# return {'result': {'error': "ERROR: select schedule in interval [1..%d]" % numscheds}}
# # start execution
# # "{"method":"start_execution","params":["1371729870918","2"],"jsonrpc":"2.0","id":1}"
# res = self.call_manager(app_id, service_id, "start_execution", [ts, args.schedule - 1])
# return {'result': res}
# def select_schedule(self, args):
# res = self._select_schedule(args)
# self._print_res(res)
# def main():
# logger = logging.getLogger(__name__)
# console = logging.StreamHandler()
# formatter = logging.Formatter('%(levelname)s - %(message)s')
# console.setFormatter(formatter)
# logger.addHandler(console)
# cmd_client = BaseClient(logger)
# parser, argv = config('Manage ConPaaS PHP services.', logger)
# _serv_cmd = TaskFarmCmd(parser, cmd_client)
# argcomplete.autocomplete(parser)
# args = parser.parse_args(argv)
# cmd_client.set_config(args.director_url, args.username, args.password,
# args.debug)
# try:
# args.run_cmd(args)
# except:
# e = sys.exc_info()[1]
# sys.stderr.write("ERROR: %s\n" % e)
# sys.exit(1)
# if __name__ == '__main__':
# main()
| ConPaaS-team/conpaas | cps-tools/src/cps_tools/taskfarm.py | Python | bsd-3-clause | 10,346 | 0.002513 |
__author__ = 'emre'
print "hello world" | ekutlu/raspberry-test | test.py | Python | mit | 40 | 0.025 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'webapp.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r"", include("meeting.urls", namespace = "meeting")),
url(r'^admin/', include(admin.site.urls)),
)
handler404 = "misc.views.handler404"
handler500 = "misc.views.handler500"
| TunedMystic/taskr | webapp/urls.py | Python | gpl-2.0 | 434 | 0.013825 |
#! /usr/bin/env python
import random
import utilities
def read_nodes_from_training(file_name):
"""
Returns a list of all the nodes in the graph
"""
node_set = set()
for nodes in utilities.edges_generator(file_name):
for node in nodes:
node_set.add(node)
return list(node_set)
def random_benchmark(train_file, test_file, submission_file, num_predictions):
"""
Runs the random benchmark.
"""
nodes = read_nodes_from_training(train_file)
test_nodes = utilities.read_nodes_list(test_file)
test_predictions = [[random.choice(nodes) for x in range(num_predictions)]
for node in test_nodes]
utilities.write_submission_file(submission_file,
test_nodes,
test_predictions)
if __name__=="__main__":
random_benchmark("../Data/train.csv",
"../Data/test.csv",
"../Submissions/random_benchmark.csv",
10)
| ameyavilankar/social-network-recommendation | preprocessing/random_benchmark.py | Python | bsd-2-clause | 1,033 | 0.006776 |
import lda
from flask import Flask, abort, request
app = Flask(__name__, static_url_path='')
@app.route('/td/items', methods=['GET'])
def items():
document, status, headers = lda.get_virtual_container(request.environ, 'ce_item_of')
if status != 200:
abort(status)
document, headers = lda.convert_to_requested_format(document, headers, request.environ)
return document, status, headers
@app.route('/td/items', methods=['POST'])
def create_item():
if request.json.get('rdf_type') != "http://example.org/todo#Item":
abort(400)
#TODO: add more validation
document, status, headers = lda.create_document(request.environ, request.json, 'ce_item_of')
if status != 201:
abort(status)
document, headers = lda.convert_to_requested_format(document, headers, request.environ)
return document, status, headers
@app.route('/td/items/<i>', methods=['GET'])
def read_item(i):
document, status, headers = lda.get_document(request.environ)
if status != 200:
abort(status)
document, headers = lda.convert_to_requested_format(document, headers, request.environ)
return document, status, headers
@app.route('/td/items/<i>', methods=['DELETE'])
def delete_item(i):
document, status, headers = lda.delete_document(request.environ)
return "", status, headers
@app.route('/td/items/<i>', methods=['PATCH'])
def change_item(i):
#TODO: add validation
document, status, headers = lda.patch_document(request.environ, request.json)
if status != 200:
abort(status)
document, headers = lda.convert_to_requested_format(document, headers, request.environ)
return document, status, headers
@app.route('/td', methods=['DELETE'])
def delete_all():
document, status, headers = lda.delete_document(request.environ)
return "", status, headers
| ld4apps/lda-examples | todo-flask/src/app.py | Python | apache-2.0 | 1,842 | 0.008143 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class DefaultValue(Document):
pass
def on_doctype_update():
"""Create indexes for `tabDefaultValue` on `(parent, defkey)`"""
frappe.db.commit()
frappe.db.add_index(doctype='DefaultValue',
fields=['parent', 'defkey'],
index_name='defaultvalue_parent_defkey_index')
frappe.db.add_index(doctype='DefaultValue',
fields=['parent', 'parenttype'],
index_name='defaultvalue_parent_parenttype_index')
| vjFaLk/frappe | frappe/core/doctype/defaultvalue/defaultvalue.py | Python | mit | 607 | 0.021417 |
import json
import pytest
from common.utils.attack_utils import ScanStatus
from infection_monkey.model import VictimHost
from infection_monkey.telemetry.attack.t1197_telem import T1197Telem
DOMAIN_NAME = "domain-name"
IP = "127.0.0.1"
MACHINE = VictimHost(IP, DOMAIN_NAME)
STATUS = ScanStatus.USED
USAGE_STR = "[Usage info]"
@pytest.fixture
def T1197_telem_test_instance():
return T1197Telem(STATUS, MACHINE, USAGE_STR)
def test_T1197_send(T1197_telem_test_instance, spy_send_telemetry):
T1197_telem_test_instance.send()
expected_data = {
"status": STATUS.value,
"technique": "T1197",
"machine": {"domain_name": DOMAIN_NAME, "ip_addr": IP},
"usage": USAGE_STR,
}
expected_data = json.dumps(expected_data, cls=T1197_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
assert spy_send_telemetry.telem_category == "attack"
| guardicore/monkey | monkey/tests/unit_tests/infection_monkey/telemetry/attack/test_t1197_telem.py | Python | gpl-3.0 | 917 | 0.001091 |
# -*- coding:utf-8 -*-
from django.shortcuts import render
def index(request):
return render(request, 'djign/index.html') | ghilbut/djign | djign/__init__.py | Python | mit | 128 | 0.023438 |
import json
import math
import sys
import threading
import traceback
import time
import uuid
from pika import BasicProperties
from pika.exceptions import ConnectionClosed, ChannelClosed, IncompatibleProtocolError
from ...helper import log, debug_mode_enabled
from ...model import Message, RemoteSignal
from .exception import NoConnectionError
from .helper import active_connection, fill_in_the_blank, SHARED_DIRECT_EXCHANGE_NAME, SHARED_TOPIC_EXCHANGE_NAME, SHARED_SIGNAL_CONNECTION_LOSS
IMMEDIATE_RETRY_LIMIT = 10
MAX_RETRY_COUNT = 20
MAX_RETRY_DELAY = 30
PING_MESSAGE = 'ping'
class Consumer(threading.Thread):
""" Message consumer
This is used to handle messages on one particular route/queue.
:param str url: the URL to the server
:param str route: the route to observe
:param callable callback: the callback function / callable object
:param list shared_stream: the internal message queue for thread synchronization
:param bool resumable: the flag to indicate whether the consumption is resumable
:param bool resumable: the flag to indicate whether the messages are distributed evenly across all consumers on the same route
:param dict queue_options: additional queue options
:param dict exchange_options: additional exchange options
:param bool unlimited_retries: the flag to disable limited retry count.
:param callable on_connect: a callback function when the message consumption begins.
:param callable on_disconnect: a callback function when the message consumption is interrupted due to unexpected disconnection.
:param callable on_error: a callback function when the message consumption is interrupted due to exception raised from the main callback function.
:param str controller_id: the associated controller ID
:param dict exchange_options: the additional options for exchange
:param bool auto_acknowledge: the flag to determine whether the consumer should auto-acknowledge any delivery (default: ``False``)
:param bool send_sigterm_on_disconnect: the flag to force the consumer to terminate the process cleanly on disconnection (default: ``True``)
:param float delay_per_message: the delay per message (any negative numbers are regarded as zero, zero or any equivalent value is regarded as "no delay")
:param int max_retries: the maximum total retries the consumer can have
:param int immediate_retry_limit: the maximum immediate retries the consumer can have before it uses the exponential delay
Here is an example for ``on_connect``.
.. code-block:: Python
def on_connect(consumer = None):
...
Here is an example for ``on_disconnect``.
.. code-block:: Python
def on_disconnect(consumer = None):
...
Here is an example for ``on_error``.
.. code-block:: Python
def on_error(exception, consumer = None):
...
"""
def __init__(self, url, route, callback, shared_stream, resumable, distributed, queue_options,
simple_handling, unlimited_retries = False, on_connect = None, on_disconnect = None,
on_error = None, controller_id = None, exchange_options = None, auto_acknowledge = False,
send_sigterm_on_disconnect = True, delay_per_message = 0, max_retries = MAX_RETRY_COUNT,
immediate_retry_limit = IMMEDIATE_RETRY_LIMIT, max_retry_delay = MAX_RETRY_DELAY):
super().__init__(daemon = True)
queue_options = queue_options if queue_options and isinstance(queue_options, dict) else {}
exchange_options = exchange_options if exchange_options and isinstance(exchange_options, dict) else {}
self.url = url
self.route = route
self.callback = callback
self.resumable = resumable
self.distributed = distributed
self.queue_options = queue_options
self.exchange_options = exchange_options
self.simple_handling = simple_handling
self._retry_count = 0
self._shared_stream = shared_stream
self._channel = None
self._queue_name = None
self._paused = False
self._stopped = False
self._controller_id = controller_id
self._consumer_id = str(uuid.uuid4())
self._max_retries = max_retries
self._max_retry_delay = max_retry_delay
self._immediate_retry_limit = immediate_retry_limit if immediate_retry_limit < max_retries else max_retries
self._send_sigterm_on_disconnect = send_sigterm_on_disconnect
self._delay_per_message = (
delay_per_message
if (
delay_per_message
and isinstance(delay_per_message, (int, float))
and delay_per_message > 0
)
else 0
)
self._auto_acknowledge = auto_acknowledge
self._unlimited_retries = unlimited_retries
self._on_connect = on_connect
self._on_disconnect = on_disconnect
self._on_error = on_error
self._recovery_queue_name = 'RECOVERY.{}'.format(self.route)
assert not self._on_disconnect or callable(self._on_disconnect), 'The error handler must be callable.'
@staticmethod
def can_handle_route(routing_key):
""" Check if the consumer can handle the given routing key.
.. note:: the default implementation will handle all routes.
:param str routing_key: the routing key
"""
return True
@property
def queue_name(self):
return self._queue_name
@property
def stopped(self):
return self._stopped
def run(self):
log('debug', '{}: Active'.format(self._debug_route_name()))
while not self._stopped:
try:
self._listen()
except NoConnectionError as e:
self._retry_count += 1
remaining_retries = self._max_retries - self._retry_count
can_immediate_retry = self._retry_count <= self._immediate_retry_limit
wait_time = 0 if can_immediate_retry else math.pow(2, self._retry_count - self._immediate_retry_limit - 1)
# Notify the unexpected disconnection
log('warning', '{}: Unexpected disconnection detected due to {} (retry #{})'.format(self._debug_route_name(), e, self._retry_count))
# Attempt to retry and skip the rest of error handling routine.
if remaining_retries >= 0:
log(
'info',
'{}: Will reconnect to the queue in {}s ({} attempt(s) left)'.format(
self._debug_route_name(),
wait_time,
remaining_retries,
)
)
# Give a pause between each retry if the code already retries immediate too often.
if wait_time:
time.sleep(1)
log('warning', '{}: Reconnecting...'.format(self._debug_route_name()))
continue
elif self._on_disconnect:
log('warning', '{}: {} the maximum retries (retry #{}/{})'.format(self._debug_route_name(), 'Reached' if self._retry_count == self._max_retries else 'Exceeded', self._retry_count, self._max_retries))
self._async_invoke_callback(self._on_disconnect)
log('warning', '{}: Passed the error information occurred to the error handler'.format(self._debug_route_name()))
if self._unlimited_retries:
log('info', '{}: Will re-listen to the queue in 5 second (unlimited retries)'.format(self._debug_route_name()))
time.sleep(5)
log('warning', '{}: Reconnecting...'.format(self._debug_route_name()))
continue
log('warning', '{}: Unexpected connection loss detected ({})'.format(self._debug_route_name(), e))
self._shared_stream.append(SHARED_SIGNAL_CONNECTION_LOSS)
if self._send_sigterm_on_disconnect:
log('error', '{}: Terminated the process on disconnect.'.format(self._debug_route_name()))
sys.exit(1)
log('debug', '{}: Inactive'.format(self._debug_route_name()))
def resume(self):
if self.stopped:
log('debug', '{}: Already stopped (resume)'.format(self._debug_route_name()))
return
log('debug', '{}: Resuming on listening...'.format(self._debug_route_name()))
self._paused = False
def pause(self):
if self.stopped:
log('debug', '{}: Already stopped (pause)'.format(self._debug_route_name()))
return
log('debug', '{}: Temporarily stop listening...'.format(self._debug_route_name()))
self._paused = True
def stop(self):
""" Stop consumption """
log('debug', 'Stopping listening to {}...'.format(self._debug_route_name()))
if self._channel:
self._channel.stop_consuming()
def _async_invoke_callback(self, callable_method, *args, **kwargs):
params = [*args]
params.append(self)
kw_params = dict(
controller_id = self._controller_id,
route = self.route,
queue_name = self._queue_name,
**kwargs
)
async_callback = threading.Thread(
target = callable_method,
args = params,
kwargs = kw_params,
daemon = True,
)
async_callback.start()
def _listen(self):
with active_connection(self.url, self._on_connect, self._on_disconnect, self._on_error) as channel:
self._channel = channel
self._queue_name = (
self._declare_topic_queue(channel)
if self.distributed
else self._declare_shared_queue(channel)
)
self._declare_recovery_queue(channel)
# Declare the callback wrapper for this route.
def callback_wrapper(channel, method_frame, header_frame, body):
time_sequence = [time.time()]
raw_message = body.decode('utf8')
message_id = str(uuid.uuid4())
log('info', '{}: MESSAGE {}: Processing {}...'.format(self._debug_route_name(), message_id, raw_message))
# Process the message
try:
# This is inside the try-catch block to deal with malformed data.
decoded_message = json.loads(raw_message)
remote_signal = None
remote_target = None
if isinstance(decoded_message, dict) and 'remote_signal' in decoded_message:
log('debug', '{}: Received a remote signal'.format(self._debug_route_name()))
remote_signal = decoded_message['remote_signal']
remote_target = decoded_message.get('controller_id', None) or None
if remote_signal != RemoteSignal.PING:
if not remote_target:
log('debug', '{}: Unable to find the remote target.'.format(
self._debug_route_name(),
))
# Acknowledge the message to discard it as it is an invalid remote command.
if not self._auto_acknowledge:
channel.basic_ack(delivery_tag = method_frame.delivery_tag)
log('debug', '{}: Discard to an invalid remote command.'.format(
self._debug_route_name(),
))
return
elif remote_target != self._controller_id:
log('debug', '{}: Ignoring the remote signal (TARGET {}).'.format(
self._debug_route_name(),
remote_target
))
# Not acknowledge the message to requeue it as this remote command
# is not for the current consumer.
channel.basic_nack(delivery_tag = method_frame.delivery_tag)
log('debug', '{}: Ignored the remote signal (TARGET {}).'.format(
self._debug_route_name(),
remote_target
))
return
time_sequence.append(time.time())
if remote_signal == RemoteSignal.PING:
log('debug', '{}: Detected PING signal'.format(self._debug_route_name()))
if not self._auto_acknowledge:
channel.basic_ack(delivery_tag = method_frame.delivery_tag)
log('debug', '{}: Ready (post-ping)'.format(self._debug_route_name()))
return
if remote_signal == RemoteSignal.RESUME:
log('debug', '{}: Receive RESUME signal'.format(self._debug_route_name()))
self.resume()
if not self._auto_acknowledge:
channel.basic_ack(delivery_tag = method_frame.delivery_tag)
log('debug', '{}: Reactivated'.format(self._debug_route_name()))
return
if remote_signal == RemoteSignal.PAUSE:
log('debug', '{}: Receive PAUSE signal'.format(self._debug_route_name()))
self.pause()
if not self._auto_acknowledge:
channel.basic_ack(delivery_tag = method_frame.delivery_tag)
log('debug', '{}: Standing by...'.format(self._debug_route_name()))
return
if self._paused:
log('info', '{}: On STANDBY'.format(self._debug_route_name()))
if not self._auto_acknowledge:
channel.basic_nack(delivery_tag = method_frame.delivery_tag)
log('debug', '{}: Temporarily block itself for a moment'.format(self._debug_route_name()))
time.sleep(3)
log('debug', '{}: Ready (standby)'.format(self._debug_route_name()))
return
message = (
decoded_message
if self.simple_handling
else Message(
decoded_message,
{
'header': header_frame,
'method': method_frame,
}
)
)
self.callback(message)
except Exception as unexpected_error:
self._handle_error_during_consumption(
message_id,
raw_message,
unexpected_error,
traceback.format_exc(),
'Error detected while processing the message',
)
log('error', '{}: MESSAGE {}: Error detected while processing the message'.format(self._debug_route_name(), message_id))
# Acknowledge the delivery when an error occurs to DEQUEUE the message.
if not self._auto_acknowledge:
channel.basic_ack(delivery_tag = method_frame.delivery_tag)
log('warning', '{}: MESSAGE {}: Recovered from the unexpected error'.format(self._debug_route_name(), message_id))
# Acknowledge the delivery after the work is done.
try:
if not self._auto_acknowledge:
channel.basic_ack(delivery_tag = method_frame.delivery_tag)
except Exception as unexpected_error:
self._handle_error_during_consumption(
message_id,
raw_message,
unexpected_error,
traceback.format_exc(),
'Error detected while acknowledging the delivery'
)
log('error', '{}: MESSAGE {}: Error detected while acknowledging the message delivery ({})'.format(self._debug_route_name(), message_id, method_frame.delivery_tag))
if self._stopped:
log('warning', '{}: Consumer terminated'.format(self._debug_route_name()))
return
if self._delay_per_message:
log('debug', '{}: Pausing for {:.3f}s (PAUSED)'.format(self._debug_route_name(), self._delay_per_message))
time.sleep(self._delay_per_message)
log('debug', '{}: Back to action (RESUMED)'.format(self._debug_route_name()))
log('debug', '{}: Ready (OK)'.format(self._debug_route_name()))
log('debug', '{}: Listening... (ACK: {})'.format(self._debug_route_name(), 'AUTO' if self._auto_acknowledge else 'MANUAL'))
channel.basic_consume(
callback = callback_wrapper,
queue = self._queue_name,
no_ack = self._auto_acknowledge, # No delivery acknowledgement needed
)
try:
while True:
channel.wait()
if self._retry_count:
self._retry_count = 0
if self._on_connect:
self._async_invoke_callback(self._on_connect)
self._stopped = True
except Exception as e: # ConnectionClosed
raise NoConnectionError('The connection has been absurbly disconnected.')
log('debug', 'Stopped listening to {}'.format(self._debug_route_name()))
def _handle_error_during_consumption(self, message_id, raw_message, error, execution_trace, summary):
with active_connection(self.url, self._on_connect, self._on_disconnect, self._on_error) as channel:
error_info = {
'type' : type(error).__name__,
'message' : str(error),
'traceback' : execution_trace,
}
log('error', '{}: Unexpected error detected: {type}: {message}\n{traceback}'.format(
self._debug_route_name(),
**error_info,
))
# Store the message that cause error in the recovery queue.
republishing_options = {
'exchange' : '',
'routing_key' : self._recovery_queue_name,
'properties' : BasicProperties(content_type = 'application/json'),
'body' : json.dumps(
{
'controller' : self._controller_id,
'error' : error_info,
'message' : raw_message,
'when' : time.time(),
},
indent = 4,
sort_keys = True,
),
}
channel.basic_publish(**republishing_options)
if self._on_error:
self._async_invoke_callback(self._on_error, error, summary = summary)
if isinstance(error, (ConnectionClosed, ChannelClosed)):
log('error', '{}: Connection Error: {type}: {message}\n{traceback}'.format(
self._debug_route_name(),
**error_info,
))
raise NoConnectionError()
def _debug_route_name(self):
segments = [
'ROUTE {}'.format(self.route),
'CONTROLLER {}'.format(self._controller_id),
]
# if debug_mode_enabled:
# segments.append('CONSUMER {}'.format(self._consumer_id))
if self.route != self._queue_name:
segments.append('QUEUE {}'.format(self._queue_name))
return '/'.join(segments)
def _declare_shared_queue(self, channel):
queue_name = self._declare_queue(
channel,
{
'auto_delete' : not self.resumable,
'durable' : self.resumable,
'queue' : self.route,
}
)
log('info', '[_declare_shared_queue] CONTROLLER {}: Declared a shared queue "{}"'.format(self._controller_id, queue_name))
exchange_options = dict(
exchange = self.exchange_options.get('name', SHARED_DIRECT_EXCHANGE_NAME),
exchange_type = self.exchange_options.get('type', 'direct'),
passive = self.exchange_options.get('passive', False),
durable = self.exchange_options.get('durable', True),
auto_delete = self.exchange_options.get('auto_delete', False),
)
self._bind_queue(channel, queue_name, exchange_options)
return self.route
def _declare_recovery_queue(self, channel):
queue_name = self._declare_queue(
channel,
{
'auto_delete': not self.resumable,
'durable' : self.resumable,
'queue' : self._recovery_queue_name,
}
)
log('info', '[_declare_recovery_queue] CONTROLLER {}: Declared a recovery queue for ROUTE {}'.format(self._controller_id, queue_name))
return self.route
def _declare_topic_queue(self, channel):
# Currently not supporting resumability.
temp_queue_name = self._declare_queue(
channel,
{
'auto_delete': True,
'queue' : '',
}
)
log('info', '[_declare_topic_queue] CONTROLLER {}: Declared a distributed queue "{}"'.format(self._controller_id, temp_queue_name))
exchange_options = dict(
exchange = self.exchange_options.get('name', SHARED_TOPIC_EXCHANGE_NAME),
exchange_type = self.exchange_options.get('type', 'topic'),
passive = self.exchange_options.get('passive', False),
durable = self.exchange_options.get('durable', True),
auto_delete = self.exchange_options.get('auto_delete', False),
)
self._bind_queue(channel, temp_queue_name, exchange_options)
return temp_queue_name
def _declare_queue(self, channel, default_queue_options):
queue_options = fill_in_the_blank(
default_queue_options,
self.queue_options or {}
)
response = channel.queue_declare(**queue_options)
queue_name = response.method.queue
return queue_name
def _bind_queue(self, channel, queue_name, exchange_options):
assert 'exchange' in exchange_options
exchange_name = exchange_options['exchange']
debugging_info = (self._controller_id, queue_name, self.route, exchange_name)
channel.exchange_declare(**exchange_options)
log('info', '[_bind_queue] CONTROLLER {}: Binding a queue "{}" to route {} on exchange {}'.format(*debugging_info))
channel.queue_bind(queue_name, exchange_name, self.route)
log('info', '[_bind_queue] CONTROLLER {}: Bound a queue "{}" to route {} on exchange {}'.format(*debugging_info))
| shiroyuki/vireo | vireo/drivers/amqp/consumer.py | Python | mit | 24,473 | 0.010256 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import math
import re
#from _common import rounding
from openerp import tools
from openerp.osv import osv, fields
from openerp.tools.translate import _
import openerp.addons.decimal_precision as dp
class product_uom_categ(osv.osv):
_inherit = 'product.uom.categ'
_columns = {
'active': fields.boolean('Active', help="By unchecking the active field you can disable a unit of measure without deleting it."),
}
_defaults = {
'active': True,
}
class product_uom(osv.osv):
_inherit = 'product.uom'
_description = 'Product Unit of Measure'
def name_get(self, cr, uid, ids, context=None):
if isinstance(ids, (list, tuple)) and not len(ids):
return []
if isinstance(ids, (long, int)):
ids = [ids]
reads = self.read(cr, uid, ids, ['name','category_id','factor','uom_type'], context=context)
res = []
for record in reads:
name = record['name']
if record['category_id']:
uom_categ = record['category_id']
#print uom_categ
if record['uom_type'] == 'reference':
name = name
elif record['uom_type'] == 'bigger':
name = ('%.0f' % (1/record['factor'])) + ' ' +uom_categ[1] +' / '+name
else:
name = ('%.0f' % (record['factor'])) + ' ' +name+' / '+uom_categ[1]
res.append((record['id'], name))
return res
def _name_get_fnc(self, cr, uid, ids, prop, unknow_none, context=None):
if isinstance(ids, (list, tuple)) and not len(ids):
return []
if isinstance(ids, (long, int)):
ids = [ids]
res = {}
for id in ids:
data = self.browse(cr, uid, id)
if data.uom_type == 'reference':
res[id] = 1
elif data.uom_type == 'bigger':
res[id] = int ('%.0f' % (1/data.factor))
else:
res[id] = int('%.0f' % (data.factor))
return res
_columns = {
'factor_name': fields.function(_name_get_fnc, type="integer", string='Factor'),
}
class product_category(osv.osv):
def _get_product_stock(self, cr, uid, ids, name, arg, context=None):
res = {'product_count': False,
'product_onhand':False,
'product_forecast': False,
}
product_obj = self.pool.get('product.product')
for id in ids:
product_ids = product_obj.search(cr, uid, [('categ_id','=',id)])
onhand = 0
forecast = 0
for product in product_obj.browse(cr, uid, product_ids):
onhand += product.qty_available
forecast += product.virtual_available
res[id] = {
'product_count': len(product_ids),
'product_onhand': onhand,
'product_forecast': forecast
}
return res
_inherit = 'product.category'
_columns = {
'product_count': fields.function(_get_product_stock, string='Product Count', type='integer', multi='inventory', readonly=True),
'product_onhand': fields.function(_get_product_stock, string='On Hand', type='float', multi='inventory', readonly=True),
'product_forecast': fields.function(_get_product_stock, string='Forecast', type='float', multi='inventory', readonly=True),
}
class product_product(osv.osv):
_inherit = 'product.product'
def _get_last_incoming_shipment_date(self, cr, uid, ids, name, arg, context=None):
res = {}
dbs = cr.dbname
if dbs == 'BS100':
dest_id = 16
else:
dest_id = 17
for id in ids:
cr.execute('select create_date::timestamp::date from stock_move where product_id=%s and location_dest_id=%s order by create_date desc limit 1',[id,dest_id])
dict = cr.dictfetchone()
if dict is None:
res[id] = 'NA'
else:
res[id] = dict.get('create_date')
return res
_columns = {
'internal_barcode': fields.char('Internal Barcode', size=64),
'last_ship_date': fields.function(_get_last_incoming_shipment_date, string='Last Shipped', type='char', readonly=True),
}
| titasakgm/brc-stock | openerp/addons/ineco_stock/product.py | Python | agpl-3.0 | 5,430 | 0.009024 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wagtailmenus', '0009_auto_20160201_0859'),
]
operations = [
migrations.RenameField(
model_name='mainmenuitem',
old_name='add_subnav',
new_name='allow_subnav',
),
]
| rkhleics/wagtailmenus | wagtailmenus/migrations/0010_auto_20160201_1558.py | Python | mit | 408 | 0 |
# flake8: noqa
"""Settings to be used for running tests."""
from settings import *
INSTALLED_APPS.append('integration_tests')
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
}
}
EMAIL_SUBJECT_PREFIX = '[test] '
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
SOUTH_TESTS_MIGRATE = False
| bitmazk/webfaction-django-boilerplate | website/webapps/django/project/settings/test_settings.py | Python | mit | 371 | 0 |
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import browsers
from crystal_dashboard.dashboards.crystal.containers import tables
class ContainerBrowser(browsers.ResourceBrowser):
name = "swift"
verbose_name = _("Swift")
navigation_table_class = tables.ContainersTable
content_table_class = tables.ObjectsTable
navigable_item_name = _("Container")
navigation_kwarg_name = "container_name"
content_kwarg_name = "subfolder_path"
has_breadcrumb = True
breadcrumb_url = "horizon:crystal:containers:index"
| Crystal-SDS/dashboard | crystal_dashboard/dashboards/crystal/containers/browsers.py | Python | gpl-3.0 | 1,168 | 0 |
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: nxos_vlan
extends_documentation_fragment: nxos
version_added: "2.1"
short_description: Manages VLAN resources and attributes.
description:
- Manages VLAN configurations on NX-OS switches.
author: Jason Edelman (@jedelman8)
options:
vlan_id:
description:
- Single VLAN ID.
required: false
default: null
vlan_range:
description:
- Range of VLANs such as 2-10 or 2,5,10-15, etc.
required: false
default: null
name:
description:
- Name of VLAN.
required: false
default: null
vlan_state:
description:
- Manage the vlan operational state of the VLAN
(equivalent to state {active | suspend} command.
required: false
default: active
choices: ['active','suspend']
admin_state:
description:
- Manage the VLAN administrative state of the VLAN equivalent
to shut/no shut in VLAN config mode.
required: false
default: up
choices: ['up','down']
mapped_vni:
description:
- The Virtual Network Identifier (VNI) ID that is mapped to the
VLAN. Valid values are integer and keyword 'default'.
required: false
default: null
version_added: "2.2"
state:
description:
- Manage the state of the resource.
required: false
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
- name: Ensure a range of VLANs are not present on the switch
nxos_vlan:
vlan_range: "2-10,20,50,55-60,100-150"
host: 68.170.147.165
username: cisco
password: cisco
state: absent
transport: nxapi
- name: Ensure VLAN 50 exists with the name WEB and is in the shutdown state
nxos_vlan:
vlan_id: 50
host: 68.170.147.165
admin_state: down
name: WEB
transport: nxapi
username: cisco
password: cisco
- name: Ensure VLAN is NOT on the device
nxos_vlan:
vlan_id: 50
host: 68.170.147.165
state: absent
transport: nxapi
username: cisco
password: cisco
'''
RETURN = '''
proposed_vlans_list:
description: list of VLANs being proposed
returned: when debug enabled
type: list
sample: ["100"]
existing_vlans_list:
description: list of existing VLANs on the switch prior to making changes
returned: when debug enabled
type: list
sample: ["1", "2", "3", "4", "5", "20"]
end_state_vlans_list:
description: list of VLANs after the module is executed
returned: when debug enabled
type: list
sample: ["1", "2", "3", "4", "5", "20", "100"]
proposed:
description: k/v pairs of parameters passed into module (does not include
vlan_id or vlan_range)
returned: when debug enabled
type: dict or null
sample: {"admin_state": "down", "name": "app_vlan",
"vlan_state": "suspend", "mapped_vni": "5000"}
existing:
description: k/v pairs of existing vlan or null when using vlan_range
returned: when debug enabled
type: dict
sample: {"admin_state": "down", "name": "app_vlan",
"vlan_id": "20", "vlan_state": "suspend", "mapped_vni": ""}
end_state:
description: k/v pairs of the VLAN after executing module or null
when using vlan_range
returned: when debug enabled
type: dict or null
sample: {"admin_state": "down", "name": "app_vlan", "vlan_id": "20",
"vlan_state": "suspend", "mapped_vni": "5000"}
updates:
description: command string sent to the device
returned: always
type: list
sample: ["vlan 20", "vlan 55", "vn-segment 5000"]
commands:
description: command string sent to the device
returned: always
type: list
sample: ["vlan 20", "vlan 55", "vn-segment 5000"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
from ansible.module_utils.nxos import get_config, load_config, run_commands
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
import re
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.nxos import run_commands, load_config, get_config
from ansible.module_utils.basic import AnsibleModule
def vlan_range_to_list(vlans):
result = []
if vlans:
for part in vlans.split(','):
if part == 'none':
break
if '-' in part:
a, b = part.split('-')
a, b = int(a), int(b)
result.extend(range(a, b + 1))
else:
a = int(part)
result.append(a)
return numerical_sort(result)
return result
def numerical_sort(string_int_list):
"""Sort list of strings (VLAN IDs) that are digits in numerical order.
"""
as_int_list = []
as_str_list = []
for vlan in string_int_list:
as_int_list.append(int(vlan))
as_int_list.sort()
for vlan in as_int_list:
as_str_list.append(str(vlan))
return as_str_list
def build_commands(vlans, state):
commands = []
for vlan in vlans:
if state == 'present':
command = 'vlan {0}'.format(vlan)
commands.append(command)
elif state == 'absent':
command = 'no vlan {0}'.format(vlan)
commands.append(command)
return commands
def get_vlan_config_commands(vlan, vid):
"""Build command list required for VLAN configuration
"""
reverse_value_map = {
"admin_state": {
"down": "shutdown",
"up": "no shutdown"
}
}
if vlan.get('admin_state'):
# apply value map when making change to the admin state
# note: would need to be a loop or more in depth check if
# value map has more than 1 key
vlan = apply_value_map(reverse_value_map, vlan)
VLAN_ARGS = {
'name': 'name {0}',
'vlan_state': 'state {0}',
'admin_state': '{0}',
'mode': 'mode {0}',
'mapped_vni': 'vn-segment {0}'
}
commands = []
for param, value in vlan.items():
if param == 'mapped_vni' and value == 'default':
command = 'no vn-segment'
else:
command = VLAN_ARGS.get(param).format(vlan.get(param))
if command:
commands.append(command)
commands.insert(0, 'vlan ' + vid)
commands.append('exit')
return commands
def get_list_of_vlans(module):
body = run_commands(module, ['show vlan | json'])
vlan_list = []
vlan_table = body[0].get('TABLE_vlanbrief')['ROW_vlanbrief']
if isinstance(vlan_table, list):
for vlan in vlan_table:
vlan_list.append(str(vlan['vlanshowbr-vlanid-utf']))
else:
vlan_list.append('1')
return vlan_list
def get_vni(vlanid, module):
flags = str('all | section vlan.{0}'.format(vlanid)).split(' ')
body = get_config(module, flags=flags)
#command = 'show run all | section vlan.{0}'.format(vlanid)
#body = execute_show_command(command, module, command_type='cli_show_ascii')[0]
value = ''
if body:
REGEX = re.compile(r'(?:vn-segment\s)(?P<value>.*)$', re.M)
if 'vn-segment' in body:
value = REGEX.search(body).group('value')
return value
def get_vlan(vlanid, module):
"""Get instance of VLAN as a dictionary
"""
command = 'show vlan id %s | json' % vlanid
body = run_commands(module, [command])
#command = 'show vlan id ' + vlanid
#body = execute_show_command(command, module)
try:
vlan_table = body[0]['TABLE_vlanbriefid']['ROW_vlanbriefid']
except (TypeError, IndexError):
return {}
key_map = {
"vlanshowbr-vlanid-utf": "vlan_id",
"vlanshowbr-vlanname": "name",
"vlanshowbr-vlanstate": "vlan_state",
"vlanshowbr-shutstate": "admin_state"
}
vlan = apply_key_map(key_map, vlan_table)
value_map = {
"admin_state": {
"shutdown": "down",
"noshutdown": "up"
}
}
vlan = apply_value_map(value_map, vlan)
vlan['mapped_vni'] = get_vni(vlanid, module)
return vlan
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
new_dict[new_key] = str(value)
return new_dict
def apply_value_map(value_map, resource):
for key, value in value_map.items():
resource[key] = value[resource.get(key)]
return resource
def main():
argument_spec = dict(
vlan_id=dict(required=False, type='str'),
vlan_range=dict(required=False),
name=dict(required=False),
vlan_state=dict(choices=['active', 'suspend'], required=False),
mapped_vni=dict(required=False, type='str'),
state=dict(choices=['present', 'absent'], default='present',
required=False),
admin_state=dict(choices=['up', 'down'], required=False),
include_defaults=dict(default=False),
config=dict(),
save=dict(type='bool', default=False)
)
argument_spec.update(nxos_argument_spec)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=[['vlan_range', 'name'],
['vlan_id', 'vlan_range']],
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
warnings = list()
check_args(module, warnings)
vlan_range = module.params['vlan_range']
vlan_id = module.params['vlan_id']
name = module.params['name']
vlan_state = module.params['vlan_state']
admin_state = module.params['admin_state']
mapped_vni = module.params['mapped_vni']
state = module.params['state']
changed = False
if vlan_id:
if not vlan_id.isdigit():
module.fail_json(msg='vlan_id must be a valid VLAN ID')
args = dict(name=name, vlan_state=vlan_state,
admin_state=admin_state, mapped_vni=mapped_vni)
proposed = dict((k, v) for k, v in args.items() if v is not None)
proposed_vlans_list = numerical_sort(vlan_range_to_list(
vlan_id or vlan_range))
existing_vlans_list = numerical_sort(get_list_of_vlans(module))
commands = []
existing = {}
if vlan_range:
if state == 'present':
# These are all of the VLANs being proposed that don't
# already exist on the switch
vlans_delta = list(
set(proposed_vlans_list).difference(existing_vlans_list))
commands = build_commands(vlans_delta, state)
elif state == 'absent':
# VLANs that are common between what is being proposed and
# what is on the switch
vlans_common = list(
set(proposed_vlans_list).intersection(existing_vlans_list))
commands = build_commands(vlans_common, state)
else:
existing = get_vlan(vlan_id, module)
if state == 'absent':
if existing:
commands = ['no vlan ' + vlan_id]
elif state == 'present':
if (existing.get('mapped_vni') == '0' and
proposed.get('mapped_vni') == 'default'):
proposed.pop('mapped_vni')
delta = dict(set(
proposed.items()).difference(existing.items()))
if delta or not existing:
commands = get_vlan_config_commands(delta, vlan_id)
end_state = existing
end_state_vlans_list = existing_vlans_list
if commands:
if existing.get('mapped_vni') and state != 'absent':
if (existing.get('mapped_vni') != proposed.get('mapped_vni') and
existing.get('mapped_vni') != '0' and proposed.get('mapped_vni') != 'default'):
commands.insert(1, 'no vn-segment')
if module.check_mode:
module.exit_json(changed=True,
commands=commands)
else:
load_config(module, commands)
changed = True
end_state_vlans_list = numerical_sort(get_list_of_vlans(module))
if 'configure' in commands:
commands.pop(0)
if vlan_id:
end_state = get_vlan(vlan_id, module)
results = {
'commands': commands,
'updates': commands,
'changed': changed,
'warnings': warnings
}
if module._debug:
results.update({
'proposed_vlans_list': proposed_vlans_list,
'existing_vlans_list': existing_vlans_list,
'proposed': proposed,
'existing': existing,
'end_state': end_state,
'end_state_vlans_list': end_state_vlans_list
})
module.exit_json(**results)
if __name__ == '__main__':
main()
| cmelange/ansible | lib/ansible/modules/network/nxos/nxos_vlan.py | Python | gpl-3.0 | 13,912 | 0.001509 |
import optparse
class CLI:
pass
CLI.parser = optparse.OptionParser()
CLI.parser.add_option("-q", "--queries", dest="queries", help="Queries csv file", metavar="FILE")
CLI.parser.add_option("-l", "--logs", dest="logs", help="Path to log files containing directory", metavar="DIR")
CLI.parser.add_option("-r", "--results", dest="results",
help="Path to result files containing directory", metavar="DIR")
CLI.parser.add_option("-g", "--gains", dest="gains",
help="Comma-separated list of gains for different relevance levels, eg. 0,1,10", metavar="LIST")
CLI.parser.add_option("-s", "--serp-len", dest="serp_len",
help="Number of results on a single Search Engine Result Page [default: %default]",
default=10, metavar="N")
CLI.parser.add_option("-c", action="store_true", dest="use_combined_log_parser", help="Use combined log parser")
CLI.parser.add_option("-a", action="store_true", dest="use_alt_log_format", help="Use alternative log format")
CLI.parsedArgs = CLI.parser.parse_args()
| fire-uta/iiix-data-parser | cli.py | Python | mit | 1,078 | 0.008349 |
from __future__ import absolute_import
import socket
import types
from collections import defaultdict
from itertools import count
from kombu import Connection, Exchange, Queue, Consumer, Producer
from kombu.exceptions import InconsistencyError, VersionMismatch
from kombu.five import Empty, Queue as _Queue
from kombu.transport import virtual
from kombu.utils import eventio # patch poll
from kombu.utils.json import dumps, loads
from kombu.tests.case import (
Case, Mock, call, module_exists, skip_if_not_module, patch,
)
class _poll(eventio._select):
def register(self, fd, flags):
if flags & eventio.READ:
self._rfd.add(fd)
def poll(self, timeout):
events = []
for fd in self._rfd:
if fd.data:
events.append((fd.fileno(), eventio.READ))
return events
eventio.poll = _poll
from kombu.transport import redis # must import after poller patch
class ResponseError(Exception):
pass
class Client(object):
queues = {}
sets = defaultdict(set)
hashes = defaultdict(dict)
shard_hint = None
def __init__(self, db=None, port=None, connection_pool=None, **kwargs):
self._called = []
self._connection = None
self.bgsave_raises_ResponseError = False
self.connection = self._sconnection(self)
def bgsave(self):
self._called.append('BGSAVE')
if self.bgsave_raises_ResponseError:
raise ResponseError()
def delete(self, key):
self.queues.pop(key, None)
def exists(self, key):
return key in self.queues or key in self.sets
def hset(self, key, k, v):
self.hashes[key][k] = v
def hget(self, key, k):
return self.hashes[key].get(k)
def hdel(self, key, k):
self.hashes[key].pop(k, None)
def sadd(self, key, member, *args):
self.sets[key].add(member)
zadd = sadd
def smembers(self, key):
return self.sets.get(key, set())
def srem(self, key, *args):
self.sets.pop(key, None)
zrem = srem
def llen(self, key):
try:
return self.queues[key].qsize()
except KeyError:
return 0
def lpush(self, key, value):
self.queues[key].put_nowait(value)
def parse_response(self, connection, type, **options):
cmd, queues = self.connection._sock.data.pop()
assert cmd == type
self.connection._sock.data = []
if type == 'BRPOP':
item = self.brpop(queues, 0.001)
if item:
return item
raise Empty()
def brpop(self, keys, timeout=None):
key = keys[0]
try:
item = self.queues[key].get(timeout=timeout)
except Empty:
pass
else:
return key, item
def rpop(self, key):
try:
return self.queues[key].get_nowait()
except KeyError:
pass
def __contains__(self, k):
return k in self._called
def pipeline(self):
return Pipeline(self)
def encode(self, value):
return str(value)
def _new_queue(self, key):
self.queues[key] = _Queue()
class _sconnection(object):
disconnected = False
class _socket(object):
blocking = True
filenos = count(30)
def __init__(self, *args):
self._fileno = next(self.filenos)
self.data = []
def fileno(self):
return self._fileno
def setblocking(self, blocking):
self.blocking = blocking
def __init__(self, client):
self.client = client
self._sock = self._socket()
def disconnect(self):
self.disconnected = True
def send_command(self, cmd, *args):
self._sock.data.append((cmd, args))
def info(self):
return {'foo': 1}
def pubsub(self, *args, **kwargs):
connection = self.connection
class ConnectionPool(object):
def get_connection(self, *args, **kwargs):
return connection
self.connection_pool = ConnectionPool()
return self
class Pipeline(object):
def __init__(self, client):
self.client = client
self.stack = []
def __getattr__(self, key):
if key not in self.__dict__:
def _add(*args, **kwargs):
self.stack.append((getattr(self.client, key), args, kwargs))
return self
return _add
return self.__dict__[key]
def execute(self):
stack = list(self.stack)
self.stack[:] = []
return [fun(*args, **kwargs) for fun, args, kwargs in stack]
class Channel(redis.Channel):
def _get_client(self):
return Client
def _get_pool(self):
return Mock()
def _get_response_error(self):
return ResponseError
def _new_queue(self, queue, **kwargs):
self.client._new_queue(queue)
def pipeline(self):
return Pipeline(Client())
class Transport(redis.Transport):
Channel = Channel
def _get_errors(self):
return ((KeyError,), (IndexError,))
class test_Channel(Case):
@skip_if_not_module('redis')
def setup(self):
self.connection = self.create_connection()
self.channel = self.connection.default_channel
def create_connection(self, **kwargs):
kwargs.setdefault('transport_options', {'fanout_patterns': True})
return Connection(transport=Transport, **kwargs)
def _get_one_delivery_tag(self, n='test_uniq_tag'):
with self.create_connection() as conn1:
chan = conn1.default_channel
chan.exchange_declare(n)
chan.queue_declare(n)
chan.queue_bind(n, n, n)
msg = chan.prepare_message('quick brown fox')
chan.basic_publish(msg, n, n)
q, payload = chan.client.brpop([n])
self.assertEqual(q, n)
self.assertTrue(payload)
pymsg = chan.message_to_python(loads(payload))
return pymsg.delivery_tag
def test_delivery_tag_is_uuid(self):
seen = set()
for i in range(100):
tag = self._get_one_delivery_tag()
self.assertNotIn(tag, seen)
seen.add(tag)
with self.assertRaises(ValueError):
int(tag)
self.assertEqual(len(tag), 36)
def test_disable_ack_emulation(self):
conn = Connection(transport=Transport, transport_options={
'ack_emulation': False,
})
chan = conn.channel()
self.assertFalse(chan.ack_emulation)
self.assertEqual(chan.QoS, virtual.QoS)
def test_redis_info_raises(self):
pool = Mock(name='pool')
pool_at_init = [pool]
client = Mock(name='client')
class XChannel(Channel):
def __init__(self, *args, **kwargs):
self._pool = pool_at_init[0]
super(XChannel, self).__init__(*args, **kwargs)
def _get_client(self):
return lambda *_, **__: client
class XTransport(Transport):
Channel = XChannel
conn = Connection(transport=XTransport)
client.info.side_effect = RuntimeError()
with self.assertRaises(RuntimeError):
conn.channel()
pool.disconnect.assert_called_with()
pool.disconnect.reset_mock()
pool_at_init = [None]
with self.assertRaises(RuntimeError):
conn.channel()
self.assertFalse(pool.disconnect.called)
def test_after_fork(self):
self.channel._pool = None
self.channel._after_fork()
self.channel._pool = Mock(name='pool')
self.channel._after_fork()
self.channel._pool.disconnect.assert_called_with()
def test_next_delivery_tag(self):
self.assertNotEqual(
self.channel._next_delivery_tag(),
self.channel._next_delivery_tag(),
)
def test_do_restore_message(self):
client = Mock(name='client')
pl1 = {'body': 'BODY'}
spl1 = dumps(pl1)
lookup = self.channel._lookup = Mock(name='_lookup')
lookup.return_value = ['george', 'elaine']
self.channel._do_restore_message(
pl1, 'ex', 'rkey', client,
)
client.rpush.assert_has_calls([
call('george', spl1), call('elaine', spl1),
])
pl2 = {'body': 'BODY2', 'headers': {'x-funny': 1}}
headers_after = dict(pl2['headers'], redelivered=True)
spl2 = dumps(dict(pl2, headers=headers_after))
self.channel._do_restore_message(
pl2, 'ex', 'rkey', client,
)
client.rpush.assert_has_calls([
call('george', spl2), call('elaine', spl2),
])
client.rpush.side_effect = KeyError()
with patch('kombu.transport.redis.crit') as crit:
self.channel._do_restore_message(
pl2, 'ex', 'rkey', client,
)
self.assertTrue(crit.called)
def test_restore(self):
message = Mock(name='message')
with patch('kombu.transport.redis.loads') as loads:
loads.return_value = 'M', 'EX', 'RK'
client = self.channel.client = Mock(name='client')
restore = self.channel._do_restore_message = Mock(
name='_do_restore_message',
)
pipe = Mock(name='pipe')
client.pipeline.return_value = pipe
pipe_hget = Mock(name='pipe.hget')
pipe.hget.return_value = pipe_hget
pipe_hget_hdel = Mock(name='pipe.hget.hdel')
pipe_hget.hdel.return_value = pipe_hget_hdel
result = Mock(name='result')
pipe_hget_hdel.execute.return_value = None, None
self.channel._restore(message)
client.pipeline.assert_called_with()
unacked_key = self.channel.unacked_key
self.assertFalse(loads.called)
tag = message.delivery_tag
pipe.hget.assert_called_with(unacked_key, tag)
pipe_hget.hdel.assert_called_with(unacked_key, tag)
pipe_hget_hdel.execute.assert_called_with()
pipe_hget_hdel.execute.return_value = result, None
self.channel._restore(message)
loads.assert_called_with(result)
restore.assert_called_with('M', 'EX', 'RK', client, False)
def test_qos_restore_visible(self):
client = self.channel.client = Mock(name='client')
client.zrevrangebyscore.return_value = [
(1, 10),
(2, 20),
(3, 30),
]
qos = redis.QoS(self.channel)
restore = qos.restore_by_tag = Mock(name='restore_by_tag')
qos._vrestore_count = 1
qos.restore_visible()
self.assertFalse(client.zrevrangebyscore.called)
self.assertEqual(qos._vrestore_count, 2)
qos._vrestore_count = 0
qos.restore_visible()
restore.assert_has_calls([
call(1, client), call(2, client), call(3, client),
])
self.assertEqual(qos._vrestore_count, 1)
qos._vrestore_count = 0
restore.reset_mock()
client.zrevrangebyscore.return_value = []
qos.restore_visible()
self.assertFalse(restore.called)
self.assertEqual(qos._vrestore_count, 1)
qos._vrestore_count = 0
client.setnx.side_effect = redis.MutexHeld()
qos.restore_visible()
def test_basic_consume_when_fanout_queue(self):
self.channel.exchange_declare(exchange='txconfan', type='fanout')
self.channel.queue_declare(queue='txconfanq')
self.channel.queue_bind(queue='txconfanq', exchange='txconfan')
self.assertIn('txconfanq', self.channel._fanout_queues)
self.channel.basic_consume('txconfanq', False, None, 1)
self.assertIn('txconfanq', self.channel.active_fanout_queues)
self.assertEqual(self.channel._fanout_to_queue.get('txconfan'),
'txconfanq')
def test_basic_cancel_unknown_delivery_tag(self):
self.assertIsNone(self.channel.basic_cancel('txaseqwewq'))
def test_subscribe_no_queues(self):
self.channel.subclient = Mock()
self.channel.active_fanout_queues.clear()
self.channel._subscribe()
self.assertFalse(self.channel.subclient.subscribe.called)
def test_subscribe(self):
self.channel.subclient = Mock()
self.channel.active_fanout_queues.add('a')
self.channel.active_fanout_queues.add('b')
self.channel._fanout_queues.update(a=('a', ''), b=('b', ''))
self.channel._subscribe()
self.assertTrue(self.channel.subclient.psubscribe.called)
s_args, _ = self.channel.subclient.psubscribe.call_args
self.assertItemsEqual(s_args[0], ['a', 'b'])
self.channel.subclient.connection._sock = None
self.channel._subscribe()
self.channel.subclient.connection.connect.assert_called_with()
def test_handle_unsubscribe_message(self):
s = self.channel.subclient
s.subscribed = True
self.channel._handle_message(s, ['unsubscribe', 'a', 0])
self.assertFalse(s.subscribed)
def test_handle_pmessage_message(self):
self.assertDictEqual(
self.channel._handle_message(
self.channel.subclient,
['pmessage', 'pattern', 'channel', 'data'],
),
{
'type': 'pmessage',
'pattern': 'pattern',
'channel': 'channel',
'data': 'data',
},
)
def test_handle_message(self):
self.assertDictEqual(
self.channel._handle_message(
self.channel.subclient,
['type', 'channel', 'data'],
),
{
'type': 'type',
'pattern': None,
'channel': 'channel',
'data': 'data',
},
)
def test_brpop_start_but_no_queues(self):
self.assertIsNone(self.channel._brpop_start())
def test_receive(self):
s = self.channel.subclient = Mock()
self.channel._fanout_to_queue['a'] = 'b'
s.parse_response.return_value = ['message', 'a',
dumps({'hello': 'world'})]
payload, queue = self.channel._receive()
self.assertDictEqual(payload, {'hello': 'world'})
self.assertEqual(queue, 'b')
def test_receive_raises(self):
self.channel._in_listen = True
s = self.channel.subclient = Mock()
s.parse_response.side_effect = KeyError('foo')
with self.assertRaises(redis.Empty):
self.channel._receive()
self.assertFalse(self.channel._in_listen)
def test_receive_empty(self):
s = self.channel.subclient = Mock()
s.parse_response.return_value = None
with self.assertRaises(redis.Empty):
self.channel._receive()
def test_receive_different_message_Type(self):
s = self.channel.subclient = Mock()
s.parse_response.return_value = ['message', '/foo/', 0, 'data']
with self.assertRaises(redis.Empty):
self.channel._receive()
def test_brpop_read_raises(self):
c = self.channel.client = Mock()
c.parse_response.side_effect = KeyError('foo')
with self.assertRaises(redis.Empty):
self.channel._brpop_read()
c.connection.disconnect.assert_called_with()
def test_brpop_read_gives_None(self):
c = self.channel.client = Mock()
c.parse_response.return_value = None
with self.assertRaises(redis.Empty):
self.channel._brpop_read()
def test_poll_error(self):
c = self.channel.client = Mock()
c.parse_response = Mock()
self.channel._poll_error('BRPOP')
c.parse_response.assert_called_with(c.connection, 'BRPOP')
c.parse_response.side_effect = KeyError('foo')
with self.assertRaises(KeyError):
self.channel._poll_error('BRPOP')
def test_poll_error_on_type_LISTEN(self):
c = self.channel.subclient = Mock()
c.parse_response = Mock()
self.channel._poll_error('LISTEN')
c.parse_response.assert_called_with()
c.parse_response.side_effect = KeyError('foo')
with self.assertRaises(KeyError):
self.channel._poll_error('LISTEN')
def test_put_fanout(self):
self.channel._in_poll = False
c = self.channel.client = Mock()
body = {'hello': 'world'}
self.channel._put_fanout('exchange', body, '')
c.publish.assert_called_with('exchange', dumps(body))
def test_put_priority(self):
client = self.channel.client = Mock(name='client')
msg1 = {'properties': {'delivery_info': {'priority': 3}}}
self.channel._put('george', msg1)
client.lpush.assert_called_with(
self.channel._q_for_pri('george', 3), dumps(msg1),
)
msg2 = {'properties': {'delivery_info': {'priority': 313}}}
self.channel._put('george', msg2)
client.lpush.assert_called_with(
self.channel._q_for_pri('george', 9), dumps(msg2),
)
msg3 = {'properties': {'delivery_info': {}}}
self.channel._put('george', msg3)
client.lpush.assert_called_with(
self.channel._q_for_pri('george', 0), dumps(msg3),
)
def test_delete(self):
x = self.channel
self.channel._in_poll = False
delete = x.client.delete = Mock()
srem = x.client.srem = Mock()
x._delete('queue', 'exchange', 'routing_key', None)
delete.assert_any_call('queue')
srem.assert_any_call(x.keyprefix_queue % ('exchange',),
x.sep.join(['routing_key', '', 'queue']))
def test_has_queue(self):
self.channel._in_poll = False
exists = self.channel.client.exists = Mock()
exists.return_value = True
self.assertTrue(self.channel._has_queue('foo'))
exists.assert_any_call('foo')
exists.return_value = False
self.assertFalse(self.channel._has_queue('foo'))
def test_close_when_closed(self):
self.channel.closed = True
self.channel.close()
def test_close_deletes_autodelete_fanout_queues(self):
self.channel._fanout_queues = {'foo': ('foo', ''), 'bar': ('bar', '')}
self.channel.auto_delete_queues = ['foo']
self.channel.queue_delete = Mock(name='queue_delete')
self.channel.close()
self.channel.queue_delete.assert_has_calls([call('foo')])
def test_close_client_close_raises(self):
c = self.channel.client = Mock()
c.connection.disconnect.side_effect = self.channel.ResponseError()
self.channel.close()
c.connection.disconnect.assert_called_with()
def test_invalid_database_raises_ValueError(self):
with self.assertRaises(ValueError):
self.channel.connection.client.virtual_host = 'dwqeq'
self.channel._connparams()
@skip_if_not_module('redis')
def test_connparams_allows_slash_in_db(self):
self.channel.connection.client.virtual_host = '/123'
self.assertEqual(self.channel._connparams()['db'], 123)
@skip_if_not_module('redis')
def test_connparams_db_can_be_int(self):
self.channel.connection.client.virtual_host = 124
self.assertEqual(self.channel._connparams()['db'], 124)
def test_new_queue_with_auto_delete(self):
redis.Channel._new_queue(self.channel, 'george', auto_delete=False)
self.assertNotIn('george', self.channel.auto_delete_queues)
redis.Channel._new_queue(self.channel, 'elaine', auto_delete=True)
self.assertIn('elaine', self.channel.auto_delete_queues)
@skip_if_not_module('redis')
def test_connparams_regular_hostname(self):
self.channel.connection.client.hostname = 'george.vandelay.com'
self.assertEqual(
self.channel._connparams()['host'],
'george.vandelay.com',
)
def test_rotate_cycle_ValueError(self):
cycle = self.channel._queue_cycle = ['kramer', 'jerry']
self.channel._rotate_cycle('kramer')
self.assertEqual(cycle, ['jerry', 'kramer'])
self.channel._rotate_cycle('elaine')
@skip_if_not_module('redis')
def test_get_client(self):
import redis as R
KombuRedis = redis.Channel._get_client(self.channel)
self.assertTrue(KombuRedis)
Rv = getattr(R, 'VERSION', None)
try:
R.VERSION = (2, 4, 0)
with self.assertRaises(VersionMismatch):
redis.Channel._get_client(self.channel)
finally:
if Rv is not None:
R.VERSION = Rv
@skip_if_not_module('redis')
def test_get_response_error(self):
from redis.exceptions import ResponseError
self.assertIs(redis.Channel._get_response_error(self.channel),
ResponseError)
def test_avail_client_when_not_in_poll(self):
self.channel._in_poll = False
c = self.channel.client = Mock()
with self.channel.conn_or_acquire() as client:
self.assertIs(client, c)
def test_avail_client_when_in_poll(self):
self.channel._in_poll = True
self.channel._pool = Mock()
cc = self.channel._create_client = Mock()
client = cc.return_value = Mock()
with self.channel.conn_or_acquire():
pass
self.channel.pool.release.assert_called_with(client.connection)
cc.assert_called_with()
def test_register_with_event_loop(self):
transport = self.connection.transport
transport.cycle = Mock(name='cycle')
transport.cycle.fds = {12: 'LISTEN', 13: 'BRPOP'}
conn = Mock(name='conn')
loop = Mock(name='loop')
redis.Transport.register_with_event_loop(transport, conn, loop)
transport.cycle.on_poll_init.assert_called_with(loop.poller)
loop.call_repeatedly.assert_called_with(
10, transport.cycle.maybe_restore_messages,
)
self.assertTrue(loop.on_tick.add.called)
on_poll_start = loop.on_tick.add.call_args[0][0]
on_poll_start()
transport.cycle.on_poll_start.assert_called_with()
loop.add_reader.assert_has_calls([
call(12, transport.on_readable, 12),
call(13, transport.on_readable, 13),
])
def test_transport_on_readable(self):
transport = self.connection.transport
cycle = transport.cycle = Mock(name='cyle')
cycle.on_readable.return_value = None
redis.Transport.on_readable(transport, 13)
cycle.on_readable.assert_called_with(13)
cycle.on_readable.reset_mock()
queue = Mock(name='queue')
ret = (Mock(name='message'), queue)
cycle.on_readable.return_value = ret
with self.assertRaises(KeyError):
redis.Transport.on_readable(transport, 14)
cb = transport._callbacks[queue] = Mock(name='callback')
redis.Transport.on_readable(transport, 14)
cb.assert_called_with(ret[0])
@skip_if_not_module('redis')
def test_transport_get_errors(self):
self.assertTrue(redis.Transport._get_errors(self.connection.transport))
@skip_if_not_module('redis')
def test_transport_driver_version(self):
self.assertTrue(
redis.Transport.driver_version(self.connection.transport),
)
@skip_if_not_module('redis')
def test_transport_get_errors_when_InvalidData_used(self):
from redis import exceptions
class ID(Exception):
pass
DataError = getattr(exceptions, 'DataError', None)
InvalidData = getattr(exceptions, 'InvalidData', None)
exceptions.InvalidData = ID
exceptions.DataError = None
try:
errors = redis.Transport._get_errors(self.connection.transport)
self.assertTrue(errors)
self.assertIn(ID, errors[1])
finally:
if DataError is not None:
exceptions.DataError = DataError
if InvalidData is not None:
exceptions.InvalidData = InvalidData
def test_empty_queues_key(self):
channel = self.channel
channel._in_poll = False
key = channel.keyprefix_queue % 'celery'
# Everything is fine, there is a list of queues.
channel.client.sadd(key, 'celery\x06\x16\x06\x16celery')
self.assertListEqual(channel.get_table('celery'),
[('celery', '', 'celery')])
# ... then for some reason, the _kombu.binding.celery key gets lost
channel.client.srem(key)
# which raises a channel error so that the consumer/publisher
# can recover by redeclaring the required entities.
with self.assertRaises(InconsistencyError):
self.channel.get_table('celery')
@skip_if_not_module('redis')
def test_socket_connection(self):
with patch('kombu.transport.redis.Channel._create_client'):
with Connection('redis+socket:///tmp/redis.sock') as conn:
connparams = conn.default_channel._connparams()
self.assertTrue(issubclass(
connparams['connection_class'],
redis.redis.UnixDomainSocketConnection,
))
self.assertEqual(connparams['path'], '/tmp/redis.sock')
class test_Redis(Case):
@skip_if_not_module('redis')
def setup(self):
self.connection = Connection(transport=Transport)
self.exchange = Exchange('test_Redis', type='direct')
self.queue = Queue('test_Redis', self.exchange, 'test_Redis')
def teardown(self):
self.connection.close()
def test_publish__get(self):
channel = self.connection.channel()
producer = Producer(channel, self.exchange, routing_key='test_Redis')
self.queue(channel).declare()
producer.publish({'hello': 'world'})
self.assertDictEqual(self.queue(channel).get().payload,
{'hello': 'world'})
self.assertIsNone(self.queue(channel).get())
self.assertIsNone(self.queue(channel).get())
self.assertIsNone(self.queue(channel).get())
def test_publish__consume(self):
connection = Connection(transport=Transport)
channel = connection.channel()
producer = Producer(channel, self.exchange, routing_key='test_Redis')
consumer = Consumer(channel, queues=[self.queue])
producer.publish({'hello2': 'world2'})
_received = []
def callback(message_data, message):
_received.append(message_data)
message.ack()
consumer.register_callback(callback)
consumer.consume()
self.assertIn(channel, channel.connection.cycle._channels)
try:
connection.drain_events(timeout=1)
self.assertTrue(_received)
with self.assertRaises(socket.timeout):
connection.drain_events(timeout=0.01)
finally:
channel.close()
def test_purge(self):
channel = self.connection.channel()
producer = Producer(channel, self.exchange, routing_key='test_Redis')
self.queue(channel).declare()
for i in range(10):
producer.publish({'hello': 'world-%s' % (i,)})
self.assertEqual(channel._size('test_Redis'), 10)
self.assertEqual(self.queue(channel).purge(), 10)
channel.close()
def test_db_values(self):
Connection(virtual_host=1,
transport=Transport).channel()
Connection(virtual_host='1',
transport=Transport).channel()
Connection(virtual_host='/1',
transport=Transport).channel()
with self.assertRaises(Exception):
Connection('redis:///foo').channel()
def test_db_port(self):
c1 = Connection(port=None, transport=Transport).channel()
c1.close()
c2 = Connection(port=9999, transport=Transport).channel()
c2.close()
def test_close_poller_not_active(self):
c = Connection(transport=Transport).channel()
cycle = c.connection.cycle
c.client.connection
c.close()
self.assertNotIn(c, cycle._channels)
def test_close_ResponseError(self):
c = Connection(transport=Transport).channel()
c.client.bgsave_raises_ResponseError = True
c.close()
def test_close_disconnects(self):
c = Connection(transport=Transport).channel()
conn1 = c.client.connection
conn2 = c.subclient.connection
c.close()
self.assertTrue(conn1.disconnected)
self.assertTrue(conn2.disconnected)
def test_get__Empty(self):
channel = self.connection.channel()
with self.assertRaises(Empty):
channel._get('does-not-exist')
channel.close()
def test_get_client(self):
myredis, exceptions = _redis_modules()
@module_exists(myredis, exceptions)
def _do_test():
conn = Connection(transport=Transport)
chan = conn.channel()
self.assertTrue(chan.Client)
self.assertTrue(chan.ResponseError)
self.assertTrue(conn.transport.connection_errors)
self.assertTrue(conn.transport.channel_errors)
_do_test()
def _redis_modules():
class ConnectionError(Exception):
pass
class AuthenticationError(Exception):
pass
class InvalidData(Exception):
pass
class InvalidResponse(Exception):
pass
class ResponseError(Exception):
pass
exceptions = types.ModuleType('redis.exceptions')
exceptions.ConnectionError = ConnectionError
exceptions.AuthenticationError = AuthenticationError
exceptions.InvalidData = InvalidData
exceptions.InvalidResponse = InvalidResponse
exceptions.ResponseError = ResponseError
class Redis(object):
pass
myredis = types.ModuleType('redis')
myredis.exceptions = exceptions
myredis.Redis = Redis
return myredis, exceptions
class test_MultiChannelPoller(Case):
@skip_if_not_module('redis')
def setup(self):
self.Poller = redis.MultiChannelPoller
def test_on_poll_start(self):
p = self.Poller()
p._channels = []
p.on_poll_start()
p._register_BRPOP = Mock(name='_register_BRPOP')
p._register_LISTEN = Mock(name='_register_LISTEN')
chan1 = Mock(name='chan1')
p._channels = [chan1]
chan1.active_queues = []
chan1.active_fanout_queues = []
p.on_poll_start()
chan1.active_queues = ['q1']
chan1.active_fanout_queues = ['q2']
chan1.qos.can_consume.return_value = False
p.on_poll_start()
p._register_LISTEN.assert_called_with(chan1)
self.assertFalse(p._register_BRPOP.called)
chan1.qos.can_consume.return_value = True
p._register_LISTEN.reset_mock()
p.on_poll_start()
p._register_BRPOP.assert_called_with(chan1)
p._register_LISTEN.assert_called_with(chan1)
def test_on_poll_init(self):
p = self.Poller()
chan1 = Mock(name='chan1')
p._channels = []
poller = Mock(name='poller')
p.on_poll_init(poller)
self.assertIs(p.poller, poller)
p._channels = [chan1]
p.on_poll_init(poller)
chan1.qos.restore_visible.assert_called_with(
num=chan1.unacked_restore_limit,
)
def test_handle_event(self):
p = self.Poller()
chan = Mock(name='chan')
p._fd_to_chan[13] = chan, 'BRPOP'
chan.handlers = {'BRPOP': Mock(name='BRPOP')}
chan.qos.can_consume.return_value = False
p.handle_event(13, redis.READ)
self.assertFalse(chan.handlers['BRPOP'].called)
chan.qos.can_consume.return_value = True
p.handle_event(13, redis.READ)
chan.handlers['BRPOP'].assert_called_with()
p.handle_event(13, redis.ERR)
chan._poll_error.assert_called_with('BRPOP')
p.handle_event(13, ~(redis.READ | redis.ERR))
def test_fds(self):
p = self.Poller()
p._fd_to_chan = {1: 2}
self.assertDictEqual(p.fds, p._fd_to_chan)
def test_close_unregisters_fds(self):
p = self.Poller()
poller = p.poller = Mock()
p._chan_to_sock.update({1: 1, 2: 2, 3: 3})
p.close()
self.assertEqual(poller.unregister.call_count, 3)
u_args = poller.unregister.call_args_list
self.assertItemsEqual(u_args, [((1,), {}),
((2,), {}),
((3,), {})])
def test_close_when_unregister_raises_KeyError(self):
p = self.Poller()
p.poller = Mock()
p._chan_to_sock.update({1: 1})
p.poller.unregister.side_effect = KeyError(1)
p.close()
def test_close_resets_state(self):
p = self.Poller()
p.poller = Mock()
p._channels = Mock()
p._fd_to_chan = Mock()
p._chan_to_sock = Mock()
p._chan_to_sock.itervalues.return_value = []
p._chan_to_sock.values.return_value = [] # py3k
p.close()
p._channels.clear.assert_called_with()
p._fd_to_chan.clear.assert_called_with()
p._chan_to_sock.clear.assert_called_with()
def test_register_when_registered_reregisters(self):
p = self.Poller()
p.poller = Mock()
channel, client, type = Mock(), Mock(), Mock()
sock = client.connection._sock = Mock()
sock.fileno.return_value = 10
p._chan_to_sock = {(channel, client, type): 6}
p._register(channel, client, type)
p.poller.unregister.assert_called_with(6)
self.assertTupleEqual(p._fd_to_chan[10], (channel, type))
self.assertEqual(p._chan_to_sock[(channel, client, type)], sock)
p.poller.register.assert_called_with(sock, p.eventflags)
# when client not connected yet
client.connection._sock = None
def after_connected():
client.connection._sock = Mock()
client.connection.connect.side_effect = after_connected
p._register(channel, client, type)
client.connection.connect.assert_called_with()
def test_register_BRPOP(self):
p = self.Poller()
channel = Mock()
channel.client.connection._sock = None
p._register = Mock()
channel._in_poll = False
p._register_BRPOP(channel)
self.assertEqual(channel._brpop_start.call_count, 1)
self.assertEqual(p._register.call_count, 1)
channel.client.connection._sock = Mock()
p._chan_to_sock[(channel, channel.client, 'BRPOP')] = True
channel._in_poll = True
p._register_BRPOP(channel)
self.assertEqual(channel._brpop_start.call_count, 1)
self.assertEqual(p._register.call_count, 1)
def test_register_LISTEN(self):
p = self.Poller()
channel = Mock()
channel.subclient.connection._sock = None
channel._in_listen = False
p._register = Mock()
p._register_LISTEN(channel)
p._register.assert_called_with(channel, channel.subclient, 'LISTEN')
self.assertEqual(p._register.call_count, 1)
self.assertEqual(channel._subscribe.call_count, 1)
channel._in_listen = True
channel.subclient.connection._sock = Mock()
p._register_LISTEN(channel)
self.assertEqual(p._register.call_count, 1)
self.assertEqual(channel._subscribe.call_count, 1)
def create_get(self, events=None, queues=None, fanouts=None):
_pr = [] if events is None else events
_aq = [] if queues is None else queues
_af = [] if fanouts is None else fanouts
p = self.Poller()
p.poller = Mock()
p.poller.poll.return_value = _pr
p._register_BRPOP = Mock()
p._register_LISTEN = Mock()
channel = Mock()
p._channels = [channel]
channel.active_queues = _aq
channel.active_fanout_queues = _af
return p, channel
def test_get_no_actions(self):
p, channel = self.create_get()
with self.assertRaises(redis.Empty):
p.get()
def test_qos_reject(self):
p, channel = self.create_get()
qos = redis.QoS(channel)
qos.ack = Mock(name='Qos.ack')
qos.reject(1234)
qos.ack.assert_called_with(1234)
def test_get_brpop_qos_allow(self):
p, channel = self.create_get(queues=['a_queue'])
channel.qos.can_consume.return_value = True
with self.assertRaises(redis.Empty):
p.get()
p._register_BRPOP.assert_called_with(channel)
def test_get_brpop_qos_disallow(self):
p, channel = self.create_get(queues=['a_queue'])
channel.qos.can_consume.return_value = False
with self.assertRaises(redis.Empty):
p.get()
self.assertFalse(p._register_BRPOP.called)
def test_get_listen(self):
p, channel = self.create_get(fanouts=['f_queue'])
with self.assertRaises(redis.Empty):
p.get()
p._register_LISTEN.assert_called_with(channel)
def test_get_receives_ERR(self):
p, channel = self.create_get(events=[(1, eventio.ERR)])
p._fd_to_chan[1] = (channel, 'BRPOP')
with self.assertRaises(redis.Empty):
p.get()
channel._poll_error.assert_called_with('BRPOP')
def test_get_receives_multiple(self):
p, channel = self.create_get(events=[(1, eventio.ERR),
(1, eventio.ERR)])
p._fd_to_chan[1] = (channel, 'BRPOP')
with self.assertRaises(redis.Empty):
p.get()
channel._poll_error.assert_called_with('BRPOP')
class test_Mutex(Case):
@skip_if_not_module('redis')
def test_mutex(self, lock_id='xxx'):
client = Mock(name='client')
with patch('kombu.transport.redis.uuid') as uuid:
# Won
uuid.return_value = lock_id
client.setnx.return_value = True
pipe = client.pipeline.return_value = Mock(name='pipe')
pipe.get.return_value = lock_id
held = False
with redis.Mutex(client, 'foo1', 100):
held = True
self.assertTrue(held)
client.setnx.assert_called_with('foo1', lock_id)
pipe.get.return_value = 'yyy'
held = False
with redis.Mutex(client, 'foo1', 100):
held = True
self.assertTrue(held)
# Did not win
client.expire.reset_mock()
pipe.get.return_value = lock_id
client.setnx.return_value = False
with self.assertRaises(redis.MutexHeld):
held = False
with redis.Mutex(client, 'foo1', '100'):
held = True
self.assertFalse(held)
client.ttl.return_value = 0
with self.assertRaises(redis.MutexHeld):
held = False
with redis.Mutex(client, 'foo1', '100'):
held = True
self.assertFalse(held)
self.assertTrue(client.expire.called)
# Wins but raises WatchError (and that is ignored)
client.setnx.return_value = True
pipe.watch.side_effect = redis.redis.WatchError()
held = False
with redis.Mutex(client, 'foo1', 100):
held = True
self.assertTrue(held)
| jindongh/kombu | kombu/tests/transport/test_redis.py | Python | bsd-3-clause | 40,008 | 0.000025 |
import csv
import math
from CSHLDAP import CSHLDAP #get this from the wiki
from subprocess import call
your_gender_list = []
with open('match_scores.csv', 'rb') as f:
reader = csv.reader(f)
your_gender_list = list(reader)
print(your_gender_list)
tempList = [0] * len(your_gender_list)
peopleList = [0] * len(your_gender_list)
for x in range(len(your_gender_list)):
tempList[x] = your_gender_list[x][1]
peopleList[x] = your_gender_list[x][0]
tempList[0] = -10
finalPeopleList = []
for x in range(len(peopleList)):
name = peopleList[x].split("/")
nametemp = name[len(name) - 1]
finalPeopleList += [nametemp]
finalPeopleList[0] = ""
print(finalPeopleList)
index = min(range(len(tempList)), key=lambda i: abs(round(float(tempList[i]), 3)-11.5))
print(finalPeopleList,tempList)
base_dn = 'ou=Users,dc=csh,dc=rit,dc=edu'
host = 'ldap://ldap.csh.rit.edu'
password = '<put password here>'
print("Prediction: ", finalPeopleList[index] , " Confidence: ", tempList[index])
ldap_con = CSHLDAP("<put user name here>", password)
name = finalPeopleList[index].split(" ")
nameStr = name[0].strip() + " " + name[1].strip()
print(nameStr)
result = ldap_con.search(cn=nameStr)
msgString = "Hello " + nameStr
file = open("names.txt", "a+")
file.write(nameStr +" Confidence: " + tempList[index] + "\n")
file.close()
onFloor = result[0][1]['onfloor']
skills =[]
if('skills' in result[0][1]):
skills = result[0][1]['skills']
if(int(onFloor[0])):
msgString += " you are an on floor member "
else:
msgString += " you are an off floor member "
skillsStr = ""
if(skills != []):
for x in range(len(skills)):
if(x == 0):
skillsStr += skills[x] + ", "
elif(x == len(skills)-1):
skillsStr += ", and " + skills[x]
else:
skillsStr += ", " + skills[x]
msgString += "with skills in " + skillsStr
print(msgString)
call(["pico2wave", "-w", "msg.wav", msgString])
| sherrardTr4129/FaceHarold | getCSVData.py | Python | mit | 2,092 | 0.010516 |
import functools
import itertools
import logging
import re
import urlparse
import warnings
import bson
from django.db.models import Q
from dirtyfields import DirtyFieldsMixin
from django.apps import apps
from django.contrib.contenttypes.fields import GenericRelation
from django.core.paginator import Paginator
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.db import models, transaction, connection
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils import timezone
from django.utils.functional import cached_property
from keen import scoped_keys
from psycopg2._psycopg import AsIs
from typedmodels.models import TypedModel, TypedModelManager
from include import IncludeManager
from framework import status
from framework.celery_tasks.handlers import enqueue_task
from framework.exceptions import PermissionsError
from framework.sentry import log_exception
from addons.wiki.utils import to_mongo_key
from osf.exceptions import ValidationValueError
from osf.models.contributor import (Contributor, RecentlyAddedContributor,
get_contributor_permissions)
from osf.models.identifiers import Identifier, IdentifierMixin
from osf.models.licenses import NodeLicenseRecord
from osf.models.mixins import (AddonModelMixin, CommentableMixin, Loggable,
NodeLinkMixin, Taggable)
from osf.models.node_relation import NodeRelation
from osf.models.nodelog import NodeLog
from osf.models.sanctions import RegistrationApproval
from osf.models.private_link import PrivateLink
from osf.models.spam import SpamMixin
from osf.models.tag import Tag
from osf.models.user import OSFUser
from osf.models.validators import validate_doi, validate_title
from framework.auth.core import Auth, get_user
from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField
from osf.utils.fields import NonNaiveDateTimeField
from osf.utils.requests import DummyRequest, get_request_and_user_id
from osf.utils.workflows import DefaultStates
from website import language, settings
from website.citations.utils import datetime_to_csl
from website.exceptions import (InvalidTagError, NodeStateError,
TagNotFoundError, UserNotAffiliatedError)
from website.project.licenses import set_license
from website.mails import mails
from website.project import signals as project_signals
from website.project import tasks as node_tasks
from website.project.model import NodeUpdateError
from website.identifiers.tasks import update_ezid_metadata_on_change
from website.util import (api_url_for, api_v2_url, get_headers_from_request,
sanitize, web_url_for)
from website.util.permissions import (ADMIN, CREATOR_PERMISSIONS,
DEFAULT_CONTRIBUTOR_PERMISSIONS, READ,
WRITE, expand_permissions,
reduce_permissions)
from .base import BaseModel, Guid, GuidMixin, GuidMixinQuerySet
logger = logging.getLogger(__name__)
class AbstractNodeQuerySet(GuidMixinQuerySet):
def get_roots(self):
return self.filter(id__in=self.exclude(type='osf.collection').exclude(type='osf.quickfilesnode').values_list('root_id', flat=True))
def get_children(self, root, active=False):
# If `root` is a root node, we can use the 'descendants' related name
# rather than doing a recursive query
if root.id == root.root_id:
query = root.descendants.exclude(id=root.id)
if active:
query = query.filter(is_deleted=False)
return query
else:
sql = """
WITH RECURSIVE descendants AS (
SELECT
parent_id,
child_id,
1 AS LEVEL,
ARRAY[parent_id] as pids
FROM %s
%s
WHERE is_node_link IS FALSE AND parent_id = %s %s
UNION ALL
SELECT
d.parent_id,
s.child_id,
d.level + 1,
d.pids || s.parent_id
FROM descendants AS d
JOIN %s AS s
ON d.child_id = s.parent_id
WHERE s.is_node_link IS FALSE AND %s = ANY(pids)
) SELECT array_agg(DISTINCT child_id)
FROM descendants
WHERE parent_id = %s;
"""
with connection.cursor() as cursor:
node_relation_table = AsIs(NodeRelation._meta.db_table)
cursor.execute(sql, [
node_relation_table,
AsIs('LEFT JOIN osf_abstractnode ON {}.child_id = osf_abstractnode.id'.format(node_relation_table) if active else ''),
root.pk,
AsIs('AND osf_abstractnode.is_deleted IS FALSE' if active else ''),
node_relation_table,
root.pk,
root.pk])
row = cursor.fetchone()[0]
if not row:
return AbstractNode.objects.none()
return AbstractNode.objects.filter(id__in=row)
def can_view(self, user=None, private_link=None):
qs = self.filter(is_public=True)
if private_link is not None:
if isinstance(private_link, PrivateLink):
private_link = private_link.key
if not isinstance(private_link, basestring):
raise TypeError('"private_link" must be either {} or {}. Got {!r}'.format(str, PrivateLink, private_link))
qs |= self.filter(private_links__is_deleted=False, private_links__key=private_link)
if user is not None:
if isinstance(user, OSFUser):
user = user.pk
if not isinstance(user, int):
raise TypeError('"user" must be either {} or {}. Got {!r}'.format(int, OSFUser, user))
sqs = Contributor.objects.filter(node=models.OuterRef('pk'), user__id=user, read=True)
qs |= self.annotate(can_view=models.Exists(sqs)).filter(can_view=True)
qs |= self.extra(where=['''
"osf_abstractnode".id in (
WITH RECURSIVE implicit_read AS (
SELECT "osf_contributor"."node_id"
FROM "osf_contributor"
WHERE "osf_contributor"."user_id" = %s
AND "osf_contributor"."admin" is TRUE
UNION ALL
SELECT "osf_noderelation"."child_id"
FROM "implicit_read"
LEFT JOIN "osf_noderelation" ON "osf_noderelation"."parent_id" = "implicit_read"."node_id"
WHERE "osf_noderelation"."is_node_link" IS FALSE
) SELECT * FROM implicit_read
)
'''], params=(user, ))
return qs
class AbstractNodeManager(TypedModelManager, IncludeManager):
def get_queryset(self):
qs = AbstractNodeQuerySet(self.model, using=self._db)
# Filter by typedmodels type
return self._filter_by_type(qs)
# AbstractNodeQuerySet methods
def get_roots(self):
return self.get_queryset().get_roots()
def get_children(self, root, active=False):
return self.get_queryset().get_children(root, active=active)
def can_view(self, user=None, private_link=None):
return self.get_queryset().can_view(user=user, private_link=private_link)
class AbstractNode(DirtyFieldsMixin, TypedModel, AddonModelMixin, IdentifierMixin,
NodeLinkMixin, CommentableMixin, SpamMixin,
Taggable, Loggable, GuidMixin, BaseModel):
"""
All things that inherit from AbstractNode will appear in
the same table and will be differentiated by the `type` column.
"""
#: Whether this is a pointer or not
primary = True
settings_type = 'node' # Needed for addons
FIELD_ALIASES = {
# TODO: Find a better way
'_id': 'guids___id',
'nodes': '_nodes',
'contributors': '_contributors',
}
CATEGORY_MAP = {
'analysis': 'Analysis',
'communication': 'Communication',
'data': 'Data',
'hypothesis': 'Hypothesis',
'instrumentation': 'Instrumentation',
'methods and measures': 'Methods and Measures',
'procedure': 'Procedure',
'project': 'Project',
'software': 'Software',
'other': 'Other',
'': 'Uncategorized',
}
# Node fields that trigger an update to Solr on save
SEARCH_UPDATE_FIELDS = {
'title',
'category',
'description',
'is_fork',
'retraction',
'embargo',
'is_public',
'is_deleted',
'wiki_pages_current',
'node_license',
'preprint_file',
}
# Node fields that trigger a check to the spam filter on save
SPAM_CHECK_FIELDS = {
'title',
'description',
'wiki_pages_current',
}
# Fields that are writable by Node.update
WRITABLE_WHITELIST = [
'title',
'description',
'category',
'is_public',
'node_license',
]
# Named constants
PRIVATE = 'private'
PUBLIC = 'public'
LICENSE_QUERY = re.sub('\s+', ' ', '''WITH RECURSIVE ascendants AS (
SELECT
N.node_license_id,
R.parent_id
FROM "{noderelation}" AS R
JOIN "{abstractnode}" AS N ON N.id = R.parent_id
WHERE R.is_node_link IS FALSE
AND R.child_id = %s
UNION ALL
SELECT
N.node_license_id,
R.parent_id
FROM ascendants AS D
JOIN "{noderelation}" AS R ON D.parent_id = R.child_id
JOIN "{abstractnode}" AS N ON N.id = R.parent_id
WHERE R.is_node_link IS FALSE
AND D.node_license_id IS NULL
) SELECT {fields} FROM "{nodelicenserecord}"
WHERE id = (SELECT node_license_id FROM ascendants WHERE node_license_id IS NOT NULL) LIMIT 1;''')
affiliated_institutions = models.ManyToManyField('Institution', related_name='nodes')
category = models.CharField(max_length=255,
choices=CATEGORY_MAP.items(),
blank=True,
default='')
# Dictionary field mapping user id to a list of nodes in node.nodes which the user has subscriptions for
# {<User.id>: [<Node._id>, <Node2._id>, ...] }
# TODO: Can this be a reference instead of data?
child_node_subscriptions = DateTimeAwareJSONField(default=dict, blank=True)
_contributors = models.ManyToManyField(OSFUser,
through=Contributor,
related_name='nodes')
@property
def contributors(self):
# NOTE: _order field is generated by order_with_respect_to = 'node'
return self._contributors.order_by('contributor___order')
creator = models.ForeignKey(OSFUser,
db_index=True,
related_name='nodes_created',
on_delete=models.SET_NULL,
null=True, blank=True)
deleted_date = NonNaiveDateTimeField(null=True, blank=True)
description = models.TextField(blank=True, default='')
file_guid_to_share_uuids = DateTimeAwareJSONField(default=dict, blank=True)
forked_date = NonNaiveDateTimeField(db_index=True, null=True, blank=True)
forked_from = models.ForeignKey('self',
related_name='forks',
on_delete=models.SET_NULL,
null=True, blank=True)
is_fork = models.BooleanField(default=False, db_index=True)
is_public = models.BooleanField(default=False, db_index=True)
is_deleted = models.BooleanField(default=False, db_index=True)
node_license = models.ForeignKey('NodeLicenseRecord', related_name='nodes',
on_delete=models.SET_NULL, null=True, blank=True)
# One of 'public', 'private'
# TODO: Add validator
comment_level = models.CharField(default='public', max_length=10)
root = models.ForeignKey('AbstractNode',
default=None,
related_name='descendants',
on_delete=models.SET_NULL, null=True, blank=True)
_nodes = models.ManyToManyField('AbstractNode',
through=NodeRelation,
through_fields=('parent', 'child'),
related_name='parent_nodes')
class Meta:
base_manager_name = 'objects'
index_together = (('is_public', 'is_deleted', 'type'))
objects = AbstractNodeManager()
@cached_property
def parent_node(self):
# TODO: Use .filter when chaining is fixed in django-include
try:
node_rel = next(parent for parent in self._parents.all() if not parent.is_node_link)
except StopIteration:
node_rel = None
if node_rel:
parent = node_rel.parent
if parent:
return parent
return None
@property
def nodes(self):
"""Return queryset of nodes."""
return self.get_nodes()
@property
def node_ids(self):
return list(self._nodes.all().values_list('guids___id', flat=True))
@property
def linked_from(self):
"""Return the nodes that have linked to this node."""
return self.parent_nodes.filter(node_relations__is_node_link=True)
@property
def linked_from_collections(self):
"""Return the collections that have linked to this node."""
return self.linked_from.filter(type='osf.collection')
def get_nodes(self, **kwargs):
"""Return list of children nodes. ``kwargs`` are used to filter against
children. In addition `is_node_link=<bool>` can be passed to filter against
node links.
"""
# Prepend 'child__' to kwargs for filtering
filter_kwargs = {}
if 'is_node_link' in kwargs:
filter_kwargs['is_node_link'] = kwargs.pop('is_node_link')
for key, val in kwargs.items():
filter_kwargs['child__{}'.format(key)] = val
node_relations = (NodeRelation.objects.filter(parent=self, **filter_kwargs)
.select_related('child')
.order_by('_order'))
return [each.child for each in node_relations]
@property
def linked_nodes(self):
child_pks = NodeRelation.objects.filter(
parent=self,
is_node_link=True
).select_related('child').values_list('child', flat=True)
return self._nodes.filter(pk__in=child_pks)
# permissions = Permissions are now on contributors
piwik_site_id = models.IntegerField(null=True, blank=True)
suspended = models.BooleanField(default=False, db_index=True)
# The node (if any) used as a template for this node's creation
template_node = models.ForeignKey('self',
related_name='templated_from',
on_delete=models.SET_NULL,
null=True, blank=True)
title = models.TextField(
validators=[validate_title]
) # this should be a charfield but data from mongo didn't fit in 255
wiki_pages_current = DateTimeAwareJSONField(default=dict, blank=True)
wiki_pages_versions = DateTimeAwareJSONField(default=dict, blank=True)
# Dictionary field mapping node wiki page to sharejs private uuid.
# {<page_name>: <sharejs_id>}
wiki_private_uuids = DateTimeAwareJSONField(default=dict, blank=True)
identifiers = GenericRelation(Identifier, related_query_name='nodes')
# Preprint fields
preprint_file = models.ForeignKey('osf.BaseFileNode',
on_delete=models.SET_NULL,
null=True, blank=True)
preprint_article_doi = models.CharField(max_length=128,
validators=[validate_doi],
null=True, blank=True)
_is_preprint_orphan = models.NullBooleanField(default=False)
_has_abandoned_preprint = models.BooleanField(default=False)
keenio_read_key = models.CharField(max_length=1000, null=True, blank=True)
def __init__(self, *args, **kwargs):
self._parent = kwargs.pop('parent', None)
self._is_templated_clone = False
super(AbstractNode, self).__init__(*args, **kwargs)
def __unicode__(self):
return ('(title={self.title!r}, category={self.category!r}) '
'with guid {self._id!r}').format(self=self)
@property
def is_registration(self):
"""For v1 compat."""
return False
@property
def is_quickfiles(self):
return False
@property
def is_original(self):
return not self.is_registration and not self.is_fork
@property
def is_preprint(self):
# TODO: This is a temporary implementation.
if not self.preprint_file_id or not self.is_public:
return False
if self.preprint_file.node_id == self.id:
return self.has_submitted_preprint
else:
self._is_preprint_orphan = True
return False
@property
def has_submitted_preprint(self):
return self.preprints.exclude(machine_state=DefaultStates.INITIAL.value).exists()
@property
def is_preprint_orphan(self):
"""For v1 compat"""
if (not self.is_preprint) and self._is_preprint_orphan:
return True
if self.preprint_file:
return self.preprint_file.is_deleted
return False
@property
def has_published_preprint(self):
return self.published_preprints_queryset.exists()
@property
def published_preprints_queryset(self):
return self.preprints.filter(is_published=True)
@property
def preprint_url(self):
node_linked_preprint = self.linked_preprint
if node_linked_preprint:
return node_linked_preprint.url
@property
def linked_preprint(self):
if self.is_preprint:
try:
# if multiple preprints per project are supported on the front end this needs to change.
published_preprint = self.published_preprints_queryset.first()
if published_preprint:
return published_preprint
else:
return self.preprints.get_queryset()[0]
except IndexError:
pass
@property
def is_collection(self):
"""For v1 compat"""
return False
@property # TODO Separate out for submodels
def absolute_api_v2_url(self):
if self.is_registration:
path = '/registrations/{}/'.format(self._id)
return api_v2_url(path)
if self.is_collection:
path = '/collections/{}/'.format(self._id)
return api_v2_url(path)
path = '/nodes/{}/'.format(self._id)
return api_v2_url(path)
@property
def absolute_url(self):
if not self.url:
return None
return urlparse.urljoin(settings.DOMAIN, self.url)
@property
def deep_url(self):
return '/project/{}/'.format(self._primary_key)
@property
def sanction(self):
"""For v1 compat. Registration has the proper implementation of this property."""
return None
@property
def is_retracted(self):
"""For v1 compat."""
return False
@property
def is_pending_registration(self):
"""For v1 compat."""
return False
@property
def is_pending_retraction(self):
"""For v1 compat."""
return False
@property
def is_pending_embargo(self):
"""For v1 compat."""
return False
@property
def is_embargoed(self):
"""For v1 compat."""
return False
@property
def archiving(self):
"""For v1 compat."""
return False
@property
def embargo_end_date(self):
"""For v1 compat."""
return False
@property
def forked_from_guid(self):
if self.forked_from:
return self.forked_from._id
return None
@property
def linked_nodes_self_url(self):
return self.absolute_api_v2_url + 'relationships/linked_nodes/'
@property
def linked_registrations_self_url(self):
return self.absolute_api_v2_url + 'relationships/linked_registrations/'
@property
def linked_nodes_related_url(self):
return self.absolute_api_v2_url + 'linked_nodes/'
@property
def linked_registrations_related_url(self):
return self.absolute_api_v2_url + 'linked_registrations/'
@property
def institutions_url(self):
return self.absolute_api_v2_url + 'institutions/'
@property
def institutions_relationship_url(self):
return self.absolute_api_v2_url + 'relationships/institutions/'
# For Comment API compatibility
@property
def target_type(self):
"""The object "type" used in the OSF v2 API."""
return 'nodes'
@property
def root_target_page(self):
"""The comment page type associated with Nodes."""
Comment = apps.get_model('osf.Comment')
return Comment.OVERVIEW
def belongs_to_node(self, node_id):
"""Check whether this node matches the specified node."""
return self._id == node_id
@property
def category_display(self):
"""The human-readable representation of this node's category."""
return settings.NODE_CATEGORY_MAP[self.category]
@property
def url(self):
return '/{}/'.format(self._primary_key)
@property
def api_url(self):
if not self.url:
logger.error('Node {0} has a parent that is not a project'.format(self._id))
return None
return '/api/v1{0}'.format(self.deep_url)
@property
def display_absolute_url(self):
url = self.absolute_url
if url is not None:
return re.sub(r'https?:', '', url).strip('/')
@property
def nodes_active(self):
return self._nodes.filter(is_deleted=False)
def web_url_for(self, view_name, _absolute=False, _guid=False, *args, **kwargs):
return web_url_for(view_name, pid=self._primary_key,
_absolute=_absolute, _guid=_guid, *args, **kwargs)
def api_url_for(self, view_name, _absolute=False, *args, **kwargs):
return api_url_for(view_name, pid=self._primary_key, _absolute=_absolute, *args, **kwargs)
@property
def project_or_component(self):
# The distinction is drawn based on whether something has a parent node, rather than by category
return 'project' if not self.parent_node else 'component'
@property
def templated_list(self):
return self.templated_from.filter(is_deleted=False)
@property
def draft_registrations_active(self):
DraftRegistration = apps.get_model('osf.DraftRegistration')
return DraftRegistration.objects.filter(
models.Q(branched_from=self) &
models.Q(deleted__isnull=True) &
(models.Q(registered_node=None) | models.Q(registered_node__is_deleted=True))
)
@property
def has_active_draft_registrations(self):
return self.draft_registrations_active.exists()
@property
def csl(self): # formats node information into CSL format for citation parsing
"""a dict in CSL-JSON schema
For details on this schema, see:
https://github.com/citation-style-language/schema#csl-json-schema
"""
csl = {
'id': self._id,
'title': sanitize.unescape_entities(self.title),
'author': [
contributor.csl_name(self._id) # method in auth/model.py which parses the names of authors
for contributor in self.visible_contributors
],
'publisher': 'Open Science Framework',
'type': 'webpage',
'URL': self.display_absolute_url,
}
doi = self.get_identifier_value('doi')
if doi:
csl['DOI'] = doi
if self.logs.exists():
csl['issued'] = datetime_to_csl(self.logs.latest().date)
return csl
@classmethod
def bulk_update_search(cls, nodes, index=None):
from website import search
try:
serialize = functools.partial(search.search.update_node, index=index, bulk=True, async=False)
search.search.bulk_update_nodes(serialize, nodes, index=index)
except search.exceptions.SearchUnavailableError as e:
logger.exception(e)
log_exception()
def update_search(self):
from website import search
try:
search.search.update_node(self, bulk=False, async=True)
except search.exceptions.SearchUnavailableError as e:
logger.exception(e)
log_exception()
def delete_search_entry(self):
from website import search
try:
search.search.delete_node(self)
except search.exceptions.SearchUnavailableError as e:
logger.exception(e)
log_exception()
def is_affiliated_with_institution(self, institution):
return self.affiliated_institutions.filter(id=institution.id).exists()
@classmethod
def find_by_institutions(cls, inst, query=None):
return inst.nodes.filter(query) if query else inst.nodes.all()
def _is_embargo_date_valid(self, end_date):
now = timezone.now()
if (end_date - now) >= settings.EMBARGO_END_DATE_MIN:
if (end_date - now) <= settings.EMBARGO_END_DATE_MAX:
return True
return False
def add_affiliated_institution(self, inst, user, save=False, log=True):
if not user.is_affiliated_with_institution(inst):
raise UserNotAffiliatedError('User is not affiliated with {}'.format(inst.name))
if not self.is_affiliated_with_institution(inst):
self.affiliated_institutions.add(inst)
self.update_search()
if log:
NodeLog = apps.get_model('osf.NodeLog')
self.add_log(
action=NodeLog.AFFILIATED_INSTITUTION_ADDED,
params={
'node': self._primary_key,
'institution': {
'id': inst._id,
'name': inst.name
}
},
auth=Auth(user)
)
def remove_affiliated_institution(self, inst, user, save=False, log=True):
if self.is_affiliated_with_institution(inst):
self.affiliated_institutions.remove(inst)
if log:
self.add_log(
action=NodeLog.AFFILIATED_INSTITUTION_REMOVED,
params={
'node': self._primary_key,
'institution': {
'id': inst._id,
'name': inst.name
}
},
auth=Auth(user)
)
if save:
self.save()
self.update_search()
return True
return False
def can_view(self, auth):
if auth and getattr(auth.private_link, 'anonymous', False):
return auth.private_link.nodes.filter(pk=self.pk).exists()
if not auth and not self.is_public:
return False
return (self.is_public or
(auth.user and self.has_permission(auth.user, 'read')) or
auth.private_key in self.private_link_keys_active or
self.is_admin_parent(auth.user))
def can_edit(self, auth=None, user=None):
"""Return if a user is authorized to edit this node.
Must specify one of (`auth`, `user`).
:param Auth auth: Auth object to check
:param User user: User object to check
:returns: Whether user has permission to edit this node.
"""
if not auth and not user:
raise ValueError('Must pass either `auth` or `user`')
if auth and user:
raise ValueError('Cannot pass both `auth` and `user`')
user = user or auth.user
if auth:
is_api_node = auth.api_node == self
else:
is_api_node = False
return (
(user and self.has_permission(user, 'write')) or is_api_node
)
def get_aggregate_logs_query(self, auth):
return (
(
Q(node_id__in=list(Node.objects.get_children(self).can_view(user=auth.user, private_link=auth.private_link).values_list('id', flat=True)) + [self.id])
) & Q(should_hide=False)
)
def get_aggregate_logs_queryset(self, auth):
query = self.get_aggregate_logs_query(auth)
return NodeLog.objects.filter(query).order_by('-date').include(
'node__guids', 'user__guids', 'original_node__guids', limit_includes=10
)
def get_absolute_url(self):
return self.absolute_api_v2_url
def get_permissions(self, user):
if hasattr(self.contributor_set.all(), '_result_cache'):
for contrib in self.contributor_set.all():
if contrib.user_id == user.id:
return get_contributor_permissions(contrib)
try:
contrib = user.contributor_set.get(node=self)
except Contributor.DoesNotExist:
return []
return get_contributor_permissions(contrib)
def get_visible(self, user):
try:
contributor = self.contributor_set.get(user=user)
except Contributor.DoesNotExist:
raise ValueError(u'User {0} not in contributors'.format(user))
return contributor.visible
def has_permission(self, user, permission, check_parent=True):
"""Check whether user has permission.
:param User user: User to test
:param str permission: Required permission
:returns: User has required permission
"""
if not user:
return False
query = {'node': self, permission: True}
has_permission = user.contributor_set.filter(**query).exists()
if not has_permission and permission == 'read' and check_parent:
return self.is_admin_parent(user)
return has_permission
def has_permission_on_children(self, user, permission):
"""Checks if the given user has a given permission on any child nodes
that are not registrations or deleted
"""
if self.has_permission(user, permission):
return True
for node in self.nodes_primary.filter(is_deleted=False):
if node.has_permission_on_children(user, permission):
return True
return False
def is_admin_parent(self, user):
if self.has_permission(user, 'admin', check_parent=False):
return True
parent = self.parent_node
if parent:
return parent.is_admin_parent(user)
return False
def find_readable_descendants(self, auth):
""" Returns a generator of first descendant node(s) readable by <user>
in each descendant branch.
"""
new_branches = []
for node in self.nodes_primary.filter(is_deleted=False):
if node.can_view(auth):
yield node
else:
new_branches.append(node)
for bnode in new_branches:
for node in bnode.find_readable_descendants(auth):
yield node
@property
def parents(self):
if self.parent_node:
return [self.parent_node] + self.parent_node.parents
return []
@property
def admin_contributor_ids(self):
return self._get_admin_contributor_ids(include_self=True)
@property
def parent_admin_contributor_ids(self):
return self._get_admin_contributor_ids()
def _get_admin_contributor_ids(self, include_self=False):
def get_admin_contributor_ids(node):
return Contributor.objects.select_related('user').filter(
node=node,
user__is_active=True,
admin=True
).values_list('user__guids___id', flat=True)
contributor_ids = set(self.contributors.values_list('guids___id', flat=True))
admin_ids = set(get_admin_contributor_ids(self)) if include_self else set()
for parent in self.parents:
admins = get_admin_contributor_ids(parent)
admin_ids.update(set(admins).difference(contributor_ids))
return admin_ids
@property
def admin_contributors(self):
return OSFUser.objects.filter(
guids___id__in=self.admin_contributor_ids
).order_by('family_name')
@property
def parent_admin_contributors(self):
return OSFUser.objects.filter(
guids___id__in=self.parent_admin_contributor_ids
).order_by('family_name')
def set_permissions(self, user, permissions, validate=True, save=False):
# Ensure that user's permissions cannot be lowered if they are the only admin
if isinstance(user, Contributor):
user = user.user
if validate and (reduce_permissions(self.get_permissions(user)) == ADMIN and
reduce_permissions(permissions) != ADMIN):
admin_contribs = Contributor.objects.filter(node=self, admin=True)
if admin_contribs.count() <= 1:
raise NodeStateError('Must have at least one registered admin contributor')
contrib_obj = Contributor.objects.get(node=self, user=user)
for permission_level in [READ, WRITE, ADMIN]:
if permission_level in permissions:
setattr(contrib_obj, permission_level, True)
else:
setattr(contrib_obj, permission_level, False)
contrib_obj.save()
if save:
self.save()
# TODO: Remove save parameter
def add_permission(self, user, permission, save=False):
"""Grant permission to a user.
:param User user: User to grant permission to
:param str permission: Permission to grant
:param bool save: Save changes
:raises: ValueError if user already has permission
"""
contributor = user.contributor_set.get(node=self)
if not getattr(contributor, permission, False):
for perm in expand_permissions(permission):
setattr(contributor, perm, True)
contributor.save()
else:
if getattr(contributor, permission, False):
raise ValueError('User already has permission {0}'.format(permission))
if save:
self.save()
# TODO: Remove save parameter
def remove_permission(self, user, permission, save=False):
"""Revoke permission from a user.
:param User user: User to revoke permission from
:param str permission: Permission to revoke
:param bool save: Save changes
:raises: ValueError if user does not have permission
"""
contributor = user.contributor_set.get(node=self)
if getattr(contributor, permission, False):
for perm in expand_permissions(permission):
setattr(contributor, perm, False)
contributor.save()
else:
raise ValueError('User does not have permission {0}'.format(permission))
if save:
self.save()
@property
def registrations_all(self):
"""For v1 compat."""
return self.registrations.all()
@property
def parent_id(self):
if self.parent_node:
return self.parent_node._id
return None
@property
def license(self):
if self.node_license_id:
return self.node_license
with connection.cursor() as cursor:
cursor.execute(self.LICENSE_QUERY.format(
abstractnode=AbstractNode._meta.db_table,
noderelation=NodeRelation._meta.db_table,
nodelicenserecord=NodeLicenseRecord._meta.db_table,
fields=', '.join('"{}"."{}"'.format(NodeLicenseRecord._meta.db_table, f.column) for f in NodeLicenseRecord._meta.concrete_fields)
), [self.id])
res = cursor.fetchone()
if res:
return NodeLicenseRecord.from_db(self._state.db, None, res)
return None
@property
def visible_contributors(self):
return OSFUser.objects.filter(
contributor__node=self,
contributor__visible=True
).order_by('contributor___order')
# visible_contributor_ids was moved to this property
@property
def visible_contributor_ids(self):
return self.contributor_set.filter(visible=True) \
.order_by('_order') \
.values_list('user__guids___id', flat=True)
@property
def all_tags(self):
"""Return a queryset containing all of this node's tags (incl. system tags)."""
# Tag's default manager only returns non-system tags, so we can't use self.tags
return Tag.all_tags.filter(abstractnode_tagged=self)
@property
def system_tags(self):
"""The system tags associated with this node. This currently returns a list of string
names for the tags, for compatibility with v1. Eventually, we can just return the
QuerySet.
"""
return self.all_tags.filter(system=True).values_list('name', flat=True)
# Override Taggable
def add_tag_log(self, tag, auth):
self.add_log(
action=NodeLog.TAG_ADDED,
params={
'parent_node': self.parent_id,
'node': self._id,
'tag': tag.name
},
auth=auth,
save=False
)
# Override Taggable
def on_tag_added(self, tag):
self.update_search()
def remove_tag(self, tag, auth, save=True):
if not tag:
raise InvalidTagError
elif not self.tags.filter(name=tag).exists():
raise TagNotFoundError
else:
tag_obj = Tag.objects.get(name=tag)
self.tags.remove(tag_obj)
self.add_log(
action=NodeLog.TAG_REMOVED,
params={
'parent_node': self.parent_id,
'node': self._id,
'tag': tag,
},
auth=auth,
save=False,
)
if save:
self.save()
self.update_search()
return True
def is_contributor(self, user):
"""Return whether ``user`` is a contributor on this node."""
return user is not None and Contributor.objects.filter(user=user, node=self).exists()
def set_visible(self, user, visible, log=True, auth=None, save=False):
if not self.is_contributor(user):
raise ValueError(u'User {0} not in contributors'.format(user))
if visible and not Contributor.objects.filter(node=self, user=user, visible=True).exists():
Contributor.objects.filter(node=self, user=user, visible=False).update(visible=True)
elif not visible and Contributor.objects.filter(node=self, user=user, visible=True).exists():
if Contributor.objects.filter(node=self, visible=True).count() == 1:
raise ValueError('Must have at least one visible contributor')
Contributor.objects.filter(node=self, user=user, visible=True).update(visible=False)
else:
return
message = (
NodeLog.MADE_CONTRIBUTOR_VISIBLE
if visible
else NodeLog.MADE_CONTRIBUTOR_INVISIBLE
)
if log:
self.add_log(
message,
params={
'parent': self.parent_id,
'node': self._id,
'contributors': [user._id],
},
auth=auth,
save=False,
)
if save:
self.save()
def add_contributor(self, contributor, permissions=None, visible=True,
send_email='default', auth=None, log=True, save=False):
"""Add a contributor to the project.
:param User contributor: The contributor to be added
:param list permissions: Permissions to grant to the contributor
:param bool visible: Contributor is visible in project dashboard
:param str send_email: Email preference for notifying added contributor
:param Auth auth: All the auth information including user, API key
:param bool log: Add log to self
:param bool save: Save after adding contributor
:returns: Whether contributor was added
"""
MAX_RECENT_LENGTH = 15
# If user is merged into another account, use master account
contrib_to_add = contributor.merged_by if contributor.is_merged else contributor
if contrib_to_add.is_disabled:
raise ValidationValueError('Deactivated users cannot be added as contributors.')
if not self.is_contributor(contrib_to_add):
contributor_obj, created = Contributor.objects.get_or_create(user=contrib_to_add, node=self)
contributor_obj.visible = visible
# Add default contributor permissions
permissions = permissions or DEFAULT_CONTRIBUTOR_PERMISSIONS
for perm in permissions:
setattr(contributor_obj, perm, True)
contributor_obj.save()
# Add contributor to recently added list for user
if auth is not None:
user = auth.user
recently_added_contributor_obj, created = RecentlyAddedContributor.objects.get_or_create(
user=user,
contributor=contrib_to_add
)
recently_added_contributor_obj.date_added = timezone.now()
recently_added_contributor_obj.save()
count = user.recently_added.count()
if count > MAX_RECENT_LENGTH:
difference = count - MAX_RECENT_LENGTH
for each in user.recentlyaddedcontributor_set.order_by('date_added')[:difference]:
each.delete()
if log:
self.add_log(
action=NodeLog.CONTRIB_ADDED,
params={
'project': self.parent_id,
'node': self._primary_key,
'contributors': [contrib_to_add._primary_key],
},
auth=auth,
save=False,
)
if save:
self.save()
if self._id:
project_signals.contributor_added.send(self,
contributor=contributor,
auth=auth, email_template=send_email)
self.update_search()
self.save_node_preprints()
return contrib_to_add, True
# Permissions must be overridden if changed when contributor is
# added to parent he/she is already on a child of.
elif self.is_contributor(contrib_to_add) and permissions is not None:
self.set_permissions(contrib_to_add, permissions)
if save:
self.save()
return False
else:
return False
def add_contributors(self, contributors, auth=None, log=True, save=False):
"""Add multiple contributors
:param list contributors: A list of dictionaries of the form:
{
'user': <User object>,
'permissions': <Permissions list, e.g. ['read', 'write']>,
'visible': <Boolean indicating whether or not user is a bibliographic contributor>
}
:param auth: All the auth information including user, API key.
:param log: Add log to self
:param save: Save after adding contributor
"""
for contrib in contributors:
self.add_contributor(
contributor=contrib['user'], permissions=contrib['permissions'],
visible=contrib['visible'], auth=auth, log=False, save=False,
)
if log and contributors:
self.add_log(
action=NodeLog.CONTRIB_ADDED,
params={
'project': self.parent_id,
'node': self._primary_key,
'contributors': [
contrib['user']._id
for contrib in contributors
],
},
auth=auth,
save=False,
)
if save:
self.save()
def add_unregistered_contributor(self, fullname, email, auth, send_email='default',
visible=True, permissions=None, save=False, existing_user=None):
"""Add a non-registered contributor to the project.
:param str fullname: The full name of the person.
:param str email: The email address of the person.
:param Auth auth: Auth object for the user adding the contributor.
:param User existing_user: the unregister_contributor if it is already created, otherwise None
:returns: The added contributor
:raises: DuplicateEmailError if user with given email is already in the database.
"""
# Create a new user record if you weren't passed an existing user
contributor = existing_user if existing_user else OSFUser.create_unregistered(fullname=fullname, email=email)
contributor.add_unclaimed_record(node=self, referrer=auth.user,
given_name=fullname, email=email)
try:
contributor.save()
except ValidationError: # User with same email already exists
contributor = get_user(email=email)
# Unregistered users may have multiple unclaimed records, so
# only raise error if user is registered.
if contributor.is_registered or self.is_contributor(contributor):
raise
contributor.add_unclaimed_record(
node=self, referrer=auth.user, given_name=fullname, email=email
)
contributor.save()
self.add_contributor(
contributor, permissions=permissions, auth=auth,
visible=visible, send_email=send_email, log=True, save=False
)
self.save()
return contributor
def add_contributor_registered_or_not(self, auth, user_id=None,
full_name=None, email=None, send_email='false',
permissions=None, bibliographic=True, index=None, save=False):
if user_id:
contributor = OSFUser.load(user_id)
if not contributor:
raise ValueError('User with id {} was not found.'.format(user_id))
if not contributor.is_registered:
raise ValueError(
'Cannot add unconfirmed user {} to node {} by guid. Add an unregistered contributor with fullname and email.'
.format(user_id, self._id)
)
if self.contributor_set.filter(user=contributor).exists():
raise ValidationValueError('{} is already a contributor.'.format(contributor.fullname))
contributor, _ = self.add_contributor(contributor=contributor, auth=auth, visible=bibliographic,
permissions=permissions, send_email=send_email, save=True)
else:
try:
contributor = self.add_unregistered_contributor(
fullname=full_name, email=email, auth=auth,
send_email=send_email, permissions=permissions,
visible=bibliographic, save=True
)
except ValidationError:
contributor = get_user(email=email)
if self.contributor_set.filter(user=contributor).exists():
raise ValidationValueError('{} is already a contributor.'.format(contributor.fullname))
self.add_contributor(contributor=contributor, auth=auth, visible=bibliographic,
send_email=send_email, permissions=permissions, save=True)
auth.user.email_last_sent = timezone.now()
auth.user.save()
if index is not None:
self.move_contributor(contributor=contributor, index=index, auth=auth, save=True)
contributor_obj = self.contributor_set.get(user=contributor)
contributor.permission = get_contributor_permissions(contributor_obj, as_list=False)
contributor.bibliographic = contributor_obj.visible
contributor.node_id = self._id
contributor_order = list(self.get_contributor_order())
contributor.index = contributor_order.index(contributor_obj.pk)
if save:
contributor.save()
return contributor_obj
def callback(self, callback, recursive=False, *args, **kwargs):
"""Invoke callbacks of attached add-ons and collect messages.
:param str callback: Name of callback method to invoke
:param bool recursive: Apply callback recursively over nodes
:return list: List of callback messages
"""
messages = []
for addon in self.get_addons():
method = getattr(addon, callback)
message = method(self, *args, **kwargs)
if message:
messages.append(message)
if recursive:
for child in self._nodes.filter(is_deleted=False):
messages.extend(
child.callback(
callback, recursive, *args, **kwargs
)
)
return messages
def replace_contributor(self, old, new):
try:
contrib_obj = self.contributor_set.get(user=old)
except Contributor.DoesNotExist:
return False
contrib_obj.user = new
contrib_obj.save()
# Remove unclaimed record for the project
if self._id in old.unclaimed_records:
del old.unclaimed_records[self._id]
old.save()
self.save_node_preprints()
return True
def remove_contributor(self, contributor, auth, log=True):
"""Remove a contributor from this node.
:param contributor: User object, the contributor to be removed
:param auth: All the auth information including user, API key.
"""
if isinstance(contributor, Contributor):
contributor = contributor.user
# remove unclaimed record if necessary
if self._primary_key in contributor.unclaimed_records:
del contributor.unclaimed_records[self._primary_key]
contributor.save()
# If user is the only visible contributor, return False
if not self.contributor_set.exclude(user=contributor).filter(visible=True).exists():
return False
# Node must have at least one registered admin user
admin_query = self._get_admin_contributors_query(self._contributors.all()).exclude(user=contributor)
if not admin_query.exists():
return False
contrib_obj = self.contributor_set.get(user=contributor)
contrib_obj.delete()
# After remove callback
for addon in self.get_addons():
message = addon.after_remove_contributor(self, contributor, auth)
if message:
# Because addons can return HTML strings, addons are responsible
# for markupsafe-escaping any messages returned
status.push_status_message(message, kind='info', trust=True)
if log:
self.add_log(
action=NodeLog.CONTRIB_REMOVED,
params={
'project': self.parent_id,
'node': self._id,
'contributors': [contributor._id],
},
auth=auth,
save=False,
)
self.save()
self.update_search()
# send signal to remove this user from project subscriptions
project_signals.contributor_removed.send(self, user=contributor)
self.save_node_preprints()
return True
def remove_contributors(self, contributors, auth=None, log=True, save=False):
results = []
removed = []
for contrib in contributors:
outcome = self.remove_contributor(
contributor=contrib, auth=auth, log=False,
)
results.append(outcome)
removed.append(contrib._id)
if log:
self.add_log(
action=NodeLog.CONTRIB_REMOVED,
params={
'project': self.parent_id,
'node': self._primary_key,
'contributors': removed,
},
auth=auth,
save=False,
)
if save:
self.save()
return all(results)
def move_contributor(self, contributor, auth, index, save=False):
if not self.has_permission(auth.user, ADMIN):
raise PermissionsError('Only admins can modify contributor order')
if isinstance(contributor, OSFUser):
contributor = self.contributor_set.get(user=contributor)
contributor_ids = list(self.get_contributor_order())
old_index = contributor_ids.index(contributor.id)
contributor_ids.insert(index, contributor_ids.pop(old_index))
self.set_contributor_order(contributor_ids)
self.add_log(
action=NodeLog.CONTRIB_REORDERED,
params={
'project': self.parent_id,
'node': self._id,
'contributors': [
contributor.user._id
],
},
auth=auth,
save=False,
)
if save:
self.save()
self.save_node_preprints()
def can_comment(self, auth):
if self.comment_level == 'public':
return auth.logged_in and (
self.is_public or
(auth.user and self.has_permission(auth.user, 'read'))
)
return self.is_contributor(auth.user)
def set_node_license(self, license_detail, auth, save=False):
license_record, license_changed = set_license(self, license_detail, auth)
if license_changed:
self.add_log(
action=NodeLog.CHANGED_LICENSE,
params={
'parent_node': self.parent_id,
'node': self._primary_key,
'new_license': license_record.node_license.name
},
auth=auth,
save=False,
)
if save:
self.save()
def set_privacy(self, permissions, auth=None, log=True, save=True, meeting_creation=False, check_addons=True):
"""Set the permissions for this node. Also, based on meeting_creation, queues
an email to user about abilities of public projects.
:param permissions: A string, either 'public' or 'private'
:param auth: All the auth information including user, API key.
:param bool log: Whether to add a NodeLog for the privacy change.
:param bool meeting_creation: Whether this was created due to a meetings email.
:param bool check_addons: Check and collect messages for addons?
"""
if auth and not self.has_permission(auth.user, ADMIN):
raise PermissionsError('Must be an admin to change privacy settings.')
if permissions == 'public' and not self.is_public:
if self.is_spam or (settings.SPAM_FLAGGED_MAKE_NODE_PRIVATE and self.is_spammy):
# TODO: Should say will review within a certain agreed upon time period.
raise NodeStateError('This project has been marked as spam. Please contact the help desk if you think this is in error.')
if self.is_registration:
if self.is_pending_embargo:
raise NodeStateError('A registration with an unapproved embargo cannot be made public.')
elif self.is_pending_registration:
raise NodeStateError('An unapproved registration cannot be made public.')
elif self.is_pending_embargo:
raise NodeStateError('An unapproved embargoed registration cannot be made public.')
elif self.is_embargoed:
# Embargoed registrations can be made public early
self.request_embargo_termination(auth=auth)
return False
self.is_public = True
self.keenio_read_key = self.generate_keenio_read_key()
elif permissions == 'private' and self.is_public:
if self.is_registration and not self.is_pending_embargo:
raise NodeStateError('Public registrations must be withdrawn, not made private.')
else:
self.is_public = False
self.keenio_read_key = ''
else:
return False
# After set permissions callback
for addon in self.get_addons():
message = addon.after_set_privacy(self, permissions)
if message:
status.push_status_message(message, kind='info', trust=False)
# After set permissions callback
if check_addons:
for addon in self.get_addons():
message = addon.after_set_privacy(self, permissions)
if message:
status.push_status_message(message, kind='info', trust=False)
# Update existing identifiers
if self.get_identifier('doi'):
doi_status = 'unavailable' if permissions == 'private' else 'public'
enqueue_task(update_ezid_metadata_on_change.s(self._id, status=doi_status))
if log:
action = NodeLog.MADE_PUBLIC if permissions == 'public' else NodeLog.MADE_PRIVATE
self.add_log(
action=action,
params={
'project': self.parent_id,
'node': self._primary_key,
},
auth=auth,
save=False,
)
if save:
self.save()
if auth and permissions == 'public':
project_signals.privacy_set_public.send(auth.user, node=self, meeting_creation=meeting_creation)
return True
def generate_keenio_read_key(self):
return scoped_keys.encrypt(settings.KEEN['public']['master_key'], options={
'filters': [{
'property_name': 'node.id',
'operator': 'eq',
'property_value': str(self._id)
}],
'allowed_operations': ['read']
})
def save_node_preprints(self):
if self.preprint_file:
PreprintService = apps.get_model('osf.PreprintService')
for preprint in PreprintService.objects.filter(node_id=self.id, is_published=True):
preprint.save()
@property
def private_links_active(self):
return self.private_links.filter(is_deleted=False)
@property
def private_link_keys_active(self):
return self.private_links.filter(is_deleted=False).values_list('key', flat=True)
@property
def private_link_keys_deleted(self):
return self.private_links.filter(is_deleted=True).values_list('key', flat=True)
def get_root(self):
sql = """
WITH RECURSIVE ascendants AS (
SELECT
parent_id,
child_id,
1 AS LEVEL,
ARRAY[child_id] as cids
FROM %s
WHERE is_node_link IS FALSE and child_id = %s
UNION ALL
SELECT
S.parent_id,
D.child_id,
D.level + 1,
D.cids || S.child_id
FROM ascendants AS D
JOIN %s AS S
ON D.parent_id = S.child_id
WHERE S.is_node_link IS FALSE
AND %s = ANY(cids)
) SELECT parent_id
FROM ascendants
WHERE child_id = %s
ORDER BY level DESC
LIMIT 1;
"""
with connection.cursor() as cursor:
node_relation_table = AsIs(NodeRelation._meta.db_table)
cursor.execute(sql, [node_relation_table, self.pk, node_relation_table, self.pk, self.pk])
res = cursor.fetchone()
if res:
return AbstractNode.objects.get(pk=res[0])
return self
def find_readable_antecedent(self, auth):
""" Returns first antecendant node readable by <user>.
"""
next_parent = self.parent_node
while next_parent:
if next_parent.can_view(auth):
return next_parent
next_parent = next_parent.parent_node
def copy_contributors_from(self, node):
"""Copies the contibutors from node (including permissions and visibility) into this node."""
contribs = []
for contrib in node.contributor_set.all():
contrib.id = None
contrib.node = self
contribs.append(contrib)
Contributor.objects.bulk_create(contribs)
def register_node(self, schema, auth, data, parent=None):
"""Make a frozen copy of a node.
:param schema: Schema object
:param auth: All the auth information including user, API key.
:param data: Form data
:param parent Node: parent registration of registration to be created
"""
# NOTE: Admins can register child nodes even if they don't have write access them
if not self.can_edit(auth=auth) and not self.is_admin_parent(user=auth.user):
raise PermissionsError(
'User {} does not have permission '
'to register this node'.format(auth.user._id)
)
if self.is_collection:
raise NodeStateError('Folders may not be registered')
original = self
# Note: Cloning a node will clone each node wiki page version and add it to
# `registered.wiki_pages_current` and `registered.wiki_pages_versions`.
if original.is_deleted:
raise NodeStateError('Cannot register deleted node.')
registered = original.clone()
registered.recast('osf.registration')
registered.registered_date = timezone.now()
registered.registered_user = auth.user
registered.registered_from = original
if not registered.registered_meta:
registered.registered_meta = {}
registered.registered_meta[schema._id] = data
registered.forked_from = self.forked_from
registered.creator = self.creator
registered.node_license = original.license.copy() if original.license else None
registered.wiki_private_uuids = {}
# Need to save here in order to set many-to-many fields
registered.save()
registered.registered_schema.add(schema)
registered.copy_contributors_from(self)
registered.tags.add(*self.all_tags.values_list('pk', flat=True))
registered.affiliated_institutions.add(*self.affiliated_institutions.values_list('pk', flat=True))
# Clone each log from the original node for this registration.
self.clone_logs(registered)
registered.is_public = False
# Copy unclaimed records to unregistered users for parent
registered.copy_unclaimed_records()
if parent:
node_relation = NodeRelation.objects.get(parent=parent.registered_from, child=original)
NodeRelation.objects.get_or_create(_order=node_relation._order, parent=parent, child=registered)
# After register callback
for addon in original.get_addons():
_, message = addon.after_register(original, registered, auth.user)
if message:
status.push_status_message(message, kind='info', trust=False)
for node_relation in original.node_relations.filter(child__is_deleted=False):
node_contained = node_relation.child
# Register child nodes
if not node_relation.is_node_link:
registered_child = node_contained.register_node( # noqa
schema=schema,
auth=auth,
data=data,
parent=registered,
)
else:
# Copy linked nodes
NodeRelation.objects.get_or_create(
is_node_link=True,
parent=registered,
child=node_contained
)
registered.root = None # Recompute root on save
registered.save()
if settings.ENABLE_ARCHIVER:
registered.refresh_from_db()
project_signals.after_create_registration.send(self, dst=registered, user=auth.user)
return registered
def path_above(self, auth):
parents = self.parents
return '/' + '/'.join([p.title if p.can_view(auth) else '-- private project --' for p in reversed(parents)])
# TODO: Deprecate this; it duplicates much of what serialize_project already
# does
def serialize(self, auth=None):
"""Dictionary representation of node that is nested within a NodeLog's
representation.
"""
# TODO: incomplete implementation
return {
'id': str(self._primary_key),
'category': self.category_display,
'node_type': self.project_or_component,
'url': self.url,
# TODO: Titles shouldn't contain escaped HTML in the first place
'title': sanitize.unescape_entities(self.title),
'path': self.path_above(auth),
'api_url': self.api_url,
'is_public': self.is_public,
'is_registration': self.is_registration,
}
def has_node_link_to(self, node):
return self.node_relations.filter(child=node, is_node_link=True).exists()
def _initiate_approval(self, user, notify_initiator_on_complete=False):
end_date = timezone.now() + settings.REGISTRATION_APPROVAL_TIME
self.registration_approval = RegistrationApproval.objects.create(
initiated_by=user,
end_date=end_date,
notify_initiator_on_complete=notify_initiator_on_complete
)
self.save() # Set foreign field reference Node.registration_approval
admins = self.get_admin_contributors_recursive(unique_users=True)
for (admin, node) in admins:
self.registration_approval.add_authorizer(admin, node=node)
self.registration_approval.save() # Save approval's approval_state
return self.registration_approval
def require_approval(self, user, notify_initiator_on_complete=False):
if not self.is_registration:
raise NodeStateError('Only registrations can require registration approval')
if not self.has_permission(user, 'admin'):
raise PermissionsError('Only admins can initiate a registration approval')
approval = self._initiate_approval(user, notify_initiator_on_complete)
self.registered_from.add_log(
action=NodeLog.REGISTRATION_APPROVAL_INITIATED,
params={
'node': self.registered_from._id,
'registration': self._id,
'registration_approval_id': approval._id,
},
auth=Auth(user),
save=True,
)
def get_primary(self, node):
return NodeRelation.objects.filter(parent=self, child=node, is_node_link=False).exists()
# TODO optimize me
def get_descendants_recursive(self, primary_only=False):
query = self.nodes_primary if primary_only else self._nodes
for node in query.all():
yield node
if not primary_only:
primary = self.get_primary(node)
if primary:
for descendant in node.get_descendants_recursive(primary_only=primary_only):
yield descendant
else:
for descendant in node.get_descendants_recursive(primary_only=primary_only):
yield descendant
@property
def nodes_primary(self):
"""For v1 compat."""
child_pks = NodeRelation.objects.filter(
parent=self,
is_node_link=False
).values_list('child', flat=True)
return self._nodes.filter(pk__in=child_pks)
@property
def has_pointers_recursive(self):
"""Recursively checks whether the current node or any of its nodes
contains a pointer.
"""
if self.linked_nodes.exists():
return True
for node in self.nodes_primary:
if node.has_pointers_recursive:
return True
return False
# TODO: Optimize me (e.g. use bulk create)
def fork_node(self, auth, title=None, parent=None):
"""Recursively fork a node.
:param Auth auth: Consolidated authorization
:param str title: Optional text to prepend to forked title
:param Node parent: Sets parent, should only be non-null when recursing
:return: Forked node
"""
Registration = apps.get_model('osf.Registration')
PREFIX = 'Fork of '
user = auth.user
# Non-contributors can't fork private nodes
if not (self.is_public or self.has_permission(user, 'read')):
raise PermissionsError('{0!r} does not have permission to fork node {1!r}'.format(user, self._id))
when = timezone.now()
original = self
if original.is_deleted:
raise NodeStateError('Cannot fork deleted node.')
# Note: Cloning a node will clone each node wiki page version and add it to
# `registered.wiki_pages_current` and `registered.wiki_pages_versions`.
forked = original.clone()
if isinstance(forked, Registration):
forked.recast('osf.node')
forked.is_fork = True
forked.forked_date = when
forked.forked_from = original
forked.creator = user
forked.node_license = original.license.copy() if original.license else None
forked.wiki_private_uuids = {}
# Forks default to private status
forked.is_public = False
# Need to save here in order to access m2m fields
forked.save()
if parent:
node_relation = NodeRelation.objects.get(parent=parent.forked_from, child=original)
NodeRelation.objects.get_or_create(_order=node_relation._order, parent=parent, child=forked)
forked.tags.add(*self.all_tags.values_list('pk', flat=True))
for node_relation in original.node_relations.filter(child__is_deleted=False):
node_contained = node_relation.child
# Fork child nodes
if not node_relation.is_node_link:
try: # Catch the potential PermissionsError above
forked_node = node_contained.fork_node(auth=auth, title='', parent=forked)
except PermissionsError:
pass # If this exception is thrown omit the node from the result set
forked_node = None
if forked_node is not None:
NodeRelation.objects.get_or_create(
is_node_link=False,
parent=forked,
child=forked_node
)
forked_node.root = None
forked_node.save() # Recompute root on save()
else:
# Copy linked nodes
NodeRelation.objects.get_or_create(
is_node_link=True,
parent=forked,
child=node_contained
)
if title is None:
forked.title = PREFIX + original.title
elif title == '':
forked.title = original.title
else:
forked.title = title
if len(forked.title) > 200:
forked.title = forked.title[:200]
forked.add_contributor(
contributor=user,
permissions=CREATOR_PERMISSIONS,
log=False,
save=False
)
forked.root = None # Recompute root on save
forked.save()
# Need to call this after save for the notifications to be created with the _primary_key
project_signals.contributor_added.send(forked, contributor=user, auth=auth, email_template='false')
forked.add_log(
action=NodeLog.NODE_FORKED,
params={
'parent_node': original.parent_id,
'node': original._primary_key,
'registration': forked._primary_key, # TODO: Remove this in favor of 'fork'
'fork': forked._primary_key,
},
auth=auth,
log_date=when,
save=False,
)
# Clone each log from the original node for this fork.
self.clone_logs(forked)
forked.refresh_from_db()
# After fork callback
for addon in original.get_addons():
addon.after_fork(original, forked, user)
return forked
def clone_logs(self, node, page_size=100):
paginator = Paginator(self.logs.order_by('pk').all(), page_size)
for page_num in paginator.page_range:
page = paginator.page(page_num)
# Instantiate NodeLogs "manually"
# because BaseModel#clone() is too slow for large projects
logs_to_create = [
NodeLog(
_id=bson.ObjectId(),
action=log.action,
date=log.date,
params=log.params,
should_hide=log.should_hide,
foreign_user=log.foreign_user,
# Set foreign keys, not their objects
# to speed things up
node_id=node.pk,
user_id=log.user_id,
original_node_id=log.original_node_id
)
for log in page
]
NodeLog.objects.bulk_create(logs_to_create)
def use_as_template(self, auth, changes=None, top_level=True, parent=None):
"""Create a new project, using an existing project as a template.
:param auth: The user to be assigned as creator
:param changes: A dictionary of changes, keyed by node id, which
override the attributes of the template project or its
children.
:param Bool top_level: indicates existence of parent TODO: deprecate
:param Node parent: parent template. Should only be passed in during recursion
:return: The `Node` instance created.
"""
Registration = apps.get_model('osf.Registration')
changes = changes or dict()
# build the dict of attributes to change for the new node
try:
attributes = changes[self._id]
# TODO: explicitly define attributes which may be changed.
except (AttributeError, KeyError):
attributes = dict()
if self.is_deleted:
raise NodeStateError('Cannot use deleted node as template.')
# Non-contributors can't template private nodes
if not (self.is_public or self.has_permission(auth.user, 'read')):
raise PermissionsError('{0!r} does not have permission to template node {1!r}'.format(auth.user, self._id))
new = self.clone()
if isinstance(new, Registration):
new.recast('osf.node')
new._is_templated_clone = True # This attribute may be read in post_save handlers
# Clear quasi-foreign fields
new.wiki_pages_current.clear()
new.wiki_pages_versions.clear()
new.wiki_private_uuids.clear()
new.file_guid_to_share_uuids.clear()
# set attributes which may be overridden by `changes`
new.is_public = False
new.description = ''
# apply `changes`
for attr, val in attributes.iteritems():
setattr(new, attr, val)
# set attributes which may NOT be overridden by `changes`
new.creator = auth.user
new.template_node = self
# Need to save in order to access contributors m2m table
new.save(suppress_log=True)
new.add_contributor(contributor=auth.user, permissions=CREATOR_PERMISSIONS, log=False, save=False)
new.is_fork = False
new.node_license = self.license.copy() if self.license else None
# If that title hasn't been changed, apply the default prefix (once)
if (
new.title == self.title and top_level and
language.TEMPLATED_FROM_PREFIX not in new.title
):
new.title = ''.join((language.TEMPLATED_FROM_PREFIX, new.title,))
if len(new.title) > 200:
new.title = new.title[:200]
# Slight hack - created is a read-only field.
new.created = timezone.now()
new.save(suppress_log=True)
# Need to call this after save for the notifications to be created with the _primary_key
project_signals.contributor_added.send(new, contributor=auth.user, auth=auth, email_template='false')
# Log the creation
new.add_log(
NodeLog.CREATED_FROM,
params={
'node': new._primary_key,
'template_node': {
'id': self._primary_key,
'url': self.url,
'title': self.title,
},
},
auth=auth,
log_date=new.created,
save=False,
)
new.save()
if parent:
node_relation = NodeRelation.objects.get(parent=parent.template_node, child=self)
NodeRelation.objects.get_or_create(_order=node_relation._order, parent=parent, child=new)
# deal with the children of the node, if any
for node_relation in self.node_relations.select_related('child').filter(child__is_deleted=False):
node_contained = node_relation.child
# template child nodes
if not node_relation.is_node_link:
try: # Catch the potential PermissionsError above
node_contained.use_as_template(auth, changes, top_level=False, parent=new)
except PermissionsError:
pass
new.root = None
new.save() # Recompute root on save()
return new
def next_descendants(self, auth, condition=lambda auth, node: True):
"""
Recursively find the first set of descedants under a given node that meet a given condition
returns a list of [(node, [children]), ...]
"""
ret = []
for node in self._nodes.order_by('created').all():
if condition(auth, node):
# base case
ret.append((node, []))
else:
ret.append((node, node.next_descendants(auth, condition)))
ret = [item for item in ret if item[1] or condition(auth, item[0])] # prune empty branches
return ret
def node_and_primary_descendants(self):
"""Return an iterator for a node and all of its primary (non-pointer) descendants.
:param node Node: target Node
"""
return itertools.chain([self], self.get_descendants_recursive(primary_only=True))
def active_contributors(self, include=lambda n: True):
for contrib in self.contributors.filter(is_active=True):
if include(contrib):
yield contrib
def get_active_contributors_recursive(self, unique_users=False, *args, **kwargs):
"""Yield (admin, node) tuples for this node and
descendant nodes. Excludes contributors on node links and inactive users.
:param bool unique_users: If True, a given admin will only be yielded once
during iteration.
"""
visited_user_ids = []
for node in self.node_and_primary_descendants(*args, **kwargs):
for contrib in node.active_contributors(*args, **kwargs):
if unique_users:
if contrib._id not in visited_user_ids:
visited_user_ids.append(contrib._id)
yield (contrib, node)
else:
yield (contrib, node)
def _get_admin_contributors_query(self, users):
return Contributor.objects.select_related('user').filter(
node=self,
user__in=users,
user__is_active=True,
admin=True
)
def get_admin_contributors(self, users):
"""Return a set of all admin contributors for this node. Excludes contributors on node links and
inactive users.
"""
return (each.user for each in self._get_admin_contributors_query(users))
def get_admin_contributors_recursive(self, unique_users=False, *args, **kwargs):
"""Yield (admin, node) tuples for this node and
descendant nodes. Excludes contributors on node links and inactive users.
:param bool unique_users: If True, a given admin will only be yielded once
during iteration.
"""
visited_user_ids = []
for node in self.node_and_primary_descendants(*args, **kwargs):
for contrib in node.contributors.all():
if node.has_permission(contrib, ADMIN) and contrib.is_active:
if unique_users:
if contrib._id not in visited_user_ids:
visited_user_ids.append(contrib._id)
yield (contrib, node)
else:
yield (contrib, node)
# TODO: Optimize me
def manage_contributors(self, user_dicts, auth, save=False):
"""Reorder and remove contributors.
:param list user_dicts: Ordered list of contributors represented as
dictionaries of the form:
{'id': <id>, 'permission': <One of 'read', 'write', 'admin'>, 'visible': bool}
:param Auth auth: Consolidated authentication information
:param bool save: Save changes
:raises: ValueError if any users in `users` not in contributors or if
no admin contributors remaining
"""
with transaction.atomic():
users = []
user_ids = []
permissions_changed = {}
visibility_removed = []
to_retain = []
to_remove = []
for user_dict in user_dicts:
user = OSFUser.load(user_dict['id'])
if user is None:
raise ValueError('User not found')
if not self.contributors.filter(id=user.id).exists():
raise ValueError(
'User {0} not in contributors'.format(user.fullname)
)
permissions = expand_permissions(user_dict['permission'])
if set(permissions) != set(self.get_permissions(user)):
# Validate later
self.set_permissions(user, permissions, validate=False, save=False)
permissions_changed[user._id] = permissions
# visible must be added before removed to ensure they are validated properly
if user_dict['visible']:
self.set_visible(user,
visible=True,
auth=auth)
else:
visibility_removed.append(user)
users.append(user)
user_ids.append(user_dict['id'])
for user in visibility_removed:
self.set_visible(user,
visible=False,
auth=auth)
for user in self.contributors.all():
if user._id in user_ids:
to_retain.append(user)
else:
to_remove.append(user)
if users is None or not self._get_admin_contributors_query(users).exists():
raise NodeStateError(
'Must have at least one registered admin contributor'
)
if to_retain != users:
# Ordered Contributor PKs, sorted according to the passed list of user IDs
sorted_contrib_ids = [
each.id for each in sorted(self.contributor_set.all(), key=lambda c: user_ids.index(c.user._id))
]
self.set_contributor_order(sorted_contrib_ids)
self.add_log(
action=NodeLog.CONTRIB_REORDERED,
params={
'project': self.parent_id,
'node': self._id,
'contributors': [
user._id
for user in users
],
},
auth=auth,
save=False,
)
if to_remove:
self.remove_contributors(to_remove, auth=auth, save=False)
if permissions_changed:
self.add_log(
action=NodeLog.PERMISSIONS_UPDATED,
params={
'project': self.parent_id,
'node': self._id,
'contributors': permissions_changed,
},
auth=auth,
save=False,
)
if save:
self.save()
self.save_node_preprints()
with transaction.atomic():
if to_remove or permissions_changed and ['read'] in permissions_changed.values():
project_signals.write_permissions_revoked.send(self)
# TODO: optimize me
def update_contributor(self, user, permission, visible, auth, save=False):
""" TODO: this method should be updated as a replacement for the main loop of
Node#manage_contributors. Right now there are redundancies, but to avoid major
feature creep this will not be included as this time.
Also checks to make sure unique admin is not removing own admin privilege.
"""
if not self.has_permission(auth.user, ADMIN):
raise PermissionsError('Only admins can modify contributor permissions')
if permission:
permissions = expand_permissions(permission)
admins = self.contributor_set.filter(admin=True)
if not admins.count() > 1:
# has only one admin
admin = admins.first()
if admin.user == user and ADMIN not in permissions:
raise NodeStateError('{} is the only admin.'.format(user.fullname))
if not self.contributor_set.filter(user=user).exists():
raise ValueError(
'User {0} not in contributors'.format(user.fullname)
)
if set(permissions) != set(self.get_permissions(user)):
self.set_permissions(user, permissions, save=save)
permissions_changed = {
user._id: permissions
}
self.add_log(
action=NodeLog.PERMISSIONS_UPDATED,
params={
'project': self.parent_id,
'node': self._id,
'contributors': permissions_changed,
},
auth=auth,
save=save
)
with transaction.atomic():
if ['read'] in permissions_changed.values():
project_signals.write_permissions_revoked.send(self)
if visible is not None:
self.set_visible(user, visible, auth=auth)
self.save_node_preprints()
def save(self, *args, **kwargs):
first_save = not bool(self.pk)
if 'suppress_log' in kwargs.keys():
self._suppress_log = kwargs['suppress_log']
del kwargs['suppress_log']
else:
self._suppress_log = False
saved_fields = self.get_dirty_fields(check_relationship=True) or []
ret = super(AbstractNode, self).save(*args, **kwargs)
if saved_fields:
self.on_update(first_save, saved_fields)
if 'node_license' in saved_fields:
children = list(self.descendants.filter(node_license=None, is_public=True, is_deleted=False))
while len(children):
batch = children[:99]
self.bulk_update_search(batch)
children = children[99:]
return ret
def on_update(self, first_save, saved_fields):
User = apps.get_model('osf.OSFUser')
request, user_id = get_request_and_user_id()
request_headers = {}
if not isinstance(request, DummyRequest):
request_headers = {
k: v
for k, v in get_headers_from_request(request).items()
if isinstance(v, basestring)
}
enqueue_task(node_tasks.on_node_updated.s(self._id, user_id, first_save, saved_fields, request_headers))
if self.preprint_file:
# avoid circular imports
from website.preprints.tasks import on_preprint_updated
PreprintService = apps.get_model('osf.PreprintService')
# .preprints wouldn't return a single deleted preprint
for preprint in PreprintService.objects.filter(node_id=self.id, is_published=True):
enqueue_task(on_preprint_updated.s(preprint._id))
user = User.load(user_id)
if user and self.check_spam(user, saved_fields, request_headers):
# Specifically call the super class save method to avoid recursion into model save method.
super(AbstractNode, self).save()
def _get_spam_content(self, saved_fields):
NodeWikiPage = apps.get_model('addons_wiki.NodeWikiPage')
spam_fields = self.SPAM_CHECK_FIELDS if self.is_public and 'is_public' in saved_fields else self.SPAM_CHECK_FIELDS.intersection(
saved_fields)
content = []
for field in spam_fields:
if field == 'wiki_pages_current':
newest_wiki_page = None
for wiki_page_id in self.wiki_pages_current.values():
wiki_page = NodeWikiPage.load(wiki_page_id)
if not newest_wiki_page:
newest_wiki_page = wiki_page
elif wiki_page.date > newest_wiki_page.date:
newest_wiki_page = wiki_page
if newest_wiki_page:
content.append(newest_wiki_page.raw_text(self).encode('utf-8'))
else:
content.append((getattr(self, field, None) or '').encode('utf-8'))
if not content:
return None
return ' '.join(content)
def check_spam(self, user, saved_fields, request_headers):
if not settings.SPAM_CHECK_ENABLED:
return False
if settings.SPAM_CHECK_PUBLIC_ONLY and not self.is_public:
return False
if 'ham_confirmed' in user.system_tags:
return False
content = self._get_spam_content(saved_fields)
if not content:
return
is_spam = self.do_check_spam(
user.fullname,
user.username,
content,
request_headers
)
logger.info("Node ({}) '{}' smells like {} (tip: {})".format(
self._id, self.title.encode('utf-8'), 'SPAM' if is_spam else 'HAM', self.spam_pro_tip
))
if is_spam:
self._check_spam_user(user)
return is_spam
def _check_spam_user(self, user):
if (
settings.SPAM_ACCOUNT_SUSPENSION_ENABLED
and (timezone.now() - user.date_confirmed) <= settings.SPAM_ACCOUNT_SUSPENSION_THRESHOLD
):
self.set_privacy('private', log=False, save=False)
# Suspend the flagged user for spam.
if 'spam_flagged' not in user.system_tags:
user.add_system_tag('spam_flagged')
if not user.is_disabled:
user.disable_account()
user.is_registered = False
mails.send_mail(
to_addr=user.username,
mail=mails.SPAM_USER_BANNED,
user=user,
osf_support_email=settings.OSF_SUPPORT_EMAIL
)
user.save()
# Make public nodes private from this contributor
for node in user.contributed:
if self._id != node._id and len(node.contributors) == 1 and node.is_public and not node.is_quickfiles:
node.set_privacy('private', log=False, save=True)
def flag_spam(self):
""" Overrides SpamMixin#flag_spam.
"""
super(AbstractNode, self).flag_spam()
if settings.SPAM_FLAGGED_MAKE_NODE_PRIVATE:
self.set_privacy(Node.PRIVATE, auth=None, log=False, save=False, check_addons=False)
log = self.add_log(
action=NodeLog.MADE_PRIVATE,
params={
'project': self.parent_id,
'node': self._primary_key,
},
auth=None,
save=False
)
log.should_hide = True
log.save()
def confirm_spam(self, save=False):
super(AbstractNode, self).confirm_spam(save=False)
self.set_privacy(Node.PRIVATE, auth=None, log=False, save=False)
log = self.add_log(
action=NodeLog.MADE_PRIVATE,
params={
'project': self.parent_id,
'node': self._primary_key,
},
auth=None,
save=False
)
log.should_hide = True
log.save()
if save:
self.save()
def resolve(self):
"""For compat with v1 Pointers."""
return self
def set_title(self, title, auth, save=False):
"""Set the title of this Node and log it.
:param str title: The new title.
:param auth: All the auth information including user, API key.
"""
# Called so validation does not have to wait until save.
validate_title(title)
original_title = self.title
new_title = sanitize.strip_html(title)
# Title hasn't changed after sanitzation, bail out
if original_title == new_title:
return False
self.title = new_title
self.add_log(
action=NodeLog.EDITED_TITLE,
params={
'parent_node': self.parent_id,
'node': self._primary_key,
'title_new': self.title,
'title_original': original_title,
},
auth=auth,
save=False,
)
if save:
self.save()
return None
def set_description(self, description, auth, save=False):
"""Set the description and log the event.
:param str description: The new description
:param auth: All the auth informtion including user, API key.
:param bool save: Save self after updating.
"""
original = self.description
new_description = sanitize.strip_html(description)
if original == new_description:
return False
self.description = new_description
self.add_log(
action=NodeLog.EDITED_DESCRIPTION,
params={
'parent_node': self.parent_id,
'node': self._primary_key,
'description_new': self.description,
'description_original': original
},
auth=auth,
save=False,
)
if save:
self.save()
return None
def update(self, fields, auth=None, save=True):
"""Update the node with the given fields.
:param dict fields: Dictionary of field_name:value pairs.
:param Auth auth: Auth object for the user making the update.
:param bool save: Whether to save after updating the object.
"""
if not fields: # Bail out early if there are no fields to update
return False
values = {}
for key, value in fields.iteritems():
if key not in self.WRITABLE_WHITELIST:
continue
if self.is_registration and key != 'is_public':
raise NodeUpdateError(reason='Registered content cannot be updated', key=key)
# Title and description have special methods for logging purposes
if key == 'title':
if not self.is_bookmark_collection or not self.is_quickfiles:
self.set_title(title=value, auth=auth, save=False)
else:
raise NodeUpdateError(reason='Bookmark collections or QuickFilesNodes cannot be renamed.', key=key)
elif key == 'description':
self.set_description(description=value, auth=auth, save=False)
elif key == 'is_public':
self.set_privacy(
Node.PUBLIC if value else Node.PRIVATE,
auth=auth,
log=True,
save=False
)
elif key == 'node_license':
self.set_node_license(
{
'id': value.get('id'),
'year': value.get('year'),
'copyrightHolders': value.get('copyrightHolders') or value.get('copyright_holders', [])
},
auth,
save=save
)
else:
with warnings.catch_warnings():
try:
# This is in place because historically projects and components
# live on different ElasticSearch indexes, and at the time of Node.save
# there is no reliable way to check what the old Node.category
# value was. When the cateogory changes it is possible to have duplicate/dead
# search entries, so always delete the ES doc on categoryt change
# TODO: consolidate Node indexes into a single index, refactor search
if key == 'category':
self.delete_search_entry()
###############
old_value = getattr(self, key)
if old_value != value:
values[key] = {
'old': old_value,
'new': value,
}
setattr(self, key, value)
except AttributeError:
raise NodeUpdateError(reason="Invalid value for attribute '{0}'".format(key), key=key)
except warnings.Warning:
raise NodeUpdateError(reason="Attribute '{0}' doesn't exist on the Node class".format(key), key=key)
if save:
updated = self.get_dirty_fields()
self.save()
else:
updated = []
for key in values:
values[key]['new'] = getattr(self, key)
if values:
self.add_log(
NodeLog.UPDATED_FIELDS,
params={
'node': self._id,
'updated_fields': {
key: {
'old': values[key]['old'],
'new': values[key]['new']
}
for key in values
}
},
auth=auth)
return updated
def remove_node(self, auth, date=None):
"""Marks a node as deleted.
TODO: Call a hook on addons
Adds a log to the parent node if applicable
:param auth: an instance of :class:`Auth`.
:param date: Date node was removed
:type date: `datetime.datetime` or `None`
"""
# TODO: rename "date" param - it's shadowing a global
if not self.can_edit(auth):
raise PermissionsError(
'{0!r} does not have permission to modify this {1}'.format(auth.user, self.category or 'node')
)
if Node.objects.get_children(self, active=True):
raise NodeStateError('Any child components must be deleted prior to deleting this project.')
# After delete callback
for addon in self.get_addons():
message = addon.after_delete(self, auth.user)
if message:
status.push_status_message(message, kind='info', trust=False)
log_date = date or timezone.now()
# Add log to parent
if self.parent_node:
self.parent_node.add_log(
NodeLog.NODE_REMOVED,
params={
'project': self._primary_key,
},
auth=auth,
log_date=log_date,
save=True,
)
else:
self.add_log(
NodeLog.PROJECT_DELETED,
params={
'project': self._primary_key,
},
auth=auth,
log_date=log_date,
save=True,
)
self.is_deleted = True
self.deleted_date = date
self.save()
project_signals.node_deleted.send(self)
return True
def admin_public_wiki(self, user):
return (
self.has_addon('wiki') and
self.has_permission(user, 'admin') and
self.is_public
)
def admin_of_wiki(self, user):
return (
self.has_addon('wiki') and
self.has_permission(user, 'admin')
)
def include_wiki_settings(self, user):
"""Check if node meets requirements to make publicly editable."""
return self.get_descendants_recursive()
def get_wiki_page(self, name=None, version=None, id=None):
NodeWikiPage = apps.get_model('addons_wiki.NodeWikiPage')
if name:
name = (name or '').strip()
key = to_mongo_key(name)
try:
if version and (isinstance(version, int) or version.isdigit()):
id = self.wiki_pages_versions[key][int(version) - 1]
elif version == 'previous':
id = self.wiki_pages_versions[key][-2]
elif version == 'current' or version is None:
id = self.wiki_pages_current[key]
else:
return None
except (KeyError, IndexError):
return None
return NodeWikiPage.load(id)
def update_node_wiki(self, name, content, auth):
"""Update the node's wiki page with new content.
:param page: A string, the page's name, e.g. ``"home"``.
:param content: A string, the posted content.
:param auth: All the auth information including user, API key.
"""
NodeWikiPage = apps.get_model('addons_wiki.NodeWikiPage')
Comment = apps.get_model('osf.Comment')
name = (name or '').strip()
key = to_mongo_key(name)
has_comments = False
current = None
if key not in self.wiki_pages_current:
if key in self.wiki_pages_versions:
version = len(self.wiki_pages_versions[key]) + 1
else:
version = 1
else:
current = NodeWikiPage.load(self.wiki_pages_current[key])
version = current.version + 1
current.save()
if Comment.objects.filter(root_target=current.guids.all()[0]).exists():
has_comments = True
new_page = NodeWikiPage(
page_name=name,
version=version,
user=auth.user,
node=self,
content=content
)
new_page.save()
if has_comments:
Comment.objects.filter(root_target=current.guids.all()[0]).update(root_target=Guid.load(new_page._id))
Comment.objects.filter(target=current.guids.all()[0]).update(target=Guid.load(new_page._id))
if current:
for contrib in self.contributors:
if contrib.comments_viewed_timestamp.get(current._id, None):
timestamp = contrib.comments_viewed_timestamp[current._id]
contrib.comments_viewed_timestamp[new_page._id] = timestamp
del contrib.comments_viewed_timestamp[current._id]
contrib.save()
# check if the wiki page already exists in versions (existed once and is now deleted)
if key not in self.wiki_pages_versions:
self.wiki_pages_versions[key] = []
self.wiki_pages_versions[key].append(new_page._primary_key)
self.wiki_pages_current[key] = new_page._primary_key
self.add_log(
action=NodeLog.WIKI_UPDATED,
params={
'project': self.parent_id,
'node': self._primary_key,
'page': new_page.page_name,
'page_id': new_page._primary_key,
'version': new_page.version,
},
auth=auth,
log_date=new_page.date,
save=False,
)
self.save()
# TODO: Move to wiki add-on
def rename_node_wiki(self, name, new_name, auth):
"""Rename the node's wiki page with new name.
:param name: A string, the page's name, e.g. ``"My Page"``.
:param new_name: A string, the new page's name, e.g. ``"My Renamed Page"``.
:param auth: All the auth information including user, API key.
"""
# TODO: Fix circular imports
from addons.wiki.exceptions import (
PageCannotRenameError,
PageConflictError,
PageNotFoundError,
)
name = (name or '').strip()
key = to_mongo_key(name)
new_name = (new_name or '').strip()
new_key = to_mongo_key(new_name)
page = self.get_wiki_page(name)
if key == 'home':
raise PageCannotRenameError('Cannot rename wiki home page')
if not page:
raise PageNotFoundError('Wiki page not found')
if (new_key in self.wiki_pages_current and key != new_key) or new_key == 'home':
raise PageConflictError(
'Page already exists with name {0}'.format(
new_name,
)
)
# rename the page first in case we hit a validation exception.
old_name = page.page_name
page.rename(new_name)
# TODO: merge historical records like update (prevents log breaks)
# transfer the old page versions/current keys to the new name.
if key != new_key:
self.wiki_pages_versions[new_key] = self.wiki_pages_versions[key]
del self.wiki_pages_versions[key]
self.wiki_pages_current[new_key] = self.wiki_pages_current[key]
del self.wiki_pages_current[key]
if key in self.wiki_private_uuids:
self.wiki_private_uuids[new_key] = self.wiki_private_uuids[key]
del self.wiki_private_uuids[key]
self.add_log(
action=NodeLog.WIKI_RENAMED,
params={
'project': self.parent_id,
'node': self._primary_key,
'page': page.page_name,
'page_id': page._primary_key,
'old_page': old_name,
'version': page.version,
},
auth=auth,
save=True,
)
def delete_node_wiki(self, name, auth):
name = (name or '').strip()
key = to_mongo_key(name)
page = self.get_wiki_page(key)
del self.wiki_pages_current[key]
if key != 'home':
del self.wiki_pages_versions[key]
self.add_log(
action=NodeLog.WIKI_DELETED,
params={
'project': self.parent_id,
'node': self._primary_key,
'page': page.page_name,
'page_id': page._primary_key,
},
auth=auth,
save=False,
)
self.save()
def add_addon(self, name, auth, log=True):
ret = super(AbstractNode, self).add_addon(name, auth)
if ret and log:
self.add_log(
action=NodeLog.ADDON_ADDED,
params={
'project': self.parent_id,
'node': self._id,
'addon': ret.__class__._meta.app_config.full_name,
},
auth=auth,
save=False,
)
self.save() # TODO Required?
return ret
def delete_addon(self, addon_name, auth, _force=False):
"""Delete an add-on from the node.
:param str addon_name: Name of add-on
:param Auth auth: Consolidated authorization object
:param bool _force: For migration testing ONLY. Do not set to True
in the application, or else projects will be allowed to delete
mandatory add-ons!
:return bool: Add-on was deleted
"""
ret = super(AbstractNode, self).delete_addon(addon_name, auth, _force)
if ret:
config = settings.ADDONS_AVAILABLE_DICT[addon_name]
self.add_log(
action=NodeLog.ADDON_REMOVED,
params={
'project': self.parent_id,
'node': self._primary_key,
'addon': config.full_name,
},
auth=auth,
save=False,
)
self.save()
# TODO: save here or outside the conditional? @mambocab
return ret
def has_addon_on_children(self, addon):
"""Checks if a given node has a specific addon on child nodes
that are not registrations or deleted
"""
if self.has_addon(addon):
return True
# TODO: Optimize me into one query
for node_relation in self.node_relations.filter(is_node_link=False, child__is_deleted=False).select_related(
'child'):
node = node_relation.child
if node.has_addon_on_children(addon):
return True
return False
def is_derived_from(self, other, attr):
derived_from = getattr(self, attr)
while True:
if derived_from is None:
return False
if derived_from == other:
return True
derived_from = getattr(derived_from, attr)
def is_fork_of(self, other):
return self.is_derived_from(other, 'forked_from')
def is_registration_of(self, other):
return self.is_derived_from(other, 'registered_from')
class Node(AbstractNode):
"""
Concrete Node class: Instance of AbstractNode(TypedModel). All things that inherit
from AbstractNode will appear in the same table and will be differentiated by the `type` column.
FYI: Behaviors common between Registration and Node should be on the parent class.
"""
@property
def api_v2_url(self):
return reverse('nodes:node-detail', kwargs={'node_id': self._id, 'version': 'v2'})
@property
def is_bookmark_collection(self):
"""For v1 compat"""
return False
class Meta:
# custom permissions for use in the OSF Admin App
permissions = (
('view_node', 'Can view node details'),
)
class Collection(AbstractNode):
is_bookmark_collection = models.NullBooleanField(default=False, db_index=True)
@property
def is_collection(self):
"""For v1 compat."""
return True
@property
def is_registration(self):
"""For v1 compat."""
return False
def remove_node(self, auth, date=None):
if self.is_bookmark_collection:
raise NodeStateError('Bookmark collections may not be deleted.')
# Remove all the collections that this is pointing at.
for pointed in self.linked_nodes.all():
if pointed.is_collection:
pointed.remove_node(auth=auth)
return super(Collection, self).remove_node(auth=auth, date=date)
def save(self, *args, **kwargs):
# Bookmark collections are always named 'Bookmarks'
if self.is_bookmark_collection and self.title != 'Bookmarks':
self.title = 'Bookmarks'
return super(Collection, self).save(*args, **kwargs)
##### Signal listeners #####
@receiver(post_save, sender=Collection)
@receiver(post_save, sender=Node)
@receiver(post_save, sender='osf.QuickFilesNode')
def add_creator_as_contributor(sender, instance, created, **kwargs):
if created:
Contributor.objects.get_or_create(
user=instance.creator,
node=instance,
visible=True,
read=True,
write=True,
admin=True
)
@receiver(post_save, sender=Collection)
@receiver(post_save, sender=Node)
def add_project_created_log(sender, instance, created, **kwargs):
if created and instance.is_original and not instance._suppress_log:
# Define log fields for non-component project
log_action = NodeLog.PROJECT_CREATED
log_params = {
'node': instance._id,
}
if getattr(instance, 'parent_node', None):
log_params.update({'parent_node': instance.parent_node._id})
# Add log with appropriate fields
instance.add_log(
log_action,
params=log_params,
auth=Auth(user=instance.creator),
log_date=instance.created,
save=True,
)
@receiver(post_save, sender=Collection)
@receiver(post_save, sender=Node)
def send_osf_signal(sender, instance, created, **kwargs):
if created and instance.is_original and not instance._suppress_log:
project_signals.project_created.send(instance)
@receiver(post_save, sender=Collection)
@receiver(post_save, sender=Node)
def add_default_node_addons(sender, instance, created, **kwargs):
if (created or instance._is_templated_clone) and instance.is_original and not instance._suppress_log:
for addon in settings.ADDONS_AVAILABLE:
if 'node' in addon.added_default:
instance.add_addon(addon.short_name, auth=None, log=False)
@receiver(post_save, sender=Collection)
@receiver(post_save, sender=Node)
@receiver(post_save, sender='osf.Registration')
@receiver(post_save, sender='osf.QuickFilesNode')
def set_parent_and_root(sender, instance, created, *args, **kwargs):
if getattr(instance, '_parent', None):
NodeRelation.objects.get_or_create(
parent=instance._parent,
child=instance,
is_node_link=False
)
# remove cached copy of parent_node
try:
del instance.__dict__['parent_node']
except KeyError:
pass
if not instance.root:
instance.root = instance.get_root()
instance.save()
| leb2dg/osf.io | osf/models/node.py | Python | apache-2.0 | 117,415 | 0.001823 |
class Solution:
"""
@param: nums: A list of integers
@return: A integer indicate the sum of max subarray
"""
def maxSubArray(self, nums):
# write your code here
if not nums or len(nums) == 0: return 0
cur = res = nums[0]
for i in xrange(1, len(nums)):
cur = max(cur + nums[i], nums[i])
res = max(res, cur)
return res
| shawncaojob/LC | LINTCODE/41_maximum_subarray.py | Python | gpl-3.0 | 410 | 0.004878 |
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
from .publish import ArchivePublishResource, ArchivePublishService # NOQA
from .kill import KillPublishResource, KillPublishService # NOQA
from .correct import CorrectPublishResource, CorrectPublishService # NOQA
| plamut/superdesk | server/apps/publish/content/__init__.py | Python | agpl-3.0 | 524 | 0 |
import unittest
from openmdao.main.api import VariableTree, Component, Assembly
from openmdao.main.datatypes.api import Float, VarTree
class VT(VariableTree):
x = Float()
class C(Component):
x = Float(iotype='in')
out = Float(iotype='out')
def execute(self):
self.out = 2 * self.x
class A(Assembly):
vt = VarTree(VT(), iotype='in')
def configure(self):
self.add('c', C())
self.driver.workflow.add(['c'])
self.connect('vt.x', 'c.x')
self.create_passthrough('c.out')
class TestCase(unittest.TestCase):
def test_vtree(self):
a = A()
a.vt.x = 1.0
a.run()
self.assertEqual(a.out, 2.0)
if __name__ == '__main__':
unittest.main()
| DailyActie/Surrogate-Model | 01-codes/OpenMDAO-Framework-dev/openmdao.main/src/openmdao/main/test/test_aningvtree.py | Python | mit | 742 | 0 |
# -*- coding: utf-8 -*-
#!/usr/bin/env python
import webapp2,jinja2,os
import logging
import wowapi
from datetime import datetime
from google.appengine.ext import ndb
from google.appengine.api.memcache import Client
from google.appengine.api import taskqueue
from google.appengine.api.taskqueue import Queue
from google.appengine.api.taskqueue import QueueStatistics
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
extensions=['jinja2.ext.autoescape'])
class Progression(ndb.Model):
raidname = ndb.StringProperty(indexed = True, required = True)
numbosses = ndb.IntegerProperty(default = 0, required = True)
normal = ndb.IntegerProperty(default = 0, required = True)
heroic = ndb.IntegerProperty(default = 0, required = True)
mythic = ndb.IntegerProperty(default = 0, required = True)
class Group(ndb.Model):
name = ndb.StringProperty(indexed=True, required = True)
toons = ndb.StringProperty(repeated=True)
brf = ndb.StructuredProperty(Progression, required = True)
hm = ndb.StructuredProperty(Progression, required = True)
lastupdated = ndb.DateTimeProperty(auto_now=True)
avgilvl = ndb.IntegerProperty(default = 0)
class Global(ndb.Model):
lastupdated = ndb.DateTimeProperty(auto_now=True)
class ProgressBuilder(webapp2.RequestHandler):
difficulties = ['normal','heroic','mythic']
hmbosses = ['Kargath Bladefist','The Butcher','Brackenspore','Tectus','Twin Ogron','Ko\'ragh','Imperator Mar\'gok']
brfbosses = ['Oregorger','Gruul','The Blast Furnace','Hans\'gar and Franzok','Flamebender Ka\'graz','Kromog','Beastlord Darmac','Operator Thogar','The Iron Maidens','Blackhand']
def post(self):
start = self.request.get('start')
end = self.request.get('end')
logging.info('%s %s' % (start,end))
importer = wowapi.Importer()
q = Group.query()
groups = q.fetch()
logging.info('Builder task for range %s to %s started' % (start, end))
for group in groups:
firstchar = group.name[0]
if firstchar < start or firstchar > end:
continue
data = list()
importer.load(group.toons, data)
progress = dict()
self.parse(ProgressBuilder.difficulties, ProgressBuilder.hmbosses,
data, 'Highmaul', progress)
self.parse(ProgressBuilder.difficulties, ProgressBuilder.brfbosses,
data, 'Blackrock Foundry', progress)
# calculate the avg ilvl values from the toon data
group.avgilvl = 0
numtoons = 0
for toon in data:
if 'items' in toon:
numtoons += 1
group.avgilvl += toon['items']['averageItemLevel']
if numtoons != 0:
group.avgilvl /= numtoons
self.response.write(group.name + " data generated<br/>")
# update the entry in ndb with the new progression data for this
# group. this also checks to make sure that the progress only ever
# increases, in case of wierdness with the data.
group.brf.normal = max(group.brf.normal,
progress['Blackrock Foundry']['normal'])
group.brf.heroic = max(group.brf.heroic,
progress['Blackrock Foundry']['heroic'])
group.brf.mythic = max(group.brf.mythic,
progress['Blackrock Foundry']['mythic'])
group.hm.normal = max(group.hm.normal,
progress['Highmaul']['normal'])
group.hm.heroic = max(group.hm.heroic,
progress['Highmaul']['heroic'])
group.hm.mythic = max(group.hm.mythic,
progress['Highmaul']['mythic'])
group.put()
logging.info('Finished building group %s' % group.name)
logging.info('Builder task for range %s to %s completed' % (start, end))
# update the last updated for the whole dataset. don't actually
# have to set the time here, the auto_now flag on the property does
# it for us.
q = Global.query()
r = q.fetch()
if (len(r) == 0):
g = Global()
else:
g = r[0]
g.put()
def parse(self, difficulties, bosses, toondata, raidname, progress):
progress[raidname] = dict()
bossdata = dict()
for boss in bosses:
bossdata[boss] = dict()
for d in difficulties:
bossdata[boss][d] = dict()
bossdata[boss][d]['times'] = list()
bossdata[boss][d]['timeset'] = set()
bossdata[boss][d]['killed'] = True
bossdata[boss][d]['killtime'] = 0
bossdata[boss][d]['killinv'] = 0
# loop through each toon in the data from the blizzard API
for toon in toondata:
if 'progression' not in toon:
continue
# get just the raid data for this toon
raids = toon['progression']['raids']
# this filters the raid data down to just the raid we're looking
# at this pass
raid = [d for d in raids if d['name'] == raidname][0]
# loop through the individual bosses and get the timestamp for
# the last kill for this toon for each boss
for boss in bosses:
# this filters the raid data down to just a single boss
b = [d for d in raid['bosses'] if d['name'] == boss][0]
# loop through each difficulty level and grab each timestamp.
# skip any timestamps of zero. that means the toon never
# killed the boss.
for d in difficulties:
if b[d+'Timestamp'] != 0:
bossdata[boss][d]['times'].append(b[d+'Timestamp'])
bossdata[boss][d]['timeset'].add(b[d+'Timestamp'])
# loop back through the difficulties and bosses and build up the
# progress data
for d in difficulties:
progress[raidname][d] = 0
for boss in bosses:
# for each boss, grab the set of unique timestamps and sort it
# with the last kill first
timelist = list(bossdata[boss][d]['timeset'])
timelist.sort(reverse=True)
# now loop through that time list. a kill involving 5 or more
# players from the group is considered a kill for the whole
# group and counts towards progress.
for t in timelist:
count = bossdata[boss][d]['times'].count(t)
if count >= 5:
bossdata[boss][d]['killed'] = True
bossdata[boss][d]['killtime'] = t
bossdata[boss][d]['killinv'] = count
progress[raidname][d] += 1
ts = datetime.fromtimestamp(t/1000)
# logging.info('count for %s %s at time %s (involved %d members)' % (boss, d, ts.strftime("%Y-%m-%d %H:%M:%S"), count))
break
class Ranker(webapp2.RequestHandler):
def get(self):
queue = Queue()
stats = queue.fetch_statistics()
template_values={
'tasks': stats.tasks,
'in_flight': stats.in_flight,
}
template = JINJA_ENVIRONMENT.get_template('templates/ranker.html')
self.response.write(template.render(template_values))
def post(self):
# refuse to start the tasks if there are some already running
queue = Queue()
stats = queue.fetch_statistics()
if stats.tasks == 0:
print 'nop'
taskqueue.add(url='/builder', params={'start':'A', 'end':'B'})
taskqueue.add(url='/builder', params={'start':'C', 'end':'E'})
taskqueue.add(url='/builder', params={'start':'F', 'end':'G'})
taskqueue.add(url='/builder', params={'start':'H', 'end':'H'})
taskqueue.add(url='/builder', params={'start':'I', 'end':'M'})
taskqueue.add(url='/builder', params={'start':'N', 'end':'O'})
taskqueue.add(url='/builder', params={'start':'P', 'end':'R'})
taskqueue.add(url='/builder', params={'start':'S', 'end':'S'})
taskqueue.add(url='/builder', params={'start':'T', 'end':'T'})
taskqueue.add(url='/builder', params={'start':'U', 'end':'Z'})
self.redirect('/rank')
class Display(webapp2.RequestHandler):
def get(self):
q = Global.query()
r = q.fetch()
template_values = {
'last_updated': r[0].lastupdated
}
template = JINJA_ENVIRONMENT.get_template('templates/header.html')
self.response.write(template.render(template_values))
# get the group data from the datastore, and order it in decreasing order
# so that further progressed teams show up first. break ties by
# alphabetical order of group names
q = Group.query().order(-Group.brf.mythic, -Group.brf.heroic, -Group.brf.normal).order(-Group.hm.mythic, -Group.hm.heroic, -Group.hm.normal).order(Group.name)
groups = q.fetch()
for group in groups:
template_values = {'group' : group}
template = JINJA_ENVIRONMENT.get_template('templates/group.html')
self.response.write(template.render(template_values))
self.response.write(" <div style='clear: both;font-size: 12px;text-align:center'>Site code by Tamen - Aerie Peak(US) • <a href='http://github.com/timwoj/ctrprogress'>http://github.com/timwoj/ctrprogress<a/></div>")
self.response.write('</body></html>')
class DisplayText(webapp2.RequestHandler):
def get(self):
q = Global.query()
r = q.fetch()
if (len(r)):
print r[0]
template_values = {
'last_updated': r[0].lastupdated
}
template = JINJA_ENVIRONMENT.get_template('templates/header.html')
self.response.write(template.render(template_values))
# get the group data from the datastore, and order it in decreasing order
# so that further progressed teams show up first. break ties by
# alphabetical order of group names
q = Group.query().order(-Group.brf.mythic, -Group.brf.heroic, -Group.brf.normal).order(-Group.hm.mythic, -Group.hm.heroic, -Group.hm.normal).order(Group.name)
groups = q.fetch()
for group in groups:
self.response.write('%s (Avg ilvl: %d)<br/>' % (group.name,group.avgilvl))
self.writeProgress(group.brf)
self.writeProgress(group.hm)
self.response.write('<br/>')
self.response.write('</body></html>')
def writeProgress(self, raid):
self.response.write("%s: %d/%dN %d/%dH %d/%dM<br/>" %
(raid.raidname, raid.normal, raid.numbosses,
raid.heroic, raid.numbosses, raid.mythic,
raid.numbosses))
| AndyHannon/ctrprogress | ranker.py | Python | mit | 11,305 | 0.008138 |
#!/usr/bin/env python3
# Copyright 2014, 2015, 2017 Endless Mobile, Inc.
# This file is part of eos-event-recorder-daemon.
#
# eos-event-recorder-daemon is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or (at your
# option) any later version.
#
# eos-event-recorder-daemon is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with eos-event-recorder-daemon. If not, see
# <http://www.gnu.org/licenses/>.
import configparser
import dbus
import os
import shlex
import subprocess
import taptestrunner
import tempfile
import time
import unittest
import dbusmock
_METRICS_IFACE = 'com.endlessm.Metrics.EventRecorderServer'
class TestOptOutIntegration(dbusmock.DBusTestCase):
"""
Makes sure the Enabled property can be set and retrieved.
"""
@classmethod
def setUpClass(klass):
"""Set up a mock system bus."""
klass.start_system_bus()
klass.dbus_con = klass.get_dbus(system_bus=True)
def setUp(self):
"""Start the event recorder on the mock system bus."""
# Put polkitd mocks onto the mock system bus.
(self.polkit_popen, self.polkit_obj) = self.spawn_server_template('polkitd')
self.test_dir = tempfile.TemporaryDirectory(
prefix='eos-event-recorder-daemon-test.')
persistent_cache_directory = os.path.join(self.test_dir.name, 'cache')
os.mkdir(persistent_cache_directory)
escaped_dir = shlex.quote(persistent_cache_directory)
persistent_cache_dir_arg = '--persistent-cache-directory=' + escaped_dir
self.config_file = os.path.join(self.test_dir.name, 'permissions.conf')
config_file_arg = '--config-file-path={}'.format(self.config_file)
# TODO: The daemon attempts to create CONFIG_DIR / cache-size.conf when
# launched; this will typically fail while running this test because
# either CONFIG_DIR does not exist, or it exists and is not owned by
# the user running the test. The daemon logs a warning in this case.
# (If the test is running as root or the metrics user, and the
# directory exists, then the test will overwrite the file within!)
# TODO: The daemon assumes it is running on an OSTree system and
# attempts to open /ostree/repo/config to determine whether to adjust
# the environment in its own configuration (self.config_file above).
# When running on a non-OSTree system such as a build server or
# development container, this fails, and logs a warning. (This could
# be addressed by, for example, checking ostree_sysroot_is_booted().)
# TODO: Address both issues above, then enable fatal warnings.
daemon_path = os.environ.get('EMER_PATH', './eos-metrics-event-recorder')
self.daemon = subprocess.Popen([daemon_path,
persistent_cache_dir_arg,
config_file_arg])
# Wait for the service to come up
self.wait_for_bus_object('com.endlessm.Metrics',
'/com/endlessm/Metrics', system_bus=True)
metrics_object = self.dbus_con.get_object('com.endlessm.Metrics',
'/com/endlessm/Metrics')
self.interface = dbus.Interface(metrics_object, _METRICS_IFACE)
def tearDown(self):
self.polkit_popen.terminate()
self.daemon.terminate()
self.polkit_popen.wait()
self.assertEqual(self.daemon.wait(), 0)
self.test_dir.cleanup()
def test_opt_out_readable(self):
"""Make sure the Enabled property exists."""
self.interface.Get(_METRICS_IFACE, 'Enabled',
dbus_interface=dbus.PROPERTIES_IFACE)
def test_opt_out_not_writable(self):
"""Make sure the Enabled property is not writable."""
with self.assertRaisesRegex(dbus.DBusException, 'org\.freedesktop\.DBus\.Error\.InvalidArgs'):
self.interface.Set(_METRICS_IFACE, 'Enabled', False,
dbus_interface=dbus.PROPERTIES_IFACE)
def test_set_enabled_authorized(self):
"""
Make sure the Enabled property's value persists and accessing SetEnabled
succeeds when it is set to allowed.
"""
# Check defaults look good and erase the file before our next change
self._check_config_file(enabled='true', uploading_enabled='false')
self.polkit_obj.SetAllowed(['com.endlessm.Metrics.SetEnabled'])
self.interface.SetEnabled(True)
self.assertTrue(self.interface.Get(_METRICS_IFACE, 'Enabled',
dbus_interface=dbus.PROPERTIES_IFACE))
self._check_config_file(enabled='true', uploading_enabled='true')
self.interface.SetEnabled(False)
self.assertFalse(self.interface.Get(_METRICS_IFACE, 'Enabled',
dbus_interface=dbus.PROPERTIES_IFACE))
self._check_config_file(enabled='false', uploading_enabled='false')
def test_set_enabled_unauthorized(self):
"""
Make sure that accessing SetEnabled fails if not explicitly authorized.
"""
with self.assertRaisesRegex(dbus.DBusException, 'org\.freedesktop\.DBus\.Error\.AuthFailed'):
self.interface.SetEnabled(True)
def test_upload_doesnt_change_config(self):
"""
Make sure that calling UploadEvents() doesn't spontaneously enable
uploading. This seems implausible but did actually happen.
UploadEvents() causes the config to be re-read, triggering a change
notification on EmerPermissionsProvider:enabled, triggering a (no-op)
update of the Enabled D-Bus property to TRUE, which was bound to
EmerPermissionsProvider:uploading-enabled so caused that property to
be set to TRUE.
"""
# Check defaults look good and erase the file before our next change
self._check_config_file(enabled='true', uploading_enabled='false')
with self.assertRaisesRegex(dbus.exceptions.DBusException,
r'uploading is disabled') as context:
self.interface.UploadEvents()
self.assertEqual(context.exception.get_dbus_name(),
"com.endlessm.Metrics.Error.UploadingDisabled")
self._check_config_file(enabled='true', uploading_enabled='false')
def test_UploadEvents_fails_if_disabled(self):
self.polkit_obj.SetAllowed(['com.endlessm.Metrics.SetEnabled'])
self.interface.SetEnabled(False)
with self.assertRaisesRegex(dbus.exceptions.DBusException,
r'metrics system is disabled') as context:
self.interface.UploadEvents()
self.assertEqual(context.exception.get_dbus_name(),
"com.endlessm.Metrics.Error.MetricsDisabled")
def _check_config_file(self, enabled, uploading_enabled):
# the config file is written asynchronously by the daemon,
# so may not exist immediately after a change is made - wait
# for up to 1 second for it to be written
for i in range(20):
if os.path.exists(self.config_file):
break
else:
time.sleep(0.05)
config = configparser.ConfigParser()
self.assertEqual(config.read(self.config_file), [self.config_file])
self.assertEqual(config.get("global", "enabled"), enabled)
self.assertEqual(config.get("global", "uploading_enabled"), uploading_enabled)
# erase the file after reading it to guarantee that the next time it
# exists, it's up to date. the daemon doesn't read it once started.
os.unlink(self.config_file)
if __name__ == '__main__':
unittest.main(testRunner=taptestrunner.TAPTestRunner())
| endlessm/eos-event-recorder-daemon | tests/test-opt-out-integration.py | Python | gpl-2.0 | 8,113 | 0.002588 |
from __future__ import unicode_literals
import boto
import copy
import itertools
import re
import six
from collections import defaultdict
from datetime import datetime
from boto.ec2.instance import Instance as BotoInstance, Reservation
from boto.ec2.blockdevicemapping import BlockDeviceMapping, BlockDeviceType
from boto.ec2.spotinstancerequest import SpotInstanceRequest as BotoSpotRequest
from boto.ec2.launchspecification import LaunchSpecification
from moto.core import BaseBackend
from moto.core.models import Model
from moto.core.utils import iso_8601_datetime_with_milliseconds
from .exceptions import (
EC2ClientError,
DependencyViolationError,
MissingParameterError,
InvalidParameterValueError,
InvalidParameterValueErrorTagNull,
InvalidDHCPOptionsIdError,
MalformedDHCPOptionsIdError,
InvalidKeyPairNameError,
InvalidKeyPairDuplicateError,
InvalidInternetGatewayIdError,
GatewayNotAttachedError,
ResourceAlreadyAssociatedError,
InvalidVPCIdError,
InvalidSubnetIdError,
InvalidNetworkInterfaceIdError,
InvalidNetworkAttachmentIdError,
InvalidSecurityGroupDuplicateError,
InvalidSecurityGroupNotFoundError,
InvalidPermissionNotFoundError,
InvalidRouteTableIdError,
InvalidRouteError,
InvalidInstanceIdError,
MalformedAMIIdError,
InvalidAMIIdError,
InvalidAMIAttributeItemValueError,
InvalidSnapshotIdError,
InvalidVolumeIdError,
InvalidVolumeAttachmentError,
InvalidDomainError,
InvalidAddressError,
InvalidAllocationIdError,
InvalidAssociationIdError,
InvalidVPCPeeringConnectionIdError,
InvalidVPCPeeringConnectionStateTransitionError,
TagLimitExceeded,
InvalidID,
InvalidCIDRSubnetError,
InvalidNetworkAclIdError,
InvalidVpnGatewayIdError,
InvalidVpnConnectionIdError,
InvalidCustomerGatewayIdError,
)
from .utils import (
EC2_RESOURCE_TO_PREFIX,
EC2_PREFIX_TO_RESOURCE,
random_ami_id,
random_dhcp_option_id,
random_eip_allocation_id,
random_eip_association_id,
random_eni_attach_id,
random_eni_id,
random_instance_id,
random_internet_gateway_id,
random_ip,
random_nat_gateway_id,
random_key_pair,
random_private_ip,
random_public_ip,
random_reservation_id,
random_route_table_id,
generate_route_id,
split_route_id,
random_security_group_id,
random_snapshot_id,
random_spot_request_id,
random_subnet_id,
random_subnet_association_id,
random_volume_id,
random_vpc_id,
random_vpc_peering_connection_id,
generic_filter,
is_valid_resource_id,
get_prefix,
simple_aws_filter_to_re,
is_valid_cidr,
filter_internet_gateways,
filter_reservations,
random_network_acl_id,
random_network_acl_subnet_association_id,
random_vpn_gateway_id,
random_vpn_connection_id,
random_customer_gateway_id,
is_tag_filter,
)
def utc_date_and_time():
return datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.000Z')
def validate_resource_ids(resource_ids):
for resource_id in resource_ids:
if not is_valid_resource_id(resource_id):
raise InvalidID(resource_id=resource_id)
return True
class InstanceState(object):
def __init__(self, name='pending', code=0):
self.name = name
self.code = code
class StateReason(object):
def __init__(self, message="", code=""):
self.message = message
self.code = code
class TaggedEC2Resource(object):
def get_tags(self, *args, **kwargs):
tags = self.ec2_backend.describe_tags(filters={'resource-id': [self.id]})
return tags
def add_tag(self, key, value):
self.ec2_backend.create_tags([self.id], {key: value})
def get_filter_value(self, filter_name):
tags = self.get_tags()
if filter_name.startswith('tag:'):
tagname = filter_name.replace('tag:', '', 1)
for tag in tags:
if tag['key'] == tagname:
return tag['value']
return ''
if filter_name == 'tag-key':
return [tag['key'] for tag in tags]
if filter_name == 'tag-value':
return [tag['value'] for tag in tags]
class NetworkInterface(TaggedEC2Resource):
def __init__(self, ec2_backend, subnet, private_ip_address, device_index=0,
public_ip_auto_assign=True, group_ids=None):
self.ec2_backend = ec2_backend
self.id = random_eni_id()
self.device_index = device_index
self.private_ip_address = private_ip_address
self.subnet = subnet
self.instance = None
self.attachment_id = None
self.public_ip = None
self.public_ip_auto_assign = public_ip_auto_assign
self.start()
self.attachments = []
# Local set to the ENI. When attached to an instance, @property group_set
# returns groups for both self and the attached instance.
self._group_set = []
group = None
if group_ids:
for group_id in group_ids:
group = self.ec2_backend.get_security_group_from_id(group_id)
if not group:
# Create with specific group ID.
group = SecurityGroup(self.ec2_backend, group_id, group_id, group_id, vpc_id=subnet.vpc_id)
self.ec2_backend.groups[subnet.vpc_id][group_id] = group
if group:
self._group_set.append(group)
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
security_group_ids = properties.get('SecurityGroups', [])
ec2_backend = ec2_backends[region_name]
subnet_id = properties.get('SubnetId')
if subnet_id:
subnet = ec2_backend.get_subnet(subnet_id)
else:
subnet = None
private_ip_address = properties.get('PrivateIpAddress', None)
network_interface = ec2_backend.create_network_interface(
subnet,
private_ip_address,
group_ids=security_group_ids
)
return network_interface
def stop(self):
if self.public_ip_auto_assign:
self.public_ip = None
def start(self):
self.check_auto_public_ip()
def check_auto_public_ip(self):
if self.public_ip_auto_assign:
self.public_ip = random_public_ip()
@property
def group_set(self):
if self.instance and self.instance.security_groups:
return set(self._group_set) | set(self.instance.security_groups)
else:
return self._group_set
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
if attribute_name == 'PrimaryPrivateIpAddress':
return self.private_ip_address
elif attribute_name == 'SecondaryPrivateIpAddresses':
raise NotImplementedError('"Fn::GetAtt" : [ "{0}" , "SecondaryPrivateIpAddresses" ]"')
raise UnformattedGetAttTemplateException()
@property
def physical_resource_id(self):
return self.id
def get_filter_value(self, filter_name):
if filter_name == 'network-interface-id':
return self.id
elif filter_name in ('addresses.private-ip-address', 'private-ip-address'):
return self.private_ip_address
elif filter_name == 'subnet-id':
return self.subnet.id
elif filter_name == 'vpc-id':
return self.subnet.vpc_id
elif filter_name == 'group-id':
return [group.id for group in self._group_set]
filter_value = super(NetworkInterface, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error(
"The filter '{0}' for DescribeNetworkInterfaces".format(filter_name)
)
return filter_value
class NetworkInterfaceBackend(object):
def __init__(self):
self.enis = {}
super(NetworkInterfaceBackend, self).__init__()
def create_network_interface(self, subnet, private_ip_address, group_ids=None, **kwargs):
eni = NetworkInterface(self, subnet, private_ip_address, group_ids=group_ids, **kwargs)
self.enis[eni.id] = eni
return eni
def get_network_interface(self, eni_id):
for eni in self.enis.values():
if eni_id == eni.id:
return eni
raise InvalidNetworkInterfaceIdError(eni_id)
def delete_network_interface(self, eni_id):
deleted = self.enis.pop(eni_id, None)
if not deleted:
raise InvalidNetworkInterfaceIdError(eni_id)
return deleted
def describe_network_interfaces(self, filters=None):
enis = self.enis.values()
if filters:
for (_filter, _filter_value) in filters.items():
if _filter == 'network-interface-id':
_filter = 'id'
enis = [eni for eni in enis if getattr(eni, _filter) in _filter_value]
elif _filter == 'group-id':
original_enis = enis
enis = []
for eni in original_enis:
for group in eni.group_set:
if group.id in _filter_value:
enis.append(eni)
break
else:
self.raise_not_implemented_error("The filter '{0}' for DescribeNetworkInterfaces".format(_filter))
return enis
def attach_network_interface(self, eni_id, instance_id, device_index):
eni = self.get_network_interface(eni_id)
instance = self.get_instance(instance_id)
return instance.attach_eni(eni, device_index)
def detach_network_interface(self, attachment_id):
found_eni = None
for eni in self.enis.values():
if eni.attachment_id == attachment_id:
found_eni = eni
break
else:
raise InvalidNetworkAttachmentIdError(attachment_id)
found_eni.instance.detach_eni(found_eni)
def modify_network_interface_attribute(self, eni_id, group_id):
eni = self.get_network_interface(eni_id)
group = self.get_security_group_from_id(group_id)
eni._group_set = [group]
def get_all_network_interfaces(self, eni_ids=None, filters=None):
enis = self.enis.values()
if eni_ids:
enis = [eni for eni in enis if eni.id in eni_ids]
if len(enis) != len(eni_ids):
invalid_id = list(set(eni_ids).difference(set([eni.id for eni in enis])))[0]
raise InvalidNetworkInterfaceIdError(invalid_id)
return generic_filter(filters, enis)
class Instance(BotoInstance, TaggedEC2Resource):
def __init__(self, ec2_backend, image_id, user_data, security_groups, **kwargs):
super(Instance, self).__init__()
self.ec2_backend = ec2_backend
self.id = random_instance_id()
self.image_id = image_id
self._state = InstanceState("running", 16)
self._reason = ""
self._state_reason = StateReason()
self.user_data = user_data
self.security_groups = security_groups
self.instance_type = kwargs.get("instance_type", "m1.small")
placement = kwargs.get("placement", None)
self.vpc_id = None
self.subnet_id = kwargs.get("subnet_id")
in_ec2_classic = not bool(self.subnet_id)
self.key_name = kwargs.get("key_name")
self.source_dest_check = "true"
self.launch_time = utc_date_and_time()
associate_public_ip = kwargs.get("associate_public_ip", False)
if in_ec2_classic:
# If we are in EC2-Classic, autoassign a public IP
associate_public_ip = True
amis = self.ec2_backend.describe_images(filters={'image-id': image_id})
ami = amis[0] if amis else None
self.platform = ami.platform if ami else None
self.virtualization_type = ami.virtualization_type if ami else 'paravirtual'
self.architecture = ami.architecture if ami else 'x86_64'
# handle weird bug around user_data -- something grabs the repr(), so it must be clean
if isinstance(self.user_data, list) and len(self.user_data) > 0:
if six.PY3 and isinstance(self.user_data[0], six.binary_type):
# string will have a "b" prefix -- need to get rid of it
self.user_data[0] = self.user_data[0].decode('utf-8')
elif six.PY2 and isinstance(self.user_data[0], six.text_type):
# string will have a "u" prefix -- need to get rid of it
self.user_data[0] = self.user_data[0].encode('utf-8')
if self.subnet_id:
subnet = ec2_backend.get_subnet(self.subnet_id)
self.vpc_id = subnet.vpc_id
self._placement.zone = subnet.availability_zone
elif placement:
self._placement.zone = placement
else:
self._placement.zone = ec2_backend.region_name + 'a'
self.block_device_mapping = BlockDeviceMapping()
self.prep_nics(kwargs.get("nics", {}),
subnet_id=self.subnet_id,
private_ip=kwargs.get("private_ip"),
associate_public_ip=associate_public_ip)
def setup_defaults(self):
# Default have an instance with root volume should you not wish to override with attach volume cmd.
volume = self.ec2_backend.create_volume(8, 'us-east-1a')
self.ec2_backend.attach_volume(volume.id, self.id, '/dev/sda1')
def teardown_defaults(self):
volume_id = self.block_device_mapping['/dev/sda1'].volume_id
self.ec2_backend.detach_volume(volume_id, self.id, '/dev/sda1')
self.ec2_backend.delete_volume(volume_id)
@property
def get_block_device_mapping(self):
return self.block_device_mapping.items()
@property
def private_ip(self):
return self.nics[0].private_ip_address
@property
def private_dns(self):
return "ip-{0}.ec2.internal".format(self.private_ip)
@property
def public_ip(self):
return self.nics[0].public_ip
@property
def public_dns(self):
if self.public_ip:
return "ec2-{0}.compute-1.amazonaws.com".format(self.public_ip)
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
ec2_backend = ec2_backends[region_name]
security_group_ids = properties.get('SecurityGroups', [])
group_names = [ec2_backend.get_security_group_from_id(group_id).name for group_id in security_group_ids]
reservation = ec2_backend.add_instances(
image_id=properties['ImageId'],
user_data=properties.get('UserData'),
count=1,
security_group_names=group_names,
instance_type=properties.get("InstanceType", "m1.small"),
subnet_id=properties.get("SubnetId"),
key_name=properties.get("KeyName"),
private_ip=properties.get('PrivateIpAddress'),
)
return reservation.instances[0]
@property
def physical_resource_id(self):
return self.id
def start(self, *args, **kwargs):
for nic in self.nics.values():
nic.start()
self._state.name = "running"
self._state.code = 16
self._reason = ""
self._state_reason = StateReason()
def stop(self, *args, **kwargs):
for nic in self.nics.values():
nic.stop()
self._state.name = "stopped"
self._state.code = 80
self._reason = "User initiated ({0})".format(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S UTC'))
self._state_reason = StateReason("Client.UserInitiatedShutdown: User initiated shutdown",
"Client.UserInitiatedShutdown")
def delete(self, region):
self.terminate()
def terminate(self, *args, **kwargs):
for nic in self.nics.values():
nic.stop()
self.teardown_defaults()
self._state.name = "terminated"
self._state.code = 48
self._reason = "User initiated ({0})".format(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S UTC'))
self._state_reason = StateReason("Client.UserInitiatedShutdown: User initiated shutdown",
"Client.UserInitiatedShutdown")
def reboot(self, *args, **kwargs):
self._state.name = "running"
self._state.code = 16
self._reason = ""
self._state_reason = StateReason()
@property
def dynamic_group_list(self):
if self.nics:
groups = []
for nic in self.nics.values():
for group in nic.group_set:
groups.append(group)
return groups
else:
return self.security_groups
def prep_nics(self, nic_spec, subnet_id=None, private_ip=None, associate_public_ip=None):
self.nics = {}
if not private_ip:
private_ip = random_private_ip()
# Primary NIC defaults
primary_nic = {'SubnetId': subnet_id,
'PrivateIpAddress': private_ip,
'AssociatePublicIpAddress': associate_public_ip}
primary_nic = dict((k, v) for k, v in primary_nic.items() if v)
# If empty NIC spec but primary NIC values provided, create NIC from them.
if primary_nic and not nic_spec:
nic_spec[0] = primary_nic
nic_spec[0]['DeviceIndex'] = 0
# Flesh out data structures and associations
for nic in nic_spec.values():
device_index = int(nic.get('DeviceIndex'))
nic_id = nic.get('NetworkInterfaceId')
if nic_id:
# If existing NIC found, use it.
use_nic = self.ec2_backend.get_network_interface(nic_id)
use_nic.device_index = device_index
use_nic.public_ip_auto_assign = False
else:
# If primary NIC values provided, use them for the primary NIC.
if device_index == 0 and primary_nic:
nic.update(primary_nic)
if 'SubnetId' in nic:
subnet = self.ec2_backend.get_subnet(nic['SubnetId'])
else:
subnet = None
group_id = nic.get('SecurityGroupId')
group_ids = [group_id] if group_id else []
use_nic = self.ec2_backend.create_network_interface(subnet,
nic.get('PrivateIpAddress'),
device_index=device_index,
public_ip_auto_assign=nic.get('AssociatePublicIpAddress', False),
group_ids=group_ids)
self.attach_eni(use_nic, device_index)
def attach_eni(self, eni, device_index):
device_index = int(device_index)
self.nics[device_index] = eni
eni.instance = self # This is used upon associate/disassociate public IP.
eni.attachment_id = random_eni_attach_id()
eni.device_index = device_index
return eni.attachment_id
def detach_eni(self, eni):
self.nics.pop(eni.device_index, None)
eni.instance = None
eni.attachment_id = None
eni.device_index = None
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
if attribute_name == 'AvailabilityZone':
return self.placement
elif attribute_name == 'PrivateDnsName':
return self.private_dns
elif attribute_name == 'PublicDnsName':
return self.public_dns
elif attribute_name == 'PrivateIp':
return self.private_ip
elif attribute_name == 'PublicIp':
return self.public_ip
raise UnformattedGetAttTemplateException()
class InstanceBackend(object):
def __init__(self):
self.reservations = {}
super(InstanceBackend, self).__init__()
def get_instance(self, instance_id):
for instance in self.all_instances():
if instance.id == instance_id:
return instance
raise InvalidInstanceIdError(instance_id)
def add_instances(self, image_id, count, user_data, security_group_names,
**kwargs):
new_reservation = Reservation()
new_reservation.id = random_reservation_id()
security_groups = [self.get_security_group_from_name(name)
for name in security_group_names]
security_groups.extend(self.get_security_group_from_id(sg_id)
for sg_id in kwargs.pop("security_group_ids", []))
self.reservations[new_reservation.id] = new_reservation
for index in range(count):
new_instance = Instance(
self,
image_id,
user_data,
security_groups,
**kwargs
)
new_reservation.instances.append(new_instance)
new_instance.setup_defaults()
return new_reservation
def start_instances(self, instance_ids):
started_instances = []
for instance in self.get_multi_instances_by_id(instance_ids):
instance.start()
started_instances.append(instance)
return started_instances
def stop_instances(self, instance_ids):
stopped_instances = []
for instance in self.get_multi_instances_by_id(instance_ids):
instance.stop()
stopped_instances.append(instance)
return stopped_instances
def terminate_instances(self, instance_ids):
terminated_instances = []
for instance in self.get_multi_instances_by_id(instance_ids):
instance.terminate()
terminated_instances.append(instance)
return terminated_instances
def reboot_instances(self, instance_ids):
rebooted_instances = []
for instance in self.get_multi_instances_by_id(instance_ids):
instance.reboot()
rebooted_instances.append(instance)
return rebooted_instances
def modify_instance_attribute(self, instance_id, key, value):
instance = self.get_instance(instance_id)
setattr(instance, key, value)
return instance
def modify_instance_security_groups(self, instance_id, new_group_list):
instance = self.get_instance(instance_id)
setattr(instance, 'security_groups', new_group_list)
return instance
def describe_instance_attribute(self, instance_id, key):
if key == 'group_set':
key = 'security_groups'
instance = self.get_instance(instance_id)
value = getattr(instance, key)
return instance, value
def all_instances(self):
instances = []
for reservation in self.all_reservations():
for instance in reservation.instances:
instances.append(instance)
return instances
def all_running_instances(self):
instances = []
for reservation in self.all_reservations():
for instance in reservation.instances:
if instance.state_code == 16:
instances.append(instance)
return instances
def get_multi_instances_by_id(self, instance_ids):
"""
:param instance_ids: A string list with instance ids
:return: A list with instance objects
"""
result = []
for reservation in self.all_reservations():
for instance in reservation.instances:
if instance.id in instance_ids:
result.append(instance)
# TODO: Trim error message down to specific invalid id.
if instance_ids and len(instance_ids) > len(result):
raise InvalidInstanceIdError(instance_ids)
return result
def get_instance_by_id(self, instance_id):
for reservation in self.all_reservations():
for instance in reservation.instances:
if instance.id == instance_id:
return instance
def get_reservations_by_instance_ids(self, instance_ids, filters=None):
""" Go through all of the reservations and filter to only return those
associated with the given instance_ids.
"""
reservations = []
for reservation in self.all_reservations(make_copy=True):
reservation_instance_ids = [instance.id for instance in reservation.instances]
matching_reservation = any(instance_id in reservation_instance_ids for instance_id in instance_ids)
if matching_reservation:
# We need to make a copy of the reservation because we have to modify the
# instances to limit to those requested
reservation.instances = [instance for instance in reservation.instances if instance.id in instance_ids]
reservations.append(reservation)
found_instance_ids = [instance.id for reservation in reservations for instance in reservation.instances]
if len(found_instance_ids) != len(instance_ids):
invalid_id = list(set(instance_ids).difference(set(found_instance_ids)))[0]
raise InvalidInstanceIdError(invalid_id)
if filters is not None:
reservations = filter_reservations(reservations, filters)
return reservations
def all_reservations(self, make_copy=False, filters=None):
if make_copy:
# Return copies so that other functions can modify them with changing
# the originals
reservations = [copy.deepcopy(reservation) for reservation in self.reservations.values()]
else:
reservations = [reservation for reservation in self.reservations.values()]
if filters is not None:
reservations = filter_reservations(reservations, filters)
return reservations
class KeyPairBackend(object):
def __init__(self):
self.keypairs = defaultdict(dict)
super(KeyPairBackend, self).__init__()
def create_key_pair(self, name):
if name in self.keypairs:
raise InvalidKeyPairDuplicateError(name)
self.keypairs[name] = keypair = random_key_pair()
keypair['name'] = name
return keypair
def delete_key_pair(self, name):
if name in self.keypairs:
self.keypairs.pop(name)
return True
def describe_key_pairs(self, filter_names=None):
results = []
for name, keypair in self.keypairs.items():
if not filter_names or name in filter_names:
keypair['name'] = name
results.append(keypair)
# TODO: Trim error message down to specific invalid name.
if filter_names and len(filter_names) > len(results):
raise InvalidKeyPairNameError(filter_names)
return results
def import_key_pair(self, key_name, public_key_material):
if key_name in self.keypairs:
raise InvalidKeyPairDuplicateError(key_name)
self.keypairs[key_name] = keypair = random_key_pair()
keypair['name'] = key_name
return keypair
class TagBackend(object):
VALID_TAG_FILTERS = ['key',
'resource-id',
'resource-type',
'value']
VALID_TAG_RESOURCE_FILTER_TYPES = ['customer-gateway',
'dhcp-options',
'image',
'instance',
'internet-gateway',
'network-acl',
'network-interface',
'reserved-instances',
'route-table',
'security-group',
'snapshot',
'spot-instances-request',
'subnet',
'volume',
'vpc',
'vpc-peering-connection'
'vpn-connection',
'vpn-gateway']
def __init__(self):
self.tags = defaultdict(dict)
super(TagBackend, self).__init__()
def create_tags(self, resource_ids, tags):
if None in set([tags[tag] for tag in tags]):
raise InvalidParameterValueErrorTagNull()
for resource_id in resource_ids:
if resource_id in self.tags:
if len(self.tags[resource_id]) + len([tag for tag in tags if not tag.startswith("aws:")]) > 10:
raise TagLimitExceeded()
elif len([tag for tag in tags if not tag.startswith("aws:")]) > 10:
raise TagLimitExceeded()
for resource_id in resource_ids:
for tag in tags:
self.tags[resource_id][tag] = tags[tag]
return True
def delete_tags(self, resource_ids, tags):
for resource_id in resource_ids:
for tag in tags:
if tag in self.tags[resource_id]:
if tags[tag] is None:
self.tags[resource_id].pop(tag)
elif tags[tag] == self.tags[resource_id][tag]:
self.tags[resource_id].pop(tag)
return True
def describe_tags(self, filters=None):
import re
results = []
key_filters = []
resource_id_filters = []
resource_type_filters = []
value_filters = []
if filters is not None:
for tag_filter in filters:
if tag_filter in self.VALID_TAG_FILTERS:
if tag_filter == 'key':
for value in filters[tag_filter]:
key_filters.append(re.compile(simple_aws_filter_to_re(value)))
if tag_filter == 'resource-id':
for value in filters[tag_filter]:
resource_id_filters.append(re.compile(simple_aws_filter_to_re(value)))
if tag_filter == 'resource-type':
for value in filters[tag_filter]:
if value in self.VALID_TAG_RESOURCE_FILTER_TYPES:
resource_type_filters.append(value)
if tag_filter == 'value':
for value in filters[tag_filter]:
value_filters.append(re.compile(simple_aws_filter_to_re(value)))
for resource_id, tags in self.tags.items():
for key, value in tags.items():
add_result = False
if filters is None:
add_result = True
else:
key_pass = False
id_pass = False
type_pass = False
value_pass = False
if key_filters:
for pattern in key_filters:
if pattern.match(key) is not None:
key_pass = True
else:
key_pass = True
if resource_id_filters:
for pattern in resource_id_filters:
if pattern.match(resource_id) is not None:
id_pass = True
else:
id_pass = True
if resource_type_filters:
for resource_type in resource_type_filters:
if EC2_PREFIX_TO_RESOURCE[get_prefix(resource_id)] == resource_type:
type_pass = True
else:
type_pass = True
if value_filters:
for pattern in value_filters:
if pattern.match(value) is not None:
value_pass = True
else:
value_pass = True
if key_pass and id_pass and type_pass and value_pass:
add_result = True
# If we're not filtering, or we are filtering and this
if add_result:
result = {
'resource_id': resource_id,
'key': key,
'value': value,
'resource_type': EC2_PREFIX_TO_RESOURCE[get_prefix(resource_id)],
}
results.append(result)
return results
class Ami(TaggedEC2Resource):
def __init__(self, ec2_backend, ami_id, instance=None, source_ami=None,
name=None, description=None):
self.ec2_backend = ec2_backend
self.id = ami_id
self.state = "available"
self.name = name
self.description = description
self.virtualization_type = None
self.architecture = None
self.kernel_id = None
self.platform = None
if instance:
self.instance = instance
self.instance_id = instance.id
self.virtualization_type = instance.virtualization_type
self.architecture = instance.architecture
self.kernel_id = instance.kernel
self.platform = instance.platform
elif source_ami:
"""
http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/CopyingAMIs.html
"We don't copy launch permissions, user-defined tags, or Amazon S3 bucket permissions from the source AMI to the new AMI."
~ 2014.09.29
"""
self.virtualization_type = source_ami.virtualization_type
self.architecture = source_ami.architecture
self.kernel_id = source_ami.kernel_id
self.platform = source_ami.platform
if not name:
self.name = source_ami.name
if not description:
self.description = source_ami.description
self.launch_permission_groups = set()
self.launch_permission_users = set()
# AWS auto-creates these, we should reflect the same.
volume = self.ec2_backend.create_volume(15, "us-east-1a")
self.ebs_snapshot = self.ec2_backend.create_snapshot(volume.id, "Auto-created snapshot for AMI %s" % self.id)
@property
def is_public(self):
return 'all' in self.launch_permission_groups
@property
def is_public_string(self):
return str(self.is_public).lower()
def get_filter_value(self, filter_name):
if filter_name == 'virtualization-type':
return self.virtualization_type
elif filter_name == 'kernel-id':
return self.kernel_id
elif filter_name in ['architecture', 'platform']:
return getattr(self, filter_name)
elif filter_name == 'image-id':
return self.id
elif filter_name == 'is-public':
return str(self.is_public)
elif filter_name == 'state':
return self.state
elif filter_name == 'name':
return self.name
filter_value = super(Ami, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error("The filter '{0}' for DescribeImages".format(filter_name))
return filter_value
class AmiBackend(object):
def __init__(self):
self.amis = {}
super(AmiBackend, self).__init__()
def create_image(self, instance_id, name=None, description=None):
# TODO: check that instance exists and pull info from it.
ami_id = random_ami_id()
instance = self.get_instance(instance_id)
ami = Ami(self, ami_id, instance=instance, source_ami=None, name=name, description=description)
self.amis[ami_id] = ami
return ami
def copy_image(self, source_image_id, source_region, name=None, description=None):
source_ami = ec2_backends[source_region].describe_images(ami_ids=[source_image_id])[0]
ami_id = random_ami_id()
ami = Ami(self, ami_id, instance=None, source_ami=source_ami, name=name, description=description)
self.amis[ami_id] = ami
return ami
def describe_images(self, ami_ids=(), filters=None):
if filters:
images = self.amis.values()
return generic_filter(filters, images)
else:
images = []
for ami_id in ami_ids:
if ami_id in self.amis:
images.append(self.amis[ami_id])
elif not ami_id.startswith("ami-"):
raise MalformedAMIIdError(ami_id)
else:
raise InvalidAMIIdError(ami_id)
return images or self.amis.values()
def deregister_image(self, ami_id):
if ami_id in self.amis:
self.amis.pop(ami_id)
return True
raise InvalidAMIIdError(ami_id)
def get_launch_permission_groups(self, ami_id):
ami = self.describe_images(ami_ids=[ami_id])[0]
return ami.launch_permission_groups
def get_launch_permission_users(self, ami_id):
ami = self.describe_images(ami_ids=[ami_id])[0]
return ami.launch_permission_users
def validate_permission_targets(self, user_ids=None, group=None):
# If anything is invalid, nothing is added. (No partial success.)
if user_ids:
"""
AWS docs:
"The AWS account ID is a 12-digit number, such as 123456789012, that you use to construct Amazon Resource Names (ARNs)."
http://docs.aws.amazon.com/general/latest/gr/acct-identifiers.html
"""
for user_id in user_ids:
if len(user_id) != 12 or not user_id.isdigit():
raise InvalidAMIAttributeItemValueError("userId", user_id)
if group and group != 'all':
raise InvalidAMIAttributeItemValueError("UserGroup", group)
def add_launch_permission(self, ami_id, user_ids=None, group=None):
ami = self.describe_images(ami_ids=[ami_id])[0]
self.validate_permission_targets(user_ids=user_ids, group=group)
if user_ids:
for user_id in user_ids:
ami.launch_permission_users.add(user_id)
if group:
ami.launch_permission_groups.add(group)
return True
def remove_launch_permission(self, ami_id, user_ids=None, group=None):
ami = self.describe_images(ami_ids=[ami_id])[0]
self.validate_permission_targets(user_ids=user_ids, group=group)
if user_ids:
for user_id in user_ids:
ami.launch_permission_users.discard(user_id)
if group:
ami.launch_permission_groups.discard(group)
return True
class Region(object):
def __init__(self, name, endpoint):
self.name = name
self.endpoint = endpoint
class Zone(object):
def __init__(self, name, region_name):
self.name = name
self.region_name = region_name
class RegionsAndZonesBackend(object):
regions = [
Region("eu-west-1", "ec2.eu-west-1.amazonaws.com"),
Region("sa-east-1", "ec2.sa-east-1.amazonaws.com"),
Region("us-east-1", "ec2.us-east-1.amazonaws.com"),
Region("ap-northeast-1", "ec2.ap-northeast-1.amazonaws.com"),
Region("us-west-2", "ec2.us-west-2.amazonaws.com"),
Region("us-west-1", "ec2.us-west-1.amazonaws.com"),
Region("ap-southeast-1", "ec2.ap-southeast-1.amazonaws.com"),
Region("ap-southeast-2", "ec2.ap-southeast-2.amazonaws.com"),
]
# TODO: cleanup. For now, pretend everything is us-east-1. 'merica.
zones = [
Zone("us-east-1a", "us-east-1"),
Zone("us-east-1b", "us-east-1"),
Zone("us-east-1c", "us-east-1"),
Zone("us-east-1d", "us-east-1"),
Zone("us-east-1e", "us-east-1"),
]
def describe_regions(self):
return self.regions
def describe_availability_zones(self):
return self.zones
def get_zone_by_name(self, name):
for zone in self.zones:
if zone.name == name:
return zone
class SecurityRule(object):
def __init__(self, ip_protocol, from_port, to_port, ip_ranges, source_groups):
self.ip_protocol = ip_protocol
self.from_port = from_port
self.to_port = to_port
self.ip_ranges = ip_ranges or []
self.source_groups = source_groups
@property
def unique_representation(self):
return "{0}-{1}-{2}-{3}-{4}".format(
self.ip_protocol,
self.from_port,
self.to_port,
self.ip_ranges,
self.source_groups
)
def __eq__(self, other):
return self.unique_representation == other.unique_representation
class SecurityGroup(TaggedEC2Resource):
def __init__(self, ec2_backend, group_id, name, description, vpc_id=None):
self.ec2_backend = ec2_backend
self.id = group_id
self.name = name
self.description = description
self.ingress_rules = []
self.egress_rules = [SecurityRule(-1, -1, -1, ['0.0.0.0/0'], [])]
self.enis = {}
self.vpc_id = vpc_id
self.owner_id = "123456789012"
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
ec2_backend = ec2_backends[region_name]
vpc_id = properties.get('VpcId')
security_group = ec2_backend.create_security_group(
name=resource_name,
description=properties.get('GroupDescription'),
vpc_id=vpc_id,
)
for tag in properties.get("Tags", []):
tag_key = tag["Key"]
tag_value = tag["Value"]
security_group.add_tag(tag_key, tag_value)
for ingress_rule in properties.get('SecurityGroupIngress', []):
source_group_id = ingress_rule.get('SourceSecurityGroupId')
ec2_backend.authorize_security_group_ingress(
group_name_or_id=security_group.id,
ip_protocol=ingress_rule['IpProtocol'],
from_port=ingress_rule['FromPort'],
to_port=ingress_rule['ToPort'],
ip_ranges=ingress_rule.get('CidrIp'),
source_group_ids=[source_group_id],
vpc_id=vpc_id,
)
return security_group
@classmethod
def update_from_cloudformation_json(cls, original_resource, new_resource_name, cloudformation_json, region_name):
cls._delete_security_group_given_vpc_id(original_resource.name, original_resource.vpc_id, region_name)
return cls.create_from_cloudformation_json(new_resource_name, cloudformation_json, region_name)
@classmethod
def delete_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
vpc_id = properties.get('VpcId')
cls._delete_security_group_given_vpc_id(resource_name, vpc_id, region_name)
@classmethod
def _delete_security_group_given_vpc_id(cls, resource_name, vpc_id, region_name):
ec2_backend = ec2_backends[region_name]
security_group = ec2_backend.get_security_group_from_name(resource_name, vpc_id)
if security_group:
security_group.delete(region_name)
def delete(self, region_name):
''' Not exposed as part of the ELB API - used for CloudFormation. '''
self.ec2_backend.delete_security_group(group_id=self.id)
@property
def physical_resource_id(self):
return self.id
def matches_filter(self, key, filter_value):
def to_attr(filter_name):
attr = None
if filter_name == 'group-name':
attr = 'name'
elif filter_name == 'group-id':
attr = 'id'
elif filter_name == 'vpc-id':
attr = 'vpc_id'
else:
attr = filter_name.replace('-', '_')
return attr
if key.startswith('ip-permission'):
match = re.search(r"ip-permission.(*)", key)
ingress_attr = to_attr(match.groups()[0])
for ingress in self.ingress_rules:
if getattr(ingress, ingress_attr) in filter_value:
return True
elif is_tag_filter(key):
tag_value = self.get_filter_value(key)
return tag_value in filter_value
else:
attr_name = to_attr(key)
return getattr(self, attr_name) in filter_value
return False
def matches_filters(self, filters):
for key, value in filters.items():
if not self.matches_filter(key, value):
return False
return True
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
if attribute_name == 'GroupId':
return self.id
raise UnformattedGetAttTemplateException()
class SecurityGroupBackend(object):
def __init__(self):
# the key in the dict group is the vpc_id or None (non-vpc)
self.groups = defaultdict(dict)
# Create the default security group
self.create_security_group("default", "default group")
super(SecurityGroupBackend, self).__init__()
def create_security_group(self, name, description, vpc_id=None, force=False):
if not description:
raise MissingParameterError('GroupDescription')
group_id = random_security_group_id()
if not force:
existing_group = self.get_security_group_from_name(name, vpc_id)
if existing_group:
raise InvalidSecurityGroupDuplicateError(name)
group = SecurityGroup(self, group_id, name, description, vpc_id=vpc_id)
self.groups[vpc_id][group_id] = group
return group
def describe_security_groups(self, group_ids=None, groupnames=None, filters=None):
all_groups = itertools.chain(*[x.values() for x in self.groups.values()])
groups = []
if group_ids or groupnames or filters:
for group in all_groups:
if ((group_ids and group.id in group_ids) or
(groupnames and group.name in groupnames) or
(filters and group.matches_filters(filters))):
groups.append(group)
else:
groups = all_groups
return groups
def _delete_security_group(self, vpc_id, group_id):
if self.groups[vpc_id][group_id].enis:
raise DependencyViolationError("{0} is being utilized by {1}".format(group_id, 'ENIs'))
return self.groups[vpc_id].pop(group_id)
def delete_security_group(self, name=None, group_id=None):
if group_id:
# loop over all the SGs, find the right one
for vpc_id, groups in self.groups.items():
if group_id in groups:
return self._delete_security_group(vpc_id, group_id)
raise InvalidSecurityGroupNotFoundError(group_id)
elif name:
# Group Name. Has to be in standard EC2, VPC needs to be identified by group_id
group = self.get_security_group_from_name(name)
if group:
return self._delete_security_group(None, group.id)
raise InvalidSecurityGroupNotFoundError(name)
def get_security_group_from_id(self, group_id):
# 2 levels of chaining necessary since it's a complex structure
all_groups = itertools.chain.from_iterable([x.values() for x in self.groups.values()])
for group in all_groups:
if group.id == group_id:
return group
def get_security_group_from_name(self, name, vpc_id=None):
for group_id, group in self.groups[vpc_id].items():
if group.name == name:
return group
def get_security_group_by_name_or_id(self, group_name_or_id, vpc_id):
# try searching by id, fallbacks to name search
group = self.get_security_group_from_id(group_name_or_id)
if group is None:
group = self.get_security_group_from_name(group_name_or_id, vpc_id)
return group
def authorize_security_group_ingress(self,
group_name_or_id,
ip_protocol,
from_port,
to_port,
ip_ranges,
source_group_names=None,
source_group_ids=None,
vpc_id=None):
group = self.get_security_group_by_name_or_id(group_name_or_id, vpc_id)
if ip_ranges and not isinstance(ip_ranges, list):
ip_ranges = [ip_ranges]
if ip_ranges:
for cidr in ip_ranges:
if not is_valid_cidr(cidr):
raise InvalidCIDRSubnetError(cidr=cidr)
source_group_names = source_group_names if source_group_names else []
source_group_ids = source_group_ids if source_group_ids else []
source_groups = []
for source_group_name in source_group_names:
source_group = self.get_security_group_from_name(source_group_name, vpc_id)
if source_group:
source_groups.append(source_group)
# for VPCs
for source_group_id in source_group_ids:
source_group = self.get_security_group_from_id(source_group_id)
if source_group:
source_groups.append(source_group)
security_rule = SecurityRule(ip_protocol, from_port, to_port, ip_ranges, source_groups)
group.ingress_rules.append(security_rule)
def revoke_security_group_ingress(self,
group_name_or_id,
ip_protocol,
from_port,
to_port,
ip_ranges,
source_group_names=None,
source_group_ids=None,
vpc_id=None):
group = self.get_security_group_by_name_or_id(group_name_or_id, vpc_id)
source_groups = []
for source_group_name in source_group_names:
source_group = self.get_security_group_from_name(source_group_name, vpc_id)
if source_group:
source_groups.append(source_group)
for source_group_id in source_group_ids:
source_group = self.get_security_group_from_id(source_group_id)
if source_group:
source_groups.append(source_group)
security_rule = SecurityRule(ip_protocol, from_port, to_port, ip_ranges, source_groups)
if security_rule in group.ingress_rules:
group.ingress_rules.remove(security_rule)
return security_rule
raise InvalidPermissionNotFoundError()
def authorize_security_group_egress(self,
group_name_or_id,
ip_protocol,
from_port,
to_port,
ip_ranges,
source_group_names=None,
source_group_ids=None,
vpc_id=None):
group = self.get_security_group_by_name_or_id(group_name_or_id, vpc_id)
if ip_ranges and not isinstance(ip_ranges, list):
ip_ranges = [ip_ranges]
if ip_ranges:
for cidr in ip_ranges:
if not is_valid_cidr(cidr):
raise InvalidCIDRSubnetError(cidr=cidr)
source_group_names = source_group_names if source_group_names else []
source_group_ids = source_group_ids if source_group_ids else []
source_groups = []
for source_group_name in source_group_names:
source_group = self.get_security_group_from_name(source_group_name, vpc_id)
if source_group:
source_groups.append(source_group)
# for VPCs
for source_group_id in source_group_ids:
source_group = self.get_security_group_from_id(source_group_id)
if source_group:
source_groups.append(source_group)
security_rule = SecurityRule(ip_protocol, from_port, to_port, ip_ranges, source_groups)
group.egress_rules.append(security_rule)
def revoke_security_group_egress(self,
group_name_or_id,
ip_protocol,
from_port,
to_port,
ip_ranges,
source_group_names=None,
source_group_ids=None,
vpc_id=None):
group = self.get_security_group_by_name_or_id(group_name_or_id, vpc_id)
source_groups = []
for source_group_name in source_group_names:
source_group = self.get_security_group_from_name(source_group_name, vpc_id)
if source_group:
source_groups.append(source_group)
for source_group_id in source_group_ids:
source_group = self.get_security_group_from_id(source_group_id)
if source_group:
source_groups.append(source_group)
security_rule = SecurityRule(ip_protocol, from_port, to_port, ip_ranges, source_groups)
if security_rule in group.egress_rules:
group.egress_rules.remove(security_rule)
return security_rule
raise InvalidPermissionNotFoundError()
class SecurityGroupIngress(object):
def __init__(self, security_group, properties):
self.security_group = security_group
self.properties = properties
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
ec2_backend = ec2_backends[region_name]
group_name = properties.get('GroupName')
group_id = properties.get('GroupId')
ip_protocol = properties.get("IpProtocol")
cidr_ip = properties.get("CidrIp")
from_port = properties.get("FromPort")
source_security_group_id = properties.get("SourceSecurityGroupId")
source_security_group_name = properties.get("SourceSecurityGroupName")
# source_security_owner_id = properties.get("SourceSecurityGroupOwnerId") # IGNORED AT THE MOMENT
to_port = properties.get("ToPort")
assert group_id or group_name
assert source_security_group_name or cidr_ip or source_security_group_id
assert ip_protocol
if source_security_group_id:
source_security_group_ids = [source_security_group_id]
else:
source_security_group_ids = None
if source_security_group_name:
source_security_group_names = [source_security_group_name]
else:
source_security_group_names = None
if cidr_ip:
ip_ranges = [cidr_ip]
else:
ip_ranges = []
if group_id:
security_group = ec2_backend.describe_security_groups(group_ids=[group_id])[0]
else:
security_group = ec2_backend.describe_security_groups(groupnames=[group_name])[0]
ec2_backend.authorize_security_group_ingress(
group_name_or_id=security_group.id,
ip_protocol=ip_protocol,
from_port=from_port,
to_port=to_port,
ip_ranges=ip_ranges,
source_group_ids=source_security_group_ids,
source_group_names=source_security_group_names,
)
return cls(security_group, properties)
class VolumeAttachment(object):
def __init__(self, volume, instance, device):
self.volume = volume
self.attach_time = utc_date_and_time()
self.instance = instance
self.device = device
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
instance_id = properties['InstanceId']
volume_id = properties['VolumeId']
ec2_backend = ec2_backends[region_name]
attachment = ec2_backend.attach_volume(
volume_id=volume_id,
instance_id=instance_id,
device_path=properties['Device'],
)
return attachment
class Volume(TaggedEC2Resource):
def __init__(self, ec2_backend, volume_id, size, zone, snapshot_id=None):
self.id = volume_id
self.size = size
self.zone = zone
self.create_time = utc_date_and_time()
self.attachment = None
self.snapshot_id = snapshot_id
self.ec2_backend = ec2_backend
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
ec2_backend = ec2_backends[region_name]
volume = ec2_backend.create_volume(
size=properties.get('Size'),
zone_name=properties.get('AvailabilityZone'),
)
return volume
@property
def physical_resource_id(self):
return self.id
@property
def status(self):
if self.attachment:
return 'in-use'
else:
return 'available'
def get_filter_value(self, filter_name):
if filter_name.startswith('attachment') and not self.attachment:
return None
if filter_name == 'attachment.attach-time':
return self.attachment.attach_time
if filter_name == 'attachment.device':
return self.attachment.device
if filter_name == 'attachment.instance-id':
return self.attachment.instance.id
if filter_name == 'create-time':
return self.create_time
if filter_name == 'size':
return self.size
if filter_name == 'snapshot-id':
return self.snapshot_id
if filter_name == 'status':
return self.status
if filter_name == 'volume-id':
return self.id
filter_value = super(Volume, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error("The filter '{0}' for DescribeVolumes".format(filter_name))
return filter_value
class Snapshot(TaggedEC2Resource):
def __init__(self, ec2_backend, snapshot_id, volume, description):
self.id = snapshot_id
self.volume = volume
self.description = description
self.start_time = utc_date_and_time()
self.create_volume_permission_groups = set()
self.ec2_backend = ec2_backend
self.status = 'completed'
def get_filter_value(self, filter_name):
if filter_name == 'description':
return self.description
if filter_name == 'snapshot-id':
return self.id
if filter_name == 'start-time':
return self.start_time
if filter_name == 'volume-id':
return self.volume.id
if filter_name == 'volume-size':
return self.volume.size
filter_value = super(Snapshot, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error("The filter '{0}' for DescribeSnapshots".format(filter_name))
return filter_value
class EBSBackend(object):
def __init__(self):
self.volumes = {}
self.attachments = {}
self.snapshots = {}
super(EBSBackend, self).__init__()
def create_volume(self, size, zone_name, snapshot_id=None):
volume_id = random_volume_id()
zone = self.get_zone_by_name(zone_name)
if snapshot_id:
snapshot = self.get_snapshot(snapshot_id)
if size is None:
size = snapshot.volume.size
volume = Volume(self, volume_id, size, zone, snapshot_id)
self.volumes[volume_id] = volume
return volume
def describe_volumes(self, filters=None):
if filters:
volumes = self.volumes.values()
return generic_filter(filters, volumes)
return self.volumes.values()
def get_volume(self, volume_id):
volume = self.volumes.get(volume_id, None)
if not volume:
raise InvalidVolumeIdError(volume_id)
return volume
def delete_volume(self, volume_id):
if volume_id in self.volumes:
return self.volumes.pop(volume_id)
raise InvalidVolumeIdError(volume_id)
def attach_volume(self, volume_id, instance_id, device_path):
volume = self.get_volume(volume_id)
instance = self.get_instance(instance_id)
if not volume or not instance:
return False
volume.attachment = VolumeAttachment(volume, instance, device_path)
# Modify instance to capture mount of block device.
bdt = BlockDeviceType(volume_id=volume_id, status=volume.status, size=volume.size,
attach_time=utc_date_and_time())
instance.block_device_mapping[device_path] = bdt
return volume.attachment
def detach_volume(self, volume_id, instance_id, device_path):
volume = self.get_volume(volume_id)
self.get_instance(instance_id)
old_attachment = volume.attachment
if not old_attachment:
raise InvalidVolumeAttachmentError(volume_id, instance_id)
volume.attachment = None
return old_attachment
def create_snapshot(self, volume_id, description):
snapshot_id = random_snapshot_id()
volume = self.get_volume(volume_id)
snapshot = Snapshot(self, snapshot_id, volume, description)
self.snapshots[snapshot_id] = snapshot
return snapshot
def describe_snapshots(self, filters=None):
if filters:
snapshots = self.snapshots.values()
return generic_filter(filters, snapshots)
return self.snapshots.values()
def get_snapshot(self, snapshot_id):
snapshot = self.snapshots.get(snapshot_id, None)
if not snapshot:
raise InvalidSnapshotIdError(snapshot_id)
return snapshot
def delete_snapshot(self, snapshot_id):
if snapshot_id in self.snapshots:
return self.snapshots.pop(snapshot_id)
raise InvalidSnapshotIdError(snapshot_id)
def get_create_volume_permission_groups(self, snapshot_id):
snapshot = self.get_snapshot(snapshot_id)
return snapshot.create_volume_permission_groups
def add_create_volume_permission(self, snapshot_id, user_id=None, group=None):
if user_id:
self.raise_not_implemented_error("The UserId parameter for ModifySnapshotAttribute")
if group != 'all':
raise InvalidAMIAttributeItemValueError("UserGroup", group)
snapshot = self.get_snapshot(snapshot_id)
snapshot.create_volume_permission_groups.add(group)
return True
def remove_create_volume_permission(self, snapshot_id, user_id=None, group=None):
if user_id:
self.raise_not_implemented_error("The UserId parameter for ModifySnapshotAttribute")
if group != 'all':
raise InvalidAMIAttributeItemValueError("UserGroup", group)
snapshot = self.get_snapshot(snapshot_id)
snapshot.create_volume_permission_groups.discard(group)
return True
class VPC(TaggedEC2Resource):
def __init__(self, ec2_backend, vpc_id, cidr_block, is_default):
self.ec2_backend = ec2_backend
self.id = vpc_id
self.cidr_block = cidr_block
self.dhcp_options = None
self.state = 'available'
self.is_default = is_default
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
ec2_backend = ec2_backends[region_name]
vpc = ec2_backend.create_vpc(
cidr_block=properties['CidrBlock'],
)
return vpc
@property
def physical_resource_id(self):
return self.id
def get_filter_value(self, filter_name):
if filter_name == 'vpc-id':
return self.id
elif filter_name == 'cidr':
return self.cidr_block
elif filter_name == 'dhcp-options-id':
if not self.dhcp_options:
return None
return self.dhcp_options.id
filter_value = super(VPC, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error("The filter '{0}' for DescribeVPCs".format(filter_name))
return filter_value
class VPCBackend(object):
def __init__(self):
self.vpcs = {}
super(VPCBackend, self).__init__()
def create_vpc(self, cidr_block):
vpc_id = random_vpc_id()
vpc = VPC(self, vpc_id, cidr_block, len(self.vpcs) == 0)
self.vpcs[vpc_id] = vpc
# AWS creates a default main route table and security group.
self.create_route_table(vpc_id, main=True)
# AWS creates a default Network ACL
self.create_network_acl(vpc_id, default=True)
default = self.get_security_group_from_name('default', vpc_id=vpc_id)
if not default:
self.create_security_group('default', 'default VPC security group', vpc_id=vpc_id)
return vpc
def get_vpc(self, vpc_id):
if vpc_id not in self.vpcs:
raise InvalidVPCIdError(vpc_id)
return self.vpcs.get(vpc_id)
def get_all_vpcs(self, vpc_ids=None, filters=None):
if vpc_ids:
vpcs = [vpc for vpc in self.vpcs.values() if vpc.id in vpc_ids]
else:
vpcs = self.vpcs.values()
return generic_filter(filters, vpcs)
def delete_vpc(self, vpc_id):
# Delete route table if only main route table remains.
route_tables = self.get_all_route_tables(filters={'vpc-id': vpc_id})
if len(route_tables) > 1:
raise DependencyViolationError(
"The vpc {0} has dependencies and cannot be deleted."
.format(vpc_id)
)
for route_table in route_tables:
self.delete_route_table(route_table.id)
# Delete default security group if exists.
default = self.get_security_group_from_name('default', vpc_id=vpc_id)
if default:
self.delete_security_group(group_id=default.id)
# Now delete VPC.
vpc = self.vpcs.pop(vpc_id, None)
if not vpc:
raise InvalidVPCIdError(vpc_id)
if vpc.dhcp_options:
vpc.dhcp_options.vpc = None
self.delete_dhcp_options_set(vpc.dhcp_options.id)
vpc.dhcp_options = None
return vpc
class VPCPeeringConnectionStatus(object):
def __init__(self, code='initiating-request', message=''):
self.code = code
self.message = message
def initiating(self):
self.code = 'initiating-request'
self.message = 'Initiating Request to {accepter ID}'
def pending(self):
self.code = 'pending-acceptance'
self.message = 'Pending Acceptance by {accepter ID}'
def accept(self):
self.code = 'active'
self.message = 'Active'
def reject(self):
self.code = 'rejected'
self.message = 'Inactive'
class VPCPeeringConnection(TaggedEC2Resource):
def __init__(self, vpc_pcx_id, vpc, peer_vpc):
self.id = vpc_pcx_id
self.vpc = vpc
self.peer_vpc = peer_vpc
self._status = VPCPeeringConnectionStatus()
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
ec2_backend = ec2_backends[region_name]
vpc = ec2_backend.get_vpc(properties['VpcId'])
peer_vpc = ec2_backend.get_vpc(properties['PeerVpcId'])
vpc_pcx = ec2_backend.create_vpc_peering_connection(vpc, peer_vpc)
return vpc_pcx
@property
def physical_resource_id(self):
return self.id
class VPCPeeringConnectionBackend(object):
def __init__(self):
self.vpc_pcxs = {}
super(VPCPeeringConnectionBackend, self).__init__()
def create_vpc_peering_connection(self, vpc, peer_vpc):
vpc_pcx_id = random_vpc_peering_connection_id()
vpc_pcx = VPCPeeringConnection(vpc_pcx_id, vpc, peer_vpc)
vpc_pcx._status.pending()
self.vpc_pcxs[vpc_pcx_id] = vpc_pcx
return vpc_pcx
def get_all_vpc_peering_connections(self):
return self.vpc_pcxs.values()
def get_vpc_peering_connection(self, vpc_pcx_id):
if vpc_pcx_id not in self.vpc_pcxs:
raise InvalidVPCPeeringConnectionIdError(vpc_pcx_id)
return self.vpc_pcxs.get(vpc_pcx_id)
def delete_vpc_peering_connection(self, vpc_pcx_id):
deleted = self.vpc_pcxs.pop(vpc_pcx_id, None)
if not deleted:
raise InvalidVPCPeeringConnectionIdError(vpc_pcx_id)
return deleted
def accept_vpc_peering_connection(self, vpc_pcx_id):
vpc_pcx = self.get_vpc_peering_connection(vpc_pcx_id)
if vpc_pcx._status.code != 'pending-acceptance':
raise InvalidVPCPeeringConnectionStateTransitionError(vpc_pcx.id)
vpc_pcx._status.accept()
return vpc_pcx
def reject_vpc_peering_connection(self, vpc_pcx_id):
vpc_pcx = self.get_vpc_peering_connection(vpc_pcx_id)
if vpc_pcx._status.code != 'pending-acceptance':
raise InvalidVPCPeeringConnectionStateTransitionError(vpc_pcx.id)
vpc_pcx._status.reject()
return vpc_pcx
class Subnet(TaggedEC2Resource):
def __init__(self, ec2_backend, subnet_id, vpc_id, cidr_block, availability_zone, defaultForAz,
map_public_ip_on_launch):
self.ec2_backend = ec2_backend
self.id = subnet_id
self.vpc_id = vpc_id
self.cidr_block = cidr_block
self._availability_zone = availability_zone
self.defaultForAz = defaultForAz
self.map_public_ip_on_launch = map_public_ip_on_launch
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
vpc_id = properties['VpcId']
cidr_block = properties['CidrBlock']
availability_zone = properties.get('AvailabilityZone')
ec2_backend = ec2_backends[region_name]
subnet = ec2_backend.create_subnet(
vpc_id=vpc_id,
cidr_block=cidr_block,
availability_zone=availability_zone,
)
for tag in properties.get("Tags", []):
tag_key = tag["Key"]
tag_value = tag["Value"]
subnet.add_tag(tag_key, tag_value)
return subnet
@property
def availability_zone(self):
if self._availability_zone is None:
# This could probably be smarter, but there doesn't appear to be a
# way to pull AZs for a region in boto
return self.ec2_backend.region_name + "a"
else:
return self._availability_zone
@property
def physical_resource_id(self):
return self.id
def get_filter_value(self, filter_name):
"""
API Version 2014-10-01 defines the following filters for DescribeSubnets:
* availabilityZone
* available-ip-address-count
* cidrBlock
* defaultForAz
* state
* subnet-id
* tag:key=value
* tag-key
* tag-value
* vpc-id
Taken from: http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSubnets.html
"""
if filter_name in ['cidr', 'cidrBlock', 'cidr-block']:
return self.cidr_block
elif filter_name == 'vpc-id':
return self.vpc_id
elif filter_name == 'subnet-id':
return self.id
elif filter_name == 'availabilityZone':
return self.availability_zone
elif filter_name == 'defaultForAz':
return self.defaultForAz
filter_value = super(Subnet, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error("The filter '{0}' for DescribeSubnets".format(filter_name))
return filter_value
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
if attribute_name == 'AvailabilityZone':
raise NotImplementedError('"Fn::GetAtt" : [ "{0}" , "AvailabilityZone" ]"')
raise UnformattedGetAttTemplateException()
class SubnetBackend(object):
def __init__(self):
self.subnets = {}
super(SubnetBackend, self).__init__()
def get_subnet(self, subnet_id):
subnet = self.subnets.get(subnet_id, None)
if not subnet:
raise InvalidSubnetIdError(subnet_id)
return subnet
def create_subnet(self, vpc_id, cidr_block, availability_zone=None):
subnet_id = random_subnet_id()
vpc = self.get_vpc(vpc_id) # Validate VPC exists
defaultForAz = "true" if vpc.is_default else "false"
map_public_ip_on_launch = "true" if vpc.is_default else "false"
subnet = Subnet(self, subnet_id, vpc_id, cidr_block, availability_zone, defaultForAz, map_public_ip_on_launch)
# AWS associates a new subnet with the default Network ACL
self.associate_default_network_acl_with_subnet(subnet_id)
self.subnets[subnet_id] = subnet
return subnet
def get_all_subnets(self, filters=None):
subnets = self.subnets.values()
return generic_filter(filters, subnets)
def delete_subnet(self, subnet_id):
deleted = self.subnets.pop(subnet_id, None)
if not deleted:
raise InvalidSubnetIdError(subnet_id)
return deleted
def modify_subnet_attribute(self, subnet_id, map_public_ip):
subnet = self.get_subnet(subnet_id)
if map_public_ip not in ('true', 'false'):
raise InvalidParameterValueError(map_public_ip)
subnet.map_public_ip_on_launch = map_public_ip
class SubnetRouteTableAssociation(object):
def __init__(self, route_table_id, subnet_id):
self.route_table_id = route_table_id
self.subnet_id = subnet_id
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
route_table_id = properties['RouteTableId']
subnet_id = properties['SubnetId']
ec2_backend = ec2_backends[region_name]
subnet_association = ec2_backend.create_subnet_association(
route_table_id=route_table_id,
subnet_id=subnet_id,
)
return subnet_association
class SubnetRouteTableAssociationBackend(object):
def __init__(self):
self.subnet_associations = {}
super(SubnetRouteTableAssociationBackend, self).__init__()
def create_subnet_association(self, route_table_id, subnet_id):
subnet_association = SubnetRouteTableAssociation(route_table_id, subnet_id)
self.subnet_associations["{0}:{1}".format(route_table_id, subnet_id)] = subnet_association
return subnet_association
class RouteTable(TaggedEC2Resource):
def __init__(self, ec2_backend, route_table_id, vpc_id, main=False):
self.ec2_backend = ec2_backend
self.id = route_table_id
self.vpc_id = vpc_id
self.main = main
self.associations = {}
self.routes = {}
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
vpc_id = properties['VpcId']
ec2_backend = ec2_backends[region_name]
route_table = ec2_backend.create_route_table(
vpc_id=vpc_id,
)
return route_table
@property
def physical_resource_id(self):
return self.id
def get_filter_value(self, filter_name):
if filter_name == "association.main":
# Note: Boto only supports 'true'.
# https://github.com/boto/boto/issues/1742
if self.main:
return 'true'
else:
return 'false'
elif filter_name == "route-table-id":
return self.id
elif filter_name == "vpc-id":
return self.vpc_id
elif filter_name == "association.route-table-id":
return self.id
elif filter_name == "association.route-table-association-id":
return self.associations.keys()
elif filter_name == "association.subnet-id":
return self.associations.values()
filter_value = super(RouteTable, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error("The filter '{0}' for DescribeRouteTables".format(filter_name))
return filter_value
class RouteTableBackend(object):
def __init__(self):
self.route_tables = {}
super(RouteTableBackend, self).__init__()
def create_route_table(self, vpc_id, main=False):
route_table_id = random_route_table_id()
vpc = self.get_vpc(vpc_id) # Validate VPC exists
route_table = RouteTable(self, route_table_id, vpc_id, main=main)
self.route_tables[route_table_id] = route_table
# AWS creates a default local route.
self.create_route(route_table_id, vpc.cidr_block, local=True)
return route_table
def get_route_table(self, route_table_id):
route_table = self.route_tables.get(route_table_id, None)
if not route_table:
raise InvalidRouteTableIdError(route_table_id)
return route_table
def get_all_route_tables(self, route_table_ids=None, filters=None):
route_tables = self.route_tables.values()
if route_table_ids:
route_tables = [route_table for route_table in route_tables if route_table.id in route_table_ids]
if len(route_tables) != len(route_table_ids):
invalid_id = list(set(route_table_ids).difference(set([route_table.id for route_table in route_tables])))[0]
raise InvalidRouteTableIdError(invalid_id)
return generic_filter(filters, route_tables)
def delete_route_table(self, route_table_id):
route_table = self.get_route_table(route_table_id)
if route_table.associations:
raise DependencyViolationError(
"The routeTable '{0}' has dependencies and cannot be deleted."
.format(route_table_id)
)
self.route_tables.pop(route_table_id)
return True
def associate_route_table(self, route_table_id, subnet_id):
# Idempotent if association already exists.
route_tables_by_subnet = self.get_all_route_tables(filters={'association.subnet-id': [subnet_id]})
if route_tables_by_subnet:
for association_id, check_subnet_id in route_tables_by_subnet[0].associations.items():
if subnet_id == check_subnet_id:
return association_id
# Association does not yet exist, so create it.
route_table = self.get_route_table(route_table_id)
self.get_subnet(subnet_id) # Validate subnet exists
association_id = random_subnet_association_id()
route_table.associations[association_id] = subnet_id
return association_id
def disassociate_route_table(self, association_id):
for route_table in self.route_tables.values():
if association_id in route_table.associations:
return route_table.associations.pop(association_id, None)
raise InvalidAssociationIdError(association_id)
def replace_route_table_association(self, association_id, route_table_id):
# Idempotent if association already exists.
new_route_table = self.get_route_table(route_table_id)
if association_id in new_route_table.associations:
return association_id
# Find route table which currently has the association, error if none.
route_tables_by_association_id = self.get_all_route_tables(filters={'association.route-table-association-id': [association_id]})
if not route_tables_by_association_id:
raise InvalidAssociationIdError(association_id)
# Remove existing association, create new one.
previous_route_table = route_tables_by_association_id[0]
subnet_id = previous_route_table.associations.pop(association_id, None)
return self.associate_route_table(route_table_id, subnet_id)
class Route(object):
def __init__(self, route_table, destination_cidr_block, local=False,
gateway=None, instance=None, interface=None, vpc_pcx=None):
self.id = generate_route_id(route_table.id, destination_cidr_block)
self.route_table = route_table
self.destination_cidr_block = destination_cidr_block
self.local = local
self.gateway = gateway
self.instance = instance
self.interface = interface
self.vpc_pcx = vpc_pcx
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
gateway_id = properties.get('GatewayId')
instance_id = properties.get('InstanceId')
interface_id = properties.get('NetworkInterfaceId')
pcx_id = properties.get('VpcPeeringConnectionId')
route_table_id = properties['RouteTableId']
ec2_backend = ec2_backends[region_name]
route_table = ec2_backend.create_route(
route_table_id=route_table_id,
destination_cidr_block=properties['DestinationCidrBlock'],
gateway_id=gateway_id,
instance_id=instance_id,
interface_id=interface_id,
vpc_peering_connection_id=pcx_id,
)
return route_table
class RouteBackend(object):
def __init__(self):
super(RouteBackend, self).__init__()
def create_route(self, route_table_id, destination_cidr_block, local=False,
gateway_id=None, instance_id=None, interface_id=None,
vpc_peering_connection_id=None):
route_table = self.get_route_table(route_table_id)
if interface_id:
self.raise_not_implemented_error("CreateRoute to NetworkInterfaceId")
gateway = None
if gateway_id:
if EC2_RESOURCE_TO_PREFIX['vpn-gateway'] in gateway_id:
gateway = self.get_vpn_gateway(gateway_id)
elif EC2_RESOURCE_TO_PREFIX['internet-gateway'] in gateway_id:
gateway = self.get_internet_gateway(gateway_id)
route = Route(route_table, destination_cidr_block, local=local,
gateway=gateway,
instance=self.get_instance(instance_id) if instance_id else None,
interface=None,
vpc_pcx=self.get_vpc_peering_connection(vpc_peering_connection_id) if vpc_peering_connection_id else None)
route_table.routes[route.id] = route
return route
def replace_route(self, route_table_id, destination_cidr_block,
gateway_id=None, instance_id=None, interface_id=None,
vpc_peering_connection_id=None):
route_table = self.get_route_table(route_table_id)
route_id = generate_route_id(route_table.id, destination_cidr_block)
route = route_table.routes[route_id]
if interface_id:
self.raise_not_implemented_error("ReplaceRoute to NetworkInterfaceId")
route.gateway = None
if gateway_id:
if EC2_RESOURCE_TO_PREFIX['vpn-gateway'] in gateway_id:
route.gateway = self.get_vpn_gateway(gateway_id)
elif EC2_RESOURCE_TO_PREFIX['internet-gateway'] in gateway_id:
route.gateway = self.get_internet_gateway(gateway_id)
route.instance = self.get_instance(instance_id) if instance_id else None
route.interface = None
route.vpc_pcx = self.get_vpc_peering_connection(vpc_peering_connection_id) if vpc_peering_connection_id else None
route_table.routes[route.id] = route
return route
def get_route(self, route_id):
route_table_id, destination_cidr_block = split_route_id(route_id)
route_table = self.get_route_table(route_table_id)
return route_table.get(route_id)
def delete_route(self, route_table_id, destination_cidr_block):
route_table = self.get_route_table(route_table_id)
route_id = generate_route_id(route_table_id, destination_cidr_block)
deleted = route_table.routes.pop(route_id, None)
if not deleted:
raise InvalidRouteError(route_table_id, destination_cidr_block)
return deleted
class InternetGateway(TaggedEC2Resource):
def __init__(self, ec2_backend):
self.ec2_backend = ec2_backend
self.id = random_internet_gateway_id()
self.vpc = None
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
ec2_backend = ec2_backends[region_name]
return ec2_backend.create_internet_gateway()
@property
def physical_resource_id(self):
return self.id
@property
def attachment_state(self):
if self.vpc:
return "available"
else:
return "detached"
class InternetGatewayBackend(object):
def __init__(self):
self.internet_gateways = {}
super(InternetGatewayBackend, self).__init__()
def create_internet_gateway(self):
igw = InternetGateway(self)
self.internet_gateways[igw.id] = igw
return igw
def describe_internet_gateways(self, internet_gateway_ids=None, filters=None):
igws = []
if internet_gateway_ids is None:
igws = self.internet_gateways.values()
else:
for igw_id in internet_gateway_ids:
if igw_id in self.internet_gateways:
igws.append(self.internet_gateways[igw_id])
else:
raise InvalidInternetGatewayIdError(igw_id)
if filters is not None:
igws = filter_internet_gateways(igws, filters)
return igws
def delete_internet_gateway(self, internet_gateway_id):
igw = self.get_internet_gateway(internet_gateway_id)
if igw.vpc:
raise DependencyViolationError(
"{0} is being utilized by {1}"
.format(internet_gateway_id, igw.vpc.id)
)
self.internet_gateways.pop(internet_gateway_id)
return True
def detach_internet_gateway(self, internet_gateway_id, vpc_id):
igw = self.get_internet_gateway(internet_gateway_id)
if not igw.vpc or igw.vpc.id != vpc_id:
raise GatewayNotAttachedError(internet_gateway_id, vpc_id)
igw.vpc = None
return True
def attach_internet_gateway(self, internet_gateway_id, vpc_id):
igw = self.get_internet_gateway(internet_gateway_id)
if igw.vpc:
raise ResourceAlreadyAssociatedError(internet_gateway_id)
vpc = self.get_vpc(vpc_id)
igw.vpc = vpc
return True
def get_internet_gateway(self, internet_gateway_id):
igw_ids = [internet_gateway_id]
return self.describe_internet_gateways(internet_gateway_ids=igw_ids)[0]
class VPCGatewayAttachment(object):
def __init__(self, gateway_id, vpc_id):
self.gateway_id = gateway_id
self.vpc_id = vpc_id
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
ec2_backend = ec2_backends[region_name]
attachment = ec2_backend.create_vpc_gateway_attachment(
gateway_id=properties['InternetGatewayId'],
vpc_id=properties['VpcId'],
)
ec2_backend.attach_internet_gateway(properties['InternetGatewayId'], properties['VpcId'])
return attachment
@property
def physical_resource_id(self):
return self.id
class VPCGatewayAttachmentBackend(object):
def __init__(self):
self.gateway_attachments = {}
super(VPCGatewayAttachmentBackend, self).__init__()
def create_vpc_gateway_attachment(self, vpc_id, gateway_id):
attachment = VPCGatewayAttachment(vpc_id, gateway_id)
self.gateway_attachments[gateway_id] = attachment
return attachment
class SpotInstanceRequest(BotoSpotRequest, TaggedEC2Resource):
def __init__(self, ec2_backend, spot_request_id, price, image_id, type,
valid_from, valid_until, launch_group, availability_zone_group,
key_name, security_groups, user_data, instance_type, placement,
kernel_id, ramdisk_id, monitoring_enabled, subnet_id,
**kwargs):
super(SpotInstanceRequest, self).__init__(**kwargs)
ls = LaunchSpecification()
self.ec2_backend = ec2_backend
self.launch_specification = ls
self.id = spot_request_id
self.state = "open"
self.price = price
self.type = type
self.valid_from = valid_from
self.valid_until = valid_until
self.launch_group = launch_group
self.availability_zone_group = availability_zone_group
self.user_data = user_data # NOT
ls.kernel = kernel_id
ls.ramdisk = ramdisk_id
ls.image_id = image_id
ls.key_name = key_name
ls.instance_type = instance_type
ls.placement = placement
ls.monitored = monitoring_enabled
ls.subnet_id = subnet_id
if security_groups:
for group_name in security_groups:
group = self.ec2_backend.get_security_group_from_name(group_name)
if group:
ls.groups.append(group)
else:
# If not security groups, add the default
default_group = self.ec2_backend.get_security_group_from_name("default")
ls.groups.append(default_group)
def get_filter_value(self, filter_name):
if filter_name == 'state':
return self.state
if filter_name == 'spot-instance-request-id':
return self.id
filter_value = super(SpotInstanceRequest, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error("The filter '{0}' for DescribeSpotInstanceRequests".format(filter_name))
return filter_value
@six.add_metaclass(Model)
class SpotRequestBackend(object):
def __init__(self):
self.spot_instance_requests = {}
super(SpotRequestBackend, self).__init__()
def request_spot_instances(self, price, image_id, count, type, valid_from,
valid_until, launch_group, availability_zone_group,
key_name, security_groups, user_data,
instance_type, placement, kernel_id, ramdisk_id,
monitoring_enabled, subnet_id):
requests = []
for _ in range(count):
spot_request_id = random_spot_request_id()
request = SpotInstanceRequest(self,
spot_request_id, price, image_id, type, valid_from, valid_until,
launch_group, availability_zone_group, key_name, security_groups,
user_data, instance_type, placement, kernel_id, ramdisk_id,
monitoring_enabled, subnet_id)
self.spot_instance_requests[spot_request_id] = request
requests.append(request)
return requests
@Model.prop('SpotInstanceRequest')
def describe_spot_instance_requests(self, filters=None):
requests = self.spot_instance_requests.values()
return generic_filter(filters, requests)
def cancel_spot_instance_requests(self, request_ids):
requests = []
for request_id in request_ids:
requests.append(self.spot_instance_requests.pop(request_id))
return requests
class ElasticAddress(object):
def __init__(self, domain):
self.public_ip = random_ip()
self.allocation_id = random_eip_allocation_id() if domain == "vpc" else None
self.domain = domain
self.instance = None
self.eni = None
self.association_id = None
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
ec2_backend = ec2_backends[region_name]
properties = cloudformation_json.get('Properties')
instance_id = None
if properties:
domain = properties.get('Domain')
eip = ec2_backend.allocate_address(
domain=domain if domain else 'standard')
instance_id = properties.get('InstanceId')
else:
eip = ec2_backend.allocate_address(domain='standard')
if instance_id:
instance = ec2_backend.get_instance_by_id(instance_id)
ec2_backend.associate_address(instance, address=eip.public_ip)
return eip
@property
def physical_resource_id(self):
return self.allocation_id if self.allocation_id else self.public_ip
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
if attribute_name == 'AllocationId':
return self.allocation_id
raise UnformattedGetAttTemplateException()
class ElasticAddressBackend(object):
def __init__(self):
self.addresses = []
super(ElasticAddressBackend, self).__init__()
def allocate_address(self, domain):
if domain not in ['standard', 'vpc']:
raise InvalidDomainError(domain)
address = ElasticAddress(domain)
self.addresses.append(address)
return address
def address_by_ip(self, ips):
eips = [address for address in self.addresses
if address.public_ip in ips]
# TODO: Trim error message down to specific invalid address.
if not eips or len(ips) > len(eips):
raise InvalidAddressError(ips)
return eips
def address_by_allocation(self, allocation_ids):
eips = [address for address in self.addresses
if address.allocation_id in allocation_ids]
# TODO: Trim error message down to specific invalid id.
if not eips or len(allocation_ids) > len(eips):
raise InvalidAllocationIdError(allocation_ids)
return eips
def address_by_association(self, association_ids):
eips = [address for address in self.addresses
if address.association_id in association_ids]
# TODO: Trim error message down to specific invalid id.
if not eips or len(association_ids) > len(eips):
raise InvalidAssociationIdError(association_ids)
return eips
def associate_address(self, instance=None, eni=None, address=None, allocation_id=None, reassociate=False):
eips = []
if address:
eips = self.address_by_ip([address])
elif allocation_id:
eips = self.address_by_allocation([allocation_id])
eip = eips[0]
new_instance_association = bool(instance and (not eip.instance or eip.instance.id == instance.id))
new_eni_association = bool(eni and (not eip.eni or eni.id == eip.eni.id))
if new_instance_association or new_eni_association or reassociate:
eip.instance = instance
eip.eni = eni
if eip.eni:
eip.eni.public_ip = eip.public_ip
if eip.domain == "vpc":
eip.association_id = random_eip_association_id()
return eip
raise ResourceAlreadyAssociatedError(eip.public_ip)
def describe_addresses(self):
return self.addresses
def disassociate_address(self, address=None, association_id=None):
eips = []
if address:
eips = self.address_by_ip([address])
elif association_id:
eips = self.address_by_association([association_id])
eip = eips[0]
if eip.eni:
if eip.eni.instance and eip.eni.instance._state.name == "running":
eip.eni.check_auto_public_ip()
else:
eip.eni.public_ip = None
eip.eni = None
eip.instance = None
eip.association_id = None
return True
def release_address(self, address=None, allocation_id=None):
eips = []
if address:
eips = self.address_by_ip([address])
elif allocation_id:
eips = self.address_by_allocation([allocation_id])
eip = eips[0]
self.disassociate_address(address=eip.public_ip)
eip.allocation_id = None
self.addresses.remove(eip)
return True
class DHCPOptionsSet(TaggedEC2Resource):
def __init__(self, ec2_backend, domain_name_servers=None, domain_name=None,
ntp_servers=None, netbios_name_servers=None,
netbios_node_type=None):
self.ec2_backend = ec2_backend
self._options = {
"domain-name-servers": domain_name_servers,
"domain-name": domain_name,
"ntp-servers": ntp_servers,
"netbios-name-servers": netbios_name_servers,
"netbios-node-type": netbios_node_type,
}
self.id = random_dhcp_option_id()
self.vpc = None
def get_filter_value(self, filter_name):
"""
API Version 2015-10-01 defines the following filters for DescribeDhcpOptions:
* dhcp-options-id
* key
* value
* tag:key=value
* tag-key
* tag-value
Taken from: http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeDhcpOptions.html
"""
if filter_name == 'dhcp-options-id':
return self.id
elif filter_name == 'key':
return list(self._options.keys())
elif filter_name == 'value':
values = [item for item in list(self._options.values()) if item]
return itertools.chain(*values)
filter_value = super(DHCPOptionsSet, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error("The filter '{0}' for DescribeDhcpOptions".format(filter_name))
return filter_value
@property
def options(self):
return self._options
class DHCPOptionsSetBackend(object):
def __init__(self):
self.dhcp_options_sets = {}
super(DHCPOptionsSetBackend, self).__init__()
def associate_dhcp_options(self, dhcp_options, vpc):
dhcp_options.vpc = vpc
vpc.dhcp_options = dhcp_options
def create_dhcp_options(
self, domain_name_servers=None, domain_name=None,
ntp_servers=None, netbios_name_servers=None,
netbios_node_type=None):
NETBIOS_NODE_TYPES = [1, 2, 4, 8]
for field_value in domain_name_servers, ntp_servers, netbios_name_servers:
if field_value and len(field_value) > 4:
raise InvalidParameterValueError(",".join(field_value))
if netbios_node_type and int(netbios_node_type[0]) not in NETBIOS_NODE_TYPES:
raise InvalidParameterValueError(netbios_node_type)
options = DHCPOptionsSet(
self, domain_name_servers, domain_name, ntp_servers,
netbios_name_servers, netbios_node_type
)
self.dhcp_options_sets[options.id] = options
return options
def describe_dhcp_options(self, options_ids=None):
options_sets = []
for option_id in options_ids or []:
if option_id in self.dhcp_options_sets:
options_sets.append(self.dhcp_options_sets[option_id])
else:
raise InvalidDHCPOptionsIdError(option_id)
return options_sets or self.dhcp_options_sets.values()
def delete_dhcp_options_set(self, options_id):
if not (options_id and options_id.startswith('dopt-')):
raise MalformedDHCPOptionsIdError(options_id)
if options_id in self.dhcp_options_sets:
if self.dhcp_options_sets[options_id].vpc:
raise DependencyViolationError("Cannot delete assigned DHCP options.")
self.dhcp_options_sets.pop(options_id)
else:
raise InvalidDHCPOptionsIdError(options_id)
return True
def get_all_dhcp_options(self, dhcp_options_ids=None, filters=None):
dhcp_options_sets = self.dhcp_options_sets.values()
if dhcp_options_ids:
dhcp_options_sets = [dhcp_options_set for dhcp_options_set in dhcp_options_sets if dhcp_options_set.id in dhcp_options_ids]
if len(dhcp_options_sets) != len(dhcp_options_ids):
invalid_id = list(set(dhcp_options_ids).difference(set([dhcp_options_set.id for dhcp_options_set in dhcp_options_sets])))[0]
raise InvalidDHCPOptionsIdError(invalid_id)
return generic_filter(filters, dhcp_options_sets)
class VPNConnection(TaggedEC2Resource):
def __init__(self, ec2_backend, id, type,
customer_gateway_id, vpn_gateway_id):
self.ec2_backend = ec2_backend
self.id = id
self.state = 'available'
self.customer_gateway_configuration = {}
self.type = type
self.customer_gateway_id = customer_gateway_id
self.vpn_gateway_id = vpn_gateway_id
self.tunnels = None
self.options = None
self.static_routes = None
class VPNConnectionBackend(object):
def __init__(self):
self.vpn_connections = {}
super(VPNConnectionBackend, self).__init__()
def create_vpn_connection(self, type, customer_gateway_id,
vpn_gateway_id,
static_routes_only=None):
vpn_connection_id = random_vpn_connection_id()
if static_routes_only:
pass
vpn_connection = VPNConnection(
self, id=vpn_connection_id, type=type,
customer_gateway_id=customer_gateway_id,
vpn_gateway_id=vpn_gateway_id
)
self.vpn_connections[vpn_connection.id] = vpn_connection
return vpn_connection
def delete_vpn_connection(self, vpn_connection_id):
if vpn_connection_id in self.vpn_connections:
self.vpn_connections.pop(vpn_connection_id)
else:
raise InvalidVpnConnectionIdError(vpn_connection_id)
return True
def describe_vpn_connections(self, vpn_connection_ids=None):
vpn_connections = []
for vpn_connection_id in vpn_connection_ids or []:
if vpn_connection_id in self.vpn_connections:
vpn_connections.append(self.vpn_connections[vpn_connection_id])
else:
raise InvalidVpnConnectionIdError(vpn_connection_id)
return vpn_connections or self.vpn_connections.values()
def get_all_vpn_connections(self, vpn_connection_ids=None, filters=None):
vpn_connections = self.vpn_connections.values()
if vpn_connection_ids:
vpn_connections = [vpn_connection for vpn_connection in vpn_connections
if vpn_connection.id in vpn_connection_ids]
if len(vpn_connections) != len(vpn_connection_ids):
invalid_id = list(set(vpn_connection_ids).difference(set([vpn_connection.id for vpn_connection in vpn_connections])))[0]
raise InvalidVpnConnectionIdError(invalid_id)
return generic_filter(filters, vpn_connections)
class NetworkAclBackend(object):
def __init__(self):
self.network_acls = {}
super(NetworkAclBackend, self).__init__()
def get_network_acl(self, network_acl_id):
network_acl = self.network_acls.get(network_acl_id, None)
if not network_acl:
raise InvalidNetworkAclIdError(network_acl_id)
return network_acl
def create_network_acl(self, vpc_id, default=False):
network_acl_id = random_network_acl_id()
self.get_vpc(vpc_id)
network_acl = NetworkAcl(self, network_acl_id, vpc_id, default)
self.network_acls[network_acl_id] = network_acl
return network_acl
def get_all_network_acls(self, network_acl_ids=None, filters=None):
network_acls = self.network_acls.values()
if network_acl_ids:
network_acls = [network_acl for network_acl in network_acls
if network_acl.id in network_acl_ids]
if len(network_acls) != len(network_acl_ids):
invalid_id = list(set(network_acl_ids).difference(set([network_acl.id for network_acl in network_acls])))[0]
raise InvalidRouteTableIdError(invalid_id)
return generic_filter(filters, network_acls)
def delete_network_acl(self, network_acl_id):
deleted = self.network_acls.pop(network_acl_id, None)
if not deleted:
raise InvalidNetworkAclIdError(network_acl_id)
return deleted
def create_network_acl_entry(self, network_acl_id, rule_number,
protocol, rule_action, egress, cidr_block,
icmp_code, icmp_type, port_range_from,
port_range_to):
network_acl_entry = NetworkAclEntry(self, network_acl_id, rule_number,
protocol, rule_action, egress,
cidr_block, icmp_code, icmp_type,
port_range_from, port_range_to)
network_acl = self.get_network_acl(network_acl_id)
network_acl.network_acl_entries.append(network_acl_entry)
return network_acl_entry
def replace_network_acl_association(self, association_id,
network_acl_id):
# lookup existing association for subnet and delete it
default_acl = next(value for key, value in self.network_acls.items()
if association_id in value.associations.keys())
subnet_id = None
for key, value in default_acl.associations.items():
if key == association_id:
subnet_id = default_acl.associations[key].subnet_id
del default_acl.associations[key]
break
new_assoc_id = random_network_acl_subnet_association_id()
association = NetworkAclAssociation(self,
new_assoc_id,
subnet_id,
network_acl_id)
new_acl = self.get_network_acl(network_acl_id)
new_acl.associations[new_assoc_id] = association
return association
def associate_default_network_acl_with_subnet(self, subnet_id):
association_id = random_network_acl_subnet_association_id()
acl = next(acl for acl in self.network_acls.values() if acl.default)
acl.associations[association_id] = NetworkAclAssociation(self, association_id,
subnet_id, acl.id)
class NetworkAclAssociation(object):
def __init__(self, ec2_backend, new_association_id,
subnet_id, network_acl_id):
self.ec2_backend = ec2_backend
self.id = new_association_id
self.new_association_id = new_association_id
self.subnet_id = subnet_id
self.network_acl_id = network_acl_id
super(NetworkAclAssociation, self).__init__()
class NetworkAcl(TaggedEC2Resource):
def __init__(self, ec2_backend, network_acl_id, vpc_id, default=False):
self.ec2_backend = ec2_backend
self.id = network_acl_id
self.vpc_id = vpc_id
self.network_acl_entries = []
self.associations = {}
self.default = 'true' if default is True else 'false'
def get_filter_value(self, filter_name):
if filter_name == "default":
return self.default
elif filter_name == "vpc-id":
return self.vpc_id
elif filter_name == "association.network-acl-id":
return self.id
elif filter_name == "association.subnet-id":
return [assoc.subnet_id for assoc in self.associations.values()]
filter_value = super(NetworkAcl, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error("The filter '{0}' for DescribeNetworkAcls".format(filter_name))
return filter_value
class NetworkAclEntry(TaggedEC2Resource):
def __init__(self, ec2_backend, network_acl_id, rule_number,
protocol, rule_action, egress, cidr_block,
icmp_code, icmp_type, port_range_from,
port_range_to):
self.ec2_backend = ec2_backend
self.network_acl_id = network_acl_id
self.rule_number = rule_number
self.protocol = protocol
self.rule_action = rule_action
self.egress = egress
self.cidr_block = cidr_block
self.icmp_code = icmp_code
self.icmp_type = icmp_type
self.port_range_from = port_range_from
self.port_range_to = port_range_to
class VpnGateway(TaggedEC2Resource):
def __init__(self, ec2_backend, id, type):
self.ec2_backend = ec2_backend
self.id = id
self.type = type
self.attachments = {}
super(VpnGateway, self).__init__()
class VpnGatewayAttachment(object):
def __init__(self, vpc_id, state):
self.vpc_id = vpc_id
self.state = state
super(VpnGatewayAttachment, self).__init__()
class VpnGatewayBackend(object):
def __init__(self):
self.vpn_gateways = {}
super(VpnGatewayBackend, self).__init__()
def create_vpn_gateway(self, type='ipsec.1'):
vpn_gateway_id = random_vpn_gateway_id()
vpn_gateway = VpnGateway(self, vpn_gateway_id, type)
self.vpn_gateways[vpn_gateway_id] = vpn_gateway
return vpn_gateway
def get_all_vpn_gateways(self, filters=None):
vpn_gateways = self.vpn_gateways.values()
return generic_filter(filters, vpn_gateways)
def get_vpn_gateway(self, vpn_gateway_id):
vpn_gateway = self.vpn_gateways.get(vpn_gateway_id, None)
if not vpn_gateway:
raise InvalidVpnGatewayIdError(vpn_gateway_id)
return vpn_gateway
def attach_vpn_gateway(self, vpn_gateway_id, vpc_id):
vpn_gateway = self.get_vpn_gateway(vpn_gateway_id)
self.get_vpc(vpc_id)
attachment = VpnGatewayAttachment(vpc_id, state='attached')
vpn_gateway.attachments[vpc_id] = attachment
return attachment
def delete_vpn_gateway(self, vpn_gateway_id):
deleted = self.vpn_gateways.pop(vpn_gateway_id, None)
if not deleted:
raise InvalidVpnGatewayIdError(vpn_gateway_id)
return deleted
def detach_vpn_gateway(self, vpn_gateway_id, vpc_id):
vpn_gateway = self.get_vpn_gateway(vpn_gateway_id)
self.get_vpc(vpc_id)
detached = vpn_gateway.attachments.pop(vpc_id, None)
if not detached:
raise InvalidVPCIdError(vpc_id)
return detached
class CustomerGateway(TaggedEC2Resource):
def __init__(self, ec2_backend, id, type, ip_address, bgp_asn):
self.ec2_backend = ec2_backend
self.id = id
self.type = type
self.ip_address = ip_address
self.bgp_asn = bgp_asn
self.attachments = {}
super(CustomerGateway, self).__init__()
class CustomerGatewayBackend(object):
def __init__(self):
self.customer_gateways = {}
super(CustomerGatewayBackend, self).__init__()
def create_customer_gateway(self, type='ipsec.1', ip_address=None, bgp_asn=None):
customer_gateway_id = random_customer_gateway_id()
customer_gateway = CustomerGateway(self, customer_gateway_id, type, ip_address, bgp_asn)
self.customer_gateways[customer_gateway_id] = customer_gateway
return customer_gateway
def get_all_customer_gateways(self, filters=None):
customer_gateways = self.customer_gateways.values()
return generic_filter(filters, customer_gateways)
def get_customer_gateway(self, customer_gateway_id):
customer_gateway = self.customer_gateways.get(customer_gateway_id, None)
if not customer_gateway:
raise InvalidCustomerGatewayIdError(customer_gateway_id)
return customer_gateway
def delete_customer_gateway(self, customer_gateway_id):
deleted = self.customer_gateways.pop(customer_gateway_id, None)
if not deleted:
raise InvalidCustomerGatewayIdError(customer_gateway_id)
return deleted
class NatGateway(object):
def __init__(self, backend, subnet_id, allocation_id):
# public properties
self.id = random_nat_gateway_id()
self.subnet_id = subnet_id
self.allocation_id = allocation_id
self.state = 'available'
self.private_ip = random_private_ip()
# protected properties
self._created_at = datetime.utcnow()
self._backend = backend
# NOTE: this is the core of NAT Gateways creation
self._eni = self._backend.create_network_interface(backend.get_subnet(self.subnet_id), self.private_ip)
# associate allocation with ENI
self._backend.associate_address(eni=self._eni, allocation_id=self.allocation_id)
@property
def vpc_id(self):
subnet = self._backend.get_subnet(self.subnet_id)
return subnet.vpc_id
@property
def create_time(self):
return iso_8601_datetime_with_milliseconds(self._created_at)
@property
def network_interface_id(self):
return self._eni.id
@property
def public_ip(self):
eips = self._backend.address_by_allocation([self.allocation_id])
return eips[0].public_ip
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
ec2_backend = ec2_backends[region_name]
nat_gateway = ec2_backend.create_nat_gateway(
cloudformation_json['Properties']['SubnetId'],
cloudformation_json['Properties']['AllocationId'],
)
return nat_gateway
class NatGatewayBackend(object):
def __init__(self):
self.nat_gateways = {}
def get_all_nat_gateways(self, filters):
return self.nat_gateways.values()
def create_nat_gateway(self, subnet_id, allocation_id):
nat_gateway = NatGateway(self, subnet_id, allocation_id)
self.nat_gateways[nat_gateway.id] = nat_gateway
return nat_gateway
def delete_nat_gateway(self, nat_gateway_id):
return self.nat_gateways.pop(nat_gateway_id)
class EC2Backend(BaseBackend, InstanceBackend, TagBackend, AmiBackend,
RegionsAndZonesBackend, SecurityGroupBackend, EBSBackend,
VPCBackend, SubnetBackend, SubnetRouteTableAssociationBackend,
NetworkInterfaceBackend, VPNConnectionBackend,
VPCPeeringConnectionBackend,
RouteTableBackend, RouteBackend, InternetGatewayBackend,
VPCGatewayAttachmentBackend, SpotRequestBackend,
ElasticAddressBackend, KeyPairBackend, DHCPOptionsSetBackend,
NetworkAclBackend, VpnGatewayBackend, CustomerGatewayBackend,
NatGatewayBackend):
def __init__(self, region_name):
super(EC2Backend, self).__init__()
self.region_name = region_name
def reset(self):
region_name = self.region_name
self.__dict__ = {}
self.__init__(region_name)
# Use this to generate a proper error template response when in a response handler.
def raise_error(self, code, message):
raise EC2ClientError(code, message)
def raise_not_implemented_error(self, blurb):
msg = "{0} has not been implemented in Moto yet." \
" Feel free to open an issue at" \
" https://github.com/spulec/moto/issues".format(blurb)
raise NotImplementedError(msg)
def do_resources_exist(self, resource_ids):
for resource_id in resource_ids:
resource_prefix = get_prefix(resource_id)
if resource_prefix == EC2_RESOURCE_TO_PREFIX['customer-gateway']:
self.get_customer_gateway(customer_gateway_id=resource_id)
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['dhcp-options']:
self.describe_dhcp_options(options_ids=[resource_id])
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['image']:
self.describe_images(ami_ids=[resource_id])
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['instance']:
self.get_instance_by_id(instance_id=resource_id)
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['internet-gateway']:
self.describe_internet_gateways(internet_gateway_ids=[resource_id])
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['network-acl']:
self.get_all_network_acls()
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['network-interface']:
self.describe_network_interfaces(filters={'network-interface-id': resource_id})
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['reserved-instance']:
self.raise_not_implemented_error('DescribeReservedInstances')
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['route-table']:
self.get_route_table(route_table_id=resource_id)
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['security-group']:
self.describe_security_groups(group_ids=[resource_id])
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['snapshot']:
self.get_snapshot(snapshot_id=resource_id)
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['spot-instance-request']:
self.describe_spot_instance_requests(filters={'spot-instance-request-id': resource_id})
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['subnet']:
self.get_subnet(subnet_id=resource_id)
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['volume']:
self.get_volume(volume_id=resource_id)
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['vpc']:
self.get_vpc(vpc_id=resource_id)
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['vpc-peering-connection']:
self.get_vpc_peering_connection(vpc_pcx_id=resource_id)
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['vpn-connection']:
self.describe_vpn_connections(vpn_connection_ids=[resource_id])
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['vpn-gateway']:
self.get_vpn_gateway(vpn_gateway_id=resource_id)
return True
ec2_backends = {}
for region in boto.ec2.regions():
ec2_backends[region.name] = EC2Backend(region.name)
| mrucci/moto | moto/ec2/models.py | Python | apache-2.0 | 121,494 | 0.001523 |
#!/usr/bin/env python
"""
Make a pie chart where peaks fall in annotations; see \
:mod:`pybedtools.contrib.Classifier` for more flexibility.
The results here are similar to CEAS (http://liulab.dfci.harvard.edu/CEAS/).
However, multi-featuretype classes are reported. That is, if a peak falls in
an exon in one isoform and an intron in another isoform, the class is "exon,
intron".
"""
import sys
import urllib
import urllib2
import argparse
import pybedtools
from collections import defaultdict
def make_pie(bed, gff, stranded=False, out='out.png',
include=None, exclude=None, thresh=0):
a = pybedtools.BedTool(bed)
b = pybedtools.BedTool(gff).remove_invalid()
c = a.intersect(b,
wao=True,
s=stranded,
stream=True)
# So we can grab just `a` features later...
afields = a.field_count()
# Where we can find the featuretype in the -wao output. Assumes GFF.
type_idx = afields + 2
# 3 different code paths depending on include/exclude to cut down on
# if/else checks.
#
# For un-included featuretypes, put them in the '.' category (unnannotated)
if include and exclude:
raise ValueError('Can only specify one of `include` or `exclude`.')
d = defaultdict(set)
if include:
for feature in c:
featuretype = feature[type_idx]
key = '\t'.join(feature[:afields])
if featuretype in include:
d[key].update([featuretype])
else:
d[key].update(['.'])
elif exclude:
for feature in c:
featuretype = feature[type_idx]
key = '\t'.join(feature[:afields])
if featuretype not in exclude:
d[key].update([featuretype])
else:
d[key].update(['.'])
else:
for feature in c:
featuretype = feature[type_idx]
key = '\t'.join(feature[:afields])
d[key].update([featuretype])
def labelmaker(x):
x.difference_update('.')
label = []
for i in list(x):
if i == 'three_prime_UTR':
i = "3' UTR"
if i == 'five_prime_UTR':
i = "5' UTR"
label.append(i)
return ', '.join(sorted(label))
# Prepare results for Google Charts API
npeaks = float(len(d))
count_d = defaultdict(int)
for peak, featuretypes in d.items():
if featuretypes == set('.'):
featuretype = 'unannotated'
else:
featuretype = labelmaker(featuretypes)
count_d[featuretype] += 1
results = count_d.items()
results.sort(key=lambda x: x[1])
labels, counts = zip(*results)
labels = []
counts_to_use = []
for label, count in results:
perc = count / npeaks * 100
if perc > thresh:
labels.append('%s: %s (%.1f%%)' % (label,
count,
perc))
counts_to_use.append(perc)
# Set up the Gchart data
data = {'cht': 'p',
'chs': '750x350',
'chd': 't:' + ','.join(map(str, counts_to_use)),
'chl': '|'.join(labels)}
# Encode it correctly
encoded_data = urllib.urlencode(data)
# Send request and get data; write to file
url = 'https://chart.googleapis.com/chart?'
req = urllib2.Request(url, encoded_data)
response = urllib2.urlopen(req)
f = open(out, 'w')
f.write(response.read())
f.close()
def main():
"""
Make a pie chart of features overlapping annotations (e.g., peaks in
introns, exons, etc)
"""
ap = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
ap.add_argument('--bed', help='BED file of e.g. peaks')
ap.add_argument('--gff', help='GFF file of e.g. annotations')
ap.add_argument('--out', default='out.png', help='Output PNG file')
ap.add_argument('--stranded', action='store_true',
help='Use strand-specific intersections')
ap.add_argument('--include', nargs='*', help='Featuretypes to include')
ap.add_argument('--exclude', nargs='*', help='Featuretypes to exclude')
ap.add_argument('--thresh', type=float,
help='Threshold percentage below which output will be '
'suppressed')
ap.add_argument('--test', action='store_true',
help='Run test, overwriting all other args. Result will '
'be "out.png" in current directory.')
args = ap.parse_args()
if not (args.bed and args.gff) and not args.test:
ap.print_help()
sys.exit(1)
if not args.test:
if args.include and args.exclude:
raise ValueError('Cannot specify both --include and --exclude')
make_pie(bed=args.bed,
gff=args.gff,
out=args.out,
thresh=args.thresh,
stranded=args.stranded,
include=args.include,
exclude=args.exclude)
else:
make_pie(bed=pybedtools.example_filename('gdc.bed'),
gff=pybedtools.example_filename('gdc.gff'),
stranded=True,
out='out.png',
include=['exon',
'CDS',
'intron',
'five_prime_UTR',
'three_prime_UTR'])
if __name__ == "__main__":
import doctest
if doctest.testmod(optionflags=doctest.ELLIPSIS).failed == 0:
main()
| jos4uke/getSeqFlankBlatHit | lib/python2.7/site-packages/pybedtools/scripts/peak_pie.py | Python | gpl-2.0 | 5,681 | 0.000176 |
"""
Tests for gravity-style spatial interaction models
Test data is the Austria migration dataset used in Dennet's (2012) practical primer
on spatial interaction modeling. The data was made avialable through the
following dropbox link: http://dl.dropbox.com/u/8649795/AT_Austria.csv.
The data has been pre-filtered so that there are no intra-zonal flows,
Dennett, A. (2012). Estimating flows between geographical locations:get me
started in spatial interaction modelling (Working Paper No. 184). UCL: Citeseer.
"""
import unittest
import math
import numpy as np
from ..gravity import BaseGravity, Gravity, Production, Attraction, Doubly
class TestGravity(unittest.TestCase):
"""Tests for gravity-type models"""
def setUp(self):
self.f = np.array([1131, 1887, 69, 738, 98, 31, 43, 19, 1633,
14055, 416, 1276, 1850, 388, 303, 159, 2301, 20164,
1080, 1831, 1943, 742, 674, 407, 85, 379, 1597,
1608, 328, 317, 469, 114, 762, 1110, 2973, 1252,
1081, 622, 425, 262, 196, 2027, 3498, 346, 1332,
2144, 821, 274, 49, 378, 1349, 310, 851, 2117,
630, 106, 87, 424, 978, 490, 670, 577, 546,
569, 33, 128, 643, 154, 328, 199, 112, 587])
self.o = np.array(['AT11',
'AT11',
'AT11',
'AT11',
'AT11',
'AT11',
'AT11',
'AT11',
'AT12',
'AT12',
'AT12',
'AT12',
'AT12',
'AT12',
'AT12',
'AT12',
'AT13',
'AT13',
'AT13',
'AT13',
'AT13',
'AT13',
'AT13',
'AT13',
'AT21',
'AT21',
'AT21',
'AT21',
'AT21',
'AT21',
'AT21',
'AT21',
'AT22',
'AT22',
'AT22',
'AT22',
'AT22',
'AT22',
'AT22',
'AT22',
'AT31',
'AT31',
'AT31',
'AT31',
'AT31',
'AT31',
'AT31',
'AT31',
'AT32',
'AT32',
'AT32',
'AT32',
'AT32',
'AT32',
'AT32',
'AT32',
'AT33',
'AT33',
'AT33',
'AT33',
'AT33',
'AT33',
'AT33',
'AT33',
'AT34',
'AT34',
'AT34',
'AT34',
'AT34',
'AT34',
'AT34',
'AT34'])
self.d = np.array(['AT12',
'AT13',
'AT21',
'AT22',
'AT31',
'AT32',
'AT33',
'AT34',
'AT11',
'AT13',
'AT21',
'AT22',
'AT31',
'AT32',
'AT33',
'AT34',
'AT11',
'AT12',
'AT21',
'AT22',
'AT31',
'AT32',
'AT33',
'AT34',
'AT11',
'AT12',
'AT13',
'AT22',
'AT31',
'AT32',
'AT33',
'AT34',
'AT11',
'AT12',
'AT13',
'AT21',
'AT31',
'AT32',
'AT33',
'AT34',
'AT11',
'AT12',
'AT13',
'AT21',
'AT22',
'AT32',
'AT33',
'AT34',
'AT11',
'AT12',
'AT13',
'AT21',
'AT22',
'AT31',
'AT33',
'AT34',
'AT11',
'AT12',
'AT13',
'AT21',
'AT22',
'AT31',
'AT32',
'AT34',
'AT11',
'AT12',
'AT13',
'AT21',
'AT22',
'AT31',
'AT32',
'AT33'])
self.dij = np.array([103.001845,
84.204666,
220.811933,
132.00748,
214.511814,
246.933305,
390.85611,
505.089539,
103.001845,
45.796272,
216.994739,
129.878172,
140.706671,
201.232355,
343.50075,
453.515594,
84.204666,
45.796272,
249.932874,
158.630661,
186.420738,
244.108305,
387.61776,
498.407152,
220.811933,
216.994739,
249.932874,
92.407958,
151.777157,
92.894408,
194.851669,
306.105825,
132.00748,
129.878172,
158.630661,
92.407958,
124.563096,
122.433524,
261.893783,
376.34667,
214.511814,
140.706671,
186.420738,
151.777157,
124.563096,
81.753652,
208.456383,
314.793199,
246.933305,
201.232355,
244.108305,
92.894408,
122.433524,
81.753652,
145.076472,
258.591197,
390.85611,
343.50075,
387.61776,
194.851669,
261.893783,
208.456383,
145.076472,
114.46325,
505.089539,
453.515594,
498.407152,
306.105825,
376.34667,
314.793199,
258.591197,
114.46325])
self.o_var = np.array([4320,
4320,
4320,
4320,
4320,
4320,
4320,
4320,
21478,
21478,
21478,
21478,
21478,
21478,
21478,
21478,
30500,
30500,
30500,
30500,
30500,
30500,
30500,
30500,
5012,
5012,
5012,
5012,
5012,
5012,
5012,
5012,
8811,
8811,
8811,
8811,
8811,
8811,
8811,
8811,
11349,
11349,
11349,
11349,
11349,
11349,
11349,
11349,
6021,
6021,
6021,
6021,
6021,
6021,
6021,
6021,
4477,
4477,
4477,
4477,
4477,
4477,
4477,
4477,
2275,
2275,
2275,
2275,
2275,
2275,
2275,
2275])
self.d_var = np.array([27169,
28710,
4354,
9069,
8577,
4963,
3923,
2026,
5452,
28710,
4354,
9069,
8577,
4963,
3923,
2026,
5452,
27169,
4354,
9069,
8577,
4963,
3923,
2026,
5452,
27169,
28710,
9069,
8577,
4963,
3923,
2026,
5452,
27169,
28710,
4354,
8577,
4963,
3923,
2026,
5452,
27169,
28710,
4354,
9069,
4963,
3923,
2026,
5452,
27169,
28710,
4354,
9069,
8577,
3923,
2026,
5452,
27169,
28710,
4354,
9069,
8577,
4963,
2026,
5452,
27169,
28710,
4354,
9069,
8577,
4963,
3923])
def test_BaseGravity_exp(self):
f = np.array(self.f).reshape((-1, 1))
dij = np.array(self.dij).reshape((-1, 1))
model = BaseGravity(f, dij, 'exp', constant=False)
np.testing.assert_allclose(model.params, [0.01641585], atol=.0001)
self.assertAlmostEqual(model.AIC, 957622.28429746185, delta=.0001)
np.testing.assert_allclose(model.cov_params, [[1.92096665e-10]])
self.assertAlmostEqual(model.deviance, 1087408.9707170483, delta=.0001)
self.assertAlmostEqual(model.llf, -478810.14214873099, delta=.0001)
self.assertAlmostEqual(model.llnull, -88037.0499629, delta=.0001)
np.testing.assert_allclose(model.pvalues, [0.])
np.testing.assert_allclose(model.std_err, [1.38598941e-05])
np.testing.assert_allclose(model.tvalues, [1184.41355888])
np.testing.assert_allclose(model.yhat,
[5.42415692e+00, 3.98401807e+00, 3.75177744e+01,
8.73217546e+00, 3.38315236e+01, 5.76055685e+01,
6.11695077e+02, 3.98970414e+03, 5.42415692e+00,
2.12078133e+00, 3.52389616e+01, 8.43222048e+00,
1.00726025e+01, 2.72049640e+01, 2.81140796e+02,
1.71101560e+03, 3.98401807e+00, 2.12078133e+00,
6.05130899e+01, 1.35184658e+01, 2.13329799e+01,
5.49951210e+01, 5.80026424e+02, 3.57519614e+03,
3.75177744e+01, 3.52389616e+01, 6.05130899e+01,
4.55832329e+00, 1.20799918e+01, 4.59486946e+00,
2.44995584e+01, 1.52168163e+02, 8.73217546e+00,
8.43222048e+00, 1.35184658e+01, 4.55832329e+00,
7.72767984e+00, 7.46219749e+00, 7.36414576e+01,
4.82050643e+02, 3.38315236e+01, 1.00726025e+01,
2.13329799e+01, 1.20799918e+01, 7.72767984e+00,
3.82690126e+00, 3.06302472e+01, 1.75492594e+02,
5.76055685e+01, 2.72049640e+01, 5.49951210e+01,
4.59486946e+00, 7.46219749e+00, 3.82690126e+00,
1.08216970e+01, 6.97553001e+01, 6.11695077e+02,
2.81140796e+02, 5.80026424e+02, 2.44995584e+01,
7.36414576e+01, 3.06302472e+01, 1.08216970e+01,
6.54702760e+00, 3.98970414e+03, 1.71101560e+03,
3.57519614e+03, 1.52168163e+02, 4.82050643e+02,
1.75492594e+02, 6.97553001e+01, 6.54702760e+00])
def test_BaseGravity_pow(self):
f = np.array(self.f).reshape((-1, 1))
dij = np.array(self.dij).reshape((-1, 1))
model = BaseGravity(f, dij, 'pow', constant=False)
np.testing.assert_allclose(model.params, [1.27223738], atol=.0001)
self.assertAlmostEqual(model.AIC, 377298.04716333596, delta=.0001)
np.testing.assert_allclose(model.cov_params, [[4.31955426e-07]])
self.assertAlmostEqual(model.deviance, 409811.34329065739, delta=.0001)
self.assertAlmostEqual(model.llf, -188648.02358166798, delta=.0001)
self.assertAlmostEqual(model.llnull, -88037.0499629, delta=.0001)
np.testing.assert_allclose(model.pvalues, [0.])
np.testing.assert_allclose(model.std_err, [0.00065723], atol=.000001)
np.testing.assert_allclose(model.tvalues, [1935.74740017])
np.testing.assert_allclose(model.yhat,
[363.76143383,
281.50403714,
959.7388893,
498.77506053,
925.03759732,
1106.44361848,
1984.54428735,
2749.95948574,
363.76143383,
129.70901679,
938.68096943,
488.56203387,
540.96136464,
852.80642651,
1683.84456031,
2397.81642174,
281.50403714,
129.70901679,
1123.57104159,
630.10766251,
773.76239688,
1090.36467516,
1963.64917204,
2703.75625368,
959.7388893,
938.68096943,
1123.57104159,
316.84652033,
595.67905738,
318.9700416,
818.55371165,
1454.18199247,
498.77506053,
488.56203387,
630.10766251,
316.84652033,
463.26843623,
453.2156204,
1192.42000515,
1891.29566175,
925.03759732,
540.96136464,
773.76239688,
595.67905738,
463.26843623,
271.12096396,
891.94447199,
1506.88882976,
1106.44361848,
852.80642651,
1090.36467516,
318.9700416,
453.2156204,
271.12096396,
562.42482847,
1173.32244253,
1984.54428735,
1683.84456031,
1963.64917204,
818.55371165,
1192.42000515,
891.94447199,
562.42482847,
416.01781589,
2749.95948574,
2397.81642174,
2703.75625368,
1454.18199247,
1891.29566175,
1506.88882976,
1173.32244253,
416.01781589])
def test_QuasiPoisson(self):
f = np.array(self.f).reshape((-1, 1))
dij = np.array(self.dij).reshape((-1, 1))
model = BaseGravity(f, dij, 'exp', constant=False, Quasi=True)
np.testing.assert_allclose(model.params, [0.01641585], atol=.0001)
self.assertTrue(math.isnan(model.AIC))
np.testing.assert_allclose(model.cov_params, [[0.00079749]],
atol=1.0e-8)
self.assertAlmostEqual(model.deviance, 1087408.9707170483, delta=.0001)
self.assertTrue(np.isnan(model.llf))
self.assertTrue(np.isnan(model.llnull))
np.testing.assert_allclose(model.pvalues, [0.56103881])
np.testing.assert_allclose(model.std_err, [0.02823993], atol=1.0e-8)
np.testing.assert_allclose(model.tvalues, [0.58129922])
np.testing.assert_allclose(model.yhat,
[5.42415692e+00, 3.98401807e+00, 3.75177744e+01,
8.73217546e+00, 3.38315236e+01, 5.76055685e+01,
6.11695077e+02, 3.98970414e+03, 5.42415692e+00,
2.12078133e+00, 3.52389616e+01, 8.43222048e+00,
1.00726025e+01, 2.72049640e+01, 2.81140796e+02,
1.71101560e+03, 3.98401807e+00, 2.12078133e+00,
6.05130899e+01, 1.35184658e+01, 2.13329799e+01,
5.49951210e+01, 5.80026424e+02, 3.57519614e+03,
3.75177744e+01, 3.52389616e+01, 6.05130899e+01,
4.55832329e+00, 1.20799918e+01, 4.59486946e+00,
2.44995584e+01, 1.52168163e+02, 8.73217546e+00,
8.43222048e+00, 1.35184658e+01, 4.55832329e+00,
7.72767984e+00, 7.46219749e+00, 7.36414576e+01,
4.82050643e+02, 3.38315236e+01, 1.00726025e+01,
2.13329799e+01, 1.20799918e+01, 7.72767984e+00,
3.82690126e+00, 3.06302472e+01, 1.75492594e+02,
5.76055685e+01, 2.72049640e+01, 5.49951210e+01,
4.59486946e+00, 7.46219749e+00, 3.82690126e+00,
1.08216970e+01, 6.97553001e+01, 6.11695077e+02,
2.81140796e+02, 5.80026424e+02, 2.44995584e+01,
7.36414576e+01, 3.06302472e+01, 1.08216970e+01,
6.54702760e+00, 3.98970414e+03, 1.71101560e+03,
3.57519614e+03, 1.52168163e+02, 4.82050643e+02,
1.75492594e+02, 6.97553001e+01, 6.54702760e+00])
def test_Gravity(self):
model = Gravity(self.f, self.o_var, self.d_var,
self.dij, 'exp', constant=True)
np.testing.assert_allclose(
model.params, [-7.95447436e+00, 8.63867812e-01, 8.80474585e-01, -6.20544765e-03])
self.assertAlmostEqual(model.AIC, 20395.085388908723, delta=.0001)
np.testing.assert_allclose(model.cov_params,
[[5.70906352e-03, -3.00814799e-04, -2.62650384e-04,
-2.40317578e-06],
[-3.00814799e-04, 2.67121974e-05, 3.21466745e-06,
1.16544737e-07],
[-2.62650384e-04, 3.21466745e-06, 2.28600781e-05,
9.94368232e-08],
[-2.40317578e-06, 1.16544737e-07, 9.94368232e-08,
2.68830005e-09]])
self.assertAlmostEqual(model.deviance, 19806.408696637576, delta=.0001)
self.assertAlmostEqual(model.llf, -10193.542694454361, delta=.0001)
self.assertAlmostEqual(model.llnull, -88037.0499629, delta=.0001)
np.testing.assert_allclose(model.pvalues, [0., 0., 0., 0.])
np.testing.assert_allclose(
model.std_err, [
7.55583451e-02, 5.16838440e-03, 4.78122141e-03, 5.18488192e-05])
np.testing.assert_allclose(
model.tvalues, [-105.27592086, 167.14465196, 184.15264854, -119.68349034])
np.testing.assert_allclose(model.yhat,
[2053.49248374,
2422.40705883,
197.17666947,
652.77645945,
372.46664089,
188.15630595,
62.62225447,
17.22633782,
1995.44179687,
12287.11927555,
806.92929317,
2643.59913196,
2353.33783354,
998.56216427,
335.77135891,
94.81498069,
3035.77484367,
15846.25211871,
890.511914,
2994.19536934,
2399.15053753,
1036.08892279,
345.71715146,
97.15537629,
273.27020389,
1150.87005074,
984.81363732,
948.91636667,
625.0285152,
556.41059801,
240.2714148,
67.32796418,
771.93257863,
3217.0998412,
2825.35215036,
809.66631035,
1204.76218438,
754.13231343,
258.03819482,
70.88540396,
575.70226041,
3743.25042014,
2959.00444172,
697.06556556,
1574.69697708,
1207.94322877,
447.3674688,
129.24387416,
272.27577768,
1487.02882957,
1196.36810195,
580.9635273,
922.83252627,
1130.90519845,
383.40648414,
105.94015788,
86.29277039,
476.14958977,
380.14055538,
238.89720288,
300.687118,
398.84078404,
365.10261002,
200.59513613,
23.66650989,
134.05168303,
106.50884151,
66.7421182,
82.35371404,
114.87900692,
100.58000293,
199.99352826])
self.assertAlmostEquals(model.D2, 0.88713874099960177)
self.assertAlmostEquals(model.adj_D2, 0.88215956780840776)
self.assertAlmostEquals(model.SSI, 0.72706171189789603)
self.assertAlmostEquals(model.pseudoR2, 0.88421303645743465)
self.assertAlmostEquals(model.adj_pseudoR2, 0.88416760104130376)
self.assertAlmostEquals(model.SRMSE, 0.62063116008447083)
def test_local_Gravity(self):
model = Gravity(self.f, self.o_var, self.d_var, self.dij, 'exp')
local = model.local(loc_index=self.o, locs=np.unique(self.o))
self.assertEqual(list(local.keys()).sort(), ['stde0',
'stde1',
'stde2',
'pvalue2',
'SRMSE',
'pvalue0',
'deviance',
'adj_pseudoR2',
'pvalue1',
'tvalue0',
'tvalue2',
'adj_D2',
'tvalue1',
'SSI',
'AIC',
'param1',
'param0',
'D2',
'pseudoR2',
'param2'].sort())
def test_Production(self):
model = Production(self.f, self.o, self.d_var,
self.dij, 'exp', constant=True)
np.testing.assert_allclose(model.params,
[-1.11700938, 1.68662317, 2.15188689, 0.60300297,
0.88380784, 1.20926104, 0.68938983, 1.15472804,
1.02479968, 0.89278717, -0.00727113], atol=.0001)
self.assertAlmostEqual(model.AIC, 15882.651018068489, delta=.0001)
np.testing.assert_allclose(model.cov_params,
[[2.58467540e-03, -3.29423877e-04, -3.27686611e-04,
-3.08689103e-04, -2.97140418e-04, -2.89494010e-04,
-3.24014540e-04, -3.00776842e-04, -2.84393168e-04,
-2.24000219e-04, -9.64855587e-07],
[-3.29423877e-04, 3.03025458e-04, 2.53009591e-04,
2.47263232e-04, 2.49058621e-04, 2.47981815e-04,
2.48504221e-04, 2.42350062e-04, 2.38815483e-04,
7.03380199e-06, 9.21233182e-08],
[-3.27686611e-04, 2.53009591e-04, 2.87125687e-04,
2.47623385e-04, 2.49193103e-04, 2.48208882e-04,
2.48776786e-04, 2.43211814e-04, 2.40006717e-04,
6.93978830e-06, 8.51141937e-08],
[-3.08689103e-04, 2.47263232e-04, 2.47623385e-04,
4.64543893e-04, 2.54358195e-04, 2.56271647e-04,
2.58881194e-04, 2.72166616e-04, 2.79448200e-04,
7.48669925e-06, -1.28025978e-07],
[-2.97140418e-04, 2.49058621e-04, 2.49193103e-04,
2.54358195e-04, 3.69553926e-04, 2.52457119e-04,
2.53895776e-04, 2.59259137e-04, 2.62162787e-04,
5.35894223e-06, -4.43827259e-08],
[-2.89494010e-04, 2.47981815e-04, 2.48208882e-04,
2.56271647e-04, 2.52457119e-04, 3.47667496e-04,
2.55361893e-04, 2.63782581e-04, 2.68393880e-04,
5.00405857e-06, -8.03379392e-08],
[-3.24014540e-04, 2.48504221e-04, 2.48776786e-04,
2.58881194e-04, 2.53895776e-04, 2.55361893e-04,
4.30583201e-04, 2.68391703e-04, 2.74115589e-04,
8.62466197e-06, -9.29399372e-08],
[-3.00776842e-04, 2.42350062e-04, 2.43211814e-04,
2.72166616e-04, 2.59259137e-04, 2.63782581e-04,
2.68391703e-04, 5.29121755e-04, 3.15535312e-04,
8.88670616e-06, -3.18385859e-07],
[-2.84393168e-04, 2.38815483e-04, 2.40006717e-04,
2.79448200e-04, 2.62162787e-04, 2.68393880e-04,
2.74115589e-04, 3.15535312e-04, 7.96308690e-04,
8.69726183e-06, -4.44906514e-07],
[-2.24000219e-04, 7.03380199e-06, 6.93978830e-06,
7.48669925e-06, 5.35894223e-06, 5.00405857e-06,
8.62466197e-06, 8.88670616e-06, 8.69726183e-06,
2.17985878e-05, 6.51339971e-08],
[-9.64855587e-07, 9.21233182e-08, 8.51141937e-08,
-1.28025978e-07, -4.43827259e-08, -8.03379392e-08,
-9.29399372e-08, -3.18385859e-07, -4.44906514e-07,
6.51339971e-08, 2.77308674e-09]])
self.assertAlmostEqual(model.deviance, 15279.974241770311, delta=.0001)
self.assertAlmostEqual(model.llf, -7930.3255090342445, delta=.0001)
self.assertAlmostEqual(model.llnull, -88037.0499629, delta=.0001)
np.testing.assert_allclose(model.pvalues,
[5.43122293e-107,
0.00000000e+000,
0.00000000e+000,
3.06800447e-172,
0.00000000e+000,
0.00000000e+000,
5.04395549e-242,
0.00000000e+000,
9.03955976e-289,
0.00000000e+000,
0.00000000e+000])
np.testing.assert_allclose(model.std_err,
[5.08397030e-02,
1.74076264e-02,
1.69447835e-02,
2.15532803e-02,
1.92237854e-02,
1.86458439e-02,
2.07504988e-02,
2.30026467e-02,
2.82189420e-02,
4.66889578e-03,
5.26601057e-05])
np.testing.assert_allclose(model.tvalues,
[-21.97120187, 96.88989939, 126.99406254, 27.97731759,
45.97470357, 64.85418671, 33.22280753, 50.19979035,
36.31602055, 191.22019711, -138.07670549])
np.testing.assert_allclose(model.yhat,
[1.40705950e+03, 1.69457663e+03, 1.16508879e+02,
4.27850723e+02, 2.23425179e+02, 1.08301078e+02,
3.08300817e+01, 7.44793331e+00, 1.81162644e+03,
1.21014912e+04, 6.46999802e+02, 2.34696906e+03,
2.06388796e+03, 8.15528209e+02, 2.34966095e+02,
5.85312512e+01, 3.30741049e+03, 1.83446566e+04,
8.10873546e+02, 3.03231168e+03, 2.35717102e+03,
9.50837295e+02, 2.71489717e+02, 6.72496632e+01,
2.60277189e+02, 1.12260001e+03, 9.28118288e+02,
1.04284804e+03, 6.44343295e+02, 6.06652130e+02,
2.34315477e+02, 5.78455649e+01, 6.57379261e+02,
2.80075361e+03, 2.38710037e+03, 7.17245241e+02,
1.03993511e+03, 6.48056270e+02, 1.90566474e+02,
4.59636590e+01, 4.99603238e+02, 3.58445439e+03,
2.70058180e+03, 6.44960859e+02, 1.51347637e+03,
1.20618713e+03, 3.89165529e+02, 9.95706858e+01,
2.34675109e+02, 1.37251483e+03, 1.05563448e+03,
5.88432822e+02, 9.13951678e+02, 1.16884200e+03,
3.66858927e+02, 8.90901579e+01, 1.31244011e+02,
7.76879800e+02, 5.92149430e+02, 4.46507449e+02,
5.27992298e+02, 7.40876898e+02, 7.20725128e+02,
4.04624989e+02, 5.02255240e+01, 3.06563409e+02,
2.32354948e+02, 1.74615053e+02, 2.01734215e+02,
3.00280455e+02, 2.77258060e+02, 6.40968342e+02])
self.assertAlmostEquals(model.D2, 0.912931356874)
self.assertAlmostEquals(model.adj_D2, 0.89865780882)
self.assertAlmostEquals(model.SSI, 0.740619203383)
self.assertAlmostEquals(model.pseudoR2, 0.909920590111)
self.assertAlmostEquals(model.adj_pseudoR2, 0.909795642717)
self.assertAlmostEquals(model.SRMSE, 0.46622685091043831)
def test_local_Production(self):
model = Production(self.f, self.o, self.d_var, self.dij, 'exp')
local = model.local(locs=np.unique(self.o))
self.assertEqual(list(local.keys()).sort(), ['stde0',
'stde1',
'stde2',
'pvalue2',
'SRMSE',
'pvalue0',
'deviance',
'adj_pseudoR2',
'pvalue1',
'tvalue0',
'tvalue2',
'adj_D2',
'tvalue1',
'SSI',
'AIC',
'param1',
'param0',
'D2',
'pseudoR2',
'param2'].sort())
def test_Attraction(self):
model = Production(self.f, self.d, self.o_var,
self.dij, 'exp', constant=True)
np.testing.assert_allclose(model.params,
[-0.88439723,
1.62180605,
1.92772078,
0.12462001,
0.62378812,
0.69646073,
0.20909411,
0.6856777,
0.48539625,
0.89235874,
-0.00693755],
atol=.001)
self.assertAlmostEqual(model.AIC, 16275.899321893821, delta=.0001)
np.testing.assert_allclose(model.cov_params,
[[3.01436996e-03, -2.61742292e-04, -3.18191276e-04,
-2.61736294e-04, -2.53401872e-04, -2.53545012e-04,
-2.81169571e-04, -2.43409544e-04, -2.12802803e-04,
-2.71488782e-04, -1.17108280e-06],
[-2.61742292e-04, 2.36371652e-04, 1.97978106e-04,
1.92565769e-04, 1.94367290e-04, 1.93561823e-04,
1.93929484e-04, 1.87837851e-04, 1.84018218e-04,
5.78923328e-06, 8.62912701e-08],
[-3.18191276e-04, 1.97978106e-04, 2.37130911e-04,
1.95813824e-04, 1.96321084e-04, 1.95974290e-04,
1.97059881e-04, 1.93136341e-04, 1.90444087e-04,
1.16187824e-05, 7.68842070e-08],
[-2.61736294e-04, 1.92565769e-04, 1.95813824e-04,
4.45977428e-04, 1.98639315e-04, 2.00358776e-04,
2.01640218e-04, 2.11745720e-04, 2.17565021e-04,
7.97756072e-06, -9.56753770e-08],
[-2.53401872e-04, 1.94367290e-04, 1.96321084e-04,
1.98639315e-04, 3.12561535e-04, 1.97440629e-04,
1.98271627e-04, 2.01952018e-04, 2.03971780e-04,
6.29181262e-06, -2.57004528e-08],
[-2.53545012e-04, 1.93561823e-04, 1.95974290e-04,
2.00358776e-04, 1.97440629e-04, 3.20607776e-04,
1.99534150e-04, 2.05855338e-04, 2.09446226e-04,
6.66273501e-06, -5.53303117e-08],
[-2.81169571e-04, 1.93929484e-04, 1.97059881e-04,
2.01640218e-04, 1.98271627e-04, 1.99534150e-04,
4.04837719e-04, 2.07747990e-04, 2.11608257e-04,
9.45143925e-06, -5.46040064e-08],
[-2.43409544e-04, 1.87837851e-04, 1.93136341e-04,
2.11745720e-04, 2.01952018e-04, 2.05855338e-04,
2.07747990e-04, 4.85148555e-04, 2.46472592e-04,
8.10725781e-06, -2.60737252e-07],
[-2.12802803e-04, 1.84018218e-04, 1.90444087e-04,
2.17565021e-04, 2.03971780e-04, 2.09446226e-04,
2.11608257e-04, 2.46472592e-04, 7.90692415e-04,
6.52409863e-06, -3.86785704e-07],
[-2.71488782e-04, 5.78923328e-06, 1.16187824e-05,
7.97756072e-06, 6.29181262e-06, 6.66273501e-06,
9.45143925e-06, 8.10725781e-06, 6.52409863e-06,
2.64461183e-05, 8.70031728e-08],
[-1.17108280e-06, 8.62912701e-08, 7.68842070e-08,
-9.56753770e-08, -2.57004528e-08, -5.53303117e-08,
-5.46040064e-08, -2.60737252e-07, -3.86785704e-07,
8.70031728e-08, 2.62593686e-09]])
self.assertAlmostEqual(model.deviance, 15673.222613627502, delta=.0001)
self.assertAlmostEqual(model.llf, -8126.9496609469106, delta=.0001)
self.assertAlmostEqual(model.llnull, -88037.0499629, delta=.0001)
np.testing.assert_allclose(model.pvalues,
[2.23154436e-058,
0.00000000e+000,
0.00000000e+000,
3.61133996e-009,
1.05877746e-272,
0.00000000e+000,
2.69492058e-025,
9.38664385e-213,
9.08121216e-067,
0.00000000e+000,
0.00000000e+000])
np.testing.assert_allclose(model.std_err,
[5.49032782e-02,
1.53743830e-02,
1.53990555e-02,
2.11181777e-02,
1.76794099e-02,
1.79055236e-02,
2.01205795e-02,
2.20260880e-02,
2.81192535e-02,
5.14257895e-03,
5.12438958e-05])
np.testing.assert_allclose(model.tvalues,
[-16.10827734, 105.487554, 125.18435157, 5.90107805,
35.28331128, 38.89641795, 10.39205191, 31.13025333,
17.2620603, 173.52358548, -135.38293645])
np.testing.assert_allclose(model.yhat,
[1.79502279e+03, 2.77690999e+03, 1.77376340e+02,
5.41058308e+02, 3.28265191e+02, 1.61020145e+02,
9.55492240e+01, 3.54052486e+01, 1.48342439e+03,
1.51642463e+04, 7.61962380e+02, 2.29718733e+03,
2.29156465e+03, 9.24935115e+02, 5.55191561e+02,
2.11833031e+02, 2.31106289e+03, 1.52712766e+04,
8.29095427e+02, 2.57322937e+03, 2.28197035e+03,
9.39377653e+02, 5.59026730e+02, 2.12153271e+02,
1.78795020e+02, 9.29389609e+02, 1.00418629e+03,
8.13086106e+02, 5.79191340e+02, 5.35268359e+02,
4.24969889e+02, 1.60758895e+02, 5.47719688e+02,
2.81394269e+03, 3.12998907e+03, 8.16565623e+02,
1.15732912e+03, 7.21460431e+02, 4.41575377e+02,
1.63374443e+02, 3.87326254e+02, 3.27181524e+03,
3.23528198e+03, 6.77976176e+02, 1.34894643e+03,
1.19916138e+03, 8.01876754e+02, 3.13863001e+02,
1.75685709e+02, 1.22115852e+03, 1.23153422e+03,
5.79386090e+02, 7.77596785e+02, 1.10887286e+03,
7.06986190e+02, 2.63279368e+02, 4.96907636e+01,
3.49378290e+02, 3.49326167e+02, 2.19253703e+02,
2.26850151e+02, 3.53430501e+02, 3.36979293e+02,
5.49332748e+02, 1.22952888e+01, 8.90162551e+01,
8.85260032e+01, 5.53842615e+01, 5.60455225e+01,
9.23759900e+01, 8.37976212e+01, 3.66824277e+02])
self.assertAlmostEquals(model.D2, .910690541438)
self.assertAlmostEquals(model.adj_D2, .896049646592)
self.assertAlmostEquals(model.SSI, .750634498293)
self.assertAlmostEquals(model.pseudoR2, .90768716507)
self.assertAlmostEquals(model.adj_pseudoR2, .907562217676)
self.assertAlmostEquals(model.SRMSE, 0.59478477816884223)
def test_local_Attraction(self):
model = Attraction(self.f, self.d, self.o_var, self.dij, 'exp')
local = model.local(locs=np.unique(self.d))
self.assertEqual(list(local.keys()).sort(), ['stde0',
'stde1',
'stde2',
'pvalue2',
'SRMSE',
'pvalue0',
'deviance',
'adj_pseudoR2',
'pvalue1',
'tvalue0',
'tvalue2',
'adj_D2',
'tvalue1',
'SSI',
'AIC',
'param1',
'param0',
'D2',
'pseudoR2',
'param2'].sort())
def test_Doubly(self):
model = Doubly(self.f, self.o, self.d,
self.dij, 'exp', constant=True)
np.testing.assert_allclose(model.params,
[6.20471518, 1.5449095, 2.4414292, 0.69924374,
0.94869185, 1.28967637, 0.74270015, 1.19468573,
0.98874193, 1.49709841, 2.18492741, 0.18784818,
0.66434515, 0.74264938, 0.21334535, 0.66765781,
0.39986094, -0.00791533], atol=1e-05)
np.testing.assert_allclose(model.cov_params,
[[5.01690795e-04, -2.67085869e-04, -2.85861407e-04,
-2.47145002e-04, -2.56344375e-04, -2.47959694e-04,
-2.51858026e-04, -2.34909872e-04, -2.31532205e-04,
-2.12557582e-04, -2.30973877e-04, -1.98360054e-04,
-2.04848380e-04, -1.97315240e-04, -2.04713619e-04,
-1.92147501e-04, -1.90223393e-04, -2.88654296e-07],
[-2.67085869e-04, 3.09818975e-04, 2.38201819e-04,
2.40816440e-04, 2.44508571e-04, 2.41573651e-04,
2.43213918e-04, 2.36475186e-04, 2.35264739e-04,
2.70747190e-05, -4.06434204e-06, 3.54631504e-06,
6.73127801e-06, 3.48881444e-07, 4.84106698e-06,
-1.08816401e-07, -7.82227026e-07, 1.17369687e-07],
[-2.85861407e-04, 2.38201819e-04, 3.15835404e-04,
2.60714112e-04, 2.57317985e-04, 2.58287551e-04,
2.61026738e-04, 2.62571867e-04, 2.63204233e-04,
-1.46703716e-06, 6.08300790e-05, 2.43162304e-05,
2.12302255e-05, 1.90685319e-05, 2.32782320e-05,
2.47736982e-05, 2.53307733e-05, -1.06931968e-08],
[-2.47145002e-04, 2.40816440e-04, 2.60714112e-04,
4.73670174e-04, 2.57806206e-04, 2.62588091e-04,
2.64062738e-04, 2.76642894e-04, 2.84509772e-04,
1.49294364e-06, 2.51567145e-05, 1.84043845e-05,
-2.99190057e-06, 1.28512200e-06, -1.26738274e-05,
-7.38879982e-06, 8.72549111e-08, -1.47073267e-07],
[-2.56344375e-04, 2.44508571e-04, 2.57317985e-04,
2.57806206e-04, 3.74656775e-04, 2.56143467e-04,
2.58077967e-04, 2.64765554e-04, 2.67330191e-04,
5.69526013e-06, 2.17034297e-05, -1.88949992e-06,
2.20220200e-05, 4.13804378e-06, 1.27479768e-06,
6.91280894e-06, 9.28146927e-06, -7.56672892e-08],
[-2.47959694e-04, 2.41573651e-04, 2.58287551e-04,
2.62588091e-04, 2.56143467e-04, 3.54522394e-04,
2.58778390e-04, 2.69502689e-04, 2.75078094e-04,
-1.05126847e-06, 1.97946415e-05, 1.65176617e-06,
3.76490799e-06, 1.56828518e-05, -1.12614285e-05,
-3.07326187e-06, 1.83335365e-06, -1.15030115e-07],
[-2.51858026e-04, 2.43213918e-04, 2.61026738e-04,
2.64062738e-04, 2.58077967e-04, 2.58778390e-04,
4.39566465e-04, 2.72270083e-04, 2.82540973e-04,
1.91670954e-06, 2.37810191e-05, -1.14352620e-05,
1.02053574e-06, -1.13429776e-05, 9.96630546e-06,
-1.99290067e-05, -9.39196494e-06, -9.20519180e-08],
[-2.34909872e-04, 2.36475186e-04, 2.62571867e-04,
2.76642894e-04, 2.64765554e-04, 2.69502689e-04,
2.72270083e-04, 5.48320166e-04, 2.87269673e-04,
-4.65887794e-06, 2.58726435e-05, -8.69859569e-06,
5.47577328e-06, -4.51656124e-06, -2.45399627e-05,
1.73990976e-05, -1.18725973e-04, -2.41709452e-07],
[-2.31532205e-04, 2.35264739e-04, 2.63204233e-04,
2.84509772e-04, 2.67330191e-04, 2.75078094e-04,
2.82540973e-04, 2.87269673e-04, 8.14375093e-04,
-5.91338987e-06, 2.65660547e-05, -1.32176066e-06,
7.75773294e-06, 5.22185282e-07, -1.46830696e-05,
-1.23317429e-04, 1.69481305e-05, -2.70788264e-07],
[-2.12557582e-04, 2.70747190e-05, -1.46703716e-06,
1.49294364e-06, 5.69526013e-06, -1.05126847e-06,
1.91670954e-06, -4.65887794e-06, -5.91338987e-06,
2.42563703e-04, 1.84733809e-04, 1.89259451e-04,
1.91681710e-04, 1.89152965e-04, 1.92231256e-04,
1.87441436e-04, 1.86834624e-04, 1.13843139e-07],
[-2.30973877e-04, -4.06434204e-06, 6.08300790e-05,
2.51567145e-05, 2.17034297e-05, 1.97946415e-05,
2.37810191e-05, 2.58726435e-05, 2.65660547e-05,
1.84733809e-04, 2.63915732e-04, 2.06737361e-04,
2.02941436e-04, 2.03812109e-04, 2.06876793e-04,
2.08793972e-04, 2.09473765e-04, -1.94248549e-08],
[-1.98360054e-04, 3.54631504e-06, 2.43162304e-05,
1.84043845e-05, -1.88949992e-06, 1.65176617e-06,
-1.14352620e-05, -8.69859569e-06, -1.32176066e-06,
1.89259451e-04, 2.06737361e-04, 4.53792323e-04,
2.01217128e-04, 2.05155865e-04, 2.05806138e-04,
2.14332194e-04, 2.21550755e-04, -9.10506514e-08],
[-2.04848380e-04, 6.73127801e-06, 2.12302255e-05,
-2.99190057e-06, 2.20220200e-05, 3.76490799e-06,
1.02053574e-06, 5.47577328e-06, 7.75773294e-06,
1.91681710e-04, 2.02941436e-04, 2.01217128e-04,
3.17290867e-04, 2.00121482e-04, 2.02120689e-04,
2.06522637e-04, 2.08554008e-04, -4.37219119e-08],
[-1.97315240e-04, 3.48881444e-07, 1.90685319e-05,
1.28512200e-06, 4.13804378e-06, 1.56828518e-05,
-1.13429776e-05, -4.51656124e-06, 5.22185282e-07,
1.89152965e-04, 2.03812109e-04, 2.05155865e-04,
2.00121482e-04, 3.26458468e-04, 2.01391450e-04,
2.09628557e-04, 2.14889547e-04, -7.68167253e-08],
[-2.04713619e-04, 4.84106698e-06, 2.32782320e-05,
-1.26738274e-05, 1.27479768e-06, -1.12614285e-05,
9.96630546e-06, -2.45399627e-05, -1.46830696e-05,
1.92231256e-04, 2.06876793e-04, 2.05806138e-04,
2.02120689e-04, 2.01391450e-04, 4.14676504e-04,
2.11496728e-04, 2.21430978e-04, -2.25147281e-08],
[-1.92147501e-04, -1.08816401e-07, 2.47736982e-05,
-7.38879982e-06, 6.91280894e-06, -3.07326187e-06,
-1.99290067e-05, 1.73990976e-05, -1.23317429e-04,
1.87441436e-04, 2.08793972e-04, 2.14332194e-04,
2.06522637e-04, 2.09628557e-04, 2.11496728e-04,
5.06851801e-04, 2.14569472e-04, -1.33183180e-07],
[-1.90223393e-04, -7.82227026e-07, 2.53307733e-05,
8.72549111e-08, 9.28146927e-06, 1.83335365e-06,
-9.39196494e-06, -1.18725973e-04, 1.69481305e-05,
1.86834624e-04, 2.09473765e-04, 2.21550755e-04,
2.08554008e-04, 2.14889547e-04, 2.21430978e-04,
2.14569472e-04, 8.07696796e-04, -1.49419116e-07],
[-2.88654296e-07, 1.17369687e-07, -1.06931968e-08,
-1.47073267e-07, -7.56672892e-08, -1.15030115e-07,
-9.20519180e-08, -2.41709452e-07, -2.70788264e-07,
1.13843139e-07, -1.94248549e-08, -9.10506514e-08,
-4.37219119e-08, -7.68167253e-08, -2.25147281e-08,
-1.33183180e-07, -1.49419116e-07, 2.56252122e-09]])
self.assertAlmostEqual(model.deviance, 9360.482092561484, delta=.0001)
self.assertAlmostEqual(model.llf, -4970.5795707251054, delta=.0001)
self.assertAlmostEqual(model.llnull, -88037.0499629, delta=.0001)
np.testing.assert_allclose(model.pvalues,
[0.00000000e+000,
0.00000000e+000,
0.00000000e+000,
1.76331083e-226,
0.00000000e+000,
0.00000000e+000,
7.21755436e-275,
0.00000000e+000,
4.88760299e-263,
0.00000000e+000,
0.00000000e+000,
1.16346714e-018,
1.88877600e-304,
0.00000000e+000,
1.10421926e-025,
2.83322217e-193,
5.83172788e-045,
0.00000000e+000])
np.testing.assert_allclose(model.std_err,
[2.23984552e-02,
1.76016753e-02,
1.77717586e-02,
2.17639650e-02,
1.93560527e-02,
1.88287651e-02,
2.09658404e-02,
2.34162372e-02,
2.85372580e-02,
1.55744567e-02,
1.62454834e-02,
2.13024018e-02,
1.78126603e-02,
1.80681617e-02,
2.03636073e-02,
2.25133694e-02,
2.84200070e-02,
5.06213514e-05])
np.testing.assert_allclose(model.tvalues,
[277.01531761, 87.77059404, 137.37690538, 32.12850872,
49.0126713, 68.49500564, 35.42429674, 51.01954322,
34.64740487, 96.12524108, 134.49445277, 8.81816885,
37.29623407, 41.10265285, 10.47679558, 29.65605897,
14.06969877, -156.36352821])
np.testing.assert_allclose(model.yhat,
[9.78988280e+02, 2.26003279e+03, 1.04038742e+02,
3.38382580e+02, 1.90458075e+02, 8.67908467e+01,
4.37554720e+01, 1.35532201e+01, 1.02693176e+03,
1.43579537e+04, 5.02646536e+02, 1.61314478e+03,
1.60124044e+03, 5.84144805e+02, 2.98377549e+02,
9.55604104e+01, 2.92086883e+03, 1.76899160e+04,
9.49267467e+02, 3.14910952e+03, 2.73315395e+03,
1.01972797e+03, 5.15779061e+02, 1.64177257e+02,
1.73496758e+02, 7.99088484e+02, 1.22486311e+03,
9.31563443e+02, 6.29698756e+02, 5.91117070e+02,
4.15424488e+02, 1.31747905e+02, 4.49674437e+02,
2.04361676e+03, 3.23802841e+03, 7.42345992e+02,
1.00234547e+03, 6.00432512e+02, 3.13590596e+02,
9.69658353e+01, 3.29132064e+02, 2.63792996e+03,
3.65458094e+03, 6.52540343e+02, 1.30346098e+03,
1.16517842e+03, 6.73203489e+02, 2.21973821e+02,
1.47356669e+02, 9.45479887e+02, 1.33962391e+03,
6.01828982e+02, 7.67131590e+02, 1.14476805e+03,
6.43385796e+02, 2.00425139e+02, 7.41169755e+01,
4.81822820e+02, 6.76007805e+02, 4.21969575e+02,
3.99722086e+02, 6.59873779e+02, 6.41890452e+02,
9.85596546e+02, 2.44225078e+01, 1.64157859e+02,
2.28909306e+02, 1.42362371e+02, 1.31485029e+02,
2.31461478e+02, 2.12717926e+02, 1.04848355e+03])
self.assertAlmostEquals(model.D2, .946661920897)
self.assertAlmostEquals(model.adj_D2, .929870303401)
self.assertAlmostEquals(model.SSI, .811852110904)
self.assertAlmostEquals(model.pseudoR2, .943539912198)
self.assertAlmostEquals(model.adj_pseudoR2, .943335452826)
self.assertAlmostEquals(model.SRMSE, 0.37925654532618808)
if __name__ == '__main__':
unittest.main()
| TaylorOshan/spint | spint/tests/test_gravity.py | Python | bsd-3-clause | 67,670 | 0.004699 |
__version__ = "0.2.7"
from pypushover.Constants import PRIORITIES, SOUNDS, OS
from pypushover._base import BaseManager, send, base_url, PushoverError
from pypushover import client, groups, license, message, verification
__all__ = ['PRIORITIES', 'SOUNDS', 'OS', 'client', 'groups', 'license', 'message', 'verification']
| KronosKoderS/py_pushover | pypushover/__init__.py | Python | mit | 324 | 0.006173 |
# coding=utf-8
"""InaSAFE Disaster risk tool by Australian Aid - Volcano Point on
Population Impact Function.
Contact : ole.moller.nielsen@gmail.com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
import numpy
from safe.impact_functions.bases.classified_vh_continuous_re import \
ClassifiedVHContinuousRE
from safe.impact_functions.volcanic.volcano_point_population\
.metadata_definitions import VolcanoPointPopulationFunctionMetadata
from safe.impact_functions.core import population_rounding, has_no_data
from safe.engine.interpolation import assign_hazard_values_to_exposure_data
from safe.storage.raster import Raster
from safe.utilities.i18n import tr
from safe.common.utilities import (
format_int,
humanize_class,
create_classes,
create_label
)
from safe.gui.tools.minimum_needs.needs_profile import add_needs_parameters, \
filter_needs_parameters, get_needs_provenance_value
from safe.impact_reports.population_exposure_report_mixin import \
PopulationExposureReportMixin
from safe.definitions import no_data_warning
class VolcanoPointPopulationFunction(
ClassifiedVHContinuousRE,
PopulationExposureReportMixin):
"""Impact Function for Volcano Point on Population."""
_metadata = VolcanoPointPopulationFunctionMetadata()
def __init__(self):
super(VolcanoPointPopulationFunction, self).__init__()
PopulationExposureReportMixin.__init__(self)
# AG: Use the proper minimum needs, update the parameters
self.parameters = add_needs_parameters(self.parameters)
# TODO: alternatively to specifying the question here we should
# TODO: consider changing the 'population' metadata concept to 'people'
self.question = (
'In the event of a volcano point how many people might be '
'impacted?')
self.no_data_warning = False
# A set of volcano names
self.volcano_names = set()
self.hazard_zone_attribute = 'radius'
def notes(self):
"""Return the notes section of the report.
:return: The notes that should be attached to this impact report.
:rtype: list
"""
if get_needs_provenance_value(self.parameters) is None:
needs_provenance = ''
else:
needs_provenance = tr(get_needs_provenance_value(self.parameters))
if self.volcano_names:
sorted_volcano_names = ', '.join(sorted(self.volcano_names))
else:
sorted_volcano_names = tr('Not specified in data')
fields = [
tr('Map shows buildings affected in each of the volcano buffered '
'zones.'),
tr('Total population in the analysis area: %s') %
format_int(population_rounding(self.total_population)),
tr('<sup>1</sup>People need evacuation if they are within the '
'volcanic hazard zones.'),
tr('Volcanoes considered: %s.') % sorted_volcano_names,
]
if needs_provenance:
fields.append(needs_provenance)
if self.no_data_warning:
fields = fields + no_data_warning
# include any generic exposure specific notes from definitions.py
fields = fields + self.exposure_notes()
# include any generic hazard specific notes from definitions.py
fields = fields + self.hazard_notes()
return fields
def run(self):
"""Run volcano point population evacuation Impact Function.
Counts number of people exposed to volcano event.
:returns: Map of population exposed to the volcano hazard zone.
The returned dict will include a table with number of people
evacuated and supplies required.
:rtype: dict
:raises:
* Exception - When hazard layer is not vector layer
* RadiiException - When radii are not valid (they need to be
monotonically increasing)
"""
# Parameters
radii = self.parameters['distances'].value
# Get parameters from layer's keywords
volcano_name_attribute = self.hazard.keyword('volcano_name_field')
data_table = self.hazard.layer.get_data()
# Get names of volcanoes considered
if volcano_name_attribute in self.hazard.layer.get_attribute_names():
# Run through all polygons and get unique names
for row in data_table:
self.volcano_names.add(row[volcano_name_attribute])
# Run interpolation function for polygon2raster
interpolated_layer, covered_exposure_layer = \
assign_hazard_values_to_exposure_data(
self.hazard.layer,
self.exposure.layer,
attribute_name=self.target_field
)
# Initialise affected population per categories
impact_category_ordering = []
for radius in radii:
category = tr('Radius %s km ' % format_int(radius))
self.affected_population[category] = 0
impact_category_ordering.append(category)
self.impact_category_ordering = impact_category_ordering
if has_no_data(self.exposure.layer.get_data(nan=True)):
self.no_data_warning = True
# Count affected population per polygon and total
for row in interpolated_layer.get_data():
# Get population at this location
population = row[self.target_field]
if not numpy.isnan(population):
population = float(population)
# Update population count for this category
category = tr('Radius %s km ' % format_int(
row[self.hazard_zone_attribute]))
self.affected_population[category] += population
# Count totals
self.total_population = population_rounding(
int(numpy.nansum(self.exposure.layer.get_data())))
self.minimum_needs = [
parameter.serialize() for parameter in
filter_needs_parameters(self.parameters['minimum needs'])
]
# Create style
colours = ['#FFFFFF', '#38A800', '#79C900', '#CEED00',
'#FFCC00', '#FF6600', '#FF0000', '#7A0000']
classes = create_classes(
covered_exposure_layer.get_data().flat[:], len(colours))
interval_classes = humanize_class(classes)
# Define style info for output polygons showing population counts
style_classes = []
for i in xrange(len(colours)):
style_class = dict()
style_class['label'] = create_label(interval_classes[i])
if i == 1:
label = create_label(
interval_classes[i],
tr('Low Population [%i people/cell]' % classes[i]))
elif i == 4:
label = create_label(
interval_classes[i],
tr('Medium Population [%i people/cell]' % classes[i]))
elif i == 7:
label = create_label(
interval_classes[i],
tr('High Population [%i people/cell]' % classes[i]))
else:
label = create_label(interval_classes[i])
style_class['label'] = label
style_class['quantity'] = classes[i]
style_class['colour'] = colours[i]
style_class['transparency'] = 0
style_classes.append(style_class)
# Override style info with new classes and name
style_info = dict(
target_field=None,
style_classes=style_classes,
style_type='rasterStyle')
impact_data = self.generate_data()
# Create vector layer and return
extra_keywords = {
'target_field': self.target_field,
'map_title': self.map_title(),
'legend_notes': self.metadata().key('legend_notes'),
'legend_units': self.metadata().key('legend_units'),
'legend_title': self.metadata().key('legend_title'),
'total_needs': self.total_needs
}
impact_layer_keywords = self.generate_impact_keywords(extra_keywords)
impact_layer = Raster(
data=covered_exposure_layer.get_data(),
projection=covered_exposure_layer.get_projection(),
geotransform=covered_exposure_layer.get_geotransform(),
name=self.metadata().key('layer_name'),
keywords=impact_layer_keywords,
style_info=style_info)
impact_layer.impact_data = impact_data
self._impact = impact_layer
return impact_layer
| Samweli/inasafe | safe/impact_functions/volcanic/volcano_point_population/impact_function.py | Python | gpl-3.0 | 8,892 | 0 |
__author__ = 'shinyorke_mbp'
| Shinichi-Nakagawa/xp2015_baseball_tools | service/__init__.py | Python | mit | 29 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# test_cancel.py - unit test for query cancellation
#
# Copyright (C) 2010-2011 Jan Urbański <wulczer@wulczer.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# In addition, as a special exception, the copyright holders give
# permission to link this program with the OpenSSL library (or with
# modified versions of OpenSSL that use the same license as OpenSSL),
# and distribute linked combinations including the two.
#
# You must obey the GNU Lesser General Public License in all respects for
# all of the code used other than OpenSSL.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
import time
import threading
import psycopg2
import psycopg2.extensions
from psycopg2 import extras
from testconfig import dsn
from testutils import unittest, ConnectingTestCase, skip_before_postgres, slow
class CancelTests(ConnectingTestCase):
def setUp(self):
ConnectingTestCase.setUp(self)
cur = self.conn.cursor()
cur.execute('''
CREATE TEMPORARY TABLE table1 (
id int PRIMARY KEY
)''')
self.conn.commit()
def test_empty_cancel(self):
self.conn.cancel()
@slow
@skip_before_postgres(8, 2)
def test_cancel(self):
errors = []
def neverending(conn):
cur = conn.cursor()
try:
self.assertRaises(psycopg2.extensions.QueryCanceledError,
cur.execute, "select pg_sleep(60)")
# make sure the connection still works
conn.rollback()
cur.execute("select 1")
self.assertEqual(cur.fetchall(), [(1, )])
except Exception, e:
errors.append(e)
raise
def canceller(conn):
cur = conn.cursor()
try:
conn.cancel()
except Exception, e:
errors.append(e)
raise
del cur
thread1 = threading.Thread(target=neverending, args=(self.conn, ))
# wait a bit to make sure that the other thread is already in
# pg_sleep -- ugly and racy, but the chances are ridiculously low
thread2 = threading.Timer(0.3, canceller, args=(self.conn, ))
thread1.start()
thread2.start()
thread1.join()
thread2.join()
self.assertEqual(errors, [])
@slow
@skip_before_postgres(8, 2)
def test_async_cancel(self):
async_conn = psycopg2.connect(dsn, async_=True)
self.assertRaises(psycopg2.OperationalError, async_conn.cancel)
extras.wait_select(async_conn)
cur = async_conn.cursor()
cur.execute("select pg_sleep(10)")
time.sleep(1)
self.assertTrue(async_conn.isexecuting())
async_conn.cancel()
self.assertRaises(psycopg2.extensions.QueryCanceledError,
extras.wait_select, async_conn)
cur.execute("select 1")
extras.wait_select(async_conn)
self.assertEqual(cur.fetchall(), [(1, )])
def test_async_connection_cancel(self):
async_conn = psycopg2.connect(dsn, async_=True)
async_conn.close()
self.assertTrue(async_conn.closed)
def test_suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == "__main__":
unittest.main()
| nwokeo/supysonic | venv/lib/python2.7/site-packages/psycopg2/tests/test_cancel.py | Python | agpl-3.0 | 3,785 | 0.000264 |
# type: ignore
#
# Inthe.AM documentation build configuration file, created by
# sphinx-quickstart on Sat Sep 13 15:53:25 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.todo",
"sphinx.ext.imgmath",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "Inthe.AM"
copyright = "2015, Adam Coddington"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "1.0"
# The full version, including alpha/beta/rc tags.
release = "1.0"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "default"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = "IntheAMdoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
("index", "IntheAM.tex", "Inthe.AM Documentation", "Adam Coddington", "manual"),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [("index", "intheam", "Inthe.AM Documentation", ["Adam Coddington"], 1)]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
"index",
"IntheAM",
"Inthe.AM Documentation",
"Adam Coddington",
"IntheAM",
"One line description of project.",
"Miscellaneous",
),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
| coddingtonbear/inthe.am | docs/conf.py | Python | agpl-3.0 | 8,300 | 0.000602 |
x = lambda x: (lambda x: (lambda x: x + 2)(x+2))(x+2)
print x(2)
| buchuki/pyjaco | tests/basic/lambda2.py | Python | mit | 67 | 0.014925 |
import unittest
from mistletoe.span_token import tokenize_inner
from mistletoe.latex_token import Math
from mistletoe.latex_renderer import LaTeXRenderer
class TestLaTeXToken(unittest.TestCase):
def setUp(self):
self.renderer = LaTeXRenderer()
self.renderer.__enter__()
self.addCleanup(self.renderer.__exit__, None, None, None)
def test_span(self):
token = next(iter(tokenize_inner('$ 1 + 2 = 3 $')))
self.assertIsInstance(token, Math)
self.assertEqual(token.content, '$ 1 + 2 = 3 $')
| miyuchina/mistletoe | test/test_latex_token.py | Python | mit | 544 | 0 |
import collections.abc
import copy
import datetime
import decimal
import math
import operator
import uuid
import warnings
from base64 import b64decode, b64encode
from functools import partialmethod, total_ordering
from django import forms
from django.apps import apps
from django.conf import settings
from django.core import checks, exceptions, validators
from django.db import connection, connections, router
from django.db.models.constants import LOOKUP_SEP
from django.db.models.query_utils import DeferredAttribute, RegisterLookupMixin
from django.utils import timezone
from django.utils.datastructures import DictWrapper
from django.utils.dateparse import (
parse_date, parse_datetime, parse_duration, parse_time,
)
from django.utils.duration import duration_microseconds, duration_string
from django.utils.functional import Promise, cached_property
from django.utils.ipv6 import clean_ipv6_address
from django.utils.itercompat import is_iterable
from django.utils.text import capfirst
from django.utils.translation import gettext_lazy as _
__all__ = [
'AutoField', 'BLANK_CHOICE_DASH', 'BigAutoField', 'BigIntegerField',
'BinaryField', 'BooleanField', 'CharField', 'CommaSeparatedIntegerField',
'DateField', 'DateTimeField', 'DecimalField', 'DurationField',
'EmailField', 'Empty', 'Field', 'FilePathField', 'FloatField',
'GenericIPAddressField', 'IPAddressField', 'IntegerField', 'NOT_PROVIDED',
'NullBooleanField', 'PositiveBigIntegerField', 'PositiveIntegerField',
'PositiveSmallIntegerField', 'SlugField', 'SmallAutoField',
'SmallIntegerField', 'TextField', 'TimeField', 'URLField', 'UUIDField',
]
class Empty:
pass
class NOT_PROVIDED:
pass
# The values to use for "blank" in SelectFields. Will be appended to the start
# of most "choices" lists.
BLANK_CHOICE_DASH = [("", "---------")]
def _load_field(app_label, model_name, field_name):
return apps.get_model(app_label, model_name)._meta.get_field(field_name)
# A guide to Field parameters:
#
# * name: The name of the field specified in the model.
# * attname: The attribute to use on the model object. This is the same as
# "name", except in the case of ForeignKeys, where "_id" is
# appended.
# * db_column: The db_column specified in the model (or None).
# * column: The database column for this field. This is the same as
# "attname", except if db_column is specified.
#
# Code that introspects values, or does other dynamic things, should use
# attname. For example, this gets the primary key value of object "obj":
#
# getattr(obj, opts.pk.attname)
def _empty(of_cls):
new = Empty()
new.__class__ = of_cls
return new
def return_None():
return None
@total_ordering
class Field(RegisterLookupMixin):
"""Base class for all field types"""
# Designates whether empty strings fundamentally are allowed at the
# database level.
empty_strings_allowed = True
empty_values = list(validators.EMPTY_VALUES)
# These track each time a Field instance is created. Used to retain order.
# The auto_creation_counter is used for fields that Django implicitly
# creates, creation_counter is used for all user-specified fields.
creation_counter = 0
auto_creation_counter = -1
default_validators = [] # Default set of validators
default_error_messages = {
'invalid_choice': _('Value %(value)r is not a valid choice.'),
'null': _('This field cannot be null.'),
'blank': _('This field cannot be blank.'),
'unique': _('%(model_name)s with this %(field_label)s '
'already exists.'),
# Translators: The 'lookup_type' is one of 'date', 'year' or 'month'.
# Eg: "Title must be unique for pub_date year"
'unique_for_date': _("%(field_label)s must be unique for "
"%(date_field_label)s %(lookup_type)s."),
}
system_check_deprecated_details = None
system_check_removed_details = None
# Field flags
hidden = False
many_to_many = None
many_to_one = None
one_to_many = None
one_to_one = None
related_model = None
descriptor_class = DeferredAttribute
# Generic field type description, usually overridden by subclasses
def _description(self):
return _('Field of type: %(field_type)s') % {
'field_type': self.__class__.__name__
}
description = property(_description)
def __init__(self, verbose_name=None, name=None, primary_key=False,
max_length=None, unique=False, blank=False, null=False,
db_index=False, rel=None, default=NOT_PROVIDED, editable=True,
serialize=True, unique_for_date=None, unique_for_month=None,
unique_for_year=None, choices=None, help_text='', db_column=None,
db_tablespace=None, auto_created=False, validators=(),
error_messages=None):
self.name = name
self.verbose_name = verbose_name # May be set by set_attributes_from_name
self._verbose_name = verbose_name # Store original for deconstruction
self.primary_key = primary_key
self.max_length, self._unique = max_length, unique
self.blank, self.null = blank, null
self.remote_field = rel
self.is_relation = self.remote_field is not None
self.default = default
self.editable = editable
self.serialize = serialize
self.unique_for_date = unique_for_date
self.unique_for_month = unique_for_month
self.unique_for_year = unique_for_year
if isinstance(choices, collections.abc.Iterator):
choices = list(choices)
self.choices = choices
self.help_text = help_text
self.db_index = db_index
self.db_column = db_column
self._db_tablespace = db_tablespace
self.auto_created = auto_created
# Adjust the appropriate creation counter, and save our local copy.
if auto_created:
self.creation_counter = Field.auto_creation_counter
Field.auto_creation_counter -= 1
else:
self.creation_counter = Field.creation_counter
Field.creation_counter += 1
self._validators = list(validators) # Store for deconstruction later
messages = {}
for c in reversed(self.__class__.__mro__):
messages.update(getattr(c, 'default_error_messages', {}))
messages.update(error_messages or {})
self._error_messages = error_messages # Store for deconstruction later
self.error_messages = messages
def __str__(self):
"""
Return "app_label.model_label.field_name" for fields attached to
models.
"""
if not hasattr(self, 'model'):
return super().__str__()
model = self.model
return '%s.%s' % (model._meta.label, self.name)
def __repr__(self):
"""Display the module, class, and name of the field."""
path = '%s.%s' % (self.__class__.__module__, self.__class__.__qualname__)
name = getattr(self, 'name', None)
if name is not None:
return '<%s: %s>' % (path, name)
return '<%s>' % path
def check(self, **kwargs):
return [
*self._check_field_name(),
*self._check_choices(),
*self._check_db_index(),
*self._check_null_allowed_for_primary_keys(),
*self._check_backend_specific_checks(**kwargs),
*self._check_validators(),
*self._check_deprecation_details(),
]
def _check_field_name(self):
"""
Check if field name is valid, i.e. 1) does not end with an
underscore, 2) does not contain "__" and 3) is not "pk".
"""
if self.name.endswith('_'):
return [
checks.Error(
'Field names must not end with an underscore.',
obj=self,
id='fields.E001',
)
]
elif LOOKUP_SEP in self.name:
return [
checks.Error(
'Field names must not contain "%s".' % LOOKUP_SEP,
obj=self,
id='fields.E002',
)
]
elif self.name == 'pk':
return [
checks.Error(
"'pk' is a reserved word that cannot be used as a field name.",
obj=self,
id='fields.E003',
)
]
else:
return []
@classmethod
def _choices_is_value(cls, value):
return isinstance(value, (str, Promise)) or not is_iterable(value)
def _check_choices(self):
if not self.choices:
return []
if not is_iterable(self.choices) or isinstance(self.choices, str):
return [
checks.Error(
"'choices' must be an iterable (e.g., a list or tuple).",
obj=self,
id='fields.E004',
)
]
choice_max_length = 0
# Expect [group_name, [value, display]]
for choices_group in self.choices:
try:
group_name, group_choices = choices_group
except (TypeError, ValueError):
# Containing non-pairs
break
try:
if not all(
self._choices_is_value(value) and self._choices_is_value(human_name)
for value, human_name in group_choices
):
break
if self.max_length is not None and group_choices:
choice_max_length = max([
choice_max_length,
*(len(value) for value, _ in group_choices if isinstance(value, str)),
])
except (TypeError, ValueError):
# No groups, choices in the form [value, display]
value, human_name = group_name, group_choices
if not self._choices_is_value(value) or not self._choices_is_value(human_name):
break
if self.max_length is not None and isinstance(value, str):
choice_max_length = max(choice_max_length, len(value))
# Special case: choices=['ab']
if isinstance(choices_group, str):
break
else:
if self.max_length is not None and choice_max_length > self.max_length:
return [
checks.Error(
"'max_length' is too small to fit the longest value "
"in 'choices' (%d characters)." % choice_max_length,
obj=self,
id='fields.E009',
),
]
return []
return [
checks.Error(
"'choices' must be an iterable containing "
"(actual value, human readable name) tuples.",
obj=self,
id='fields.E005',
)
]
def _check_db_index(self):
if self.db_index not in (None, True, False):
return [
checks.Error(
"'db_index' must be None, True or False.",
obj=self,
id='fields.E006',
)
]
else:
return []
def _check_null_allowed_for_primary_keys(self):
if (self.primary_key and self.null and
not connection.features.interprets_empty_strings_as_nulls):
# We cannot reliably check this for backends like Oracle which
# consider NULL and '' to be equal (and thus set up
# character-based fields a little differently).
return [
checks.Error(
'Primary keys must not have null=True.',
hint=('Set null=False on the field, or '
'remove primary_key=True argument.'),
obj=self,
id='fields.E007',
)
]
else:
return []
def _check_backend_specific_checks(self, databases=None, **kwargs):
if databases is None:
return []
app_label = self.model._meta.app_label
errors = []
for alias in databases:
if router.allow_migrate(alias, app_label, model_name=self.model._meta.model_name):
errors.extend(connections[alias].validation.check_field(self, **kwargs))
return errors
def _check_validators(self):
errors = []
for i, validator in enumerate(self.validators):
if not callable(validator):
errors.append(
checks.Error(
"All 'validators' must be callable.",
hint=(
"validators[{i}] ({repr}) isn't a function or "
"instance of a validator class.".format(
i=i, repr=repr(validator),
)
),
obj=self,
id='fields.E008',
)
)
return errors
def _check_deprecation_details(self):
if self.system_check_removed_details is not None:
return [
checks.Error(
self.system_check_removed_details.get(
'msg',
'%s has been removed except for support in historical '
'migrations.' % self.__class__.__name__
),
hint=self.system_check_removed_details.get('hint'),
obj=self,
id=self.system_check_removed_details.get('id', 'fields.EXXX'),
)
]
elif self.system_check_deprecated_details is not None:
return [
checks.Warning(
self.system_check_deprecated_details.get(
'msg',
'%s has been deprecated.' % self.__class__.__name__
),
hint=self.system_check_deprecated_details.get('hint'),
obj=self,
id=self.system_check_deprecated_details.get('id', 'fields.WXXX'),
)
]
return []
def get_col(self, alias, output_field=None):
if (
alias == self.model._meta.db_table and
(output_field is None or output_field == self)
):
return self.cached_col
from django.db.models.expressions import Col
return Col(alias, self, output_field)
@cached_property
def cached_col(self):
from django.db.models.expressions import Col
return Col(self.model._meta.db_table, self)
def select_format(self, compiler, sql, params):
"""
Custom format for select clauses. For example, GIS columns need to be
selected as AsText(table.col) on MySQL as the table.col data can't be
used by Django.
"""
return sql, params
def deconstruct(self):
"""
Return enough information to recreate the field as a 4-tuple:
* The name of the field on the model, if contribute_to_class() has
been run.
* The import path of the field, including the class, e.g.
django.db.models.IntegerField. This should be the most portable
version, so less specific may be better.
* A list of positional arguments.
* A dict of keyword arguments.
Note that the positional or keyword arguments must contain values of
the following types (including inner values of collection types):
* None, bool, str, int, float, complex, set, frozenset, list, tuple,
dict
* UUID
* datetime.datetime (naive), datetime.date
* top-level classes, top-level functions - will be referenced by their
full import path
* Storage instances - these have their own deconstruct() method
This is because the values here must be serialized into a text format
(possibly new Python code, possibly JSON) and these are the only types
with encoding handlers defined.
There's no need to return the exact way the field was instantiated this
time, just ensure that the resulting field is the same - prefer keyword
arguments over positional ones, and omit parameters with their default
values.
"""
# Short-form way of fetching all the default parameters
keywords = {}
possibles = {
"verbose_name": None,
"primary_key": False,
"max_length": None,
"unique": False,
"blank": False,
"null": False,
"db_index": False,
"default": NOT_PROVIDED,
"editable": True,
"serialize": True,
"unique_for_date": None,
"unique_for_month": None,
"unique_for_year": None,
"choices": None,
"help_text": '',
"db_column": None,
"db_tablespace": None,
"auto_created": False,
"validators": [],
"error_messages": None,
}
attr_overrides = {
"unique": "_unique",
"error_messages": "_error_messages",
"validators": "_validators",
"verbose_name": "_verbose_name",
"db_tablespace": "_db_tablespace",
}
equals_comparison = {"choices", "validators"}
for name, default in possibles.items():
value = getattr(self, attr_overrides.get(name, name))
# Unroll anything iterable for choices into a concrete list
if name == "choices" and isinstance(value, collections.abc.Iterable):
value = list(value)
# Do correct kind of comparison
if name in equals_comparison:
if value != default:
keywords[name] = value
else:
if value is not default:
keywords[name] = value
# Work out path - we shorten it for known Django core fields
path = "%s.%s" % (self.__class__.__module__, self.__class__.__qualname__)
if path.startswith("django.db.models.fields.related"):
path = path.replace("django.db.models.fields.related", "django.db.models")
elif path.startswith("django.db.models.fields.files"):
path = path.replace("django.db.models.fields.files", "django.db.models")
elif path.startswith('django.db.models.fields.json'):
path = path.replace('django.db.models.fields.json', 'django.db.models')
elif path.startswith("django.db.models.fields.proxy"):
path = path.replace("django.db.models.fields.proxy", "django.db.models")
elif path.startswith("django.db.models.fields"):
path = path.replace("django.db.models.fields", "django.db.models")
# Return basic info - other fields should override this.
return (self.name, path, [], keywords)
def clone(self):
"""
Uses deconstruct() to clone a new copy of this Field.
Will not preserve any class attachments/attribute names.
"""
name, path, args, kwargs = self.deconstruct()
return self.__class__(*args, **kwargs)
def __eq__(self, other):
# Needed for @total_ordering
if isinstance(other, Field):
return (
self.creation_counter == other.creation_counter and
getattr(self, 'model', None) == getattr(other, 'model', None)
)
return NotImplemented
def __lt__(self, other):
# This is needed because bisect does not take a comparison function.
# Order by creation_counter first for backward compatibility.
if isinstance(other, Field):
if (
self.creation_counter != other.creation_counter or
not hasattr(self, 'model') and not hasattr(other, 'model')
):
return self.creation_counter < other.creation_counter
elif hasattr(self, 'model') != hasattr(other, 'model'):
return not hasattr(self, 'model') # Order no-model fields first
else:
# creation_counter's are equal, compare only models.
return (
(self.model._meta.app_label, self.model._meta.model_name) <
(other.model._meta.app_label, other.model._meta.model_name)
)
return NotImplemented
def __hash__(self):
return hash((
self.creation_counter,
self.model._meta.app_label if hasattr(self, 'model') else None,
self.model._meta.model_name if hasattr(self, 'model') else None,
))
def __deepcopy__(self, memodict):
# We don't have to deepcopy very much here, since most things are not
# intended to be altered after initial creation.
obj = copy.copy(self)
if self.remote_field:
obj.remote_field = copy.copy(self.remote_field)
if hasattr(self.remote_field, 'field') and self.remote_field.field is self:
obj.remote_field.field = obj
memodict[id(self)] = obj
return obj
def __copy__(self):
# We need to avoid hitting __reduce__, so define this
# slightly weird copy construct.
obj = Empty()
obj.__class__ = self.__class__
obj.__dict__ = self.__dict__.copy()
return obj
def __reduce__(self):
"""
Pickling should return the model._meta.fields instance of the field,
not a new copy of that field. So, use the app registry to load the
model and then the field back.
"""
if not hasattr(self, 'model'):
# Fields are sometimes used without attaching them to models (for
# example in aggregation). In this case give back a plain field
# instance. The code below will create a new empty instance of
# class self.__class__, then update its dict with self.__dict__
# values - so, this is very close to normal pickle.
state = self.__dict__.copy()
# The _get_default cached_property can't be pickled due to lambda
# usage.
state.pop('_get_default', None)
return _empty, (self.__class__,), state
return _load_field, (self.model._meta.app_label, self.model._meta.object_name,
self.name)
def get_pk_value_on_save(self, instance):
"""
Hook to generate new PK values on save. This method is called when
saving instances with no primary key value set. If this method returns
something else than None, then the returned value is used when saving
the new instance.
"""
if self.default:
return self.get_default()
return None
def to_python(self, value):
"""
Convert the input value into the expected Python data type, raising
django.core.exceptions.ValidationError if the data can't be converted.
Return the converted value. Subclasses should override this.
"""
return value
@cached_property
def validators(self):
"""
Some validators can't be created at field initialization time.
This method provides a way to delay their creation until required.
"""
return [*self.default_validators, *self._validators]
def run_validators(self, value):
if value in self.empty_values:
return
errors = []
for v in self.validators:
try:
v(value)
except exceptions.ValidationError as e:
if hasattr(e, 'code') and e.code in self.error_messages:
e.message = self.error_messages[e.code]
errors.extend(e.error_list)
if errors:
raise exceptions.ValidationError(errors)
def validate(self, value, model_instance):
"""
Validate value and raise ValidationError if necessary. Subclasses
should override this to provide validation logic.
"""
if not self.editable:
# Skip validation for non-editable fields.
return
if self.choices is not None and value not in self.empty_values:
for option_key, option_value in self.choices:
if isinstance(option_value, (list, tuple)):
# This is an optgroup, so look inside the group for
# options.
for optgroup_key, optgroup_value in option_value:
if value == optgroup_key:
return
elif value == option_key:
return
raise exceptions.ValidationError(
self.error_messages['invalid_choice'],
code='invalid_choice',
params={'value': value},
)
if value is None and not self.null:
raise exceptions.ValidationError(self.error_messages['null'], code='null')
if not self.blank and value in self.empty_values:
raise exceptions.ValidationError(self.error_messages['blank'], code='blank')
def clean(self, value, model_instance):
"""
Convert the value's type and run validation. Validation errors
from to_python() and validate() are propagated. Return the correct
value if no error is raised.
"""
value = self.to_python(value)
self.validate(value, model_instance)
self.run_validators(value)
return value
def db_type_parameters(self, connection):
return DictWrapper(self.__dict__, connection.ops.quote_name, 'qn_')
def db_check(self, connection):
"""
Return the database column check constraint for this field, for the
provided connection. Works the same way as db_type() for the case that
get_internal_type() does not map to a preexisting model field.
"""
data = self.db_type_parameters(connection)
try:
return connection.data_type_check_constraints[self.get_internal_type()] % data
except KeyError:
return None
def db_type(self, connection):
"""
Return the database column data type for this field, for the provided
connection.
"""
# The default implementation of this method looks at the
# backend-specific data_types dictionary, looking up the field by its
# "internal type".
#
# A Field class can implement the get_internal_type() method to specify
# which *preexisting* Django Field class it's most similar to -- i.e.,
# a custom field might be represented by a TEXT column type, which is
# the same as the TextField Django field type, which means the custom
# field's get_internal_type() returns 'TextField'.
#
# But the limitation of the get_internal_type() / data_types approach
# is that it cannot handle database column types that aren't already
# mapped to one of the built-in Django field types. In this case, you
# can implement db_type() instead of get_internal_type() to specify
# exactly which wacky database column type you want to use.
data = self.db_type_parameters(connection)
try:
return connection.data_types[self.get_internal_type()] % data
except KeyError:
return None
def rel_db_type(self, connection):
"""
Return the data type that a related field pointing to this field should
use. For example, this method is called by ForeignKey and OneToOneField
to determine its data type.
"""
return self.db_type(connection)
def cast_db_type(self, connection):
"""Return the data type to use in the Cast() function."""
db_type = connection.ops.cast_data_types.get(self.get_internal_type())
if db_type:
return db_type % self.db_type_parameters(connection)
return self.db_type(connection)
def db_parameters(self, connection):
"""
Extension of db_type(), providing a range of different return values
(type, checks). This will look at db_type(), allowing custom model
fields to override it.
"""
type_string = self.db_type(connection)
check_string = self.db_check(connection)
return {
"type": type_string,
"check": check_string,
}
def db_type_suffix(self, connection):
return connection.data_types_suffix.get(self.get_internal_type())
def get_db_converters(self, connection):
if hasattr(self, 'from_db_value'):
return [self.from_db_value]
return []
@property
def unique(self):
return self._unique or self.primary_key
@property
def db_tablespace(self):
return self._db_tablespace or settings.DEFAULT_INDEX_TABLESPACE
@property
def db_returning(self):
"""
Private API intended only to be used by Django itself. Currently only
the PostgreSQL backend supports returning multiple fields on a model.
"""
return False
def set_attributes_from_name(self, name):
self.name = self.name or name
self.attname, self.column = self.get_attname_column()
self.concrete = self.column is not None
if self.verbose_name is None and self.name:
self.verbose_name = self.name.replace('_', ' ')
def contribute_to_class(self, cls, name, private_only=False):
"""
Register the field with the model class it belongs to.
If private_only is True, create a separate instance of this field
for every subclass of cls, even if cls is not an abstract model.
"""
self.set_attributes_from_name(name)
self.model = cls
cls._meta.add_field(self, private=private_only)
if self.column:
setattr(cls, self.attname, self.descriptor_class(self))
if self.choices is not None:
# Don't override a get_FOO_display() method defined explicitly on
# this class, but don't check methods derived from inheritance, to
# allow overriding inherited choices. For more complex inheritance
# structures users should override contribute_to_class().
if 'get_%s_display' % self.name not in cls.__dict__:
setattr(
cls,
'get_%s_display' % self.name,
partialmethod(cls._get_FIELD_display, field=self),
)
def get_filter_kwargs_for_object(self, obj):
"""
Return a dict that when passed as kwargs to self.model.filter(), would
yield all instances having the same value for this field as obj has.
"""
return {self.name: getattr(obj, self.attname)}
def get_attname(self):
return self.name
def get_attname_column(self):
attname = self.get_attname()
column = self.db_column or attname
return attname, column
def get_internal_type(self):
return self.__class__.__name__
def pre_save(self, model_instance, add):
"""Return field's value just before saving."""
return getattr(model_instance, self.attname)
def get_prep_value(self, value):
"""Perform preliminary non-db specific value checks and conversions."""
if isinstance(value, Promise):
value = value._proxy____cast()
return value
def get_db_prep_value(self, value, connection, prepared=False):
"""
Return field's value prepared for interacting with the database backend.
Used by the default implementations of get_db_prep_save().
"""
if not prepared:
value = self.get_prep_value(value)
return value
def get_db_prep_save(self, value, connection):
"""Return field's value prepared for saving into a database."""
return self.get_db_prep_value(value, connection=connection, prepared=False)
def has_default(self):
"""Return a boolean of whether this field has a default value."""
return self.default is not NOT_PROVIDED
def get_default(self):
"""Return the default value for this field."""
return self._get_default()
@cached_property
def _get_default(self):
if self.has_default():
if callable(self.default):
return self.default
return lambda: self.default
if not self.empty_strings_allowed or self.null and not connection.features.interprets_empty_strings_as_nulls:
return return_None
return str # return empty string
def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH, limit_choices_to=None, ordering=()):
"""
Return choices with a default blank choices included, for use
as <select> choices for this field.
"""
if self.choices is not None:
choices = list(self.choices)
if include_blank:
blank_defined = any(choice in ('', None) for choice, _ in self.flatchoices)
if not blank_defined:
choices = blank_choice + choices
return choices
rel_model = self.remote_field.model
limit_choices_to = limit_choices_to or self.get_limit_choices_to()
choice_func = operator.attrgetter(
self.remote_field.get_related_field().attname
if hasattr(self.remote_field, 'get_related_field')
else 'pk'
)
qs = rel_model._default_manager.complex_filter(limit_choices_to)
if ordering:
qs = qs.order_by(*ordering)
return (blank_choice if include_blank else []) + [
(choice_func(x), str(x)) for x in qs
]
def value_to_string(self, obj):
"""
Return a string value of this field from the passed obj.
This is used by the serialization framework.
"""
return str(self.value_from_object(obj))
def _get_flatchoices(self):
"""Flattened version of choices tuple."""
if self.choices is None:
return []
flat = []
for choice, value in self.choices:
if isinstance(value, (list, tuple)):
flat.extend(value)
else:
flat.append((choice, value))
return flat
flatchoices = property(_get_flatchoices)
def save_form_data(self, instance, data):
setattr(instance, self.name, data)
def formfield(self, form_class=None, choices_form_class=None, **kwargs):
"""Return a django.forms.Field instance for this field."""
defaults = {
'required': not self.blank,
'label': capfirst(self.verbose_name),
'help_text': self.help_text,
}
if self.has_default():
if callable(self.default):
defaults['initial'] = self.default
defaults['show_hidden_initial'] = True
else:
defaults['initial'] = self.get_default()
if self.choices is not None:
# Fields with choices get special treatment.
include_blank = (self.blank or
not (self.has_default() or 'initial' in kwargs))
defaults['choices'] = self.get_choices(include_blank=include_blank)
defaults['coerce'] = self.to_python
if self.null:
defaults['empty_value'] = None
if choices_form_class is not None:
form_class = choices_form_class
else:
form_class = forms.TypedChoiceField
# Many of the subclass-specific formfield arguments (min_value,
# max_value) don't apply for choice fields, so be sure to only pass
# the values that TypedChoiceField will understand.
for k in list(kwargs):
if k not in ('coerce', 'empty_value', 'choices', 'required',
'widget', 'label', 'initial', 'help_text',
'error_messages', 'show_hidden_initial', 'disabled'):
del kwargs[k]
defaults.update(kwargs)
if form_class is None:
form_class = forms.CharField
return form_class(**defaults)
def value_from_object(self, obj):
"""Return the value of this field in the given model instance."""
return getattr(obj, self.attname)
class BooleanField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _('“%(value)s” value must be either True or False.'),
'invalid_nullable': _('“%(value)s” value must be either True, False, or None.'),
}
description = _("Boolean (Either True or False)")
def get_internal_type(self):
return "BooleanField"
def to_python(self, value):
if self.null and value in self.empty_values:
return None
if value in (True, False):
# 1/0 are equal to True/False. bool() converts former to latter.
return bool(value)
if value in ('t', 'True', '1'):
return True
if value in ('f', 'False', '0'):
return False
raise exceptions.ValidationError(
self.error_messages['invalid_nullable' if self.null else 'invalid'],
code='invalid',
params={'value': value},
)
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return self.to_python(value)
def formfield(self, **kwargs):
if self.choices is not None:
include_blank = not (self.has_default() or 'initial' in kwargs)
defaults = {'choices': self.get_choices(include_blank=include_blank)}
else:
form_class = forms.NullBooleanField if self.null else forms.BooleanField
# In HTML checkboxes, 'required' means "must be checked" which is
# different from the choices case ("must select some value").
# required=False allows unchecked checkboxes.
defaults = {'form_class': form_class, 'required': False}
return super().formfield(**{**defaults, **kwargs})
class CharField(Field):
description = _("String (up to %(max_length)s)")
def __init__(self, *args, db_collation=None, **kwargs):
super().__init__(*args, **kwargs)
self.db_collation = db_collation
self.validators.append(validators.MaxLengthValidator(self.max_length))
def check(self, **kwargs):
databases = kwargs.get('databases') or []
return [
*super().check(**kwargs),
*self._check_db_collation(databases),
*self._check_max_length_attribute(**kwargs),
]
def _check_max_length_attribute(self, **kwargs):
if self.max_length is None:
return [
checks.Error(
"CharFields must define a 'max_length' attribute.",
obj=self,
id='fields.E120',
)
]
elif (not isinstance(self.max_length, int) or isinstance(self.max_length, bool) or
self.max_length <= 0):
return [
checks.Error(
"'max_length' must be a positive integer.",
obj=self,
id='fields.E121',
)
]
else:
return []
def _check_db_collation(self, databases):
errors = []
for db in databases:
if not router.allow_migrate_model(db, self.model):
continue
connection = connections[db]
if not (
self.db_collation is None or
'supports_collation_on_charfield' in self.model._meta.required_db_features or
connection.features.supports_collation_on_charfield
):
errors.append(
checks.Error(
'%s does not support a database collation on '
'CharFields.' % connection.display_name,
obj=self,
id='fields.E190',
),
)
return errors
def cast_db_type(self, connection):
if self.max_length is None:
return connection.ops.cast_char_field_without_max_length
return super().cast_db_type(connection)
def get_internal_type(self):
return "CharField"
def to_python(self, value):
if isinstance(value, str) or value is None:
return value
return str(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
# Passing max_length to forms.CharField means that the value's length
# will be validated twice. This is considered acceptable since we want
# the value in the form field (to pass into widget for example).
defaults = {'max_length': self.max_length}
# TODO: Handle multiple backends with different feature flags.
if self.null and not connection.features.interprets_empty_strings_as_nulls:
defaults['empty_value'] = None
defaults.update(kwargs)
return super().formfield(**defaults)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.db_collation:
kwargs['db_collation'] = self.db_collation
return name, path, args, kwargs
class CommaSeparatedIntegerField(CharField):
default_validators = [validators.validate_comma_separated_integer_list]
description = _("Comma-separated integers")
system_check_removed_details = {
'msg': (
'CommaSeparatedIntegerField is removed except for support in '
'historical migrations.'
),
'hint': (
'Use CharField(validators=[validate_comma_separated_integer_list]) '
'instead.'
),
'id': 'fields.E901',
}
def _to_naive(value):
if timezone.is_aware(value):
value = timezone.make_naive(value, timezone.utc)
return value
def _get_naive_now():
return _to_naive(timezone.now())
class DateTimeCheckMixin:
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_mutually_exclusive_options(),
*self._check_fix_default_value(),
]
def _check_mutually_exclusive_options(self):
# auto_now, auto_now_add, and default are mutually exclusive
# options. The use of more than one of these options together
# will trigger an Error
mutually_exclusive_options = [self.auto_now_add, self.auto_now, self.has_default()]
enabled_options = [option not in (None, False) for option in mutually_exclusive_options].count(True)
if enabled_options > 1:
return [
checks.Error(
"The options auto_now, auto_now_add, and default "
"are mutually exclusive. Only one of these options "
"may be present.",
obj=self,
id='fields.E160',
)
]
else:
return []
def _check_fix_default_value(self):
return []
# Concrete subclasses use this in their implementations of
# _check_fix_default_value().
def _check_if_value_fixed(self, value, now=None):
"""
Check if the given value appears to have been provided as a "fixed"
time value, and include a warning in the returned list if it does. The
value argument must be a date object or aware/naive datetime object. If
now is provided, it must be a naive datetime object.
"""
if now is None:
now = _get_naive_now()
offset = datetime.timedelta(seconds=10)
lower = now - offset
upper = now + offset
if isinstance(value, datetime.datetime):
value = _to_naive(value)
else:
assert isinstance(value, datetime.date)
lower = lower.date()
upper = upper.date()
if lower <= value <= upper:
return [
checks.Warning(
'Fixed default value provided.',
hint=(
'It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`'
),
obj=self,
id='fields.W161',
)
]
return []
class DateField(DateTimeCheckMixin, Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _('“%(value)s” value has an invalid date format. It must be '
'in YYYY-MM-DD format.'),
'invalid_date': _('“%(value)s” value has the correct format (YYYY-MM-DD) '
'but it is an invalid date.'),
}
description = _("Date (without time)")
def __init__(self, verbose_name=None, name=None, auto_now=False,
auto_now_add=False, **kwargs):
self.auto_now, self.auto_now_add = auto_now, auto_now_add
if auto_now or auto_now_add:
kwargs['editable'] = False
kwargs['blank'] = True
super().__init__(verbose_name, name, **kwargs)
def _check_fix_default_value(self):
"""
Warn that using an actual date or datetime value is probably wrong;
it's only evaluated on server startup.
"""
if not self.has_default():
return []
value = self.default
if isinstance(value, datetime.datetime):
value = _to_naive(value).date()
elif isinstance(value, datetime.date):
pass
else:
# No explicit date / datetime value -- no checks necessary
return []
# At this point, value is a date object.
return self._check_if_value_fixed(value)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.auto_now:
kwargs['auto_now'] = True
if self.auto_now_add:
kwargs['auto_now_add'] = True
if self.auto_now or self.auto_now_add:
del kwargs['editable']
del kwargs['blank']
return name, path, args, kwargs
def get_internal_type(self):
return "DateField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.datetime):
if settings.USE_TZ and timezone.is_aware(value):
# Convert aware datetimes to the default time zone
# before casting them to dates (#17742).
default_timezone = timezone.get_default_timezone()
value = timezone.make_naive(value, default_timezone)
return value.date()
if isinstance(value, datetime.date):
return value
try:
parsed = parse_date(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages['invalid_date'],
code='invalid_date',
params={'value': value},
)
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = datetime.date.today()
setattr(model_instance, self.attname, value)
return value
else:
return super().pre_save(model_instance, add)
def contribute_to_class(self, cls, name, **kwargs):
super().contribute_to_class(cls, name, **kwargs)
if not self.null:
setattr(
cls, 'get_next_by_%s' % self.name,
partialmethod(cls._get_next_or_previous_by_FIELD, field=self, is_next=True)
)
setattr(
cls, 'get_previous_by_%s' % self.name,
partialmethod(cls._get_next_or_previous_by_FIELD, field=self, is_next=False)
)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
# Casts dates into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_datefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return '' if val is None else val.isoformat()
def formfield(self, **kwargs):
return super().formfield(**{
'form_class': forms.DateField,
**kwargs,
})
class DateTimeField(DateField):
empty_strings_allowed = False
default_error_messages = {
'invalid': _('“%(value)s” value has an invalid format. It must be in '
'YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format.'),
'invalid_date': _("“%(value)s” value has the correct format "
"(YYYY-MM-DD) but it is an invalid date."),
'invalid_datetime': _('“%(value)s” value has the correct format '
'(YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) '
'but it is an invalid date/time.'),
}
description = _("Date (with time)")
# __init__ is inherited from DateField
def _check_fix_default_value(self):
"""
Warn that using an actual date or datetime value is probably wrong;
it's only evaluated on server startup.
"""
if not self.has_default():
return []
value = self.default
if isinstance(value, (datetime.datetime, datetime.date)):
return self._check_if_value_fixed(value)
# No explicit date / datetime value -- no checks necessary.
return []
def get_internal_type(self):
return "DateTimeField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.datetime):
return value
if isinstance(value, datetime.date):
value = datetime.datetime(value.year, value.month, value.day)
if settings.USE_TZ:
# For backwards compatibility, interpret naive datetimes in
# local time. This won't work during DST change, but we can't
# do much about it, so we let the exceptions percolate up the
# call stack.
warnings.warn("DateTimeField %s.%s received a naive datetime "
"(%s) while time zone support is active." %
(self.model.__name__, self.name, value),
RuntimeWarning)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
try:
parsed = parse_datetime(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages['invalid_datetime'],
code='invalid_datetime',
params={'value': value},
)
try:
parsed = parse_date(value)
if parsed is not None:
return datetime.datetime(parsed.year, parsed.month, parsed.day)
except ValueError:
raise exceptions.ValidationError(
self.error_messages['invalid_date'],
code='invalid_date',
params={'value': value},
)
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = timezone.now()
setattr(model_instance, self.attname, value)
return value
else:
return super().pre_save(model_instance, add)
# contribute_to_class is inherited from DateField, it registers
# get_next_by_FOO and get_prev_by_FOO
def get_prep_value(self, value):
value = super().get_prep_value(value)
value = self.to_python(value)
if value is not None and settings.USE_TZ and timezone.is_naive(value):
# For backwards compatibility, interpret naive datetimes in local
# time. This won't work during DST change, but we can't do much
# about it, so we let the exceptions percolate up the call stack.
try:
name = '%s.%s' % (self.model.__name__, self.name)
except AttributeError:
name = '(unbound)'
warnings.warn("DateTimeField %s received a naive datetime (%s)"
" while time zone support is active." %
(name, value),
RuntimeWarning)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
def get_db_prep_value(self, value, connection, prepared=False):
# Casts datetimes into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_datetimefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return '' if val is None else val.isoformat()
def formfield(self, **kwargs):
return super().formfield(**{
'form_class': forms.DateTimeField,
**kwargs,
})
class DecimalField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _('“%(value)s” value must be a decimal number.'),
}
description = _("Decimal number")
def __init__(self, verbose_name=None, name=None, max_digits=None,
decimal_places=None, **kwargs):
self.max_digits, self.decimal_places = max_digits, decimal_places
super().__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
errors = super().check(**kwargs)
digits_errors = [
*self._check_decimal_places(),
*self._check_max_digits(),
]
if not digits_errors:
errors.extend(self._check_decimal_places_and_max_digits(**kwargs))
else:
errors.extend(digits_errors)
return errors
def _check_decimal_places(self):
try:
decimal_places = int(self.decimal_places)
if decimal_places < 0:
raise ValueError()
except TypeError:
return [
checks.Error(
"DecimalFields must define a 'decimal_places' attribute.",
obj=self,
id='fields.E130',
)
]
except ValueError:
return [
checks.Error(
"'decimal_places' must be a non-negative integer.",
obj=self,
id='fields.E131',
)
]
else:
return []
def _check_max_digits(self):
try:
max_digits = int(self.max_digits)
if max_digits <= 0:
raise ValueError()
except TypeError:
return [
checks.Error(
"DecimalFields must define a 'max_digits' attribute.",
obj=self,
id='fields.E132',
)
]
except ValueError:
return [
checks.Error(
"'max_digits' must be a positive integer.",
obj=self,
id='fields.E133',
)
]
else:
return []
def _check_decimal_places_and_max_digits(self, **kwargs):
if int(self.decimal_places) > int(self.max_digits):
return [
checks.Error(
"'max_digits' must be greater or equal to 'decimal_places'.",
obj=self,
id='fields.E134',
)
]
return []
@cached_property
def validators(self):
return super().validators + [
validators.DecimalValidator(self.max_digits, self.decimal_places)
]
@cached_property
def context(self):
return decimal.Context(prec=self.max_digits)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.max_digits is not None:
kwargs['max_digits'] = self.max_digits
if self.decimal_places is not None:
kwargs['decimal_places'] = self.decimal_places
return name, path, args, kwargs
def get_internal_type(self):
return "DecimalField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, float):
if math.isnan(value):
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
return self.context.create_decimal_from_float(value)
try:
return decimal.Decimal(value)
except (decimal.InvalidOperation, TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def get_db_prep_save(self, value, connection):
return connection.ops.adapt_decimalfield_value(self.to_python(value), self.max_digits, self.decimal_places)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
return super().formfield(**{
'max_digits': self.max_digits,
'decimal_places': self.decimal_places,
'form_class': forms.DecimalField,
**kwargs,
})
class DurationField(Field):
"""
Store timedelta objects.
Use interval on PostgreSQL, INTERVAL DAY TO SECOND on Oracle, and bigint
of microseconds on other databases.
"""
empty_strings_allowed = False
default_error_messages = {
'invalid': _('“%(value)s” value has an invalid format. It must be in '
'[DD] [[HH:]MM:]ss[.uuuuuu] format.')
}
description = _("Duration")
def get_internal_type(self):
return "DurationField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.timedelta):
return value
try:
parsed = parse_duration(value)
except ValueError:
pass
else:
if parsed is not None:
return parsed
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def get_db_prep_value(self, value, connection, prepared=False):
if connection.features.has_native_duration_field:
return value
if value is None:
return None
return duration_microseconds(value)
def get_db_converters(self, connection):
converters = []
if not connection.features.has_native_duration_field:
converters.append(connection.ops.convert_durationfield_value)
return converters + super().get_db_converters(connection)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return '' if val is None else duration_string(val)
def formfield(self, **kwargs):
return super().formfield(**{
'form_class': forms.DurationField,
**kwargs,
})
class EmailField(CharField):
default_validators = [validators.validate_email]
description = _("Email address")
def __init__(self, *args, **kwargs):
# max_length=254 to be compliant with RFCs 3696 and 5321
kwargs.setdefault('max_length', 254)
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
# We do not exclude max_length if it matches default as we want to change
# the default in future.
return name, path, args, kwargs
def formfield(self, **kwargs):
# As with CharField, this will cause email validation to be performed
# twice.
return super().formfield(**{
'form_class': forms.EmailField,
**kwargs,
})
class FilePathField(Field):
description = _("File path")
def __init__(self, verbose_name=None, name=None, path='', match=None,
recursive=False, allow_files=True, allow_folders=False, **kwargs):
self.path, self.match, self.recursive = path, match, recursive
self.allow_files, self.allow_folders = allow_files, allow_folders
kwargs.setdefault('max_length', 100)
super().__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_allowing_files_or_folders(**kwargs),
]
def _check_allowing_files_or_folders(self, **kwargs):
if not self.allow_files and not self.allow_folders:
return [
checks.Error(
"FilePathFields must have either 'allow_files' or 'allow_folders' set to True.",
obj=self,
id='fields.E140',
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.path != '':
kwargs['path'] = self.path
if self.match is not None:
kwargs['match'] = self.match
if self.recursive is not False:
kwargs['recursive'] = self.recursive
if self.allow_files is not True:
kwargs['allow_files'] = self.allow_files
if self.allow_folders is not False:
kwargs['allow_folders'] = self.allow_folders
if kwargs.get("max_length") == 100:
del kwargs["max_length"]
return name, path, args, kwargs
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return str(value)
def formfield(self, **kwargs):
return super().formfield(**{
'path': self.path() if callable(self.path) else self.path,
'match': self.match,
'recursive': self.recursive,
'form_class': forms.FilePathField,
'allow_files': self.allow_files,
'allow_folders': self.allow_folders,
**kwargs,
})
def get_internal_type(self):
return "FilePathField"
class FloatField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _('“%(value)s” value must be a float.'),
}
description = _("Floating point number")
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
try:
return float(value)
except (TypeError, ValueError) as e:
raise e.__class__(
"Field '%s' expected a number but got %r." % (self.name, value),
) from e
def get_internal_type(self):
return "FloatField"
def to_python(self, value):
if value is None:
return value
try:
return float(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def formfield(self, **kwargs):
return super().formfield(**{
'form_class': forms.FloatField,
**kwargs,
})
class IntegerField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _('“%(value)s” value must be an integer.'),
}
description = _("Integer")
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_max_length_warning(),
]
def _check_max_length_warning(self):
if self.max_length is not None:
return [
checks.Warning(
"'max_length' is ignored when used with %s." % self.__class__.__name__,
hint="Remove 'max_length' from field",
obj=self,
id='fields.W122',
)
]
return []
@cached_property
def validators(self):
# These validators can't be added at field initialization time since
# they're based on values retrieved from `connection`.
validators_ = super().validators
internal_type = self.get_internal_type()
min_value, max_value = connection.ops.integer_field_range(internal_type)
if min_value is not None and not any(
(
isinstance(validator, validators.MinValueValidator) and (
validator.limit_value()
if callable(validator.limit_value)
else validator.limit_value
) >= min_value
) for validator in validators_
):
validators_.append(validators.MinValueValidator(min_value))
if max_value is not None and not any(
(
isinstance(validator, validators.MaxValueValidator) and (
validator.limit_value()
if callable(validator.limit_value)
else validator.limit_value
) <= max_value
) for validator in validators_
):
validators_.append(validators.MaxValueValidator(max_value))
return validators_
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
try:
return int(value)
except (TypeError, ValueError) as e:
raise e.__class__(
"Field '%s' expected a number but got %r." % (self.name, value),
) from e
def get_internal_type(self):
return "IntegerField"
def to_python(self, value):
if value is None:
return value
try:
return int(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def formfield(self, **kwargs):
return super().formfield(**{
'form_class': forms.IntegerField,
**kwargs,
})
class BigIntegerField(IntegerField):
description = _("Big (8 byte) integer")
MAX_BIGINT = 9223372036854775807
def get_internal_type(self):
return "BigIntegerField"
def formfield(self, **kwargs):
return super().formfield(**{
'min_value': -BigIntegerField.MAX_BIGINT - 1,
'max_value': BigIntegerField.MAX_BIGINT,
**kwargs,
})
class SmallIntegerField(IntegerField):
description = _('Small integer')
def get_internal_type(self):
return 'SmallIntegerField'
class IPAddressField(Field):
empty_strings_allowed = False
description = _("IPv4 address")
system_check_removed_details = {
'msg': (
'IPAddressField has been removed except for support in '
'historical migrations.'
),
'hint': 'Use GenericIPAddressField instead.',
'id': 'fields.E900',
}
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 15
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs['max_length']
return name, path, args, kwargs
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return str(value)
def get_internal_type(self):
return "IPAddressField"
class GenericIPAddressField(Field):
empty_strings_allowed = False
description = _("IP address")
default_error_messages = {}
def __init__(self, verbose_name=None, name=None, protocol='both',
unpack_ipv4=False, *args, **kwargs):
self.unpack_ipv4 = unpack_ipv4
self.protocol = protocol
self.default_validators, invalid_error_message = \
validators.ip_address_validators(protocol, unpack_ipv4)
self.default_error_messages['invalid'] = invalid_error_message
kwargs['max_length'] = 39
super().__init__(verbose_name, name, *args, **kwargs)
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_blank_and_null_values(**kwargs),
]
def _check_blank_and_null_values(self, **kwargs):
if not getattr(self, 'null', False) and getattr(self, 'blank', False):
return [
checks.Error(
'GenericIPAddressFields cannot have blank=True if null=False, '
'as blank values are stored as nulls.',
obj=self,
id='fields.E150',
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.unpack_ipv4 is not False:
kwargs['unpack_ipv4'] = self.unpack_ipv4
if self.protocol != "both":
kwargs['protocol'] = self.protocol
if kwargs.get("max_length") == 39:
del kwargs['max_length']
return name, path, args, kwargs
def get_internal_type(self):
return "GenericIPAddressField"
def to_python(self, value):
if value is None:
return None
if not isinstance(value, str):
value = str(value)
value = value.strip()
if ':' in value:
return clean_ipv6_address(value, self.unpack_ipv4, self.error_messages['invalid'])
return value
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_ipaddressfield_value(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
if value and ':' in value:
try:
return clean_ipv6_address(value, self.unpack_ipv4)
except exceptions.ValidationError:
pass
return str(value)
def formfield(self, **kwargs):
return super().formfield(**{
'protocol': self.protocol,
'form_class': forms.GenericIPAddressField,
**kwargs,
})
class NullBooleanField(BooleanField):
default_error_messages = {
'invalid': _('“%(value)s” value must be either None, True or False.'),
'invalid_nullable': _('“%(value)s” value must be either None, True or False.'),
}
description = _("Boolean (Either True, False or None)")
system_check_removed_details = {
'msg': (
'NullBooleanField is removed except for support in historical '
'migrations.'
),
'hint': 'Use BooleanField(null=True) instead.',
'id': 'fields.E903',
}
def __init__(self, *args, **kwargs):
kwargs['null'] = True
kwargs['blank'] = True
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs['null']
del kwargs['blank']
return name, path, args, kwargs
class PositiveIntegerRelDbTypeMixin:
def __init_subclass__(cls, **kwargs):
super().__init_subclass__(**kwargs)
if not hasattr(cls, 'integer_field_class'):
cls.integer_field_class = next(
(
parent
for parent in cls.__mro__[1:]
if issubclass(parent, IntegerField)
),
None,
)
def rel_db_type(self, connection):
"""
Return the data type that a related field pointing to this field should
use. In most cases, a foreign key pointing to a positive integer
primary key will have an integer column data type but some databases
(e.g. MySQL) have an unsigned integer type. In that case
(related_fields_match_type=True), the primary key should return its
db_type.
"""
if connection.features.related_fields_match_type:
return self.db_type(connection)
else:
return self.integer_field_class().db_type(connection=connection)
class PositiveBigIntegerField(PositiveIntegerRelDbTypeMixin, BigIntegerField):
description = _('Positive big integer')
def get_internal_type(self):
return 'PositiveBigIntegerField'
def formfield(self, **kwargs):
return super().formfield(**{
'min_value': 0,
**kwargs,
})
class PositiveIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField):
description = _("Positive integer")
def get_internal_type(self):
return "PositiveIntegerField"
def formfield(self, **kwargs):
return super().formfield(**{
'min_value': 0,
**kwargs,
})
class PositiveSmallIntegerField(PositiveIntegerRelDbTypeMixin, SmallIntegerField):
description = _("Positive small integer")
def get_internal_type(self):
return "PositiveSmallIntegerField"
def formfield(self, **kwargs):
return super().formfield(**{
'min_value': 0,
**kwargs,
})
class SlugField(CharField):
default_validators = [validators.validate_slug]
description = _("Slug (up to %(max_length)s)")
def __init__(self, *args, max_length=50, db_index=True, allow_unicode=False, **kwargs):
self.allow_unicode = allow_unicode
if self.allow_unicode:
self.default_validators = [validators.validate_unicode_slug]
super().__init__(*args, max_length=max_length, db_index=db_index, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if kwargs.get("max_length") == 50:
del kwargs['max_length']
if self.db_index is False:
kwargs['db_index'] = False
else:
del kwargs['db_index']
if self.allow_unicode is not False:
kwargs['allow_unicode'] = self.allow_unicode
return name, path, args, kwargs
def get_internal_type(self):
return "SlugField"
def formfield(self, **kwargs):
return super().formfield(**{
'form_class': forms.SlugField,
'allow_unicode': self.allow_unicode,
**kwargs,
})
class TextField(Field):
description = _("Text")
def __init__(self, *args, db_collation=None, **kwargs):
super().__init__(*args, **kwargs)
self.db_collation = db_collation
def check(self, **kwargs):
databases = kwargs.get('databases') or []
return [
*super().check(**kwargs),
*self._check_db_collation(databases),
]
def _check_db_collation(self, databases):
errors = []
for db in databases:
if not router.allow_migrate_model(db, self.model):
continue
connection = connections[db]
if not (
self.db_collation is None or
'supports_collation_on_textfield' in self.model._meta.required_db_features or
connection.features.supports_collation_on_textfield
):
errors.append(
checks.Error(
'%s does not support a database collation on '
'TextFields.' % connection.display_name,
obj=self,
id='fields.E190',
),
)
return errors
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if isinstance(value, str) or value is None:
return value
return str(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
# Passing max_length to forms.CharField means that the value's length
# will be validated twice. This is considered acceptable since we want
# the value in the form field (to pass into widget for example).
return super().formfield(**{
'max_length': self.max_length,
**({} if self.choices is not None else {'widget': forms.Textarea}),
**kwargs,
})
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.db_collation:
kwargs['db_collation'] = self.db_collation
return name, path, args, kwargs
class TimeField(DateTimeCheckMixin, Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _('“%(value)s” value has an invalid format. It must be in '
'HH:MM[:ss[.uuuuuu]] format.'),
'invalid_time': _('“%(value)s” value has the correct format '
'(HH:MM[:ss[.uuuuuu]]) but it is an invalid time.'),
}
description = _("Time")
def __init__(self, verbose_name=None, name=None, auto_now=False,
auto_now_add=False, **kwargs):
self.auto_now, self.auto_now_add = auto_now, auto_now_add
if auto_now or auto_now_add:
kwargs['editable'] = False
kwargs['blank'] = True
super().__init__(verbose_name, name, **kwargs)
def _check_fix_default_value(self):
"""
Warn that using an actual date or datetime value is probably wrong;
it's only evaluated on server startup.
"""
if not self.has_default():
return []
value = self.default
if isinstance(value, datetime.datetime):
now = None
elif isinstance(value, datetime.time):
now = _get_naive_now()
# This will not use the right date in the race condition where now
# is just before the date change and value is just past 0:00.
value = datetime.datetime.combine(now.date(), value)
else:
# No explicit time / datetime value -- no checks necessary
return []
# At this point, value is a datetime object.
return self._check_if_value_fixed(value, now=now)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.auto_now is not False:
kwargs["auto_now"] = self.auto_now
if self.auto_now_add is not False:
kwargs["auto_now_add"] = self.auto_now_add
if self.auto_now or self.auto_now_add:
del kwargs['blank']
del kwargs['editable']
return name, path, args, kwargs
def get_internal_type(self):
return "TimeField"
def to_python(self, value):
if value is None:
return None
if isinstance(value, datetime.time):
return value
if isinstance(value, datetime.datetime):
# Not usually a good idea to pass in a datetime here (it loses
# information), but this can be a side-effect of interacting with a
# database backend (e.g. Oracle), so we'll be accommodating.
return value.time()
try:
parsed = parse_time(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages['invalid_time'],
code='invalid_time',
params={'value': value},
)
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = datetime.datetime.now().time()
setattr(model_instance, self.attname, value)
return value
else:
return super().pre_save(model_instance, add)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
# Casts times into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_timefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return '' if val is None else val.isoformat()
def formfield(self, **kwargs):
return super().formfield(**{
'form_class': forms.TimeField,
**kwargs,
})
class URLField(CharField):
default_validators = [validators.URLValidator()]
description = _("URL")
def __init__(self, verbose_name=None, name=None, **kwargs):
kwargs.setdefault('max_length', 200)
super().__init__(verbose_name, name, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if kwargs.get("max_length") == 200:
del kwargs['max_length']
return name, path, args, kwargs
def formfield(self, **kwargs):
# As with CharField, this will cause URL validation to be performed
# twice.
return super().formfield(**{
'form_class': forms.URLField,
**kwargs,
})
class BinaryField(Field):
description = _("Raw binary data")
empty_values = [None, b'']
def __init__(self, *args, **kwargs):
kwargs.setdefault('editable', False)
super().__init__(*args, **kwargs)
if self.max_length is not None:
self.validators.append(validators.MaxLengthValidator(self.max_length))
def check(self, **kwargs):
return [*super().check(**kwargs), *self._check_str_default_value()]
def _check_str_default_value(self):
if self.has_default() and isinstance(self.default, str):
return [
checks.Error(
"BinaryField's default cannot be a string. Use bytes "
"content instead.",
obj=self,
id='fields.E170',
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.editable:
kwargs['editable'] = True
else:
del kwargs['editable']
return name, path, args, kwargs
def get_internal_type(self):
return "BinaryField"
def get_placeholder(self, value, compiler, connection):
return connection.ops.binary_placeholder_sql(value)
def get_default(self):
if self.has_default() and not callable(self.default):
return self.default
default = super().get_default()
if default == '':
return b''
return default
def get_db_prep_value(self, value, connection, prepared=False):
value = super().get_db_prep_value(value, connection, prepared)
if value is not None:
return connection.Database.Binary(value)
return value
def value_to_string(self, obj):
"""Binary data is serialized as base64"""
return b64encode(self.value_from_object(obj)).decode('ascii')
def to_python(self, value):
# If it's a string, it should be base64-encoded data
if isinstance(value, str):
return memoryview(b64decode(value.encode('ascii')))
return value
class UUIDField(Field):
default_error_messages = {
'invalid': _('“%(value)s” is not a valid UUID.'),
}
description = _('Universally unique identifier')
empty_strings_allowed = False
def __init__(self, verbose_name=None, **kwargs):
kwargs['max_length'] = 32
super().__init__(verbose_name, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs['max_length']
return name, path, args, kwargs
def get_internal_type(self):
return "UUIDField"
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
if value is None:
return None
if not isinstance(value, uuid.UUID):
value = self.to_python(value)
if connection.features.has_native_uuid_field:
return value
return value.hex
def to_python(self, value):
if value is not None and not isinstance(value, uuid.UUID):
input_form = 'int' if isinstance(value, int) else 'hex'
try:
return uuid.UUID(**{input_form: value})
except (AttributeError, ValueError):
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
return value
def formfield(self, **kwargs):
return super().formfield(**{
'form_class': forms.UUIDField,
**kwargs,
})
class AutoFieldMixin:
db_returning = True
def __init__(self, *args, **kwargs):
kwargs['blank'] = True
super().__init__(*args, **kwargs)
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_primary_key(),
]
def _check_primary_key(self):
if not self.primary_key:
return [
checks.Error(
'AutoFields must set primary_key=True.',
obj=self,
id='fields.E100',
),
]
else:
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs['blank']
kwargs['primary_key'] = True
return name, path, args, kwargs
def validate(self, value, model_instance):
pass
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
value = connection.ops.validate_autopk_value(value)
return value
def contribute_to_class(self, cls, name, **kwargs):
if cls._meta.auto_field:
raise ValueError(
"Model %s can't have more than one auto-generated field."
% cls._meta.label
)
super().contribute_to_class(cls, name, **kwargs)
cls._meta.auto_field = self
def formfield(self, **kwargs):
return None
class AutoFieldMeta(type):
"""
Metaclass to maintain backward inheritance compatibility for AutoField.
It is intended that AutoFieldMixin become public API when it is possible to
create a non-integer automatically-generated field using column defaults
stored in the database.
In many areas Django also relies on using isinstance() to check for an
automatically-generated field as a subclass of AutoField. A new flag needs
to be implemented on Field to be used instead.
When these issues have been addressed, this metaclass could be used to
deprecate inheritance from AutoField and use of isinstance() with AutoField
for detecting automatically-generated fields.
"""
@property
def _subclasses(self):
return (BigAutoField, SmallAutoField)
def __instancecheck__(self, instance):
return isinstance(instance, self._subclasses) or super().__instancecheck__(instance)
def __subclasscheck__(self, subclass):
return issubclass(subclass, self._subclasses) or super().__subclasscheck__(subclass)
class AutoField(AutoFieldMixin, IntegerField, metaclass=AutoFieldMeta):
def get_internal_type(self):
return 'AutoField'
def rel_db_type(self, connection):
return IntegerField().db_type(connection=connection)
class BigAutoField(AutoFieldMixin, BigIntegerField):
def get_internal_type(self):
return 'BigAutoField'
def rel_db_type(self, connection):
return BigIntegerField().db_type(connection=connection)
class SmallAutoField(AutoFieldMixin, SmallIntegerField):
def get_internal_type(self):
return 'SmallAutoField'
def rel_db_type(self, connection):
return SmallIntegerField().db_type(connection=connection)
| ar4s/django | django/db/models/fields/__init__.py | Python | bsd-3-clause | 90,874 | 0.000694 |
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Moves a C++ file to a new location, updating any include paths that
point to it, and re-ordering headers as needed. Updates include
guards in moved header files. Assumes Chromium coding style.
Attempts to update paths used in .gyp(i) files, but does not reorder
or restructure .gyp(i) files in any way.
Updates full-path references to files in // comments in source files.
Must run in a git checkout, as it relies on git grep for a fast way to
find files that reference the moved file.
"""
import os
import re
import subprocess
import sys
import mffr
if __name__ == '__main__':
# Need to add the directory containing sort-headers.py to the Python
# classpath.
sys.path.append(os.path.abspath(os.path.join(sys.path[0], '..')))
sort_headers = __import__('sort-headers')
HANDLED_EXTENSIONS = ['.cc', '.mm', '.h', '.hh']
def MakeDestinationPath(from_path, to_path):
"""Given the from and to paths, return a correct destination path.
The initial destination path may either a full path or a directory,
in which case the path must end with /. Also does basic sanity
checks.
"""
if os.path.splitext(from_path)[1] not in HANDLED_EXTENSIONS:
raise Exception('Only intended to move individual source files.')
dest_extension = os.path.splitext(to_path)[1]
if dest_extension not in HANDLED_EXTENSIONS:
if to_path.endswith('/') or to_path.endswith('\\'):
to_path += os.path.basename(from_path)
else:
raise Exception('Destination must be either full path or end with /.')
return to_path
def MoveFile(from_path, to_path):
"""Performs a git mv command to move a file from |from_path| to |to_path|.
"""
if not os.system('git mv %s %s' % (from_path, to_path)) == 0:
raise Exception('Fatal: Failed to run git mv command.')
def UpdatePostMove(from_path, to_path):
"""Given a file that has moved from |from_path| to |to_path|,
updates the moved file's include guard to match the new path and
updates all references to the file in other source files. Also tries
to update references in .gyp(i) files using a heuristic.
"""
# Include paths always use forward slashes.
from_path = from_path.replace('\\', '/')
to_path = to_path.replace('\\', '/')
if os.path.splitext(from_path)[1] in ['.h', '.hh']:
UpdateIncludeGuard(from_path, to_path)
# Update include/import references.
files_with_changed_includes = mffr.MultiFileFindReplace(
r'(#(include|import)\s*["<])%s([>"])' % re.escape(from_path),
r'\1%s\3' % to_path,
['*.cc', '*.h', '*.m', '*.mm'])
# Reorder headers in files that changed.
for changed_file in files_with_changed_includes:
def AlwaysConfirm(a, b): return True
sort_headers.FixFileWithConfirmFunction(changed_file, AlwaysConfirm)
# Update comments; only supports // comments, which are primarily
# used in our code.
#
# This work takes a bit of time. If this script starts feeling too
# slow, one good way to speed it up is to make the comment handling
# optional under a flag.
mffr.MultiFileFindReplace(
r'(//.*)%s' % re.escape(from_path),
r'\1%s' % to_path,
['*.cc', '*.h', '*.m', '*.mm'])
# Update references in .gyp(i) files.
def PathMinusFirstComponent(path):
"""foo/bar/baz -> bar/baz"""
parts = re.split(r"[/\\]", path, 1)
if len(parts) == 2:
return parts[1]
else:
return parts[0]
mffr.MultiFileFindReplace(
r'([\'"])%s([\'"])' % re.escape(PathMinusFirstComponent(from_path)),
r'\1%s\2' % PathMinusFirstComponent(to_path),
['*.gyp*'])
def MakeIncludeGuardName(path_from_root):
"""Returns an include guard name given a path from root."""
guard = path_from_root.replace('/', '_')
guard = guard.replace('\\', '_')
guard = guard.replace('.', '_')
guard += '_'
return guard.upper()
def UpdateIncludeGuard(old_path, new_path):
"""Updates the include guard in a file now residing at |new_path|,
previously residing at |old_path|, with an up-to-date include guard.
Errors out if an include guard per Chromium style guide cannot be
found for the old path.
"""
old_guard = MakeIncludeGuardName(old_path)
new_guard = MakeIncludeGuardName(new_path)
with open(new_path) as f:
contents = f.read()
new_contents = contents.replace(old_guard, new_guard)
if new_contents == contents:
raise Exception(
'Error updating include guard; perhaps old guard is not per style guide?')
with open(new_path, 'w') as f:
f.write(new_contents)
def main():
if not os.path.isdir('.git'):
print 'Fatal: You must run from the root of a git checkout.'
return 1
args = sys.argv[1:]
if not len(args) in [2, 3]:
print ('Usage: move_source_file.py [--already-moved] FROM_PATH TO_PATH'
'\n\n%s' % __doc__)
return 1
already_moved = False
if args[0] == '--already-moved':
args = args[1:]
already_moved = True
from_path = args[0]
to_path = args[1]
to_path = MakeDestinationPath(from_path, to_path)
if not already_moved:
MoveFile(from_path, to_path)
UpdatePostMove(from_path, to_path)
return 0
if __name__ == '__main__':
sys.exit(main())
| zcbenz/cefode-chromium | tools/git/move_source_file.py | Python | bsd-3-clause | 5,348 | 0.010845 |
#! /usr/bin/env python
import unittest
import time
from Communication import Communication
class CommunicationTest(unittest.TestCase):
def setUp(self):
'''
Verify environment is setup properly.
'''
self.controller = Communication()
self.b_list = self.controller.get_bluetooth_list()
def tearDown(self):
'''
Verify environment is tore down properly.
'''
pass
def test_get_bluetooth_list(self):
'''
Verify that the bluetooth list was retrieve without problems.
'''
value = False
if "Empty" not in self.b_list[0]:
value = True
self.assertTrue(value)
def test_send(self):
'''
Verify that the instruction was send without problems.
'''
for b_name in self.b_list:
if "CURIOSITY"in b_name:
break
self.controller.connect(b_name)
value = self.controller.send("Hello")
time.sleep(5)
self.controller.disconnect()
self.assertTrue(value)
if __name__ == '__main__':
unittest.main() | mparra-mpz/CURIOSITY | CURIOSITY/test/CommunicationTest.py | Python | gpl-2.0 | 1,149 | 0.007833 |
from numpy import *
from matplotlib import pyplot
import scripts.skewtools as st
import sys
X,Y,t,Pe = st.importDatasets(sys.argv[1],'X','Y','Time','Peclet')
figscale = 5.
fig,ax = pyplot.subplots(1,1,figsize=(4*figscale,figscale))
uwall = 2./3.
xmax = X.max() + uwall*Pe*t[-1]
for i in range(len(t)):
#for i in [52]:
ax.cla()
# ax.hold(True)
ax.plot([0,xmax],[1,1],linewidth=0.5,color='k')
ax.plot([0,xmax],[-1,-1],linewidth=0.5,color='k')
subset1 = ((Y[:,i]<1.)*(Y[:,i] > -1.))
subset2 = ~subset1
ax.scatter(X[subset1,i],Y[subset1,i],facecolor=[0,0,0.9],edgecolor=[0,0,0,0],s=1,alpha=0.2)
ax.scatter(X[subset2,i],Y[subset2,i],facecolor=[0.9,0,0],edgecolor=[0,0,0,0],s=1,alpha=0.2)
# ax.hist2d(X[subset,i] + uwall*Pe*t[i],Y[subset,i],cmap=pyplot.cm.inferno,)
# ax.hold(False)
ax.set_xlim([0.,xmax])
ax.set_ylim([-1.05,1.05])
print '%i active particles, %i of %i frames'%(sum(subset1),i,len(t)-1)
pyplot.savefig('cloudframe_'+str(i).zfill(4)+'.png',dpi=80,bbox_inches='tight')
# end for
#pyplot.tight_layout()
| maminian/skewtools | scripts/animate_particles_2d_labframe.py | Python | gpl-3.0 | 1,101 | 0.070845 |
#
# Copyright 2015 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Tools to generate data sources.
"""
import numpy as np
import pandas as pd
from zipline.gens.utils import hash_args
from zipline.sources.data_source import DataSource
class DataFrameSource(DataSource):
"""
Data source that yields from a pandas DataFrame.
:Axis layout:
* columns : sids
* index : datetime
:Note:
Bars where the price is nan are filtered out.
"""
def __init__(self, data, **kwargs):
assert isinstance(data.index, pd.tseries.index.DatetimeIndex)
# Only accept integer SIDs as the items of the DataFrame
assert isinstance(data.columns, pd.Int64Index)
# TODO is ffilling correct/necessary?
# Forward fill prices
self.data = data.fillna(method='ffill')
# Unpack config dictionary with default values.
self.start = kwargs.get('start', self.data.index[0])
self.end = kwargs.get('end', self.data.index[-1])
self.sids = self.data.columns
# Hash_value for downstream sorting.
self.arg_string = hash_args(data, **kwargs)
self._raw_data = None
self.started_sids = set()
@property
def mapping(self):
return {
'dt': (lambda x: x, 'dt'),
'sid': (lambda x: x, 'sid'),
'price': (float, 'price'),
'volume': (int, 'volume'),
}
@property
def instance_hash(self):
return self.arg_string
def raw_data_gen(self):
for dt, series in self.data.iterrows():
for sid, price in series.iteritems():
# Skip SIDs that can not be forward filled
if np.isnan(price) and \
sid not in self.started_sids:
continue
self.started_sids.add(sid)
event = {
'dt': dt,
'sid': sid,
'price': price,
# Just chose something large
# if no volume available.
'volume': 1e9,
}
yield event
@property
def raw_data(self):
if not self._raw_data:
self._raw_data = self.raw_data_gen()
return self._raw_data
class DataPanelSource(DataSource):
"""
Data source that yields from a pandas Panel.
:Axis layout:
* items : sids
* major_axis : datetime
* minor_axis : price, volume, ...
:Note:
Bars where the price is nan are filtered out.
"""
def __init__(self, data, **kwargs):
assert isinstance(data.major_axis, pd.tseries.index.DatetimeIndex)
# Only accept integer SIDs as the items of the Panel
assert isinstance(data.items, pd.Int64Index)
# TODO is ffilling correct/necessary?
# forward fill with volumes of 0
self.data = data.fillna(value={'volume': 0})
# Unpack config dictionary with default values.
self.start = kwargs.get('start', self.data.major_axis[0])
self.end = kwargs.get('end', self.data.major_axis[-1])
self.sids = self.data.items
# Hash_value for downstream sorting.
self.arg_string = hash_args(data, **kwargs)
self._raw_data = None
self.started_sids = set()
@property
def mapping(self):
mapping = {
'dt': (lambda x: x, 'dt'),
'sid': (lambda x: x, 'sid'),
'price': (float, 'price'),
'volume': (int, 'volume'),
}
# Add additional fields.
for field_name in self.data.minor_axis:
if field_name in ['price', 'volume', 'dt', 'sid']:
continue
mapping[field_name] = (lambda x: x, field_name)
return mapping
@property
def instance_hash(self):
return self.arg_string
def raw_data_gen(self):
for dt in self.data.major_axis:
df = self.data.major_xs(dt)
for sid, series in df.iteritems():
# Skip SIDs that can not be forward filled
if np.isnan(series['price']):
continue
self.started_sids.add(sid)
event = {
'dt': dt,
'sid': sid,
}
for field_name, value in series.iteritems():
event[field_name] = value
yield event
@property
def raw_data(self):
if not self._raw_data:
self._raw_data = self.raw_data_gen()
return self._raw_data
| wilsonkichoi/zipline | zipline/sources/data_frame_source.py | Python | apache-2.0 | 5,146 | 0 |
#-----------------------------------------------------------------------------
# Copyright (c) 2010 Justin Riley
#
# Distributed under the terms of the New BSD License. The full license is in
# the file COPYING.BSD, distributed as part of this software.
#-----------------------------------------------------------------------------
import sys
import zmq
import pymongo
import pymongo.json_util
import json
class MongoZMQ(object):
"""
ZMQ server that adds/fetches documents (ie dictionaries) to a MongoDB.
NOTE: mongod must be started before using this class
"""
def __init__(self, db_name, table_name, bind_addr="tcp://127.0.0.1:5000"):
"""
bind_addr: address to bind zmq socket on
db_name: name of database to write to (created if doesnt exist)
table_name: name of mongodb 'table' in the db to write to (created if doesnt exist)
"""
self._bind_addr = bind_addr
self._db_name = db_name
self._table_name = table_name
self._conn = pymongo.Connection()
self._db = self._conn[self._db_name]
self._table = self._db[self._table_name]
def _doc_to_json(self, doc):
return json.dumps(doc,default=pymongo.json_util.default)
def add_document(self, doc):
"""
Inserts a document (dictionary) into mongo database table
"""
print 'adding docment %s' % (doc)
try:
self._table.insert(doc)
except Exception,e:
return 'Error: %s' % e
def get_document_by_keys(self, keys):
"""
Attempts to return a single document from database table that matches
each key/value in keys dictionary.
"""
print 'attempting to retrieve document using keys: %s' % keys
try:
return self._table.find_one(keys)
except Exception,e:
return 'Error: %s' % e
def start(self):
context = zmq.Context()
socket = context.socket(zmq.ROUTER)
socket.bind(self._bind_addr)
while True:
msg = socket.recv_multipart()
print "Received msg: ", msg
if len(msg) != 3:
error_msg = 'invalid message received: %s' % msg
print error_msg
reply = [msg[0], error_msg]
socket.send_multipart(reply)
continue
id = msg[0]
operation = msg[1]
contents = json.loads(msg[2])
# always send back the id with ROUTER
reply = [id]
if operation == 'add':
self.add_document(contents)
reply.append("success")
elif operation == 'get':
doc = self.get_document_by_keys(contents)
json_doc = self._doc_to_json(doc)
reply.append(json_doc)
else:
print 'unknown request'
socket.send_multipart(reply)
def main():
MongoZMQ('ipcontroller','jobs').start()
if __name__ == "__main__":
main()
| mgadi/naemonbox | sources/psdash/pyzmq-13.1.0/examples/mongodb/controller.py | Python | gpl-2.0 | 3,050 | 0.003934 |
import os
from conda_build import api
from conda_build import render
def test_output_with_noarch_says_noarch(testing_metadata):
testing_metadata.meta['build']['noarch'] = 'python'
output = api.get_output_file_path(testing_metadata)
assert os.path.sep + "noarch" + os.path.sep in output[0]
def test_output_with_noarch_python_says_noarch(testing_metadata):
testing_metadata.meta['build']['noarch_python'] = True
output = api.get_output_file_path(testing_metadata)
assert os.path.sep + "noarch" + os.path.sep in output[0]
def test_reduce_duplicate_specs(testing_metadata):
reqs = {'build': ['exact', 'exact 1.2.3 1', 'exact >1.0,<2'],
'host': ['exact', 'exact 1.2.3 1']
}
testing_metadata.meta['requirements'] = reqs
render._simplify_to_exact_constraints(testing_metadata)
assert (testing_metadata.meta['requirements']['build'] ==
testing_metadata.meta['requirements']['host'])
simplified_deps = testing_metadata.meta['requirements']
assert len(simplified_deps['build']) == 1
assert 'exact 1.2.3 1' in simplified_deps['build']
def test_pin_run_as_build_preserve_string(testing_metadata):
m = testing_metadata
m.config.variant['pin_run_as_build']['pkg'] = {
'max_pin': 'x.x'
}
dep = render.get_pin_from_build(
m,
'pkg * somestring*',
{'pkg': '1.2.3 somestring_h1234'}
)
assert dep == 'pkg >=1.2.3,<1.3.0a0 somestring*'
| pelson/conda-build | tests/test_render.py | Python | bsd-3-clause | 1,460 | 0.000685 |
# Copyright 2014, Rackspace, US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import mock
from django.conf import settings
from oslo_serialization import jsonutils
from openstack_dashboard import api
from openstack_dashboard.api.rest import keystone
from openstack_dashboard.test import helpers as test
class KeystoneRestTestCase(test.TestCase):
#
# Version
#
@test.create_mocks({api.keystone: ['get_version']})
def test_version_get(self):
request = self.mock_rest_request()
self.mock_get_version.return_value = '3'
response = keystone.Version().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.json, {"version": "3"})
self.mock_get_version.assert_called_once_with()
#
# Users
#
@test.create_mocks({api.keystone: ['user_get']})
def test_user_get(self):
request = self.mock_rest_request()
self.mock_user_get.return_value.to_dict.return_value = {'name': 'Ni!'}
response = keystone.User().get(request, 'the_id')
self.assertStatusCode(response, 200)
self.assertEqual(response.json, {"name": "Ni!"})
self.mock_user_get.assert_called_once_with(
request, 'the_id', admin=False)
@test.create_mocks({api.keystone: ['user_get']})
def test_user_get_current(self):
request = self.mock_rest_request(**{'user.id': 'current_id'})
self.mock_user_get.return_value.to_dict.return_value = {'name': 'Ni!'}
response = keystone.User().get(request, 'current')
self.assertStatusCode(response, 200)
self.assertEqual(response.json, {"name": "Ni!"})
self.mock_user_get.assert_called_once_with(
request, 'current_id', admin=False)
@test.create_mocks({api.keystone: ['user_list']})
def test_user_get_list(self):
request = self.mock_rest_request(**{
'session.get': mock.Mock(return_value='the_domain'),
'GET': {},
})
self.mock_user_list.return_value = [
mock.Mock(**{'to_dict.return_value': {'name': 'Ni!'}}),
mock.Mock(**{'to_dict.return_value': {'name': 'Ptang!'}})
]
response = keystone.Users().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.json,
{"items": [{"name": "Ni!"}, {"name": "Ptang!"}]})
self.mock_user_list.assert_called_once_with(request, project=None,
domain='the_domain',
group=None,
filters=None)
@test.create_mocks({api.keystone: ['user_list']})
def test_user_get_list_with_filters(self):
filters = {'enabled': True}
request = self.mock_rest_request(**{
'session.get': mock.Mock(return_value='the_domain'),
'GET': dict(**filters),
})
self.mock_user_list.return_value = [
mock.Mock(**{'to_dict.return_value': {'name': 'Ni!'}}),
mock.Mock(**{'to_dict.return_value': {'name': 'Ptang!'}})
]
response = keystone.Users().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.json,
{"items": [{"name": "Ni!"}, {"name": "Ptang!"}]})
self.mock_user_list.assert_called_once_with(request, project=None,
domain='the_domain',
group=None,
filters=filters)
def test_user_create_full(self):
self._test_user_create(
'{"name": "bob", '
'"password": "sekrit", "project": "123", '
'"email": "spam@company.example", '
'"description": "hello, puff"}',
{
'name': 'bob',
'password': 'sekrit',
'email': 'spam@company.example',
'project': '123',
'domain': 'the_domain',
'enabled': True,
'description': 'hello, puff'
}
)
def test_user_create_existing_role(self):
self._test_user_create(
'{"name": "bob", '
'"password": "sekrit", "project": "123", '
'"email": "spam@company.example"}',
{
'name': 'bob',
'password': 'sekrit',
'email': 'spam@company.example',
'project': '123',
'domain': 'the_domain',
'enabled': True,
'description': None
}
)
def test_user_create_no_project(self):
self._test_user_create(
'{"name": "bob", '
'"password": "sekrit", "project": "", '
'"email": "spam@company.example"}',
{
'name': 'bob',
'password': 'sekrit',
'email': 'spam@company.example',
'project': None,
'domain': 'the_domain',
'enabled': True,
'description': None
}
)
def test_user_create_partial(self):
self._test_user_create(
'{"name": "bob", "project": ""}',
{
'name': 'bob',
'password': None,
'email': None,
'project': None,
'domain': 'the_domain',
'enabled': True,
'description': None
}
)
@test.create_mocks({api.keystone: ['get_default_domain',
'user_create']})
def _test_user_create(self, supplied_body, add_user_call):
request = self.mock_rest_request(body=supplied_body)
self.mock_get_default_domain.return_value = \
mock.Mock(**{'id': 'the_domain'})
self.mock_user_create.return_value = mock.Mock(**{
'id': 'user123',
'to_dict.return_value': {'id': 'user123', 'name': 'bob'}
})
response = keystone.Users().post(request)
self.assertStatusCode(response, 201)
self.assertEqual(response['location'],
'/api/keystone/users/user123')
self.assertEqual(response.json,
{"id": "user123", "name": "bob"})
self.mock_user_create.assert_called_once_with(request, **add_user_call)
self.mock_get_default_domain.assert_called_once_with(request)
@test.create_mocks({api.keystone: ['user_delete']})
def test_user_delete_many(self):
request = self.mock_rest_request(body='''
["id1", "id2", "id3"]
''')
self.mock_user_delete.return_value = None
response = keystone.Users().delete(request)
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_user_delete.assert_has_calls([
mock.call(request, 'id1'),
mock.call(request, 'id2'),
mock.call(request, 'id3'),
])
@test.create_mocks({api.keystone: ['user_delete']})
def test_user_delete(self):
request = self.mock_rest_request()
self.mock_user_delete.return_value = None
response = keystone.User().delete(request, 'the_id')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_user_delete.assert_called_once_with(request, 'the_id')
@test.create_mocks({api.keystone: ['user_get',
'user_update_password']})
def test_user_patch_password(self):
request = self.mock_rest_request(body='''
{"password": "sekrit"}
''')
user = keystone.User()
self.mock_user_get.return_value = mock.sentinel.user
self.mock_user_update_password.return_value = None
response = user.patch(request, 'user123')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_user_get.assert_called_once_with(request, 'user123')
self.mock_user_update_password.assert_called_once_with(
request, mock.sentinel.user, 'sekrit')
@test.create_mocks({api.keystone: ['user_get',
'user_update_enabled']})
def test_user_patch_enabled(self):
request = self.mock_rest_request(body='''
{"enabled": false}
''')
user = keystone.User()
self.mock_user_get.return_value = mock.sentinel.user
self.mock_user_update_enabled.return_value = None
response = user.patch(request, 'user123')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_user_get.assert_called_once_with(request, 'user123')
self.mock_user_update_enabled.assert_called_once_with(
request, mock.sentinel.user, False)
@test.create_mocks({api.keystone: ['user_get',
'user_update']})
def test_user_patch_project(self):
request = self.mock_rest_request(body='''
{"project": "other123"}
''')
user = keystone.User()
self.mock_user_get.return_value = mock.sentinel.user
self.mock_user_update.return_value = self.users.first()
response = user.patch(request, 'user123')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_user_get.assert_called_once_with(request, 'user123')
self.mock_user_update.assert_called_once_with(
request, mock.sentinel.user, project='other123')
@test.create_mocks({api.keystone: ['user_get',
'user_update']})
def test_user_patch_multiple(self):
request = self.mock_rest_request(body='''
{"project": "other123", "name": "something"}
''')
user = keystone.User()
self.mock_user_get.return_value = mock.sentinel.user
self.mock_user_update.return_value = self.users.first()
response = user.patch(request, 'user123')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_user_get.assert_called_once_with(request, 'user123')
self.mock_user_update.assert_called_once_with(
request, mock.sentinel.user, project='other123', name='something')
#
# Roles
#
@test.create_mocks({api.keystone: ['role_get']})
def test_role_get(self):
request = self.mock_rest_request()
self.mock_role_get.return_value.to_dict.return_value = {'name': 'Ni!'}
response = keystone.Role().get(request, 'the_id')
self.assertStatusCode(response, 200)
self.assertEqual(response.json, {"name": "Ni!"})
self.mock_role_get.assert_called_once_with(request, 'the_id')
@test.create_mocks({api.keystone: ['get_default_role']})
def test_role_get_default(self):
request = self.mock_rest_request()
ret_val_role = self.mock_get_default_role.return_value
ret_val_role.to_dict.return_value = {'name': 'Ni!'}
response = keystone.Role().get(request, 'default')
self.assertStatusCode(response, 200)
self.assertEqual(response.json, {"name": "Ni!"})
self.mock_get_default_role.assert_called_once_with(request)
@test.create_mocks({api.keystone: ['role_list']})
def test_role_get_list(self):
request = self.mock_rest_request(**{'GET': {}})
self.mock_role_list.return_value = [
mock.Mock(**{'to_dict.return_value': {'name': 'Ni!'}}),
mock.Mock(**{'to_dict.return_value': {'name': 'Ptang!'}})
]
response = keystone.Roles().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.json,
{"items": [{"name": "Ni!"}, {"name": "Ptang!"}]})
self.mock_role_list.assert_called_once_with(request)
@test.create_mocks({api.keystone: ['roles_for_user']})
def test_role_get_for_user(self):
request = self.mock_rest_request(**{'GET': {'user_id': 'user123',
'project_id': 'project123'}})
self.mock_roles_for_user.return_value = [
mock.Mock(**{'to_dict.return_value': {'name': 'Ni!'}}),
mock.Mock(**{'to_dict.return_value': {'name': 'Ptang!'}})
]
response = keystone.Roles().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.json,
{"items": [{"name": "Ni!"}, {"name": "Ptang!"}]})
self.mock_roles_for_user.assert_called_once_with(request, 'user123',
'project123')
@test.create_mocks({api.keystone: ['role_create']})
def test_role_create(self):
request = self.mock_rest_request(body='''
{"name": "bob"}
''')
self.mock_role_create.return_value.id = 'role123'
self.mock_role_create.return_value.to_dict.return_value = {
'id': 'role123', 'name': 'bob'
}
response = keystone.Roles().post(request)
self.assertStatusCode(response, 201)
self.assertEqual(response['location'],
'/api/keystone/roles/role123')
self.assertEqual(response.json, {"id": "role123", "name": "bob"})
self.mock_role_create.assert_called_once_with(request, 'bob')
@test.create_mocks({api.keystone: ['add_tenant_user_role']})
def test_role_grant(self):
self.mock_add_tenant_user_role.return_value = None
request = self.mock_rest_request(body='''
{"action": "grant", "data": {"user_id": "user123",
"role_id": "role123", "project_id": "project123"}}
''')
response = keystone.ProjectRole().put(request, "project1", "role2",
"user3")
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_add_tenant_user_role.assert_called_once_with(
request, 'project1', 'user3', 'role2')
@test.create_mocks({api.keystone: ['role_delete']})
def test_role_delete_many(self):
self.mock_role_delete.return_value = None
request = self.mock_rest_request(body='''
["id1", "id2", "id3"]
''')
response = keystone.Roles().delete(request)
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_role_delete.assert_has_calls([
mock.call(request, 'id1'),
mock.call(request, 'id2'),
mock.call(request, 'id3'),
])
@test.create_mocks({api.keystone: ['role_delete']})
def test_role_delete(self):
self.mock_role_delete.return_value = None
request = self.mock_rest_request()
response = keystone.Role().delete(request, 'the_id')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_role_delete.assert_called_once_with(request, 'the_id')
@test.create_mocks({api.keystone: ['role_update']})
def test_role_patch(self):
self.mock_role_update.return_value = self.roles.first()
request = self.mock_rest_request(body='{"name": "spam"}')
response = keystone.Role().patch(request, 'the_id')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_role_update.assert_called_once_with(request,
'the_id',
'spam')
#
# Domains
#
@test.create_mocks({api.keystone: ['get_default_domain']})
def test_default_domain_get(self):
request = self.mock_rest_request()
domain = api.base.APIDictWrapper({'id': 'the_id', 'name': 'the_name'})
self.mock_get_default_domain.return_value = domain
response = keystone.DefaultDomain().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.json, domain.to_dict())
self.mock_get_default_domain.assert_called_once_with(request)
@test.create_mocks({api.keystone: ['domain_get']})
def test_domain_get(self):
request = self.mock_rest_request()
ret_val_domain = self.mock_domain_get.return_value
ret_val_domain.to_dict.return_value = {'name': 'Ni!'}
response = keystone.Domain().get(request, 'the_id')
self.assertStatusCode(response, 200)
self.assertEqual(response.json, {"name": "Ni!"})
self.mock_domain_get.assert_called_once_with(request, 'the_id')
@test.create_mocks({api.keystone: ['get_default_domain']})
def test_domain_get_default(self):
request = self.mock_rest_request()
self.mock_get_default_domain.return_value.to_dict.return_value = {
'name': 'Ni!'
}
response = keystone.Domain().get(request, 'default')
self.assertStatusCode(response, 200)
self.assertEqual(response.json, {"name": "Ni!"})
self.mock_get_default_domain.assert_called_once_with(request)
@test.create_mocks({api.keystone: ['domain_list']})
def test_domain_get_list(self):
request = self.mock_rest_request()
self.mock_domain_list.return_value = [
mock.Mock(**{'to_dict.return_value': {'name': 'Ni!'}}),
mock.Mock(**{'to_dict.return_value': {'name': 'Ptang!'}})
]
response = keystone.Domains().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.json,
{"items": [{"name": "Ni!"}, {"name": "Ptang!"}]})
self.mock_domain_list.assert_called_once_with(request)
def test_domain_create_full(self):
self._test_domain_create(
'{"name": "bob", '
'"description": "sekrit", "enabled": false}',
{
'description': 'sekrit',
'enabled': False
}
)
def test_domain_create_partial(self):
self._test_domain_create(
'{"name": "bob"}',
{
'description': None,
'enabled': True
}
)
@test.create_mocks({api.keystone: ['domain_create']})
def _test_domain_create(self, supplied_body, expected_call):
request = self.mock_rest_request(body=supplied_body)
ret_val_domain = self.mock_domain_create.return_value
ret_val_domain.id = 'domain123'
ret_val_domain.to_dict.return_value = {
'id': 'domain123', 'name': 'bob'
}
response = keystone.Domains().post(request)
self.assertStatusCode(response, 201)
self.assertEqual(response['location'],
'/api/keystone/domains/domain123')
self.assertEqual(response.json, {"id": "domain123", "name": "bob"})
self.mock_domain_create.assert_called_once_with(request, 'bob',
**expected_call)
@test.create_mocks({api.keystone: ['domain_delete']})
def test_domain_delete_many(self):
self.mock_domain_delete.return_value = None
request = self.mock_rest_request(body='''
["id1", "id2", "id3"]
''')
response = keystone.Domains().delete(request)
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_domain_delete.assert_has_calls([
mock.call(request, 'id1'),
mock.call(request, 'id2'),
mock.call(request, 'id3'),
])
@test.create_mocks({api.keystone: ['domain_delete']})
def test_domain_delete(self):
self.mock_domain_delete.return_value = None
request = self.mock_rest_request()
response = keystone.Domain().delete(request, 'the_id')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_domain_delete.assert_called_once_with(request, 'the_id')
@test.create_mocks({api.keystone: ['domain_update']})
def test_domain_patch(self):
self.mock_domain_update.return_value = self.domains.first()
request = self.mock_rest_request(body='{"name": "spam"}')
response = keystone.Domain().patch(request, 'the_id')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_domain_update.assert_called_once_with(request,
'the_id',
name='spam',
description=None,
enabled=None)
#
# Projects
#
@test.create_mocks({api.keystone: ['tenant_get']})
def test_project_get(self):
request = self.mock_rest_request()
ret_val_tenant = self.mock_tenant_get.return_value
ret_val_tenant.to_dict.return_value = {'name': 'Ni!'}
response = keystone.Project().get(request, 'the_id')
self.assertStatusCode(response, 200)
self.assertEqual(response.json, {"name": "Ni!"})
self.mock_tenant_get.assert_called_once_with(
request, 'the_id', admin=False)
def test_project_get_list(self):
self._test_project_get_list(
{},
{
'paginate': False,
'marker': None,
'domain': None,
'user': None,
'admin': True,
'filters': None
}
)
def test_project_get_list_with_params_true(self):
self._test_project_get_list(
{
'paginate': 'true',
'admin': 'true'
},
{
'paginate': True,
'marker': None,
'domain': None,
'user': None,
'admin': True,
'filters': None
}
)
def test_project_get_list_with_params_false(self):
self._test_project_get_list(
{
'paginate': 'false',
'admin': 'false'
},
{
'paginate': False,
'marker': None,
'domain': None,
'user': None,
'admin': False,
'filters': None
}
)
@test.create_mocks({api.keystone: ['tenant_list']})
def _test_project_get_list(self, params, expected_call):
request = self.mock_rest_request(**{'GET': dict(**params)})
self.mock_tenant_list.return_value = ([
mock.Mock(**{'to_dict.return_value': {'name': 'Ni!'}}),
mock.Mock(**{'to_dict.return_value': {'name': 'Ptang!'}})
], False)
with mock.patch.object(settings, 'DEBUG', True):
response = keystone.Projects().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.json,
{"has_more": False,
"items": [{"name": "Ni!"}, {"name": "Ptang!"}]})
self.mock_tenant_list.assert_called_once_with(request, **expected_call)
@test.create_mocks({api.keystone: ['tenant_list']})
def test_project_get_list_with_filters(self):
filters = {'name': 'Ni!'}
request = self.mock_rest_request(**{'GET': dict(**filters)})
self.mock_tenant_list.return_value = ([
mock.Mock(**{'to_dict.return_value': {'name': 'Ni!'}}),
mock.Mock(**{'to_dict.return_value': {'name': 'Ni!'}})
], False)
with mock.patch.object(settings, 'DEBUG', True):
response = keystone.Projects().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.json,
{"has_more": False,
"items": [{"name": "Ni!"}, {"name": "Ni!"}]})
self.mock_tenant_list.assert_called_once_with(request, paginate=False,
marker=None, domain=None,
user=None, admin=True,
filters=filters)
def test_project_create_full(self):
self._test_project_create(
'{"name": "bob", '
'"domain_id": "domain123", "description": "sekrit", '
'"enabled": false}',
{
'name': 'bob',
'description': 'sekrit',
'domain': 'domain123',
'enabled': False
}
)
def test_project_create_partial(self):
self._test_project_create(
'{"name": "bob"}',
{
'name': 'bob',
'description': None,
'domain': None,
'enabled': True
}
)
@test.create_mocks({api.keystone: ['tenant_create']})
def _test_project_create(self, supplied_body, expected_args):
request = self.mock_rest_request(body=supplied_body)
self.mock_tenant_create.return_value.id = 'project123'
self.mock_tenant_create.return_value.to_dict.return_value = {
'id': 'project123', 'name': 'bob'
}
response = keystone.Projects().post(request)
self.assertStatusCode(response, 201)
self.assertEqual(response['location'],
'/api/keystone/projects/project123')
self.assertEqual(response.json,
{"id": "project123", "name": "bob"})
self.mock_tenant_create.assert_called_once_with(request,
**expected_args)
@test.create_mocks({api.keystone: ['tenant_delete']})
def test_project_delete_many(self):
self.mock_tenant_delete.return_value = None
request = self.mock_rest_request(body='''
["id1", "id2", "id3"]
''')
response = keystone.Projects().delete(request)
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_tenant_delete.assert_has_calls([
mock.call(request, 'id1'),
mock.call(request, 'id2'),
mock.call(request, 'id3'),
])
@test.create_mocks({api.keystone: ['tenant_delete']})
def test_project_delete(self):
self.mock_tenant_delete.return_value = None
request = self.mock_rest_request()
response = keystone.Project().delete(request, 'the_id')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_tenant_delete.assert_called_once_with(request, 'the_id')
@test.create_mocks({api.keystone: ['tenant_update']})
def test_project_patch(self):
# nothing in the Horizon code documents what additional parameters are
# allowed, so we'll just assume GIGO
self.mock_tenant_update.return_value = self.tenants.first()
request = self.mock_rest_request(body='''
{"name": "spam", "domain_id": "domain123", "foo": "bar"}
''')
response = keystone.Project().patch(request, 'spam123')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_tenant_update.assert_called_once_with(request,
'spam123',
name='spam', foo='bar',
description=None,
domain='domain123',
enabled=None)
#
# Service Catalog
#
def test_service_catalog_get(self):
request = self.mock_rest_request()
request.user = mock.MagicMock(**{'service_catalog': [
{'endpoints': [
{'url': 'http://cool_url/image',
'interface': 'admin',
'region': 'RegionOne',
'region_id': 'RegionOne',
'id': 'test'},
{'url': 'http://cool_url/image',
'interface': 'public',
'region': 'RegionOne',
'region_id': 'RegionOne',
'id': 'test'},
{'url': 'http://cool_url/image',
'interface': 'internal',
'region': 'RegionOne',
'region_id': 'RegionOne',
'id': 'test'}],
'type': 'image',
'id': '2b5bc2e59b094f898a43f5e8ce446240',
'name': 'glance'},
{'endpoints': [
{'url': 'http://cool_url/volume/v3/test',
'interface': 'public',
'region': 'RegionOne',
'region_id': 'RegionOne',
'id': '29a629afb80547ea9baa4266e97b4cb5'},
{'url': 'http://cool_url/volume/v3/test',
'interface': 'admin',
'region': 'RegionOne',
'region_id': 'RegionOne',
'id': '29a629afb80547ea9baa4266e97b4cb5'}],
'type': 'volumev3',
'id': '55ef272cfa714e54b8f2046c157b027d',
'name': 'cinderv3'},
{'endpoints': [
{'url': 'http://cool_url/compute/v2/check',
'interface': 'internal',
'region': 'RegionOne',
'region_id': 'RegionOne',
'id': 'e8c440e025d94355ab82c78cc2062129'}],
'type': 'compute_legacy',
'id': 'b7f1d3f4119643508d5ca2325eb8af87',
'name': 'nova_legacy'}]})
response = keystone.ServiceCatalog().get(request)
self.assertStatusCode(response, 200)
content = [{'endpoints': [
{'url': 'http://cool_url/image',
'interface': 'public',
'region': 'RegionOne',
'region_id': 'RegionOne',
'id': 'test'}],
'type': 'image',
'id': '2b5bc2e59b094f898a43f5e8ce446240',
'name': 'glance'},
{'endpoints': [
{'url': 'http://cool_url/volume/v3/test',
'interface': 'public',
'region': 'RegionOne',
'region_id': 'RegionOne',
'id': '29a629afb80547ea9baa4266e97b4cb5'}],
'type': 'volumev3',
'id': '55ef272cfa714e54b8f2046c157b027d',
'name': 'cinderv3'}]
self.assertEqual(content, jsonutils.loads(response.content))
#
# User Session
#
def test_user_session_get(self):
request = self.mock_rest_request()
request.user = mock.Mock(
services_region='some region',
super_secret_thing='not here',
token=type('', (object,), {'id': 'token here'}),
is_authenticated=lambda: True,
spec=['services_region', 'super_secret_thing']
)
response = keystone.UserSession().get(request)
self.assertStatusCode(response, 200)
content = jsonutils.loads(response.content)
self.assertEqual(content['services_region'], 'some region')
self.assertEqual(content['token'], 'token here')
self.assertNotIn('super_secret_thing', content)
#
# Groups
#
@test.create_mocks({api.keystone: ['group_list']})
def test_group_get_list(self):
request = self.mock_rest_request(**{
'session.get': mock.Mock(return_value='the_domain'),
'GET': {},
})
self.mock_group_list.return_value = [
mock.Mock(**{'to_dict.return_value': {'name': 'uno!'}}),
mock.Mock(**{'to_dict.return_value': {'name': 'dos!'}})
]
response = keystone.Groups().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.json,
{"items": [{"name": "uno!"}, {"name": "dos!"}]})
self.mock_group_list.assert_called_once_with(request,
domain='the_domain')
@test.create_mocks({api.keystone: ['group_create']})
def test_group_create(self):
request = self.mock_rest_request(**{
'session.get': mock.Mock(return_value='the_domain'),
'GET': {},
'body': '{"name": "bug!", "description": "bugaboo!!"}',
})
self.mock_group_create.return_value.id = 'group789'
self.mock_group_create.return_value.to_dict.return_value = {
'id': 'group789', 'name': 'bug!', 'description': 'bugaboo!!'
}
response = keystone.Groups().post(request)
self.assertStatusCode(response, 201)
self.assertEqual(response['location'],
'/api/keystone/groups/group789')
self.assertEqual(response.json,
{"id": "group789",
"name": "bug!",
"description": "bugaboo!!"})
self.mock_group_create.assert_called_once_with(request, 'the_domain',
'bug!', 'bugaboo!!')
@test.create_mocks({api.keystone: ['group_create']})
def test_group_create_without_description(self):
request = self.mock_rest_request(**{
'session.get': mock.Mock(return_value='the_domain'),
'GET': {},
'body': '{"name": "bug!"}',
})
self.mock_group_create.return_value.id = 'group789'
self.mock_group_create.return_value.to_dict.return_value = {
'id': 'group789', 'name': 'bug!'
}
response = keystone.Groups().post(request)
self.assertStatusCode(response, 201)
self.assertEqual(response['location'],
'/api/keystone/groups/group789')
self.assertEqual(response.json,
{"id": "group789",
"name": "bug!"})
self.mock_group_create.assert_called_once_with(request, 'the_domain',
'bug!', None)
@test.create_mocks({api.keystone: ['group_get']})
def test_group_get(self):
request = self.mock_rest_request()
self.mock_group_get.return_value.to_dict.return_value = {
'name': 'bug!', 'description': 'bugaboo!!'}
response = keystone.Group().get(request, 'the_id')
self.assertStatusCode(response, 200)
self.assertEqual(response.json, {"name": "bug!",
"description": "bugaboo!!"})
self.mock_group_get.assert_called_once_with(request, 'the_id')
@test.create_mocks({api.keystone: ['group_delete']})
def test_group_delete(self):
self.mock_group_delete.return_value = None
request = self.mock_rest_request()
response = keystone.Group().delete(request, 'the_id')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_group_delete.assert_called_once_with(request, 'the_id')
@test.create_mocks({api.keystone: ['group_update']})
def test_group_patch(self):
self.mock_group_update.return_value = self.groups.first()
request = self.mock_rest_request(
body='{"name": "spam_i_am", "description": "Sir Spam"}')
response = keystone.Group().patch(request, 'the_id')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_group_update.assert_called_once_with(request,
'the_id',
'spam_i_am',
'Sir Spam')
@test.create_mocks({api.keystone: ['group_delete']})
def test_group_delete_many(self):
self.mock_group_delete.return_value = None
request = self.mock_rest_request(body='''
["id1", "id2", "id3"]
''')
response = keystone.Groups().delete(request)
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
self.mock_group_delete.assert_has_calls([
mock.call(request, 'id1'),
mock.call(request, 'id2'),
mock.call(request, 'id3'),
])
#
# Services
#
@test.create_mocks({api.keystone: ['Service']})
def test_services_get(self):
request = self.mock_rest_request()
mock_service = {
"name": "srv_name",
"type": "srv_type",
"host": "srv_host"
}
request.user = mock.Mock(
service_catalog=[mock_service],
services_region='some region'
)
self.mock_Service.return_value.to_dict.return_value = mock_service
response = keystone.Services().get(request)
self.assertStatusCode(response, 200)
self.mock_Service.assert_called_once_with(mock_service, "some region")
| openstack/horizon | openstack_dashboard/test/unit/api/rest/test_keystone.py | Python | apache-2.0 | 37,897 | 0 |
# -*- coding: utf-8 -*-
from __future__ import with_statement
import copy
from django.db import connection
from cms.api import create_page
from cms.menu import CMSMenu, get_visible_pages
from cms.models import Page
from cms.models.permissionmodels import GlobalPagePermission, PagePermission
from cms.test_utils.fixtures.menus import (MenusFixture, SubMenusFixture,
SoftrootFixture, ExtendedMenusFixture)
from cms.test_utils.testcases import SettingsOverrideTestCase
from cms.test_utils.util.context_managers import (SettingsOverride,
LanguageOverride)
from cms.test_utils.util.mock import AttributeObject
from cms.utils import get_cms_setting
from cms.utils.i18n import force_language
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, User, Permission, Group
from django.contrib.sites.models import Site
from django.template import Template, TemplateSyntaxError
from django.utils.translation import activate
from menus.base import NavigationNode
from menus.menu_pool import menu_pool, _build_nodes_inner_for_one_menu
from menus.models import CacheKey
from menus.utils import mark_descendants, find_selected, cut_levels
from django.utils.unittest.case import skipUnless
class BaseMenuTest(SettingsOverrideTestCase):
def _get_nodes(self, path='/'):
node1 = NavigationNode('1', '/1/', 1)
node2 = NavigationNode('2', '/2/', 2, 1)
node3 = NavigationNode('3', '/3/', 3, 2)
node4 = NavigationNode('4', '/4/', 4, 2)
node5 = NavigationNode('5', '/5/', 5)
nodes = [node1, node2, node3, node4, node5]
tree = _build_nodes_inner_for_one_menu([n for n in nodes], "test")
request = self.get_request(path)
menu_pool.apply_modifiers(tree, request)
return tree, nodes
def setUp(self):
super(BaseMenuTest, self).setUp()
if not menu_pool.discovered:
menu_pool.discover_menus()
self.old_menu = menu_pool.menus
menu_pool.menus = {'CMSMenu': self.old_menu['CMSMenu']}
menu_pool.clear(settings.SITE_ID)
activate("en")
def tearDown(self):
menu_pool.menus = self.old_menu
super(BaseMenuTest, self).tearDown()
def get_page(self, num):
return Page.objects.public().get(title_set__title='P%s' % num)
class ExtendedFixturesMenuTests(ExtendedMenusFixture, BaseMenuTest):
"""
Tree from fixture:
+ P1
| + P2
| + P3
| + P9
| + P10
| + P11
+ P4
| + P5
+ P6 (not in menu)
+ P7
+ P8
"""
def get_page(self, num):
return Page.objects.public().get(title_set__title='P%s' % num)
def get_level(self, num):
return Page.objects.public().filter(level=num)
def get_all_pages(self):
return Page.objects.public()
def test_menu_failfast_on_invalid_usage(self):
context = self.get_context()
context['child'] = self.get_page(1)
# test standard show_menu
with SettingsOverride(DEBUG=True, TEMPLATE_DEBUG=True):
tpl = Template("{% load menu_tags %}{% show_menu 0 0 0 0 'menu/menu.html' child %}")
self.assertRaises(TemplateSyntaxError, tpl.render, context)
def test_show_submenu_nephews(self):
context = self.get_context(path=self.get_page(2).get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_sub_menu 100 1 1 %}")
tpl.render(context)
nodes = context["children"]
# P2 is the selected node
self.assertTrue(nodes[0].selected)
# Should include P10 but not P11
self.assertEqual(len(nodes[1].children), 1)
self.assertFalse(nodes[1].children[0].children)
tpl = Template("{% load menu_tags %}{% show_sub_menu 100 1 %}")
tpl.render(context)
nodes = context["children"]
# should now include both P10 and P11
self.assertEqual(len(nodes[1].children), 1)
self.assertEqual(len(nodes[1].children[0].children), 1)
class FixturesMenuTests(MenusFixture, BaseMenuTest):
"""
Tree from fixture:
+ P1
| + P2
| + P3
+ P4
| + P5
+ P6 (not in menu)
+ P7
+ P8
"""
def get_page(self, num):
return Page.objects.public().get(title_set__title='P%s' % num)
def get_level(self, num):
return Page.objects.public().filter(level=num)
def get_all_pages(self):
return Page.objects.public()
def test_menu_failfast_on_invalid_usage(self):
context = self.get_context()
context['child'] = self.get_page(1)
# test standard show_menu
with SettingsOverride(DEBUG=True, TEMPLATE_DEBUG=True):
tpl = Template("{% load menu_tags %}{% show_menu 0 0 0 0 'menu/menu.html' child %}")
self.assertRaises(TemplateSyntaxError, tpl.render, context)
def test_basic_cms_menu(self):
self.assertEqual(len(menu_pool.menus), 1)
with force_language("en"):
response = self.client.get(self.get_pages_root()) # path = '/'
self.assertEquals(response.status_code, 200)
request = self.get_request()
# test the cms menu class
menu = CMSMenu()
nodes = menu.get_nodes(request)
self.assertEqual(len(nodes), len(self.get_all_pages()))
def test_show_menu(self):
context = self.get_context()
# test standard show_menu
tpl = Template("{% load menu_tags %}{% show_menu %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 2)
self.assertEqual(nodes[0].selected, True)
self.assertEqual(nodes[0].sibling, False)
self.assertEqual(nodes[0].descendant, False)
self.assertEqual(nodes[0].children[0].descendant, True)
self.assertEqual(nodes[0].children[0].children[0].descendant, True)
self.assertEqual(nodes[0].get_absolute_url(), self.get_pages_root())
self.assertEqual(nodes[1].get_absolute_url(), self.get_page(4).get_absolute_url())
self.assertEqual(nodes[1].sibling, True)
self.assertEqual(nodes[1].selected, False)
@skipUnless(settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3', 'transaction queries')
def test_show_menu_num_queries(self):
context = self.get_context()
# test standard show_menu
with self.assertNumQueries(5):
"""
The queries should be:
get all pages
get all page permissions
get all titles
get the menu cache key
set the menu cache key
"""
tpl = Template("{% load menu_tags %}{% show_menu %}")
tpl.render(context)
def test_show_menu_cache_key_leak(self):
context = self.get_context()
tpl = Template("{% load menu_tags %}{% show_menu %}")
self.assertEqual(CacheKey.objects.count(), 0)
tpl.render(context)
self.assertEqual(CacheKey.objects.count(), 1)
tpl.render(context)
self.assertEqual(CacheKey.objects.count(), 1)
def test_menu_keys_duplicate_truncates(self):
"""
When two objects with the same characteristics are present in the
database, get_or_create truncates the database table to "invalidate"
the cache, before retrying. This can happen after migrations, and since
it's only cache, we don't want any propagation of errors.
"""
CacheKey.objects.create(language="fr", site=1, key="a")
CacheKey.objects.create(language="fr", site=1, key="a")
CacheKey.objects.get_or_create(language="fr", site=1, key="a")
self.assertEqual(CacheKey.objects.count(), 1)
def test_only_active_tree(self):
context = self.get_context()
# test standard show_menu
tpl = Template("{% load menu_tags %}{% show_menu 0 100 0 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes[1].children), 0)
self.assertEqual(len(nodes[0].children), 1)
self.assertEqual(len(nodes[0].children[0].children), 1)
context = self.get_context(path=self.get_page(4).get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu 0 100 0 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes[1].children), 1)
self.assertEqual(len(nodes[0].children), 0)
def test_only_one_active_level(self):
context = self.get_context()
# test standard show_menu
tpl = Template("{% load menu_tags %}{% show_menu 0 100 0 1 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes[1].children), 0)
self.assertEqual(len(nodes[0].children), 1)
self.assertEqual(len(nodes[0].children[0].children), 0)
def test_only_level_zero(self):
context = self.get_context()
# test standard show_menu
tpl = Template("{% load menu_tags %}{% show_menu 0 0 0 0 %}")
tpl.render(context)
nodes = context['children']
for node in nodes:
self.assertEqual(len(node.children), 0)
def test_only_level_one(self):
context = self.get_context()
# test standard show_menu
tpl = Template("{% load menu_tags %}{% show_menu 1 1 100 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), len(self.get_level(1)))
for node in nodes:
self.assertEqual(len(node.children), 0)
def test_only_level_one_active(self):
context = self.get_context()
# test standard show_menu
tpl = Template("{% load menu_tags %}{% show_menu 1 1 0 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 1)
self.assertEqual(nodes[0].descendant, True)
self.assertEqual(len(nodes[0].children), 0)
def test_level_zero_and_one(self):
context = self.get_context()
# test standard show_menu
tpl = Template("{% load menu_tags %}{% show_menu 0 1 100 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 2)
for node in nodes:
self.assertEqual(len(node.children), 1)
def test_show_submenu(self):
context = self.get_context()
# test standard show_menu
tpl = Template("{% load menu_tags %}{% show_sub_menu %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(nodes[0].descendant, True)
self.assertEqual(len(nodes), 1)
self.assertEqual(len(nodes[0].children), 1)
tpl = Template("{% load menu_tags %}{% show_sub_menu 1 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 1)
self.assertEqual(len(nodes[0].children), 0)
context = self.get_context(path=self.get_page(3).get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_sub_menu 100 1 %}")
tpl.render(context)
nodes = context["children"]
# P3 is the selected node
self.assertFalse(nodes[0].selected)
self.assertTrue(nodes[0].children[0].selected)
# top level node should be P2
self.assertEqual(nodes[0].get_absolute_url(), self.get_page(2).get_absolute_url())
# should include P3 as well
self.assertEqual(len(nodes[0].children), 1)
# but not P1 as it's at the root_level
self.assertEqual(nodes[0].parent, None)
context = self.get_context(path=self.get_page(2).get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_sub_menu 100 0 %}")
tpl.render(context)
nodes = context["children"]
# P1 should be in the nav
self.assertEqual(nodes[0].get_absolute_url(), self.get_page(1).get_absolute_url())
# P2 is selected
self.assertTrue(nodes[0].children[0].selected)
def test_show_breadcrumb(self):
context = self.get_context(path=self.get_page(3).get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_breadcrumb %}")
tpl.render(context)
nodes = context['ancestors']
self.assertEqual(len(nodes), 3)
tpl = Template("{% load menu_tags %}{% show_breadcrumb 1 %}")
tpl.render(context)
nodes = context['ancestors']
self.assertEqual(len(nodes), 2)
context = self.get_context()
tpl = Template("{% load menu_tags %}{% show_breadcrumb %}")
tpl.render(context)
nodes = context['ancestors']
self.assertEqual(len(nodes), 1)
tpl = Template("{% load menu_tags %}{% show_breadcrumb 1 %}")
tpl.render(context)
nodes = context['ancestors']
self.assertEqual(len(nodes), 0)
page1 = self.get_page(1)
page1.in_navigation = False
page1.save()
page2 = self.get_page(2)
context = self.get_context(path=page2.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_breadcrumb %}")
tpl.render(context)
nodes = context['ancestors']
self.assertEqual(len(nodes), 2)
self.assertEqual(nodes[0].get_absolute_url(), self.get_pages_root())
self.assertEqual(isinstance(nodes[0], NavigationNode), True)
self.assertEqual(nodes[1].get_absolute_url(), page2.get_absolute_url())
def test_language_chooser(self):
# test simple language chooser with default args
lang_settings = copy.deepcopy(get_cms_setting('LANGUAGES'))
lang_settings[1][0]['public'] = False
with SettingsOverride(CMS_LANGUAGES=lang_settings):
context = self.get_context(path=self.get_page(3).get_absolute_url())
tpl = Template("{% load menu_tags %}{% language_chooser %}")
tpl.render(context)
self.assertEqual(len(context['languages']), 3)
# try a different template and some different args
tpl = Template("{% load menu_tags %}{% language_chooser 'menu/test_language_chooser.html' %}")
tpl.render(context)
self.assertEqual(context['template'], 'menu/test_language_chooser.html')
tpl = Template("{% load menu_tags %}{% language_chooser 'short' 'menu/test_language_chooser.html' %}")
tpl.render(context)
self.assertEqual(context['template'], 'menu/test_language_chooser.html')
for lang in context['languages']:
self.assertEqual(*lang)
def test_page_language_url(self):
path = self.get_page(3).get_absolute_url()
context = self.get_context(path=path)
tpl = Template("{%% load menu_tags %%}{%% page_language_url '%s' %%}" % settings.LANGUAGES[0][0])
url = tpl.render(context)
self.assertEqual(url, "%s" % path)
def test_show_menu_below_id(self):
page2 = self.get_page(2)
page2.reverse_id = "hello"
page2.save()
page2 = self.reload(page2)
self.assertEqual(page2.reverse_id, "hello")
page5 = self.get_page(5)
context = self.get_context(path=page5.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu_below_id 'hello' %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 1)
page3_url = self.get_page(3).get_absolute_url()
self.assertEqual(nodes[0].get_absolute_url(), page3_url)
page2.in_navigation = False
page2.save()
context = self.get_context(path=page5.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu_below_id 'hello' %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 1)
self.assertEqual(nodes[0].get_absolute_url(), page3_url)
def test_unpublished(self):
page2 = self.get_page(2)
page2.published = False
page2.save()
context = self.get_context()
tpl = Template("{% load menu_tags %}{% show_menu %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 2)
self.assertEqual(len(nodes[0].children), 0)
def test_home_not_in_menu(self):
page1 = self.get_page(1)
page1.in_navigation = False
page1.save()
page4 = self.get_page(4)
page4.in_navigation = False
page4.save()
context = self.get_context()
tpl = Template("{% load menu_tags %}{% show_menu 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 1)
self.assertEqual(nodes[0].get_absolute_url(), self.get_page(2).get_absolute_url())
self.assertEqual(nodes[0].children[0].get_absolute_url(), self.get_page(3).get_absolute_url())
page4 = self.get_page(4)
page4.in_navigation = True
page4.save()
menu_pool.clear(settings.SITE_ID)
context = self.get_context()
tpl = Template("{% load menu_tags %}{% show_menu 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 2)
def test_show_submenu_from_non_menu_page(self):
"""
Here's the structure bit we're interested in:
+ P6 (not in menu)
+ P7
+ P8
When we render P6, there should be a menu entry for P7 and P8 if the
tag parameters are "1 XXX XXX XXX"
"""
page6 = self.get_page(6)
context = self.get_context(page6.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu 1 100 0 1 %}")
tpl.render(context)
nodes = context['children']
number_of_p6_children = len(page6.children.filter(in_navigation=True))
self.assertEqual(len(nodes), number_of_p6_children)
page7 = self.get_page(7)
context = self.get_context(page7.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu 1 100 0 1 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), number_of_p6_children)
tpl = Template("{% load menu_tags %}{% show_menu 2 100 0 1 %}")
tpl.render(context)
nodes = context['children']
number_of_p7_children = len(page7.children.filter(in_navigation=True))
self.assertEqual(len(nodes), number_of_p7_children)
def test_show_breadcrumb_invisible(self):
# Must use the drafts to find the parent when calling create_page
parent = Page.objects.drafts().get(title_set__title='P3')
invisible_page = create_page("invisible", "nav_playground.html", "en",
parent=parent, published=True, in_navigation=False)
context = self.get_context(path=invisible_page.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_breadcrumb %}")
tpl.render(context)
nodes = context['ancestors']
self.assertEqual(len(nodes), 3)
tpl = Template("{% load menu_tags %}{% show_breadcrumb 0 'menu/breadcrumb.html' 1 %}")
tpl.render(context)
nodes = context['ancestors']
self.assertEqual(len(nodes), 3)
tpl = Template("{% load menu_tags %}{% show_breadcrumb 0 'menu/breadcrumb.html' 0 %}")
tpl.render(context)
nodes = context['ancestors']
self.assertEqual(len(nodes), 4)
class MenuTests(BaseMenuTest):
def test_build_nodes_inner_for_worst_case_menu(self):
'''
Tests the worst case scenario
node5
node4
node3
node2
node1
'''
node1 = NavigationNode('Test1', '/test1/', 1, 2)
node2 = NavigationNode('Test2', '/test2/', 2, 3)
node3 = NavigationNode('Test3', '/test3/', 3, 4)
node4 = NavigationNode('Test4', '/test4/', 4, 5)
node5 = NavigationNode('Test5', '/test5/', 5, None)
menu_class_name = 'Test'
nodes = [node1, node2, node3, node4, node5, ]
len_nodes = len(nodes)
final_list = _build_nodes_inner_for_one_menu(nodes, menu_class_name)
self.assertEqual(len(final_list), len_nodes)
self.assertEqual(node1.parent, node2)
self.assertEqual(node2.parent, node3)
self.assertEqual(node3.parent, node4)
self.assertEqual(node4.parent, node5)
self.assertEqual(node5.parent, None)
self.assertEqual(node1.children, [])
self.assertEqual(node2.children, [node1])
self.assertEqual(node3.children, [node2])
self.assertEqual(node4.children, [node3])
self.assertEqual(node5.children, [node4])
def test_build_nodes_inner_for_circular_menu(self):
'''
TODO:
To properly handle this test we need to have a circular dependency
detection system.
Go nuts implementing it :)
'''
pass
def test_build_nodes_inner_for_broken_menu(self):
'''
Tests a broken menu tree (non-existing parent)
node5
node4
node3
<non-existant>
node2
node1
'''
node1 = NavigationNode('Test1', '/test1/', 1, 2)
node2 = NavigationNode('Test2', '/test2/', 2, 12)
node3 = NavigationNode('Test3', '/test3/', 3, 4)
node4 = NavigationNode('Test4', '/test4/', 4, 5)
node5 = NavigationNode('Test5', '/test5/', 5, None)
menu_class_name = 'Test'
nodes = [node1, node2, node3, node4, node5, ]
final_list = _build_nodes_inner_for_one_menu(nodes, menu_class_name)
self.assertEqual(len(final_list), 3)
self.assertFalse(node1 in final_list)
self.assertFalse(node2 in final_list)
self.assertEqual(node1.parent, None)
self.assertEqual(node2.parent, None)
self.assertEqual(node3.parent, node4)
self.assertEqual(node4.parent, node5)
self.assertEqual(node5.parent, None)
self.assertEqual(node1.children, [])
self.assertEqual(node2.children, [])
self.assertEqual(node3.children, [])
self.assertEqual(node4.children, [node3])
self.assertEqual(node5.children, [node4])
def test_utils_mark_descendants(self):
tree_nodes, flat_nodes = self._get_nodes()
mark_descendants(tree_nodes)
for node in flat_nodes:
self.assertTrue(node.descendant, node)
def test_utils_find_selected(self):
tree_nodes, flat_nodes = self._get_nodes()
node = flat_nodes[0]
selected = find_selected(tree_nodes)
self.assertEqual(selected, node)
selected = find_selected([])
self.assertEqual(selected, None)
def test_utils_cut_levels(self):
tree_nodes, flat_nodes = self._get_nodes()
self.assertEqual(cut_levels(tree_nodes, 1), [flat_nodes[1]])
def test_empty_menu(self):
context = self.get_context()
tpl = Template("{% load menu_tags %}{% show_menu 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 0)
class AdvancedSoftrootTests(SoftrootFixture, SettingsOverrideTestCase):
"""
Tree in fixture (as taken from issue 662):
top
root
aaa
111
ccc
ddd
222
bbb
333
444
In the fixture, all pages are "in_navigation", "published" and
NOT-"soft_root".
What is a soft root?
If a page is a soft root, it becomes the root page in the menu if
we are currently on or under that page.
If we are above that page, the children of this page are not shown.
"""
settings_overrides = {
'CMS_PERMISSION': False
}
def tearDown(self):
Page.objects.all().delete()
def get_page(self, name):
return Page.objects.public().get(title_set__slug=name)
def assertTreeQuality(self, a, b, *attrs):
"""
Checks that the node-lists a and b are the same for attrs.
This is recursive over the tree
"""
msg = '%r != %r with %r, %r' % (len(a), len(b), a, b)
self.assertEqual(len(a), len(b), msg)
for n1, n2 in zip(a, b):
for attr in attrs:
a1 = getattr(n1, attr)
a2 = getattr(n2, attr)
msg = '%r != %r with %r, %r (%s)' % (a1, a2, n1, n2, attr)
self.assertEqual(a1, a2, msg)
self.assertTreeQuality(n1.children, n2.children)
def test_top_not_in_nav(self):
"""
top: not in navigation
tag: show_menu 0 100 0 100
context shared: current page is aaa
context 1: root is NOT a softroot
context 2: root IS a softroot
expected result: the two node-trees should be equal
"""
top = self.get_page('top')
top.in_navigation = False
top.save()
aaa = self.get_page('aaa')
# root is NOT a soft root
context = self.get_context(aaa.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu 0 100 0 100 %}")
tpl.render(context)
hard_root = context['children']
# root IS a soft root
root = self.get_page('root')
root.soft_root = True
root.save()
aaa = self.get_page('aaa')
context = self.get_context(aaa.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu 0 100 0 100 %}")
tpl.render(context)
soft_root = context['children']
# assert the two trees are equal in terms of 'level' and 'title'
self.assertTreeQuality(hard_root, soft_root, 'level', 'title')
def test_top_in_nav(self):
"""
top: in navigation
tag: show_menu 0 100 0 100
context shared: current page is aaa
context 1: root is NOT a softroot
context 2: root IS a softroot
expected result 1:
0:top
1:root
2:aaa
3:111
4:ccc
5:ddd
3:222
2:bbb
expected result 2:
0:root
1:aaa
2:111
3:ccc
4:ddd
2:222
1:bbb
"""
aaa = self.get_page('aaa')
# root is NOT a soft root
context = self.get_context(aaa.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu 0 100 0 100 %}")
tpl.render(context)
hard_root = context['children']
mock_tree = [
AttributeObject(title='top', level=0, children=[
AttributeObject(title='root', level=1, children=[
AttributeObject(title='aaa', level=2, children=[
AttributeObject(title='111', level=3, children=[
AttributeObject(title='ccc', level=4, children=[
AttributeObject(title='ddd', level=5, children=[])
])
]),
AttributeObject(title='222', level=3, children=[])
]),
AttributeObject(title='bbb', level=2, children=[])
])
])
]
self.assertTreeQuality(hard_root, mock_tree)
# root IS a soft root
root = self.get_page('root')
root.soft_root = True
root.save()
aaa = self.get_page('aaa')
context = self.get_context(aaa.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu 0 100 0 100 %}")
tpl.render(context)
soft_root = context['children']
mock_tree = [
AttributeObject(title='root', level=0, children=[
AttributeObject(title='aaa', level=1, children=[
AttributeObject(title='111', level=2, children=[
AttributeObject(title='ccc', level=3, children=[
AttributeObject(title='ddd', level=4, children=[])
])
]),
AttributeObject(title='222', level=2, children=[])
]),
AttributeObject(title='bbb', level=1, children=[])
])
]
self.assertTreeQuality(soft_root, mock_tree, 'title', 'level')
class ShowSubMenuCheck(SubMenusFixture, BaseMenuTest):
"""
Tree from fixture:
+ P1
| + P2
| + P3
+ P4
| + P5
+ P6
+ P7 (not in menu)
+ P8
"""
def test_show_submenu(self):
page = self.get_page(6)
subpage = self.get_page(8)
context = self.get_context(page.get_absolute_url())
# test standard show_menu
tpl = Template("{% load menu_tags %}{% show_sub_menu %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 1)
self.assertEqual(nodes[0].id, subpage.pk)
@skipUnless(settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3', 'transaction queries')
def test_show_submenu_num_queries(self):
page = self.get_page(6)
context = self.get_context(page.get_absolute_url())
# test standard show_menu
with self.assertNumQueries(5):
"""
The queries should be:
get all pages
get all page permissions
get all titles
get the menu cache key
set the menu cache key
"""
tpl = Template("{% load menu_tags %}{% show_sub_menu %}")
tpl.render(context)
class ShowMenuBelowIdTests(BaseMenuTest):
def test_not_in_navigation(self):
"""
Test for issue 521
Build the following tree:
A
|-B
|-C
\-D (not in nav)
"""
a = create_page('A', 'nav_playground.html', 'en', published=True,
in_navigation=True, reverse_id='a')
b = create_page('B', 'nav_playground.html', 'en', parent=a,
published=True, in_navigation=True)
c = create_page('C', 'nav_playground.html', 'en', parent=b,
published=True, in_navigation=True)
create_page('D', 'nav_playground.html', 'en', parent=self.reload(b),
published=True, in_navigation=False)
context = self.get_context(a.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu_below_id 'a' 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 1, nodes)
node = nodes[0]
self.assertEqual(node.id, b.publisher_public.id)
children = node.children
self.assertEqual(len(children), 1, repr(children))
child = children[0]
self.assertEqual(child.id, c.publisher_public.id)
@skipUnless(settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3', 'transaction queries')
def test_not_in_navigation_num_queries(self):
"""
Test for issue 521
Build the following tree:
A
|-B
|-C
\-D (not in nav)
"""
a = create_page('A', 'nav_playground.html', 'en', published=True,
in_navigation=True, reverse_id='a')
b = create_page('B', 'nav_playground.html', 'en', parent=a,
published=True, in_navigation=True)
create_page('C', 'nav_playground.html', 'en', parent=b,
published=True, in_navigation=True)
create_page('D', 'nav_playground.html', 'en', parent=self.reload(b),
published=True, in_navigation=False)
with LanguageOverride('en'):
context = self.get_context(a.get_absolute_url())
with self.assertNumQueries(5):
"""
The queries should be:
get all pages
get all page permissions
get all titles
get the menu cache key
set the menu cache key
"""
# Actually seems to run:
tpl = Template("{% load menu_tags %}{% show_menu_below_id 'a' 0 100 100 100 %}")
tpl.render(context)
class ViewPermissionMenuTests(SettingsOverrideTestCase):
settings_overrides = {
'CMS_PERMISSION': True,
'CMS_PUBLIC_FOR': 'all',
}
def get_request(self, user=None):
attrs = {
'user': user or AnonymousUser(),
'REQUEST': {},
'session': {},
}
return type('Request', (object,), attrs)
def test_public_for_all_staff(self):
request = self.get_request()
request.user.is_staff = True
page = Page()
page.pk = 1
page.level = 0
page.tree_id = 1
pages = [page]
result = get_visible_pages(request, pages)
self.assertEqual(result, [1])
def test_public_for_all_staff_assert_num_queries(self):
request = self.get_request()
request.user.is_staff = True
page = Page()
page.pk = 1
page.level = 0
page.tree_id = 1
pages = [page]
with self.assertNumQueries(1):
"""
The queries are:
PagePermission count query
"""
get_visible_pages(request, pages)
def test_public_for_all(self):
user = User.objects.create_user('user', 'user@domain.com', 'user')
request = self.get_request(user)
page = Page()
page.pk = 1
page.level = 0
page.tree_id = 1
pages = [page]
result = get_visible_pages(request, pages)
self.assertEqual(result, [1])
def test_public_for_all_num_queries(self):
user = User.objects.create_user('user', 'user@domain.com', 'user')
request = self.get_request(user)
site = Site()
site.pk = 1
page = Page()
page.pk = 1
page.level = 0
page.tree_id = 1
pages = [page]
with self.assertNumQueries(2):
"""
The queries are:
PagePermission query for affected pages
GlobalpagePermission query for user
"""
get_visible_pages(request, pages, site)
def test_unauthed(self):
request = self.get_request()
page = Page()
page.pk = 1
page.level = 0
page.tree_id = 1
pages = [page]
result = get_visible_pages(request, pages)
self.assertEqual(result, [1])
def test_unauthed_num_queries(self):
request = self.get_request()
site = Site()
site.pk = 1
page = Page()
page.pk = 1
page.level = 0
page.tree_id = 1
pages = [page]
with self.assertNumQueries(1):
"""
The query is:
PagePermission query for affected pages
global is not executed because it's lazy
"""
get_visible_pages(request, pages, site)
def test_authed_basic_perm(self):
with SettingsOverride(CMS_PUBLIC_FOR='staff'):
user = User()
user.username = "test"
user.is_staff = True
user.save()
user.user_permissions.add(Permission.objects.get(codename='view_page'))
request = self.get_request(user)
page = Page()
page.pk = 1
page.level = 0
page.tree_id = 1
pages = [page]
result = get_visible_pages(request, pages)
self.assertEqual(result, [1])
def test_authed_basic_perm_num_queries(self):
site = Site()
site.pk = 1
with SettingsOverride(CMS_PUBLIC_FOR='staff'):
user = User()
user.username = "test"
user.is_staff = True
user.save()
user.user_permissions.add(Permission.objects.get(codename='view_page'))
request = self.get_request(user)
page = Page()
page.pk = 1
page.level = 0
page.tree_id = 1
pages = [page]
with self.assertNumQueries(2):
"""
The queries are:
PagePermission count query
GlobalpagePermission count query
"""
get_visible_pages(request, pages, site)
#print connection.queries
def test_authed_no_access(self):
with SettingsOverride(CMS_PUBLIC_FOR='staff'):
user = User.objects.create_user('user', 'user@domain.com', 'user')
request = self.get_request(user)
page = Page()
page.pk = 1
page.level = 0
page.tree_id = 1
pages = [page]
result = get_visible_pages(request, pages)
self.assertEqual(result, [])
def test_authed_no_access_num_queries(self):
site = Site()
site.pk = 1
with SettingsOverride(CMS_PUBLIC_FOR='staff'):
user = User.objects.create_user('user', 'user@domain.com', 'user')
request = self.get_request(user)
page = Page()
page.pk = 1
page.level = 0
page.tree_id = 1
pages = [page]
with self.assertNumQueries(2):
"""
The queries are:
View Permission Calculation Query
globalpagepermissino calculation
"""
get_visible_pages(request, pages, site)
def test_unauthed_no_access(self):
with SettingsOverride(CMS_PUBLIC_FOR='staff'):
request = self.get_request()
page = Page()
page.pk = 1
page.level = 0
page.tree_id = 1
pages = [page]
result = get_visible_pages(request, pages)
self.assertEqual(result, [])
def test_unauthed_no_access_num_queries(self):
site = Site()
site.pk = 1
request = self.get_request()
page = Page()
page.pk = 1
page.level = 0
page.tree_id = 1
pages = [page]
with self.assertNumQueries(1):
get_visible_pages(request, pages, site)
def test_page_permissions(self):
with SettingsOverride(CMS_PUBLIC_FOR='staff'):
user = User.objects.create_user('user', 'user@domain.com', 'user')
request = self.get_request(user)
page = create_page('A', 'nav_playground.html', 'en')
PagePermission.objects.create(can_view=True, user=user, page=page)
pages = [page]
result = get_visible_pages(request, pages)
self.assertEqual(result, [page.pk])
def test_page_permissions_num_queries(self):
with SettingsOverride(CMS_PUBLIC_FOR='staff'):
user = User.objects.create_user('user', 'user@domain.com', 'user')
request = self.get_request(user)
page = create_page('A', 'nav_playground.html', 'en')
PagePermission.objects.create(can_view=True, user=user, page=page)
pages = [page]
with self.assertNumQueries(2):
"""
The queries are:
PagePermission query for affected pages
GlobalpagePermission query for user
"""
get_visible_pages(request, pages)
def test_page_permissions_view_groups(self):
with SettingsOverride(CMS_PUBLIC_FOR='staff'):
user = User.objects.create_user('user', 'user@domain.com', 'user')
group = Group.objects.create(name='testgroup')
group.user_set.add(user)
request = self.get_request(user)
page = create_page('A', 'nav_playground.html', 'en')
PagePermission.objects.create(can_view=True, group=group, page=page)
pages = [page]
result = get_visible_pages(request, pages)
self.assertEqual(result, [page.pk])
def test_page_permissions_view_groups_num_queries(self):
with SettingsOverride(CMS_PUBLIC_FOR='staff'):
user = User.objects.create_user('user', 'user@domain.com', 'user')
group = Group.objects.create(name='testgroup')
group.user_set.add(user)
request = self.get_request(user)
page = create_page('A', 'nav_playground.html', 'en')
PagePermission.objects.create(can_view=True, group=group, page=page)
pages = [page]
with self.assertNumQueries(3):
"""
The queries are:
PagePermission query for affected pages
GlobalpagePermission query for user
Group query via PagePermission
"""
get_visible_pages(request, pages)
def test_global_permission(self):
with SettingsOverride(CMS_PUBLIC_FOR='staff'):
user = User.objects.create_user('user', 'user@domain.com', 'user')
GlobalPagePermission.objects.create(can_view=True, user=user)
request = self.get_request(user)
page = Page()
page.pk = 1
page.level = 0
page.tree_id = 1
pages = [page]
result = get_visible_pages(request, pages)
self.assertEqual(result, [1])
def test_global_permission_num_queries(self):
site = Site()
site.pk = 1
user = User.objects.create_user('user', 'user@domain.com', 'user')
GlobalPagePermission.objects.create(can_view=True, user=user)
request = self.get_request(user)
site = Site()
site.pk = 1
page = Page()
page.pk = 1
page.level = 0
page.tree_id = 1
pages = [page]
with self.assertNumQueries(2):
"""
The queries are:
PagePermission query for affected pages
GlobalpagePermission query for user
"""
get_visible_pages(request, pages, site)
class SoftrootTests(SettingsOverrideTestCase):
"""
Ask evildmp/superdmp if you don't understand softroots!
Softroot description from the docs:
A soft root is a page that acts as the root for a menu navigation tree.
Typically, this will be a page that is the root of a significant new
section on your site.
When the soft root feature is enabled, the navigation menu for any page
will start at the nearest soft root, rather than at the real root of
the site’s page hierarchy.
This feature is useful when your site has deep page hierarchies (and
therefore multiple levels in its navigation trees). In such a case, you
usually don’t want to present site visitors with deep menus of nested
items.
For example, you’re on the page “Introduction to Bleeding”, so the menu
might look like this:
School of Medicine
Medical Education
Departments
Department of Lorem Ipsum
Department of Donec Imperdiet
Department of Cras Eros
Department of Mediaeval Surgery
Theory
Cures
Bleeding
Introduction to Bleeding <this is the current page>
Bleeding - the scientific evidence
Cleaning up the mess
Cupping
Leaches
Maggots
Techniques
Instruments
Department of Curabitur a Purus
Department of Sed Accumsan
Department of Etiam
Research
Administration
Contact us
Impressum
which is frankly overwhelming.
By making “Department of Mediaeval Surgery” a soft root, the menu
becomes much more manageable:
Department of Mediaeval Surgery
Theory
Cures
Bleeding
Introduction to Bleeding <current page>
Bleeding - the scientific evidence
Cleaning up the mess
Cupping
Leaches
Maggots
Techniques
Instruments
"""
settings_overrides = {
'CMS_SOFTROOT': True,
'CMS_PERMISSION': False
}
def test_basic_home(self):
"""
Given the tree:
|- Home
| |- Projects (SOFTROOT)
| | |- django CMS
| | |- django Shop
| |- People
Expected menu when on "Home" (0 100 100 100):
|- Home
| |- Projects (SOFTROOT)
| | |- django CMS
| | |- django Shop
| |- People
"""
stdkwargs = {
'template': 'nav_playground.html',
'language': 'en',
'published': True,
'in_navigation': True,
}
home = create_page("Home", **stdkwargs)
projects = create_page("Projects", parent=home, soft_root=True, **stdkwargs)
djangocms = create_page("django CMS", parent=projects, **stdkwargs)
djangoshop = create_page("django Shop", parent=projects, **stdkwargs)
people = create_page("People", parent=home, **stdkwargs)
# On Home
context = self.get_context(home.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
# check everything
self.assertEqual(len(nodes), 1)
homenode = nodes[0]
self.assertEqual(homenode.id, home.publisher_public.pk)
self.assertEqual(len(homenode.children), 2)
projectsnode, peoplenode = homenode.children
self.assertEqual(projectsnode.id, projects.publisher_public.pk)
self.assertEqual(peoplenode.id, people.publisher_public.pk)
self.assertEqual(len(projectsnode.children), 2)
cmsnode, shopnode = projectsnode.children
self.assertEqual(cmsnode.id, djangocms.publisher_public.pk)
self.assertEqual(shopnode.id, djangoshop.publisher_public.pk)
self.assertEqual(len(cmsnode.children), 0)
self.assertEqual(len(shopnode.children), 0)
self.assertEqual(len(peoplenode.children), 0)
def test_basic_projects(self):
"""
Given the tree:
|- Home
| |- Projects (SOFTROOT)
| | |- django CMS
| | |- django Shop
| |- People
Expected menu when on "Projects" (0 100 100 100):
|- Projects (SOFTROOT)
| |- django CMS
| |- django Shop
"""
stdkwargs = {
'template': 'nav_playground.html',
'language': 'en',
'published': True,
'in_navigation': True,
}
home = create_page("Home", **stdkwargs)
projects = create_page("Projects", parent=home, soft_root=True, **stdkwargs)
djangocms = create_page("django CMS", parent=projects, **stdkwargs)
djangoshop = create_page("django Shop", parent=projects, **stdkwargs)
people = create_page("People", parent=home, **stdkwargs)
# On Projects
context = self.get_context(projects.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
# check everything
self.assertEqual(len(nodes), 1)
projectsnode = nodes[0]
self.assertEqual(projectsnode.id, projects.publisher_public.pk)
self.assertEqual(len(projectsnode.children), 2)
cmsnode, shopnode = projectsnode.children
self.assertEqual(cmsnode.id, djangocms.publisher_public.pk)
self.assertEqual(shopnode.id, djangoshop.publisher_public.pk)
self.assertEqual(len(cmsnode.children), 0)
self.assertEqual(len(shopnode.children), 0)
def test_basic_djangocms(self):
"""
Given the tree:
|- Home
| |- Projects (SOFTROOT)
| | |- django CMS
| | |- django Shop
| |- People
Expected menu when on "django CMS" (0 100 100 100):
|- Projects (SOFTROOT)
| |- django CMS
| |- django Shop
"""
stdkwargs = {
'template': 'nav_playground.html',
'language': 'en',
'published': True,
'in_navigation': True,
}
home = create_page("Home", **stdkwargs)
projects = create_page("Projects", parent=home, soft_root=True, **stdkwargs)
djangocms = create_page("django CMS", parent=projects, **stdkwargs)
djangoshop = create_page("django Shop", parent=projects, **stdkwargs)
people = create_page("People", parent=home, **stdkwargs)
# On django CMS
context = self.get_context(djangocms.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
# check everything
self.assertEqual(len(nodes), 1)
projectsnode = nodes[0]
self.assertEqual(projectsnode.id, projects.publisher_public.pk)
self.assertEqual(len(projectsnode.children), 2)
cmsnode, shopnode = projectsnode.children
self.assertEqual(cmsnode.id, djangocms.publisher_public.pk)
self.assertEqual(shopnode.id, djangoshop.publisher_public.pk)
self.assertEqual(len(cmsnode.children), 0)
self.assertEqual(len(shopnode.children), 0)
def test_basic_people(self):
"""
Given the tree:
|- Home
| |- Projects (SOFTROOT)
| | |- django CMS
| | |- django Shop
| |- People
Expected menu when on "People" (0 100 100 100):
|- Home
| |- Projects (SOFTROOT)
| | |- django CMS
| | |- django Shop
| |- People
"""
stdkwargs = {
'template': 'nav_playground.html',
'language': 'en',
'published': True,
'in_navigation': True,
}
home = create_page("Home", **stdkwargs)
projects = create_page("Projects", parent=home, soft_root=True, **stdkwargs)
djangocms = create_page("django CMS", parent=projects, **stdkwargs)
djangoshop = create_page("django Shop", parent=projects, **stdkwargs)
people = create_page("People", parent=home, **stdkwargs)
# On People
context = self.get_context(home.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
# check everything
self.assertEqual(len(nodes), 1)
homenode = nodes[0]
self.assertEqual(homenode.id, home.publisher_public.pk)
self.assertEqual(len(homenode.children), 2)
projectsnode, peoplenode = homenode.children
self.assertEqual(projectsnode.id, projects.publisher_public.pk)
self.assertEqual(peoplenode.id, people.publisher_public.pk)
self.assertEqual(len(projectsnode.children), 2)
cmsnode, shopnode = projectsnode.children
self.assertEqual(cmsnode.id, djangocms.publisher_public.pk)
self.assertEqual(shopnode.id, djangoshop.publisher_public.pk)
self.assertEqual(len(cmsnode.children), 0)
self.assertEqual(len(shopnode.children), 0)
self.assertEqual(len(peoplenode.children), 0)
| datakortet/django-cms | cms/tests/menu.py | Python | bsd-3-clause | 52,671 | 0.001956 |
# -*- coding: utf-8 -*-
# leapbackend.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Backend for GUI/Logic communication.
"""
import logging
from Queue import Queue, Empty
from twisted.internet import reactor
from twisted.internet import threads, defer
from twisted.internet.task import LoopingCall
import zope.interface
import zope.proxy
from leap.bitmask.backend.leapsignaler import Signaler
from leap.bitmask.backend import components
logger = logging.getLogger(__name__)
class Backend(object):
"""
Backend for everything, the UI should only use this class.
"""
PASSED_KEY = "passed"
ERROR_KEY = "error"
def __init__(self, bypass_checks=False):
"""
Constructor for the backend.
"""
# Components map for the commands received
self._components = {}
# Ongoing defers that will be cancelled at stop time
self._ongoing_defers = []
# Signaler object to translate commands into Qt signals
self._signaler = Signaler()
# Objects needed by several components, so we make a proxy and pass
# them around
self._soledad_proxy = zope.proxy.ProxyBase(None)
self._keymanager_proxy = zope.proxy.ProxyBase(None)
# Component registration
self._register(components.Provider(self._signaler, bypass_checks))
self._register(components.Register(self._signaler))
self._register(components.Authenticate(self._signaler))
self._register(components.EIP(self._signaler))
self._register(components.Soledad(self._soledad_proxy,
self._keymanager_proxy,
self._signaler))
self._register(components.Keymanager(self._keymanager_proxy,
self._signaler))
self._register(components.Mail(self._soledad_proxy,
self._keymanager_proxy,
self._signaler))
# We have a looping call on a thread executing all the
# commands in queue. Right now this queue is an actual Queue
# object, but it'll become the zmq recv_multipart queue
self._lc = LoopingCall(threads.deferToThread, self._worker)
# Temporal call_queue for worker, will be replaced with
# recv_multipart os something equivalent in the loopingcall
self._call_queue = Queue()
@property
def signaler(self):
"""
Public signaler access to let the UI connect to its signals.
"""
return self._signaler
def start(self):
"""
Starts the looping call
"""
logger.debug("Starting worker...")
self._lc.start(0.01)
def stop(self):
"""
Stops the looping call and tries to cancel all the defers.
"""
reactor.callLater(2, self._stop)
def _stop(self):
"""
Delayed stopping of worker. Called from `stop`.
"""
logger.debug("Stopping worker...")
if self._lc.running:
self._lc.stop()
else:
logger.warning("Looping call is not running, cannot stop")
logger.debug("Cancelling ongoing defers...")
while len(self._ongoing_defers) > 0:
d = self._ongoing_defers.pop()
d.cancel()
logger.debug("Defers cancelled.")
def _register(self, component):
"""
Registers a component in this backend
:param component: Component to register
:type component: any object that implements ILEAPComponent
"""
# TODO: assert that the component implements the interfaces
# expected
try:
self._components[component.key] = component
except Exception:
logger.error("There was a problem registering %s" % (component,))
def _signal_back(self, _, signal):
"""
Helper method to signal back (callback like behavior) to the
UI that an operation finished.
:param signal: signal name
:type signal: str
"""
self._signaler.signal(signal)
def _worker(self):
"""
Worker method, called from a different thread and as a part of
a looping call
"""
try:
# this'll become recv_multipart
cmd = self._call_queue.get(block=False)
# cmd is: component, method, signalback, *args
func = getattr(self._components[cmd[0]], cmd[1])
d = func(*cmd[3:])
if d is not None: # d may be None if a defer chain is cancelled.
# A call might not have a callback signal, but if it does,
# we add it to the chain
if cmd[2] is not None:
d.addCallbacks(self._signal_back, logger.error, cmd[2])
d.addCallbacks(self._done_action, logger.error,
callbackKeywords={"d": d})
d.addErrback(logger.error)
self._ongoing_defers.append(d)
except Empty:
# If it's just empty we don't have anything to do.
pass
except defer.CancelledError:
logger.debug("defer cancelled somewhere (CancelledError).")
except Exception as e:
# But we log the rest
logger.exception("Unexpected exception: {0!r}".format(e))
def _done_action(self, _, d):
"""
Remover of the defer once it's done
:param d: defer to remove
:type d: twisted.internet.defer.Deferred
"""
if d in self._ongoing_defers:
self._ongoing_defers.remove(d)
# XXX: Temporal interface until we migrate to zmq
# We simulate the calls to zmq.send_multipart. Once we separate
# this in two processes, the methods bellow can be changed to
# send_multipart and this backend class will be really simple.
def provider_setup(self, provider):
"""
Initiate the setup for a provider.
:param provider: URL for the provider
:type provider: unicode
Signals:
prov_unsupported_client
prov_unsupported_api
prov_name_resolution -> { PASSED_KEY: bool, ERROR_KEY: str }
prov_https_connection -> { PASSED_KEY: bool, ERROR_KEY: str }
prov_download_provider_info -> { PASSED_KEY: bool, ERROR_KEY: str }
"""
self._call_queue.put(("provider", "setup_provider", None, provider))
def provider_cancel_setup(self):
"""
Cancel the ongoing setup provider (if any).
"""
self._call_queue.put(("provider", "cancel_setup_provider", None))
def provider_bootstrap(self, provider):
"""
Second stage of bootstrapping for a provider.
:param provider: URL for the provider
:type provider: unicode
Signals:
prov_problem_with_provider
prov_download_ca_cert -> {PASSED_KEY: bool, ERROR_KEY: str}
prov_check_ca_fingerprint -> {PASSED_KEY: bool, ERROR_KEY: str}
prov_check_api_certificate -> {PASSED_KEY: bool, ERROR_KEY: str}
"""
self._call_queue.put(("provider", "bootstrap", None, provider))
def provider_get_supported_services(self, domain):
"""
Signal a list of supported services provided by the given provider.
:param domain: the provider to get the services from.
:type domain: str
Signals:
prov_get_supported_services -> list of unicode
"""
self._call_queue.put(("provider", "get_supported_services", None,
domain))
def provider_get_all_services(self, providers):
"""
Signal a list of services provided by all the configured providers.
:param providers: the list of providers to get the services.
:type providers: list
Signals:
prov_get_all_services -> list of unicode
"""
self._call_queue.put(("provider", "get_all_services", None,
providers))
def provider_get_details(self, domain, lang):
"""
Signal a ProviderConfigLight object with the current ProviderConfig
settings.
:param domain: the domain name of the provider.
:type domain: str
:param lang: the language to use for localized strings.
:type lang: str
Signals:
prov_get_details -> ProviderConfigLight
"""
self._call_queue.put(("provider", "get_details", None, domain, lang))
def provider_get_pinned_providers(self):
"""
Signal the pinned providers.
Signals:
prov_get_pinned_providers -> list of provider domains
"""
self._call_queue.put(("provider", "get_pinned_providers", None))
def user_register(self, provider, username, password):
"""
Register a user using the domain and password given as parameters.
:param domain: the domain we need to register the user.
:type domain: unicode
:param username: the user name
:type username: unicode
:param password: the password for the username
:type password: unicode
Signals:
srp_registration_finished
srp_registration_taken
srp_registration_failed
"""
self._call_queue.put(("register", "register_user", None, provider,
username, password))
def eip_setup(self, provider, skip_network=False):
"""
Initiate the setup for a provider
:param provider: URL for the provider
:type provider: unicode
:param skip_network: Whether checks that involve network should be done
or not
:type skip_network: bool
Signals:
eip_config_ready -> {PASSED_KEY: bool, ERROR_KEY: str}
eip_client_certificate_ready -> {PASSED_KEY: bool, ERROR_KEY: str}
eip_cancelled_setup
"""
self._call_queue.put(("eip", "setup_eip", None, provider,
skip_network))
def eip_cancel_setup(self):
"""
Cancel the ongoing setup EIP (if any).
"""
self._call_queue.put(("eip", "cancel_setup_eip", None))
def eip_start(self, restart=False):
"""
Start the EIP service.
Signals:
backend_bad_call
eip_alien_openvpn_already_running
eip_connected
eip_connection_aborted
eip_network_unreachable
eip_no_pkexec_error
eip_no_polkit_agent_error
eip_no_tun_kext_error
eip_openvpn_already_running
eip_openvpn_not_found_error
eip_process_finished
eip_process_restart_ping
eip_process_restart_tls
eip_state_changed -> str
eip_status_changed -> tuple of str (download, upload)
eip_vpn_launcher_exception
:param restart: whether is is a restart.
:type restart: bool
"""
self._call_queue.put(("eip", "start", None, restart))
def eip_stop(self, shutdown=False, restart=False, failed=False):
"""
Stop the EIP service.
:param shutdown: whether this is the final shutdown.
:type shutdown: bool
:param restart: whether this is part of a restart.
:type restart: bool
"""
self._call_queue.put(("eip", "stop", None, shutdown, restart))
def eip_terminate(self):
"""
Terminate the EIP service, not necessarily in a nice way.
"""
self._call_queue.put(("eip", "terminate", None))
def eip_get_gateways_list(self, domain):
"""
Signal a list of gateways for the given provider.
:param domain: the domain to get the gateways.
:type domain: str
# TODO discuss how to document the expected result object received of
# the signal
:signal type: list of str
Signals:
eip_get_gateways_list -> list of unicode
eip_get_gateways_list_error
eip_uninitialized_provider
"""
self._call_queue.put(("eip", "get_gateways_list", None, domain))
def eip_get_initialized_providers(self, domains):
"""
Signal a list of the given domains and if they are initialized or not.
:param domains: the list of domains to check.
:type domain: list of str
Signals:
eip_get_initialized_providers -> list of tuple(unicode, bool)
"""
self._call_queue.put(("eip", "get_initialized_providers",
None, domains))
def eip_can_start(self, domain):
"""
Signal whether it has everything that is needed to run EIP or not
:param domain: the domain for the provider to check
:type domain: str
Signals:
eip_can_start
eip_cannot_start
"""
self._call_queue.put(("eip", "can_start",
None, domain))
def eip_check_dns(self, domain):
"""
Check if we can resolve the given domain name.
:param domain: the domain for the provider to check
:type domain: str
Signals:
eip_dns_ok
eip_dns_error
"""
self._call_queue.put(("eip", "check_dns", None, domain))
def tear_fw_down(self):
"""
Signal the need to tear the fw down.
"""
self._call_queue.put(("eip", "tear_fw_down", None))
def user_login(self, provider, username, password):
"""
Execute the whole authentication process for a user
:param domain: the domain where we need to authenticate.
:type domain: unicode
:param username: username for this session
:type username: str
:param password: password for this user
:type password: str
Signals:
srp_auth_error
srp_auth_ok
srp_auth_bad_user_or_password
srp_auth_server_error
srp_auth_connection_error
srp_auth_error
"""
self._call_queue.put(("authenticate", "login", None, provider,
username, password))
def user_logout(self):
"""
Log out the current session.
Signals:
srp_logout_ok
srp_logout_error
srp_not_logged_in_error
"""
self._call_queue.put(("authenticate", "logout", None))
def user_cancel_login(self):
"""
Cancel the ongoing login (if any).
"""
self._call_queue.put(("authenticate", "cancel_login", None))
def user_change_password(self, current_password, new_password):
"""
Change the user's password.
:param current_password: the current password of the user.
:type current_password: str
:param new_password: the new password for the user.
:type new_password: str
Signals:
srp_not_logged_in_error
srp_password_change_ok
srp_password_change_badpw
srp_password_change_error
"""
self._call_queue.put(("authenticate", "change_password", None,
current_password, new_password))
def soledad_change_password(self, new_password):
"""
Change the database's password.
:param new_password: the new password for the user.
:type new_password: unicode
Signals:
srp_not_logged_in_error
srp_password_change_ok
srp_password_change_badpw
srp_password_change_error
"""
self._call_queue.put(("soledad", "change_password", None,
new_password))
def user_get_logged_in_status(self):
"""
Signal if the user is currently logged in or not.
Signals:
srp_status_logged_in
srp_status_not_logged_in
"""
self._call_queue.put(("authenticate", "get_logged_in_status", None))
def soledad_bootstrap(self, username, domain, password):
"""
Bootstrap the soledad database.
:param username: the user name
:type username: unicode
:param domain: the domain that we are using.
:type domain: unicode
:param password: the password for the username
:type password: unicode
Signals:
soledad_bootstrap_finished
soledad_bootstrap_failed
soledad_invalid_auth_token
"""
self._call_queue.put(("soledad", "bootstrap", None,
username, domain, password))
def soledad_load_offline(self, username, password, uuid):
"""
Load the soledad database in offline mode.
:param username: full user id (user@provider)
:type username: str or unicode
:param password: the soledad passphrase
:type password: unicode
:param uuid: the user uuid
:type uuid: str or unicode
Signals:
"""
self._call_queue.put(("soledad", "load_offline", None,
username, password, uuid))
def soledad_cancel_bootstrap(self):
"""
Cancel the ongoing soledad bootstrapping process (if any).
"""
self._call_queue.put(("soledad", "cancel_bootstrap", None))
def soledad_close(self):
"""
Close soledad database.
"""
self._call_queue.put(("soledad", "close", None))
def keymanager_list_keys(self):
"""
Signal a list of public keys locally stored.
Signals:
keymanager_keys_list -> list
"""
self._call_queue.put(("keymanager", "list_keys", None))
def keymanager_export_keys(self, username, filename):
"""
Export the given username's keys to a file.
:param username: the username whos keys we need to export.
:type username: str
:param filename: the name of the file where we want to save the keys.
:type filename: str
Signals:
keymanager_export_ok
keymanager_export_error
"""
self._call_queue.put(("keymanager", "export_keys", None,
username, filename))
def keymanager_get_key_details(self, username):
"""
Signal the given username's key details.
:param username: the username whos keys we need to get details.
:type username: str
Signals:
keymanager_key_details
"""
self._call_queue.put(("keymanager", "get_key_details", None, username))
def smtp_start_service(self, full_user_id, download_if_needed=False):
"""
Start the SMTP service.
:param full_user_id: user id, in the form "user@provider"
:type full_user_id: str
:param download_if_needed: True if it should check for mtime
for the file
:type download_if_needed: bool
"""
self._call_queue.put(("mail", "start_smtp_service", None,
full_user_id, download_if_needed))
def imap_start_service(self, full_user_id, offline=False):
"""
Start the IMAP service.
:param full_user_id: user id, in the form "user@provider"
:type full_user_id: str
:param offline: whether imap should start in offline mode or not.
:type offline: bool
"""
self._call_queue.put(("mail", "start_imap_service", None,
full_user_id, offline))
def smtp_stop_service(self):
"""
Stop the SMTP service.
"""
self._call_queue.put(("mail", "stop_smtp_service", None))
def imap_stop_service(self):
"""
Stop imap service.
Signals:
imap_stopped
"""
self._call_queue.put(("mail", "stop_imap_service", None))
| meskio/bitmask_client | src/leap/bitmask/backend/leapbackend.py | Python | gpl-3.0 | 20,848 | 0 |
import sys
__author__ = "ilausuch"
__date__ = "$13-jun-2017 20:05:19$"
sys.path.append( "../Addons" )
from Client import Client
from Timer import Timer
count=1000
def test1():
print ("Test 1: Multiple entry set same connection (count={0})".format(count))
client = Client("localhost", 10001)
timer=Timer()
for i in range(0,count):
client.entry_set("test speed",i,i)
client.close()
print ("Seconds: {0}".format(timer.end()))
def test2():
print ("Test 2: Multiple entry set opening/closing connection (count={0})".format(count))
timer=Timer()
for i in range(0,count):
client = Client("localhost", 10001)
client.entry_set("test speed",i,i)
client.close()
client.close()
print ("Seconds: {0}".format(timer.end()))
def test3():
print ("Test 3: Multiple entry get (count={0})".format(count))
client = Client("localhost", 10001)
timer=Timer()
for i in range(0,count):
client.entry_get("test speed",i)
client.close()
print ("Seconds: {0}".format(timer.end()))
def main():
test1()
test2()
test3()
if __name__ == "__main__":
main()
| ilausuch/CacheServer | src/test/testServer_speed.py | Python | mit | 1,247 | 0.035285 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for CreateVersion
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-dialogflow
# [START dialogflow_generated_dialogflow_v2_Versions_CreateVersion_async]
from google.cloud import dialogflow_v2
async def sample_create_version():
# Create a client
client = dialogflow_v2.VersionsAsyncClient()
# Initialize request argument(s)
request = dialogflow_v2.CreateVersionRequest(
parent="parent_value",
)
# Make the request
response = await client.create_version(request=request)
# Handle the response
print(response)
# [END dialogflow_generated_dialogflow_v2_Versions_CreateVersion_async]
| googleapis/python-dialogflow | samples/generated_samples/dialogflow_generated_dialogflow_v2_versions_create_version_async.py | Python | apache-2.0 | 1,488 | 0.000672 |
#!/usr/bin/env python3
# coding: utf-8
import sys
class Solution:
def findPaths(self, m, n, N, i, j):
"""
:type m: int
:type n: int
:type N: int
:type i: int
:type j: int
:rtype: int
"""
if N == 0:
return 0
from collections import defaultdict
mod = 10 ** 9 + 7
ret = 0
maps = [defaultdict(int), defaultdict(int)]
for c in range(n):
maps[1][(0, c)] += 1
maps[1][(m - 1, c)] += 1
for r in range(m):
maps[1][(r, 0)] += 1
maps[1][(r, n - 1)] += 1
ret += maps[1].get((i, j), 0)
for step in range(2, N + 1):
midx = step % 2
for r in range(m):
for c in range(n):
maps[midx][(r, c)] = (maps[1 - midx].get((r - 1, c), 0) +
maps[1 - midx].get((r + 1, c), 0) +
maps[1 - midx].get((r, c - 1), 0) +
maps[1 - midx].get((r, c + 1), 0))
if maps[midx][(r, c)] > mod:
maps[midx][(r, c)] %= mod
ret = (ret + maps[midx].get((i, j), 0)) % mod
# print(step, maps[midx])
return ret
def main(args):
sol = Solution()
print(sol.findPaths(2, 2, 2, 0, 0))
print(sol.findPaths(1, 3, 3, 0, 1))
print(sol.findPaths(50, 50, 50, 0, 0))
return
if __name__ == '__main__':
main(sys.argv[1:])
| feigaochn/leetcode | p576_out_of_boundary_paths.py | Python | mit | 1,552 | 0 |
from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
IMPORTANT_FIELD_GUESSES = ['id', 'pk', 'name', 'last', 'first', 'full_name', 'summary', 'description', 'user', 'person']
def representation(model, field_names=[], max_fields=None):
"""Unicode representation of Django model instance (object/record/row)"""
representation.max_fields = max_fields if max_fields is not None else representation.max_fields
if not field_names:
field_names = getattr(model, 'IMPORTANT_FIELDS', None)
if field_names is None:
field_names = []
# model_fields = set([f.name for f in model._meta.fields])
for f in model._meta.fields:
field_names += [f.name] if f.name in IMPORTANT_FIELD_GUESSES else []
retval = model.__class__.__name__ + u'('
retval += ', '.join("{}".format(repr(getattr(model, s, '') or ''))
for s in field_names[:min(len(field_names), representation.max_fields)])
return retval + u')'
representation.max_fields = 5
def name_similarity():
"""Compute the similarity (inverse distance) matrix between committe names"""
pass
class LongCharField(models.CharField):
"An unlimited-length CharField to satisfy by Django and postgreSQL varchar."
description = _("Unlimited-length string")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = int(1e9) # Satisfy management validation.
super(models.CharField, self).__init__(*args, **kwargs)
# Don't add max-length validator like CharField does.
def get_internal_type(self):
# This has no function, since this value is used as a lookup in
# db_type(). Put something that isn't known by django so it
# raises an error if it is ever used.
return 'LongCharField'
def db_type(self, connection):
# *** This is probably only compatible with Postgres.
# 'varchar' with no max length is equivalent to 'text' in Postgres,
# but put 'varchar' so we can tell LongCharFields from TextFields
# when we're looking at the db.
return 'varchar'
def formfield(self, **kwargs):
# Don't pass max_length to form field like CharField does.
return super(models.CharField, self).formfield(**kwargs)
models.LongCharField = LongCharField
| totalgood/twote | twote/model_utils.py | Python | mit | 2,398 | 0.003336 |
"""
Module: plugin.py
Author: Rinke Hoekstra
Created: 2 October 2012
Copyright (c) 2012, Rinke Hoekstra, VU University Amsterdam
http://github.com/Data2Semantics/linkitup
"""
from flask.ext.login import login_required
import re
from linkitup import app
from linkitup.util.baseplugin import plugin, SPARQLPlugin
from linkitup.util.provenance import provenance
app.logger.debug("Initializing DrugBank")
endpoints = ['http://drugbank.bio2rdf.org/sparql','http://bioportal.bio2rdf.org/sparql','http://kegg.bio2rdf.org/sparql','http://affymetrix.bio2rdf.org/sparql']
@app.route('/bio2rdf', methods=['POST'])
@login_required
@plugin(fields=[('tags','id','name'),('categories','id','name')], link='mapping')
@provenance()
def link_to_bio2rdf(*args,**kwargs):
# Retrieve the article from the post
article_id = kwargs['article']['id']
match_items = kwargs['inputs']
match_type = kwargs['link']
app.logger.debug("Running Bio2RDF plugin for article {}".format(article_id))
try :
# Initialize the plugin
plugin = SPARQLPlugin(endpoint = endpoints,
template = "bio2rdf.query",
match_type = match_type,
id_base = 'label',
all=True)
# Run the plugin, and retrieve matches using the default label property (rdfs:label)
matches = plugin.match(match_items)
app.logger.debug("Plugin is done, returning the following matches")
app.logger.debug(matches)
# Return the matches
return matches
except Exception as e:
app.logger.error(e.message)
return {'error': e.message }
| Data2Semantics/linkitup | linkitup/bio2rdf/plugin.py | Python | mit | 1,736 | 0.021889 |
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django.views.generic import View
from django.conf import settings
from geonode.base.enumerations import LINK_TYPES as _LT
# from geonode.base.models import Link
from geonode.utils import json_response
from geonode.geoserver import ows
LINK_TYPES = [L for L in _LT if L.startswith("OGC:")]
class OWSListView(View):
def get(self, request):
out = {'success': True}
data = []
out['data'] = data
# per-layer links
# for link in Link.objects.filter(link_type__in=LINK_TYPES): # .distinct('url'):
# data.append({'url': link.url, 'type': link.link_type})
data.append({'url': ows._wcs_get_capabilities(), 'type': 'OGC:WCS'})
data.append({'url': ows._wfs_get_capabilities(), 'type': 'OGC:WFS'})
data.append({'url': ows._wms_get_capabilities(), 'type': 'OGC:WMS'})
# catalogue from configuration
for catname, catconf in settings.CATALOGUE.items():
data.append({'url': catconf['URL'], 'type': 'OGC:CSW'})
# main site url
data.append({'url': settings.SITEURL, 'type': 'WWW:LINK'})
return json_response(out)
ows_endpoints = OWSListView.as_view()
| timlinux/geonode | geonode/contrib/ows_api/views.py | Python | gpl-3.0 | 2,017 | 0.000496 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# $Id$
#
# Copyright 2010 Glencoe Software, Inc. All rights reserved.
# Use is subject to license terms supplied in LICENSE.txt
#
# Hudson launcher script which properly launches the script
# on the right system. This is used by most jobs via:
#
# cd src
# cd docs
# cd hudson
# python launcher.py
#
# which will:
#
# * download <BRANCH>.log from hudson
# * create hudson.log
# * run sh docs/hudson/OMERO-<BRANCH>-<COMPONENT>.sh
# or docs\hudson\OMERO-<BRANCH>-<COMPONENT>.bat
#
import os
import re
import sys
import urllib
import platform
import subprocess
LOG_URL = "http://hudson.openmicroscopy.org.uk/job/OMERO-%(BRANCH)s/lastSuccessfulBuild/artifact/src/target/%(BRANCH)s.log"
JOB_NAME_STR = "^OMERO-([^-]+)-(.*?)(/(.*))?$"
JOB_NAME_REG = re.compile(JOB_NAME_STR)
class ConfigOpener(urllib.FancyURLopener):
def http_error_default(self, url, fp, errcode, errmsg, headers):
if errcode and errcode > 400:
raise Exception("Error loading %s: %s" % (url, errcode))
if __name__ == "__main__":
#
# FIND JOB NAME
#
job_name = os.environ["JOB_NAME"]
m = JOB_NAME_REG.match(job_name)
if not m:
print "Bad job name: %s doesn't match %r" % (job_name, JOB_NAME_STR)
sys.exit(1)
branch = m.group(1)
build = m.group(2)
axises = m.group(4)
if axises:
values = {}
for axis in axises.split(","):
parts = axis.split("=")
values[parts[0]] = parts[1]
job = values["component"]
label = values["label"]
else:
job = build
#
# SETUP
#
os.chdir("..") # docs
os.chdir("..") # OMERO_HOME
top = os.path.abspath(".")
build_log = os.path.join(top, "%s.log" % branch)
hudson_log = os.path.join(top, "hudson.log")
config_file = os.path.join(top, "%s.config" % branch)
#
# LOG FILES
#
log_url = LOG_URL % {"BRANCH": branch}
print "Loading %s ..." % log_url
url = urllib.urlopen(log_url)
build_log_text = url.read()
url.close()
f = open(build_log, "w")
for line in build_log_text.split("\n"):
f.write(line)
f.write("\n")
# Also import the file into the environment
line = line.strip()
parts = line.split("=")
if parts and parts[0]:
k = str(parts[0])
try:
v = str(parts[1])
os.environ[k] = v
except:
os.environ[k] = ""
f.close()
f = open(hudson_log, "w")
for key in sorted(os.environ):
f.write("%s=%s\n" % (key, os.environ[key]))
f.close
#
# CONFIG FILE
# -----------
# If this is not the "start" job, then download
# the <BRANCH>.config file created by start in
# order to access the server.
#
if axises and job != "start":
build_url = os.environ["BUILD_URL"]
build_url = build_url.replace("component=%s" % job, "component=start")
# These jobs don't have their own
# "start" component, so let them use
# the "linux" label.
if label == "macosx" or label == "matlab":
build_url = build_url.replace("label=%s" % label, "label=linux")
build_url = "%s/%s" % (build_url, "artifact/src/%s.config" % branch)
if os.path.exists(config_file):
print "Removing %s ..." % config_file
os.remove(config_file)
print "Downloading %s ... " % build_url
ConfigOpener().retrieve(build_url, filename=config_file)
os.environ["ICE_CONFIG"] = config_file
#
# BUILD COMMAND
#
path = os.path.join("docs", "hudson")
base = "OMERO-%s" % job
if "Windows" == platform.system():
name = base + ".bat"
cmd = []
else:
name = base + ".sh"
cmd = ["sh"]
path = os.path.join(path, name)
cmd.append(path)
#
# RUN
#
print "Launching", " ".join(cmd)
print "="*60
popen = subprocess.Popen(cmd, env = os.environ)
rcode = popen.wait()
if rcode != 0:
print "="*60
print "Build failed with rcode=%s" % rcode
sys.exit(rcode)
| jballanc/openmicroscopy | docs/hudson/launcher.py | Python | gpl-2.0 | 4,193 | 0.0031 |
#!/usr/bin/env python
#
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
"""Module to handle real user logins via GAE SSO"""
import logging
from google.appengine.api import users
from simian import auth as auth_init
from simian.auth import base
from simian.auth import gaeserver
from simian.mac.common import auth
from simian.mac.munki import handlers
class Error(Exception):
"""Base error."""
class NotAuthenticated(Error, base.NotAuthenticated):
"""Not Authenticated Error."""
class UserAuth(handlers.AuthenticationHandler):
"""Handle for user auth which provides Auth1 token."""
def get(self):
"""Handle GET."""
try:
# already munki authenticated? return, nothing to do.
gaeserver.DoMunkiAuth()
#logging.info('Uauth: session is already authenticated')
return
except gaeserver.NotAuthenticated:
pass
user = users.get_current_user()
if not user:
#logging.error('Uauth: user is not logged in')
raise NotAuthenticated
email = user.email()
if auth.IsAdminUser(email):
a = gaeserver.AuthSimianServer()
output = a.SessionCreateUserAuthToken(email, level=gaeserver.LEVEL_ADMIN)
elif auth.IsSupportUser(email):
a = gaeserver.AuthSimianServer()
output = a.SessionCreateUserAuthToken(email, level=gaeserver.LEVEL_BASE)
else:
logging.error('Uauth: user %s is not an admin', email)
raise NotAuthenticated
if output:
#logging.info('Uauth: success, token = %s', output)
self.response.headers['Set-Cookie'] = '%s=%s; secure; httponly;' % (
auth_init.AUTH_TOKEN_COOKIE, output)
self.response.out.write(auth_init.AUTH_TOKEN_COOKIE)
else:
#logging.info('Uauth: unknown token')
raise NotAuthenticated
def post(self):
"""Handle POST.
Because the appengine_rpc module, used by simian.client.UAuth class, uses
the POST http method, define this handler which mirrors the functionaly
of the GET method.
"""
return self.get()
| alexandregz/simian | src/simian/mac/munki/handlers/uauth.py | Python | apache-2.0 | 2,569 | 0.011288 |
"""
pgoapi - Pokemon Go API
Copyright (c) 2016 tjado <https://github.com/tejado>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
OR OTHER DEALINGS IN THE SOFTWARE.
Author: tjado <https://github.com/tejado>
"""
import logging
import re
import requests
from utilities import f2i, h2f
from rpc_api import RpcApi
from auth_ptc import AuthPtc
from auth_google import AuthGoogle
from exceptions import AuthException, NotLoggedInException, ServerBusyOrOfflineException
import protos.RpcEnum_pb2 as RpcEnum
logger = logging.getLogger(__name__)
class PGoApi:
API_ENTRY = 'https://pgorelease.nianticlabs.com/plfe/rpc'
def __init__(self):
self.log = logging.getLogger(__name__)
self._auth_provider = None
self._api_endpoint = None
self._position_lat = 0
self._position_lng = 0
self._position_alt = 0
self._req_method_list = []
def call(self):
if not self._req_method_list:
return False
if self._auth_provider is None or not self._auth_provider.is_login():
self.log.info('Not logged in')
return False
player_position = self.get_position()
request = RpcApi(self._auth_provider)
if self._api_endpoint:
api_endpoint = self._api_endpoint
else:
api_endpoint = self.API_ENTRY
self.log.info('Execution of RPC')
response = None
try:
response = request.request(api_endpoint, self._req_method_list, player_position)
except ServerBusyOrOfflineException as e:
self.log.info('Server seems to be busy or offline - try again!')
# cleanup after call execution
self.log.info('Cleanup of request!')
self._req_method_list = []
return response
#def get_player(self):
def list_curr_methods(self):
for i in self._req_method_list:
print("{} ({})".format(RpcEnum.RequestMethod.Name(i),i))
def set_logger(self, logger):
self._ = logger or logging.getLogger(__name__)
def get_position(self):
return (self._position_lat, self._position_lng, self._position_alt)
def set_position(self, lat, lng, alt):
self.log.debug('Set Position - Lat: %s Long: %s Alt: %s', lat, lng, alt)
self._position_lat = f2i(lat)
self._position_lng = f2i(lng)
self._position_alt = f2i(alt)
def __getattr__(self, func):
def function(**kwargs):
if not self._req_method_list:
self.log.info('Create new request...')
name = func.upper()
if kwargs:
self._req_method_list.append( { RpcEnum.RequestMethod.Value(name): kwargs } )
self.log.info("Adding '%s' to RPC request including arguments", name)
self.log.debug("Arguments of '%s': \n\r%s", name, kwargs)
else:
self._req_method_list.append( RpcEnum.RequestMethod.Value(name) )
self.log.info("Adding '%s' to RPC request", name)
return self
if func.upper() in RpcEnum.RequestMethod.keys():
return function
else:
raise AttributeError
def login(self, provider, username, password):
if not isinstance(username, basestring) or not isinstance(password, basestring):
raise AuthException("Username/password not correctly specified")
if provider == 'ptc':
self._auth_provider = AuthPtc()
elif provider == 'google':
self._auth_provider = AuthGoogle()
else:
raise AuthException("Invalid authentication provider - only ptc/google available.")
self.log.debug('Auth provider: %s', provider)
if not self._auth_provider.login(username, password):
self.log.info('Login process failed')
return False
self.log.info('Starting RPC login sequence (app simulation)')
# making a standard call, like it is also done by the client
self.get_player()
self.get_hatched_eggs()
self.get_inventory()
self.check_awarded_badges()
self.download_settings(hash="4a2e9bc330dae60e7b74fc85b98868ab4700802e")
response = self.call()
if not response:
self.log.info('Login failed!')
return False
if 'api_url' in response:
self._api_endpoint = ('https://{}/rpc'.format(response['api_url']))
self.log.debug('Setting API endpoint to: %s', self._api_endpoint)
else:
self.log.error('Login failed - unexpected server response!')
return False
if 'auth_ticket' in response:
self._auth_provider.set_ticket(response['auth_ticket'].values())
self.log.info('Finished RPC login sequence (app simulation)')
self.log.info('Login process completed')
return True
| charbec1/pokemapfuntimesyay | pogom/pgoapi/pgoapi.py | Python | mit | 6,103 | 0.00934 |
import os.path as op
from nose.tools import assert_true, assert_raises
import numpy as np
from numpy.testing import assert_array_almost_equal, assert_array_equal
import warnings
import mne
from mne import compute_covariance
from mne.datasets import testing
from mne.beamformer import lcmv, lcmv_epochs, lcmv_raw, tf_lcmv
from mne.beamformer._lcmv import _lcmv_source_power
from mne.externals.six import advance_iterator
from mne.utils import run_tests_if_main, slow_test
data_path = testing.data_path(download=False)
fname_raw = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc_raw.fif')
fname_cov = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-cov.fif')
fname_fwd = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-eeg-oct-4-fwd.fif')
fname_fwd_vol = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-vol-7-fwd.fif')
fname_event = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc_raw-eve.fif')
label = 'Aud-lh'
fname_label = op.join(data_path, 'MEG', 'sample', 'labels', '%s.label' % label)
warnings.simplefilter('always') # enable b/c these tests throw warnings
def read_forward_solution_meg(*args, **kwargs):
fwd = mne.read_forward_solution(*args, **kwargs)
return mne.pick_types_forward(fwd, meg=True, eeg=False)
def _get_data(tmin=-0.1, tmax=0.15, all_forward=True, epochs=True,
epochs_preload=True, data_cov=True):
"""Read in data used in tests
"""
label = mne.read_label(fname_label)
events = mne.read_events(fname_event)
raw = mne.io.Raw(fname_raw, preload=True)
forward = mne.read_forward_solution(fname_fwd)
if all_forward:
forward_surf_ori = read_forward_solution_meg(fname_fwd, surf_ori=True)
forward_fixed = read_forward_solution_meg(fname_fwd, force_fixed=True,
surf_ori=True)
forward_vol = read_forward_solution_meg(fname_fwd_vol, surf_ori=True)
else:
forward_surf_ori = None
forward_fixed = None
forward_vol = None
event_id, tmin, tmax = 1, tmin, tmax
# Setup for reading the raw data
raw.info['bads'] = ['MEG 2443', 'EEG 053'] # 2 bads channels
if epochs:
# Set up pick list: MEG - bad channels
left_temporal_channels = mne.read_selection('Left-temporal')
picks = mne.pick_types(raw.info, meg=True, eeg=False, stim=True,
eog=True, ref_meg=False, exclude='bads',
selection=left_temporal_channels)
# Read epochs
epochs = mne.Epochs(raw, events, event_id, tmin, tmax, proj=True,
picks=picks, baseline=(None, 0),
preload=epochs_preload,
reject=dict(grad=4000e-13, mag=4e-12, eog=150e-6))
if epochs_preload:
epochs.resample(200, npad=0, n_jobs=2)
evoked = epochs.average()
info = evoked.info
else:
epochs = None
evoked = None
info = raw.info
noise_cov = mne.read_cov(fname_cov)
noise_cov = mne.cov.regularize(noise_cov, info, mag=0.05, grad=0.05,
eeg=0.1, proj=True)
if data_cov:
with warnings.catch_warnings(record=True):
data_cov = mne.compute_covariance(epochs, tmin=0.04, tmax=0.15)
else:
data_cov = None
return raw, epochs, evoked, data_cov, noise_cov, label, forward,\
forward_surf_ori, forward_fixed, forward_vol
@slow_test
@testing.requires_testing_data
def test_lcmv():
"""Test LCMV with evoked data and single trials
"""
raw, epochs, evoked, data_cov, noise_cov, label, forward,\
forward_surf_ori, forward_fixed, forward_vol = _get_data()
for fwd in [forward, forward_vol]:
stc = lcmv(evoked, fwd, noise_cov, data_cov, reg=0.01)
stc.crop(0.02, None)
stc_pow = np.sum(stc.data, axis=1)
idx = np.argmax(stc_pow)
max_stc = stc.data[idx]
tmax = stc.times[np.argmax(max_stc)]
assert_true(0.09 < tmax < 0.105, tmax)
assert_true(0.9 < np.max(max_stc) < 3., np.max(max_stc))
if fwd is forward:
# Test picking normal orientation (surface source space only)
stc_normal = lcmv(evoked, forward_surf_ori, noise_cov, data_cov,
reg=0.01, pick_ori="normal")
stc_normal.crop(0.02, None)
stc_pow = np.sum(np.abs(stc_normal.data), axis=1)
idx = np.argmax(stc_pow)
max_stc = stc_normal.data[idx]
tmax = stc_normal.times[np.argmax(max_stc)]
assert_true(0.04 < tmax < 0.11, tmax)
assert_true(0.4 < np.max(max_stc) < 2., np.max(max_stc))
# The amplitude of normal orientation results should always be
# smaller than free orientation results
assert_true((np.abs(stc_normal.data) <= stc.data).all())
# Test picking source orientation maximizing output source power
stc_max_power = lcmv(evoked, fwd, noise_cov, data_cov, reg=0.01,
pick_ori="max-power")
stc_max_power.crop(0.02, None)
stc_pow = np.sum(stc_max_power.data, axis=1)
idx = np.argmax(stc_pow)
max_stc = stc_max_power.data[idx]
tmax = stc.times[np.argmax(max_stc)]
assert_true(0.09 < tmax < 0.11, tmax)
assert_true(0.8 < np.max(max_stc) < 3., np.max(max_stc))
# Maximum output source power orientation results should be similar to
# free orientation results
assert_true((stc_max_power.data - stc.data < 1).all())
# Test if fixed forward operator is detected when picking normal or
# max-power orientation
assert_raises(ValueError, lcmv, evoked, forward_fixed, noise_cov, data_cov,
reg=0.01, pick_ori="normal")
assert_raises(ValueError, lcmv, evoked, forward_fixed, noise_cov, data_cov,
reg=0.01, pick_ori="max-power")
# Test if non-surface oriented forward operator is detected when picking
# normal orientation
assert_raises(ValueError, lcmv, evoked, forward, noise_cov, data_cov,
reg=0.01, pick_ori="normal")
# Test if volume forward operator is detected when picking normal
# orientation
assert_raises(ValueError, lcmv, evoked, forward_vol, noise_cov, data_cov,
reg=0.01, pick_ori="normal")
# Now test single trial using fixed orientation forward solution
# so we can compare it to the evoked solution
stcs = lcmv_epochs(epochs, forward_fixed, noise_cov, data_cov, reg=0.01)
stcs_ = lcmv_epochs(epochs, forward_fixed, noise_cov, data_cov, reg=0.01,
return_generator=True)
assert_array_equal(stcs[0].data, advance_iterator(stcs_).data)
epochs.drop_bad_epochs()
assert_true(len(epochs.events) == len(stcs))
# average the single trial estimates
stc_avg = np.zeros_like(stcs[0].data)
for this_stc in stcs:
stc_avg += this_stc.data
stc_avg /= len(stcs)
# compare it to the solution using evoked with fixed orientation
stc_fixed = lcmv(evoked, forward_fixed, noise_cov, data_cov, reg=0.01)
assert_array_almost_equal(stc_avg, stc_fixed.data)
# use a label so we have few source vertices and delayed computation is
# not used
stcs_label = lcmv_epochs(epochs, forward_fixed, noise_cov, data_cov,
reg=0.01, label=label)
assert_array_almost_equal(stcs_label[0].data, stcs[0].in_label(label).data)
@testing.requires_testing_data
def test_lcmv_raw():
"""Test LCMV with raw data
"""
raw, _, _, _, noise_cov, label, forward, _, _, _ =\
_get_data(all_forward=False, epochs=False, data_cov=False)
tmin, tmax = 0, 20
start, stop = raw.time_as_index([tmin, tmax])
# use only the left-temporal MEG channels for LCMV
left_temporal_channels = mne.read_selection('Left-temporal')
picks = mne.pick_types(raw.info, meg=True, exclude='bads',
selection=left_temporal_channels)
data_cov = mne.compute_raw_data_covariance(raw, tmin=tmin, tmax=tmax)
stc = lcmv_raw(raw, forward, noise_cov, data_cov, reg=0.01, label=label,
start=start, stop=stop, picks=picks)
assert_array_almost_equal(np.array([tmin, tmax]),
np.array([stc.times[0], stc.times[-1]]),
decimal=2)
# make sure we get an stc with vertices only in the lh
vertno = [forward['src'][0]['vertno'], forward['src'][1]['vertno']]
assert_true(len(stc.vertices[0]) == len(np.intersect1d(vertno[0],
label.vertices)))
assert_true(len(stc.vertices[1]) == 0)
@testing.requires_testing_data
def test_lcmv_source_power():
"""Test LCMV source power computation
"""
raw, epochs, evoked, data_cov, noise_cov, label, forward,\
forward_surf_ori, forward_fixed, forward_vol = _get_data()
stc_source_power = _lcmv_source_power(epochs.info, forward, noise_cov,
data_cov, label=label)
max_source_idx = np.argmax(stc_source_power.data)
max_source_power = np.max(stc_source_power.data)
assert_true(max_source_idx == 0, max_source_idx)
assert_true(0.4 < max_source_power < 2.4, max_source_power)
# Test picking normal orientation and using a list of CSD matrices
stc_normal = _lcmv_source_power(epochs.info, forward_surf_ori, noise_cov,
data_cov, pick_ori="normal", label=label)
# The normal orientation results should always be smaller than free
# orientation results
assert_true((np.abs(stc_normal.data[:, 0]) <=
stc_source_power.data[:, 0]).all())
# Test if fixed forward operator is detected when picking normal
# orientation
assert_raises(ValueError, _lcmv_source_power, raw.info, forward_fixed,
noise_cov, data_cov, pick_ori="normal")
# Test if non-surface oriented forward operator is detected when picking
# normal orientation
assert_raises(ValueError, _lcmv_source_power, raw.info, forward, noise_cov,
data_cov, pick_ori="normal")
# Test if volume forward operator is detected when picking normal
# orientation
assert_raises(ValueError, _lcmv_source_power, epochs.info, forward_vol,
noise_cov, data_cov, pick_ori="normal")
@testing.requires_testing_data
def test_tf_lcmv():
"""Test TF beamforming based on LCMV
"""
label = mne.read_label(fname_label)
events = mne.read_events(fname_event)
raw = mne.io.Raw(fname_raw, preload=True)
forward = mne.read_forward_solution(fname_fwd)
event_id, tmin, tmax = 1, -0.2, 0.2
# Setup for reading the raw data
raw.info['bads'] = ['MEG 2443', 'EEG 053'] # 2 bads channels
# Set up pick list: MEG - bad channels
left_temporal_channels = mne.read_selection('Left-temporal')
picks = mne.pick_types(raw.info, meg=True, eeg=False,
stim=True, eog=True, exclude='bads',
selection=left_temporal_channels)
# Read epochs
epochs = mne.Epochs(raw, events, event_id, tmin, tmax, proj=True,
picks=picks, baseline=None, preload=False,
reject=dict(grad=4000e-13, mag=4e-12, eog=150e-6))
epochs.drop_bad_epochs()
freq_bins = [(4, 12), (15, 40)]
time_windows = [(-0.1, 0.1), (0.0, 0.2)]
win_lengths = [0.2, 0.2]
tstep = 0.1
reg = 0.05
source_power = []
noise_covs = []
for (l_freq, h_freq), win_length in zip(freq_bins, win_lengths):
raw_band = raw.copy()
raw_band.filter(l_freq, h_freq, method='iir', n_jobs=1, picks=picks)
epochs_band = mne.Epochs(raw_band, epochs.events, epochs.event_id,
tmin=tmin, tmax=tmax, baseline=None,
proj=True, picks=picks)
with warnings.catch_warnings(record=True): # not enough samples
noise_cov = compute_covariance(epochs_band, tmin=tmin, tmax=tmin +
win_length)
noise_cov = mne.cov.regularize(noise_cov, epochs_band.info, mag=reg,
grad=reg, eeg=reg, proj=True)
noise_covs.append(noise_cov)
del raw_band # to save memory
# Manually calculating source power in on frequency band and several
# time windows to compare to tf_lcmv results and test overlapping
if (l_freq, h_freq) == freq_bins[0]:
for time_window in time_windows:
with warnings.catch_warnings(record=True):
data_cov = compute_covariance(epochs_band,
tmin=time_window[0],
tmax=time_window[1])
stc_source_power = _lcmv_source_power(epochs.info, forward,
noise_cov, data_cov,
reg=reg, label=label)
source_power.append(stc_source_power.data)
with warnings.catch_warnings(record=True):
stcs = tf_lcmv(epochs, forward, noise_covs, tmin, tmax, tstep,
win_lengths, freq_bins, reg=reg, label=label)
assert_true(len(stcs) == len(freq_bins))
assert_true(stcs[0].shape[1] == 4)
# Averaging all time windows that overlap the time period 0 to 100 ms
source_power = np.mean(source_power, axis=0)
# Selecting the first frequency bin in tf_lcmv results
stc = stcs[0]
# Comparing tf_lcmv results with _lcmv_source_power results
assert_array_almost_equal(stc.data[:, 2], source_power[:, 0])
# Test if using unsupported max-power orientation is detected
assert_raises(ValueError, tf_lcmv, epochs, forward, noise_covs, tmin, tmax,
tstep, win_lengths, freq_bins=freq_bins,
pick_ori='max-power')
# Test if incorrect number of noise CSDs is detected
# Test if incorrect number of noise covariances is detected
assert_raises(ValueError, tf_lcmv, epochs, forward, [noise_covs[0]], tmin,
tmax, tstep, win_lengths, freq_bins)
# Test if freq_bins and win_lengths incompatibility is detected
assert_raises(ValueError, tf_lcmv, epochs, forward, noise_covs, tmin, tmax,
tstep, win_lengths=[0, 1, 2], freq_bins=freq_bins)
# Test if time step exceeding window lengths is detected
assert_raises(ValueError, tf_lcmv, epochs, forward, noise_covs, tmin, tmax,
tstep=0.15, win_lengths=[0.2, 0.1], freq_bins=freq_bins)
# Test correct detection of preloaded epochs objects that do not contain
# the underlying raw object
epochs_preloaded = mne.Epochs(raw, events, event_id, tmin, tmax, proj=True,
baseline=(None, 0), preload=True)
with warnings.catch_warnings(record=True): # not enough samples
assert_raises(ValueError, tf_lcmv, epochs_preloaded, forward,
noise_covs, tmin, tmax, tstep, win_lengths, freq_bins)
with warnings.catch_warnings(record=True): # not enough samples
# Pass only one epoch to test if subtracting evoked
# responses yields zeros
stcs = tf_lcmv(epochs[0], forward, noise_covs, tmin, tmax, tstep,
win_lengths, freq_bins, subtract_evoked=True, reg=reg,
label=label)
assert_array_almost_equal(stcs[0].data, np.zeros_like(stcs[0].data))
run_tests_if_main()
| dimkal/mne-python | mne/beamformer/tests/test_lcmv.py | Python | bsd-3-clause | 15,822 | 0 |
# sequences.py
# strings
>>> # 4 ways to make a string
>>> str1 = 'This is a string. We built it with single quotes.'
>>> str2 = "This is also a string, but built with double quotes."
>>> str3 = '''This is built using triple quotes,
... so it can span multiple lines.'''
>>> str4 = """This too
... is a multiline one
... built with triple double-quotes."""
>>> str4 #A
'This too\nis a multiline one\nbuilt with triple double-quotes.'
>>> print(str4) #B
This too
is a multiline one
built with triple double-quotes.
>>>
# encode / decode
>>> s = "This is üŋíc0de" # unicode string: code points
>>> type(s)
<class 'str'>
>>> encoded_s = s.encode('utf-8') # utf-8 encoded version of s
>>> encoded_s
b'This is \xc3\xbc\xc5\x8b\xc3\xadc0de' # result: bytes object
>>> type(encoded_s) # another way to verify it
<class 'bytes'>
>>> encoded_s.decode('utf-8') # let's revert to the original
'This is üŋíc0de'
>>> bytes_obj = b"A bytes object" # a bytes object
>>> type(bytes_obj)
<class 'bytes'>
# length
>>> len(str1)
49
# indexing and slicing
>>> s = "The trouble is you think you have time."
>>> s[0] # indexing at position 0, which is the first char
'T'
>>> s[5] # indexing at position 5, which is the sixth char
'r'
>>> s[:4] # slicing, we specify only the stop position
'The '
>>> s[4:] # slicing, we specify only the start position
'trouble is you think you have time.'
>>> s[2:14] # slicing, both start and stop positions
'e trouble is'
>>> s[2:14:3] # slicing, start, stop and step (every 3 chars)
'erb '
>>> s[:] # quick way of making a copy
'The trouble is you think you have time.'
| mkhuthir/learnPython | Book_learning-python-r1.1/ch2/sequences.py | Python | mit | 1,614 | 0.019279 |
"""
The controller base class
"""
from .routes import Route
from .view import View
class Controller(object):
def __init__(self, entity, env):
"""Instantiate a controller with the name of the entity and the
environment dict.
"""
self.entity = entity.strip('/^$')
if not self.entity:
self.entity = 'index'
self.routes = []
self.register_routes()
self.env = env
def register_routes(self):
"""Simple internal method to run through all of the methods of this class
and see if they've been decorated to be endpoints.
"""
for funcname in dir(self):
func = getattr(self, funcname)
if hasattr(func, '_method') and hasattr(func, '_path'):
self.update_routes(func._method, func._path, func)
def update_routes(self, method, matcher, endpoint):
"""Adds an endpoint into the possible endpoints of a path based on
its HTTP method
"""
for route in self.routes:
if route.key == matcher:
route.update(method, endpoint)
return
# If the route has not been added to the routes yet
self.routes.append(Route(method, matcher, endpoint))
def route(self, env):
"""Called by the application to route the requests to the proper endpoint
in this controller.
"""
for route in self.routes:
if self.entity == 'index':
path = '/' + '/'.join(env['PATH_INFO'].split('/')[1:])
else:
path = '/' + '/'.join(env['PATH_INFO'].split('/')[2:])
if route.match(path):
ans = route.call(env['REQUEST_METHOD'], env['PATH_INFO'], env)
if ans[1] == 'no_template':
return ans[0]
if '/' in ans[0]:
view = View(ans[0].split('/')[0])
return view.render(ans[0], ans[1])
else:
view = View(self.entity)
return view.render(ans[0], ans[1])
| bis12/yapwaf | yapwaf/controller.py | Python | mit | 2,107 | 0.001424 |
from theano import tensor
from theano.tensor.nnet import conv2d
def weights_std(weights, mask_outputs=None):
positions = tensor.arange(weights.shape[2])
expected = (weights * positions).sum(axis=2)
expected2 = (weights * positions ** 2).sum(axis=2)
result = (expected2 - expected ** 2) ** 0.5
if mask_outputs:
result *= mask_outputs
return result.sum() / weights.shape[0]
def monotonicity_penalty(weights, mask_x=None):
cumsums = tensor.cumsum(weights, axis=2)
penalties = tensor.maximum(cumsums[1:] - cumsums[:-1], 0).sum(axis=2)
if mask_x:
penalties *= mask_x[1:]
return penalties.sum()
def entropy(weights, mask_x):
entropies = (weights * tensor.log(weights + 1e-7)).sum(axis=2)
entropies *= mask_x
return entropies.sum()
def conv1d(sequences, masks, **kwargs):
"""Wraps Theano conv2d to perform 1D convolution.
Parameters
----------
sequence : :class:`~theano.Variable`
(batch_size, length)
masks : :class:`~theano.Variable`
(num_filters, filter_length)
**kwargs
Will be passed to `conv2d`
Returns
-------
result : :class:`~theano.Variable`
(batch_size, num_filters, position)
"""
# For testability
sequences = tensor.as_tensor_variable(sequences)
masks = tensor.as_tensor_variable(masks)
image = sequences.dimshuffle('x', 'x', 0, 1)
filters = masks.dimshuffle(0, 'x', 'x', 1)
result = conv2d(image, filters, **kwargs)
# Now number of rows is the actual batch size
result = result.dimshuffle(2, 1, 3, 0)
return result.reshape(result.shape[:-1], ndim=3)
def pad_to_a_multiple(tensor_, k, pad_with):
"""Pad a tensor to make its first dimension a multiple of a number.
Parameters
----------
tensor_ : :class:`~theano.Variable`
k : int
The number, multiple of which the length of tensor is made.
pad_with : float or int
The value for padding.
"""
new_length = (
tensor.ceil(tensor_.shape[0].astype('float32') / k) * k).astype('int64')
new_shape = tensor.set_subtensor(tensor_.shape[:1], new_length)
canvas = tensor.alloc(pad_with, tensor.prod(new_shape)).reshape(
new_shape, ndim=tensor_.ndim)
return tensor.set_subtensor(canvas[:tensor_.shape[0]], tensor_)
| rizar/attention-lvcsr | lvsr/expressions.py | Python | mit | 2,325 | 0.00129 |
from pandas import compat
import sys
import itertools
import functools
import numpy as np
from pandas.core.common import isnull, notnull, _values_from_object, is_float
import pandas.core.common as com
import pandas.lib as lib
import pandas.algos as algos
import pandas.hashtable as _hash
import pandas.tslib as tslib
from pandas.compat import builtins
try:
import bottleneck as bn
_USE_BOTTLENECK = True
except ImportError: # pragma: no cover
_USE_BOTTLENECK = False
class disallow(object):
def __init__(self, *dtypes):
super(disallow, self).__init__()
self.dtypes = tuple(np.dtype(dtype).type for dtype in dtypes)
def check(self, obj):
return hasattr(obj, 'dtype') and issubclass(obj.dtype.type,
self.dtypes)
def __call__(self, f):
@functools.wraps(f)
def _f(*args, **kwargs):
obj_iter = itertools.chain(args, compat.itervalues(kwargs))
if any(self.check(obj) for obj in obj_iter):
raise TypeError('reduction operation {0!r} not allowed for '
'this dtype'.format(f.__name__.replace('nan',
'')))
return f(*args, **kwargs)
return _f
class bottleneck_switch(object):
def __init__(self, zero_value=None, **kwargs):
self.zero_value = zero_value
self.kwargs = kwargs
def __call__(self, alt):
bn_name = alt.__name__
try:
bn_func = getattr(bn, bn_name)
except (AttributeError, NameError): # pragma: no cover
bn_func = None
@functools.wraps(alt)
def f(values, axis=None, skipna=True, **kwds):
if len(self.kwargs) > 0:
for k, v in compat.iteritems(self.kwargs):
if k not in kwds:
kwds[k] = v
try:
if self.zero_value is not None and values.size == 0:
if values.ndim == 1:
return 0
else:
result_shape = (values.shape[:axis] +
values.shape[axis + 1:])
result = np.empty(result_shape)
result.fill(0)
return result
if _USE_BOTTLENECK and skipna and _bn_ok_dtype(values.dtype):
result = bn_func(values, axis=axis, **kwds)
# prefer to treat inf/-inf as NA, but must compute the func
# twice :(
if _has_infs(result):
result = alt(values, axis=axis, skipna=skipna, **kwds)
else:
result = alt(values, axis=axis, skipna=skipna, **kwds)
except Exception:
result = alt(values, axis=axis, skipna=skipna, **kwds)
return result
return f
def _bn_ok_dtype(dt):
# Bottleneck chokes on datetime64
time_types = np.datetime64, np.timedelta64
return dt != np.object_ and not issubclass(dt.type, time_types)
def _has_infs(result):
if isinstance(result, np.ndarray):
if result.dtype == 'f8':
return lib.has_infs_f8(result)
elif result.dtype == 'f4':
return lib.has_infs_f4(result)
return False
return np.isinf(result) or np.isneginf(result)
def _get_fill_value(dtype, fill_value=None, fill_value_typ=None):
""" return the correct fill value for the dtype of the values """
if fill_value is not None:
return fill_value
if _na_ok_dtype(dtype):
if fill_value_typ is None:
return np.nan
else:
if fill_value_typ == '+inf':
return np.inf
else:
return -np.inf
else:
if fill_value_typ is None:
return tslib.iNaT
else:
if fill_value_typ == '+inf':
# need the max int here
return np.iinfo(np.int64).max
else:
return tslib.iNaT
def _get_values(values, skipna, fill_value=None, fill_value_typ=None,
isfinite=False, copy=True):
""" utility to get the values view, mask, dtype
if necessary copy and mask using the specified fill_value
copy = True will force the copy """
values = _values_from_object(values)
if isfinite:
mask = _isfinite(values)
else:
mask = isnull(values)
dtype = values.dtype
dtype_ok = _na_ok_dtype(dtype)
# get our fill value (in case we need to provide an alternative
# dtype for it)
fill_value = _get_fill_value(dtype, fill_value=fill_value,
fill_value_typ=fill_value_typ)
if skipna:
if copy:
values = values.copy()
if dtype_ok:
np.putmask(values, mask, fill_value)
# promote if needed
else:
values, changed = com._maybe_upcast_putmask(values, mask,
fill_value)
elif copy:
values = values.copy()
values = _view_if_needed(values)
return values, mask, dtype
def _isfinite(values):
if issubclass(values.dtype.type, (np.timedelta64, np.datetime64)):
return isnull(values)
elif isinstance(values.dtype, object):
return -np.isfinite(values.astype('float64'))
return -np.isfinite(values)
def _na_ok_dtype(dtype):
return not issubclass(dtype.type, (np.integer, np.datetime64,
np.timedelta64))
def _view_if_needed(values):
if issubclass(values.dtype.type, (np.datetime64, np.timedelta64)):
return values.view(np.int64)
return values
def _wrap_results(result, dtype):
""" wrap our results if needed """
if issubclass(dtype.type, np.datetime64):
if not isinstance(result, np.ndarray):
result = lib.Timestamp(result)
else:
result = result.view(dtype)
elif issubclass(dtype.type, np.timedelta64):
if not isinstance(result, np.ndarray):
# this is a scalar timedelta result!
# we have series convert then take the element (scalar)
# as series will do the right thing in py3 (and deal with numpy
# 1.6.2 bug in that it results dtype of timedelta64[us]
from pandas import Series
# coerce float to results
if is_float(result):
result = int(result)
result = Series([result], dtype='timedelta64[ns]')
else:
result = result.view(dtype)
return result
def nanany(values, axis=None, skipna=True):
values, mask, dtype = _get_values(values, skipna, False, copy=skipna)
return values.any(axis)
def nanall(values, axis=None, skipna=True):
values, mask, dtype = _get_values(values, skipna, True, copy=skipna)
return values.all(axis)
@disallow('M8')
@bottleneck_switch(zero_value=0)
def nansum(values, axis=None, skipna=True):
values, mask, dtype = _get_values(values, skipna, 0)
the_sum = values.sum(axis)
the_sum = _maybe_null_out(the_sum, axis, mask)
return the_sum
@disallow('M8')
@bottleneck_switch()
def nanmean(values, axis=None, skipna=True):
values, mask, dtype = _get_values(values, skipna, 0)
the_sum = _ensure_numeric(values.sum(axis))
count = _get_counts(mask, axis)
if axis is not None:
the_mean = the_sum / count
ct_mask = count == 0
if ct_mask.any():
the_mean[ct_mask] = np.nan
else:
the_mean = the_sum / count if count > 0 else np.nan
return _wrap_results(the_mean, dtype)
@disallow('M8')
@bottleneck_switch()
def nanmedian(values, axis=None, skipna=True):
values, mask, dtype = _get_values(values, skipna)
def get_median(x):
mask = notnull(x)
if not skipna and not mask.all():
return np.nan
return algos.median(_values_from_object(x[mask]))
if values.dtype != np.float64:
values = values.astype('f8')
notempty = values.size
# an array from a frame
if values.ndim > 1:
# there's a non-empty array to apply over otherwise numpy raises
if notempty:
return np.apply_along_axis(get_median, axis, values)
# must return the correct shape, but median is not defined for the
# empty set so return nans of shape "everything but the passed axis"
# since "axis" is where the reduction would occur if we had a nonempty
# array
shp = np.array(values.shape)
dims = np.arange(values.ndim)
ret = np.empty(shp[dims != axis])
ret.fill(np.nan)
return ret
# otherwise return a scalar value
return _wrap_results(get_median(values), dtype) if notempty else np.nan
@disallow('M8')
@bottleneck_switch(ddof=1)
def nanvar(values, axis=None, skipna=True, ddof=1):
if not isinstance(values.dtype.type, np.floating):
values = values.astype('f8')
mask = isnull(values)
if axis is not None:
count = (values.shape[axis] - mask.sum(axis)).astype(float)
else:
count = float(values.size - mask.sum())
d = count-ddof
if skipna:
values = values.copy()
np.putmask(values, mask, 0)
# always return NaN, never inf
if np.isscalar(count):
if count <= ddof:
count = np.nan
d = np.nan
else:
mask = count <= ddof
if mask.any():
np.putmask(d, mask, np.nan)
np.putmask(count, mask, np.nan)
X = _ensure_numeric(values.sum(axis))
XX = _ensure_numeric((values ** 2).sum(axis))
return np.fabs((XX - X ** 2 / count) / d)
@bottleneck_switch()
def nanmin(values, axis=None, skipna=True):
values, mask, dtype = _get_values(values, skipna, fill_value_typ='+inf')
# numpy 1.6.1 workaround in Python 3.x
if (values.dtype == np.object_ and compat.PY3):
if values.ndim > 1:
apply_ax = axis if axis is not None else 0
result = np.apply_along_axis(builtins.min, apply_ax, values)
else:
try:
result = builtins.min(values)
except:
result = np.nan
else:
if ((axis is not None and values.shape[axis] == 0)
or values.size == 0):
try:
result = com.ensure_float(values.sum(axis))
result.fill(np.nan)
except:
result = np.nan
else:
result = values.min(axis)
result = _wrap_results(result, dtype)
return _maybe_null_out(result, axis, mask)
@bottleneck_switch()
def nanmax(values, axis=None, skipna=True):
values, mask, dtype = _get_values(values, skipna, fill_value_typ='-inf')
# numpy 1.6.1 workaround in Python 3.x
if (values.dtype == np.object_ and compat.PY3):
if values.ndim > 1:
apply_ax = axis if axis is not None else 0
result = np.apply_along_axis(builtins.max, apply_ax, values)
else:
try:
result = builtins.max(values)
except:
result = np.nan
else:
if ((axis is not None and values.shape[axis] == 0)
or values.size == 0):
try:
result = com.ensure_float(values.sum(axis))
result.fill(np.nan)
except:
result = np.nan
else:
result = values.max(axis)
result = _wrap_results(result, dtype)
return _maybe_null_out(result, axis, mask)
def nanargmax(values, axis=None, skipna=True):
"""
Returns -1 in the NA case
"""
values, mask, dtype = _get_values(values, skipna, fill_value_typ='-inf',
isfinite=True)
result = values.argmax(axis)
result = _maybe_arg_null_out(result, axis, mask, skipna)
return result
def nanargmin(values, axis=None, skipna=True):
"""
Returns -1 in the NA case
"""
values, mask, dtype = _get_values(values, skipna, fill_value_typ='+inf',
isfinite=True)
result = values.argmin(axis)
result = _maybe_arg_null_out(result, axis, mask, skipna)
return result
@disallow('M8')
def nanskew(values, axis=None, skipna=True):
if not isinstance(values.dtype.type, np.floating):
values = values.astype('f8')
mask = isnull(values)
count = _get_counts(mask, axis)
if skipna:
values = values.copy()
np.putmask(values, mask, 0)
A = values.sum(axis) / count
B = (values ** 2).sum(axis) / count - A ** 2
C = (values ** 3).sum(axis) / count - A ** 3 - 3 * A * B
# floating point error
B = _zero_out_fperr(B)
C = _zero_out_fperr(C)
result = ((np.sqrt((count ** 2 - count)) * C) /
((count - 2) * np.sqrt(B) ** 3))
if isinstance(result, np.ndarray):
result = np.where(B == 0, 0, result)
result[count < 3] = np.nan
return result
else:
result = 0 if B == 0 else result
if count < 3:
return np.nan
return result
@disallow('M8')
def nankurt(values, axis=None, skipna=True):
if not isinstance(values.dtype.type, np.floating):
values = values.astype('f8')
mask = isnull(values)
count = _get_counts(mask, axis)
if skipna:
values = values.copy()
np.putmask(values, mask, 0)
A = values.sum(axis) / count
B = (values ** 2).sum(axis) / count - A ** 2
C = (values ** 3).sum(axis) / count - A ** 3 - 3 * A * B
D = (values ** 4).sum(axis) / count - A ** 4 - 6 * B * A * A - 4 * C * A
B = _zero_out_fperr(B)
C = _zero_out_fperr(C)
D = _zero_out_fperr(D)
result = (((count * count - 1.) * D / (B * B) - 3 * ((count - 1.) ** 2)) /
((count - 2.) * (count - 3.)))
if isinstance(result, np.ndarray):
result = np.where(B == 0, 0, result)
result[count < 4] = np.nan
return result
else:
result = 0 if B == 0 else result
if count < 4:
return np.nan
return result
@disallow('M8')
def nanprod(values, axis=None, skipna=True):
mask = isnull(values)
if skipna and not issubclass(values.dtype.type, np.integer):
values = values.copy()
values[mask] = 1
result = values.prod(axis)
return _maybe_null_out(result, axis, mask)
def _maybe_arg_null_out(result, axis, mask, skipna):
# helper function for nanargmin/nanargmax
if axis is None:
if skipna:
if mask.all():
result = -1
else:
if mask.any():
result = -1
else:
if skipna:
na_mask = mask.all(axis)
else:
na_mask = mask.any(axis)
if na_mask.any():
result[na_mask] = -1
return result
def _get_counts(mask, axis):
if axis is not None:
count = (mask.shape[axis] - mask.sum(axis)).astype(float)
else:
count = float(mask.size - mask.sum())
return count
def _maybe_null_out(result, axis, mask):
if axis is not None:
null_mask = (mask.shape[axis] - mask.sum(axis)) == 0
if null_mask.any():
result = result.astype('f8')
result[null_mask] = np.nan
else:
null_mask = mask.size - mask.sum()
if null_mask == 0:
result = np.nan
return result
def _zero_out_fperr(arg):
if isinstance(arg, np.ndarray):
return np.where(np.abs(arg) < 1e-14, 0, arg)
else:
return 0 if np.abs(arg) < 1e-14 else arg
@disallow('M8')
def nancorr(a, b, method='pearson', min_periods=None):
"""
a, b: ndarrays
"""
if len(a) != len(b):
raise AssertionError('Operands to nancorr must have same size')
if min_periods is None:
min_periods = 1
valid = notnull(a) & notnull(b)
if not valid.all():
a = a[valid]
b = b[valid]
if len(a) < min_periods:
return np.nan
f = get_corr_func(method)
return f(a, b)
def get_corr_func(method):
if method in ['kendall', 'spearman']:
from scipy.stats import kendalltau, spearmanr
def _pearson(a, b):
return np.corrcoef(a, b)[0, 1]
def _kendall(a, b):
rs = kendalltau(a, b)
if isinstance(rs, tuple):
return rs[0]
return rs
def _spearman(a, b):
return spearmanr(a, b)[0]
_cor_methods = {
'pearson': _pearson,
'kendall': _kendall,
'spearman': _spearman
}
return _cor_methods[method]
@disallow('M8')
def nancov(a, b, min_periods=None):
if len(a) != len(b):
raise AssertionError('Operands to nancov must have same size')
if min_periods is None:
min_periods = 1
valid = notnull(a) & notnull(b)
if not valid.all():
a = a[valid]
b = b[valid]
if len(a) < min_periods:
return np.nan
return np.cov(a, b)[0, 1]
def _ensure_numeric(x):
if isinstance(x, np.ndarray):
if x.dtype == np.object_:
x = x.astype(np.float64)
elif not (com.is_float(x) or com.is_integer(x) or com.is_complex(x)):
try:
x = float(x)
except Exception:
try:
x = complex(x)
except Exception:
raise TypeError('Could not convert %s to numeric' % str(x))
return x
# NA-friendly array comparisons
import operator
def make_nancomp(op):
def f(x, y):
xmask = isnull(x)
ymask = isnull(y)
mask = xmask | ymask
result = op(x, y)
if mask.any():
if result.dtype == np.bool_:
result = result.astype('O')
np.putmask(result, mask, np.nan)
return result
return f
nangt = make_nancomp(operator.gt)
nange = make_nancomp(operator.ge)
nanlt = make_nancomp(operator.lt)
nanle = make_nancomp(operator.le)
naneq = make_nancomp(operator.eq)
nanne = make_nancomp(operator.ne)
def unique1d(values):
"""
Hash table-based unique
"""
if np.issubdtype(values.dtype, np.floating):
table = _hash.Float64HashTable(len(values))
uniques = np.array(table.unique(com._ensure_float64(values)),
dtype=np.float64)
elif np.issubdtype(values.dtype, np.datetime64):
table = _hash.Int64HashTable(len(values))
uniques = table.unique(com._ensure_int64(values))
uniques = uniques.view('M8[ns]')
elif np.issubdtype(values.dtype, np.integer):
table = _hash.Int64HashTable(len(values))
uniques = table.unique(com._ensure_int64(values))
else:
table = _hash.PyObjectHashTable(len(values))
uniques = table.unique(com._ensure_object(values))
return uniques
| alephu5/Soundbyte | environment/lib/python3.3/site-packages/pandas/core/nanops.py | Python | gpl-3.0 | 18,949 | 0.000422 |
from uhashring.ring import HashRing
__all__ = ["HashRing", "monkey"]
| ultrabug/uhashring | uhashring/__init__.py | Python | bsd-3-clause | 70 | 0 |
#!/usr/bin/env python3
import connexion
if __name__ == '__main__':
app = connexion.App(__name__, specification_dir='./swagger/')
app.add_api('swagger.yaml', arguments={'title': 'A second cut at writing the code initial formed in web.py for the PiMoroni UnicornHAT\n'})
app.run(port=8080)
| dthoreau/rpi_rally | services/daemons/uniserve-flask/app.py | Python | mit | 302 | 0.003311 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2020 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Santiago Dueñas <sduenas@bitergia.com>
# Stephan Barth <stephan.barth@gmail.com>
# Valerio Cosentino <valcos@bitergia.com>
# Miguel Ángel Fernández <mafesan@bitergia.com>
# Harshal Mittal <harshalmittal4@gmail.com>
#
import copy
import datetime
import dateutil.tz
import httpretty
import os
import pkg_resources
import time
import unittest
import unittest.mock
import warnings
import requests
pkg_resources.declare_namespace('perceval.backends')
from perceval.backend import BackendCommandArgumentParser
from perceval.errors import RateLimitError, RepositoryError
from perceval.utils import DEFAULT_DATETIME
from perceval.backends.core.meetup import (Meetup,
MeetupCommand,
MeetupClient,
MIN_RATE_LIMIT)
from base import TestCaseBackendArchive
warnings.filterwarnings("ignore")
MEETUP_URL = 'https://api.meetup.com'
MEETUP_GROUP_URL = MEETUP_URL + '/sqlpass-es'
MEETUP_EVENTS_URL = MEETUP_GROUP_URL + '/events'
MEETUP_EVENT_1_URL = MEETUP_EVENTS_URL + '/1'
MEETUP_EVENT_2_URL = MEETUP_EVENTS_URL + '/2'
MEETUP_EVENT_3_URL = MEETUP_EVENTS_URL + '/3'
MEETUP_EVENT_1_COMMENTS_URL = MEETUP_EVENT_1_URL + '/comments'
MEETUP_EVENT_2_COMMENTS_URL = MEETUP_EVENT_2_URL + '/comments'
MEETUP_EVENT_3_COMMENTS_URL = MEETUP_EVENT_3_URL + '/comments'
MEETUP_EVENT_1_RSVPS_URL = MEETUP_EVENT_1_URL + '/rsvps'
MEETUP_EVENT_2_RSVPS_URL = MEETUP_EVENT_2_URL + '/rsvps'
MEETUP_EVENT_3_RSVPS_URL = MEETUP_EVENT_3_URL + '/rsvps'
MEETUP_COMMENTS_URL = [
MEETUP_EVENT_1_COMMENTS_URL,
MEETUP_EVENT_2_COMMENTS_URL,
MEETUP_EVENT_3_COMMENTS_URL
]
MEETUP_RSVPS_URL = [
MEETUP_EVENT_1_RSVPS_URL,
MEETUP_EVENT_2_RSVPS_URL,
MEETUP_EVENT_3_RSVPS_URL
]
def read_file(filename, mode='r'):
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), filename), mode) as f:
content = f.read()
return content
def setup_http_server(rate_limit=-1, reset_rate_limit=-1):
"""Setup a mock HTTP server"""
http_requests = []
events_bodies = [
read_file('data/meetup/meetup_events.json', 'rb'),
read_file('data/meetup/meetup_events_next.json', 'rb')
]
events_range_body = read_file('data/meetup/meetup_events_range.json', 'rb')
events_empty_body = read_file('data/meetup/meetup_events_empty.json', 'rb')
event_comments_body = read_file('data/meetup/meetup_comments.json', 'rb')
event_rsvps_body = read_file('data/meetup/meetup_rsvps.json', 'rb')
def request_callback(method, uri, headers, too_many_requests=False):
last_request = httpretty.last_request()
if uri.startswith(MEETUP_EVENT_1_COMMENTS_URL):
body = event_comments_body
elif uri.startswith(MEETUP_EVENT_2_COMMENTS_URL):
body = event_comments_body
elif uri.startswith(MEETUP_EVENT_3_COMMENTS_URL):
body = event_comments_body
elif uri.startswith(MEETUP_EVENT_1_RSVPS_URL):
body = event_rsvps_body
elif uri.startswith(MEETUP_EVENT_2_RSVPS_URL):
body = event_rsvps_body
elif uri.startswith(MEETUP_EVENT_3_RSVPS_URL):
body = event_rsvps_body
elif uri.startswith(MEETUP_EVENTS_URL):
params = last_request.querystring
scroll = params.get('scroll', None)
if scroll and scroll[0] == 'since:2016-09-25T00:00:00.000Z':
# Last events and no pagination
body = events_bodies[-1]
elif scroll and scroll[0] == 'since:2016-04-08T00:00:00.000Z':
body = events_range_body
elif scroll and scroll[0] == 'since:2017-01-01T00:00:00.000Z':
body = events_empty_body
else:
body = events_bodies.pop(0)
if events_bodies:
# Mock the 'Link' header with a fake URL
headers['Link'] = '<' + MEETUP_EVENTS_URL + '>; rel="next"'
if rate_limit != -1:
headers['X-RateLimit-Remaining'] = str(rate_limit)
if reset_rate_limit != -1:
headers['X-RateLimit-Reset'] = str(reset_rate_limit)
else:
raise
if rate_limit == -1:
headers['X-RateLimit-Remaining'] = '10000000'
if reset_rate_limit == -1:
headers['X-RateLimit-Reset'] = '0'
http_requests.append(last_request)
return (200, headers, body)
httpretty.register_uri(httpretty.GET,
MEETUP_EVENTS_URL,
responses=[
httpretty.Response(body=request_callback)
for _ in range(2)
])
for url in MEETUP_COMMENTS_URL:
httpretty.register_uri(httpretty.GET,
url,
responses=[
httpretty.Response(body=request_callback)
])
for url in MEETUP_RSVPS_URL:
httpretty.register_uri(httpretty.GET,
url,
responses=[
httpretty.Response(body=request_callback)
])
return http_requests
class MockedMeetupClient(MeetupClient):
"""Mocked meetup client for testing"""
def __init__(self, token, max_items, min_rate_to_sleep, sleep_for_rate):
super().__init__(token, max_items=max_items,
min_rate_to_sleep=min_rate_to_sleep,
sleep_for_rate=sleep_for_rate)
self.rate_limit_reset_ts = -1
class TestMeetupBackend(unittest.TestCase):
"""Meetup backend tests"""
def setUp(self):
warnings.simplefilter("ignore")
def test_initialization(self):
"""Test whether attributes are initialized"""
meetup = Meetup('mygroup', 'aaaa', max_items=5, tag='test',
sleep_for_rate=True, min_rate_to_sleep=10, sleep_time=60)
self.assertEqual(meetup.origin, 'https://meetup.com/')
self.assertEqual(meetup.tag, 'test')
self.assertEqual(meetup.group, 'mygroup')
self.assertEqual(meetup.max_items, 5)
self.assertIsNone(meetup.client)
self.assertTrue(meetup.ssl_verify)
# When tag is empty or None it will be set to
# the value in URL
meetup = Meetup('mygroup', 'aaaa', ssl_verify=False)
self.assertEqual(meetup.origin, 'https://meetup.com/')
self.assertEqual(meetup.tag, 'https://meetup.com/')
self.assertFalse(meetup.ssl_verify)
meetup = Meetup('mygroup', 'aaaa', tag='')
self.assertEqual(meetup.origin, 'https://meetup.com/')
self.assertEqual(meetup.tag, 'https://meetup.com/')
def test_has_archiving(self):
"""Test if it returns True when has_archiving is called"""
self.assertTrue(Meetup.has_archiving())
def test_has_resuming(self):
"""Test if it returns True when has_resuming is called"""
self.assertTrue(Meetup.has_resuming())
@httpretty.activate
def test_fetch(self):
"""Test whether it fetches a set of events"""
http_requests = setup_http_server()
meetup = Meetup('sqlpass-es', 'aaaa', max_items=2)
events = [event for event in meetup.fetch(from_date=None)]
expected = [('1', '0d07fe36f994a6c78dfcf60fb73674bcf158cb5a', 1460065164.0, 2, 3),
('2', '24b47b622eb33965676dd951b18eea7689b1d81c', 1465503498.0, 2, 3),
('3', 'a42b7cf556c17b17f05b951e2eb5e07a7cb0a731', 1474842748.0, 2, 3)]
self.assertEqual(len(events), len(expected))
for x in range(len(events)):
event = events[x]
expc = expected[x]
self.assertEqual(event['data']['id'], expc[0])
self.assertEqual(event['uuid'], expc[1])
self.assertEqual(event['origin'], 'https://meetup.com/')
self.assertEqual(event['updated_on'], expc[2])
self.assertEqual(event['category'], 'event')
self.assertEqual(event['tag'], 'https://meetup.com/')
self.assertEqual(event['classified_fields_filtered'], None)
self.assertIn('topics', event['data']['group'])
self.assertEqual(len(event['data']['comments']), expc[3])
self.assertEqual(len(event['data']['rsvps']), expc[4])
# Check requests
expected = [
{
'fields': ['event_hosts,featured,group_topics,plain_text_description,rsvpable,series'],
'order': ['updated'],
'page': ['2'],
'scroll': ['since:1970-01-01T00:00:00.000Z'],
'status': ['cancelled,upcoming,past,proposed,suggested']
},
{
'page': ['2']
},
{
'fields': ['attendance_status'],
'page': ['2'],
'response': ['yes,no']
},
{
'page': ['2']
},
{
'fields': ['attendance_status'],
'page': ['2'],
'response': ['yes,no']
},
{
'order': ['updated'],
'page': ['2'],
'scroll': ['since:1970-01-01T00:00:00.000Z']
},
{
'page': ['2']
},
{
'fields': ['attendance_status'],
'page': ['2'],
'response': ['yes,no']
}
]
self.assertEqual(len(http_requests), len(expected))
for i in range(len(expected)):
self.assertIn((MeetupClient.PKEY_OAUTH2, 'Bearer aaaa'), http_requests[i].headers._headers)
self.assertDictEqual(http_requests[i].querystring, expected[i])
@httpretty.activate
def test_fetch_from_date(self):
"""Test whether if fetches a set of events from the given date"""
http_requests = setup_http_server()
from_date = datetime.datetime(2016, 9, 25)
meetup = Meetup('sqlpass-es', 'aaaa', max_items=2)
events = [event for event in meetup.fetch(from_date=from_date)]
expected = [('3', 'a42b7cf556c17b17f05b951e2eb5e07a7cb0a731', 1474842748.0, 2, 3)]
self.assertEqual(len(events), len(expected))
for x in range(len(events)):
event = events[x]
expc = expected[x]
self.assertEqual(event['data']['id'], expc[0])
self.assertEqual(event['uuid'], expc[1])
self.assertEqual(event['origin'], 'https://meetup.com/')
self.assertEqual(event['updated_on'], expc[2])
self.assertEqual(event['category'], 'event')
self.assertEqual(event['tag'], 'https://meetup.com/')
self.assertEqual(event['classified_fields_filtered'], None)
self.assertEqual(len(event['data']['comments']), expc[3])
self.assertEqual(len(event['data']['rsvps']), expc[4])
# Check requests
expected = [
{
'fields': ['event_hosts,featured,group_topics,plain_text_description,rsvpable,series'],
'order': ['updated'],
'page': ['2'],
'scroll': ['since:2016-09-25T00:00:00.000Z'],
'status': ['cancelled,upcoming,past,proposed,suggested']
},
{
'page': ['2']
},
{
'fields': ['attendance_status'],
'page': ['2'],
'response': ['yes,no']
}
]
self.assertEqual(len(http_requests), len(expected))
for i in range(len(expected)):
self.assertIn((MeetupClient.PKEY_OAUTH2, 'Bearer aaaa'), http_requests[i].headers._headers)
self.assertDictEqual(http_requests[i].querystring, expected[i])
@httpretty.activate
def test_fetch_to_date(self):
"""Test whether if fetches a set of events updated before the given date"""
http_requests = setup_http_server()
to_date = datetime.datetime(2016, 9, 25)
meetup = Meetup('sqlpass-es', 'aaaa', max_items=2)
events = [event for event in meetup.fetch(to_date=to_date)]
expected = [('1', '0d07fe36f994a6c78dfcf60fb73674bcf158cb5a', 1460065164.0, 2, 3),
('2', '24b47b622eb33965676dd951b18eea7689b1d81c', 1465503498.0, 2, 3)]
self.assertEqual(len(events), len(expected))
for x in range(len(events)):
event = events[x]
expc = expected[x]
self.assertEqual(event['data']['id'], expc[0])
self.assertEqual(event['uuid'], expc[1])
self.assertEqual(event['origin'], 'https://meetup.com/')
self.assertEqual(event['updated_on'], expc[2])
self.assertEqual(event['category'], 'event')
self.assertEqual(event['tag'], 'https://meetup.com/')
self.assertEqual(event['classified_fields_filtered'], None)
self.assertEqual(len(event['data']['comments']), expc[3])
self.assertEqual(len(event['data']['rsvps']), expc[4])
# Check requests
expected = [
{
'fields': ['event_hosts,featured,group_topics,plain_text_description,rsvpable,series'],
'order': ['updated'],
'page': ['2'],
'scroll': ['since:1970-01-01T00:00:00.000Z'],
'status': ['cancelled,upcoming,past,proposed,suggested']
},
{
'page': ['2']
},
{
'fields': ['attendance_status'],
'page': ['2'],
'response': ['yes,no']
},
{
'page': ['2']
},
{
'fields': ['attendance_status'],
'page': ['2'],
'response': ['yes,no']
},
{
'order': ['updated'],
'page': ['2'],
'scroll': ['since:1970-01-01T00:00:00.000Z']
},
{
'page': ['2']
},
{
'fields': ['attendance_status'],
'page': ['2'],
'response': ['yes,no']
}
]
self.assertEqual(len(http_requests), len(expected))
for i in range(len(expected)):
self.assertIn((MeetupClient.PKEY_OAUTH2, 'Bearer aaaa'), http_requests[i].headers._headers)
self.assertDictEqual(http_requests[i].querystring, expected[i])
@httpretty.activate
def test_fetch_date_range(self):
"""Test whether if fetches a set of events updated withing the given range"""
http_requests = setup_http_server()
from_date = datetime.datetime(2016, 4, 8)
to_date = datetime.datetime(2016, 9, 25)
meetup = Meetup('sqlpass-es', 'aaaa', max_items=2)
events = [event for event in meetup.fetch(from_date=from_date,
to_date=to_date)]
self.assertEqual(len(events), 1)
event = events[0]
self.assertEqual(event['data']['id'], '2')
self.assertEqual(event['uuid'], '24b47b622eb33965676dd951b18eea7689b1d81c')
self.assertEqual(event['origin'], 'https://meetup.com/')
self.assertEqual(event['updated_on'], 1465503498.0)
self.assertEqual(event['category'], 'event')
self.assertEqual(event['tag'], 'https://meetup.com/')
self.assertEqual(event['classified_fields_filtered'], None)
self.assertEqual(len(event['data']['comments']), 2)
self.assertEqual(len(event['data']['rsvps']), 3)
# Check requests
expected = [
{
'fields': ['event_hosts,featured,group_topics,plain_text_description,rsvpable,series'],
'order': ['updated'],
'page': ['2'],
'scroll': ['since:2016-04-08T00:00:00.000Z'],
'status': ['cancelled,upcoming,past,proposed,suggested']
},
{
'page': ['2']
},
{
'fields': ['attendance_status'],
'page': ['2'],
'response': ['yes,no']
}
]
self.assertEqual(len(http_requests), len(expected))
for i in range(len(expected)):
self.assertIn((MeetupClient.PKEY_OAUTH2, 'Bearer aaaa'), http_requests[i].headers._headers)
self.assertDictEqual(http_requests[i].querystring, expected[i])
@httpretty.activate
def test_fetch_filtering_classified_fields(self):
"""Test it it removes classified fields from a set of fetched items"""
http_requests = setup_http_server()
meetup = Meetup('sqlpass-es', 'aaaa', max_items=2)
events = [event for event in meetup.fetch(from_date=None, filter_classified=True)]
expected = [('1', '0d07fe36f994a6c78dfcf60fb73674bcf158cb5a', 1460065164.0, 2, 3),
('2', '24b47b622eb33965676dd951b18eea7689b1d81c', 1465503498.0, 2, 3),
('3', 'a42b7cf556c17b17f05b951e2eb5e07a7cb0a731', 1474842748.0, 2, 3)]
self.assertEqual(len(events), len(expected))
for x in range(len(events)):
event = events[x]
expc = expected[x]
self.assertEqual(event['data']['id'], expc[0])
self.assertEqual(event['uuid'], expc[1])
self.assertEqual(event['origin'], 'https://meetup.com/')
self.assertEqual(event['updated_on'], expc[2])
self.assertEqual(event['category'], 'event')
self.assertEqual(event['tag'], 'https://meetup.com/')
self.assertEqual(len(event['data']['comments']), expc[3])
# Check classified items filtering
self.assertEqual(event['classified_fields_filtered'],
['group.topics', 'event_hosts', 'rsvps', 'venue'])
self.assertNotIn('topics', event['data']['group'])
self.assertNotIn('event_hosts', event['data'])
self.assertNotIn('rsvps', event['data'])
self.assertNotIn('venue', event['data'])
# Check requests
expected = [
{
'fields': ['event_hosts,featured,group_topics,plain_text_description,rsvpable,series'],
'order': ['updated'],
'page': ['2'],
'scroll': ['since:1970-01-01T00:00:00.000Z'],
'status': ['cancelled,upcoming,past,proposed,suggested']
},
{
'page': ['2']
},
{
'fields': ['attendance_status'],
'page': ['2'],
'response': ['yes,no']
},
{
'page': ['2']
},
{
'fields': ['attendance_status'],
'page': ['2'],
'response': ['yes,no']
},
{
'order': ['updated'],
'page': ['2'],
'scroll': ['since:1970-01-01T00:00:00.000Z']
},
{
'page': ['2']
},
{
'fields': ['attendance_status'],
'page': ['2'],
'response': ['yes,no']
}
]
self.assertEqual(len(http_requests), len(expected))
for i in range(len(expected)):
self.assertIn((MeetupClient.PKEY_OAUTH2, 'Bearer aaaa'), http_requests[i].headers._headers)
self.assertDictEqual(http_requests[i].querystring, expected[i])
@httpretty.activate
def test_search_fields(self):
"""Test whether the search_fields is properly set"""
http_requests = setup_http_server()
meetup = Meetup('sqlpass-es', 'aaaa', max_items=2)
events = [event for event in meetup.fetch(from_date=None)]
event = events[0]
self.assertEqual(meetup.metadata_id(event['data']), event['search_fields']['item_id'])
self.assertEqual(event['data']['group']['name'], 'sqlpass.es')
self.assertEqual(event['data']['group']['name'], event['search_fields']['group_name'])
self.assertEqual(event['data']['group']['id'], 19734270)
self.assertEqual(event['data']['group']['id'], event['search_fields']['group_id'])
event = events[1]
self.assertEqual(meetup.metadata_id(event['data']), event['search_fields']['item_id'])
self.assertEqual(event['data']['group']['name'], 'sqlpass.es')
self.assertEqual(event['data']['group']['name'], event['search_fields']['group_name'])
self.assertEqual(event['data']['group']['id'], 19734270)
self.assertEqual(event['data']['group']['id'], event['search_fields']['group_id'])
event = events[2]
self.assertEqual(meetup.metadata_id(event['data']), event['search_fields']['item_id'])
self.assertEqual(event['data']['group']['name'], 'sqlpass.es')
self.assertEqual(event['data']['group']['name'], event['search_fields']['group_name'])
self.assertEqual(event['data']['group']['id'], 19734270)
self.assertEqual(event['data']['group']['id'], event['search_fields']['group_id'])
@httpretty.activate
def test_fetch_empty(self):
"""Test if nothing is returned when there are no events"""
http_requests = setup_http_server()
from_date = datetime.datetime(2017, 1, 1)
meetup = Meetup('sqlpass-es', 'aaaa', max_items=2)
events = [event for event in meetup.fetch(from_date=from_date)]
self.assertEqual(len(events), 0)
# Check requests
expected = {
'fields': ['event_hosts,featured,group_topics,plain_text_description,rsvpable,series'],
'order': ['updated'],
'page': ['2'],
'scroll': ['since:2017-01-01T00:00:00.000Z'],
'status': ['cancelled,upcoming,past,proposed,suggested']
}
self.assertEqual(len(http_requests), 1)
self.assertIn((MeetupClient.PKEY_OAUTH2, 'Bearer aaaa'), http_requests[0].headers._headers)
self.assertDictEqual(http_requests[0].querystring, expected)
def test_parse_json(self):
"""Test if it parses a JSON stream"""
raw_json = read_file('data/meetup/meetup_events.json')
items = Meetup.parse_json(raw_json)
results = [item for item in items]
self.assertEqual(len(results), 2)
self.assertEqual(results[0]['id'], '1')
self.assertEqual(results[1]['id'], '2')
# Parse a file without results
raw_json = read_file('data/meetup/meetup_events_empty.json')
items = Meetup.parse_json(raw_json)
results = [item for item in items]
self.assertEqual(len(results), 0)
class TestMeetupBackendArchive(TestCaseBackendArchive):
"""Meetup backend tests using an archive"""
def setUp(self):
super().setUp()
self.backend_write_archive = Meetup('sqlpass-es', 'aaaa', max_items=2, archive=self.archive)
self.backend_read_archive = Meetup('sqlpass-es', 'bbbb', max_items=2, archive=self.archive)
@httpretty.activate
def test_fetch_from_archive(self):
"""Test whether it fetches a set of events from archive"""
setup_http_server()
self._test_fetch_from_archive()
@httpretty.activate
def test_fetch_from_date_archive(self):
"""Test whether if fetches a set of events from the given date from archive"""
setup_http_server()
from_date = datetime.datetime(2016, 9, 25)
self._test_fetch_from_archive(from_date=from_date)
@httpretty.activate
def test_fetch_to_date(self):
"""Test whether if fetches a set of events updated before the given date from archive"""
setup_http_server()
to_date = datetime.datetime(2016, 9, 25)
self._test_fetch_from_archive(to_date=to_date)
@httpretty.activate
def test_fetch_date_range_from_archive(self):
"""Test whether if fetches a set of events updated withing the given range from archive"""
setup_http_server()
from_date = datetime.datetime(2016, 4, 8)
to_date = datetime.datetime(2016, 9, 25)
self._test_fetch_from_archive(from_date=from_date, to_date=to_date)
@httpretty.activate
def test_fetch_empty(self):
"""Test if nothing is returned when there are no events in the archive"""
setup_http_server()
from_date = datetime.datetime(2017, 1, 1)
self._test_fetch_from_archive(from_date=from_date)
class TestMeetupCommand(unittest.TestCase):
"""Tests for MeetupCommand class"""
def test_backend_class(self):
"""Test if the backend class is Meetup"""
self.assertIs(MeetupCommand.BACKEND, Meetup)
def test_setup_cmd_parser(self):
"""Test if it parser object is correctly initialized"""
parser = MeetupCommand.setup_cmd_parser()
self.assertIsInstance(parser, BackendCommandArgumentParser)
self.assertEqual(parser._backend, Meetup)
args = ['sqlpass-es',
'--api-token', 'aaaa',
'--max-items', '5',
'--tag', 'test',
'--no-archive',
'--from-date', '1970-01-01',
'--to-date', '2016-01-01',
'--sleep-for-rate',
'--min-rate-to-sleep', '10',
'--sleep-time', '10',
'--filter-classified']
expected_ts = datetime.datetime(2016, 1, 1, 0, 0, 0,
tzinfo=dateutil.tz.tzutc())
parsed_args = parser.parse(*args)
self.assertEqual(parsed_args.group, 'sqlpass-es')
self.assertEqual(parsed_args.api_token, 'aaaa')
self.assertEqual(parsed_args.max_items, 5)
self.assertEqual(parsed_args.tag, 'test')
self.assertTrue(parsed_args.no_archive)
self.assertEqual(parsed_args.from_date, DEFAULT_DATETIME)
self.assertEqual(parsed_args.to_date, expected_ts)
self.assertTrue(parsed_args.sleep_for_rate)
self.assertEqual(parsed_args.min_rate_to_sleep, 10)
self.assertEqual(parsed_args.sleep_time, 10)
self.assertTrue(parsed_args.filter_classified)
self.assertTrue(parsed_args.ssl_verify)
args = ['sqlpass-es',
'--api-token', 'aaaa',
'--max-items', '5',
'--tag', 'test',
'--no-ssl-verify']
parsed_args = parser.parse(*args)
self.assertEqual(parsed_args.group, 'sqlpass-es')
self.assertEqual(parsed_args.api_token, 'aaaa')
self.assertEqual(parsed_args.max_items, 5)
self.assertEqual(parsed_args.tag, 'test')
self.assertFalse(parsed_args.ssl_verify)
class TestMeetupClient(unittest.TestCase):
"""Meetup REST API client tests.
These tests not check the body of the response, only if the call
was well formed and if a response was obtained. Due to this, take
into account that the body returned on each request might not
match with the parameters from the request.
"""
def test_init(self):
"""Test initialization"""
client = MeetupClient('aaaa', max_items=10)
self.assertEqual(client.api_token, 'aaaa')
self.assertEqual(client.max_items, 10)
self.assertFalse(client.sleep_for_rate)
self.assertEqual(client.min_rate_to_sleep, MIN_RATE_LIMIT)
self.assertTrue(client.ssl_verify)
client = MeetupClient('aaaa', max_items=10,
sleep_for_rate=True,
min_rate_to_sleep=4,
ssl_verify=False)
self.assertEqual(client.api_token, 'aaaa')
self.assertEqual(client.max_items, 10)
self.assertTrue(client.sleep_for_rate)
self.assertEqual(client.min_rate_to_sleep, 4)
self.assertFalse(client.ssl_verify)
# Max rate limit is never overtaken
client = MeetupClient('aaaa', max_items=10,
sleep_for_rate=True,
min_rate_to_sleep=100000000)
self.assertEqual(client.min_rate_to_sleep, client.MAX_RATE_LIMIT)
@httpretty.activate
def test_group_gone(self):
"""Test whether the group gone exception (HTTP 410) is properly handled"""
httpretty.register_uri(httpretty.GET,
MEETUP_EVENTS_URL,
body="",
status=410)
client = MeetupClient('aaaa', max_items=2)
events = client.events('sqlpass-es')
with self.assertRaises(RepositoryError):
_ = [event for event in events]
@httpretty.activate
def test_events_error(self):
"""Test whether HTTP errors different from 410 are thrown when fetching event pages"""
httpretty.register_uri(httpretty.GET,
MEETUP_EVENTS_URL,
body="",
status=401)
client = MeetupClient('aaaa', max_items=2)
events = client.events('sqlpass-es')
with self.assertRaises(requests.exceptions.HTTPError):
_ = [event for event in events]
@httpretty.activate
def test_events(self):
"""Test events API call"""
http_requests = setup_http_server()
client = MeetupClient('aaaa', max_items=2)
from_date = datetime.datetime(2016, 1, 1)
# Call API
events = client.events('sqlpass-es', from_date=from_date)
result = [event for event in events]
self.assertEqual(len(result), 2)
expected = [
{
'fields': ['event_hosts,featured,group_topics,plain_text_description,rsvpable,series'],
'order': ['updated'],
'page': ['2'],
'scroll': ['since:2016-01-01T00:00:00.000Z'],
'status': ['cancelled,upcoming,past,proposed,suggested']
},
{
'order': ['updated'],
'page': ['2'],
'scroll': ['since:2016-01-01T00:00:00.000Z']
}
]
self.assertEqual(len(http_requests), 2)
for x in range(0, len(http_requests)):
req = http_requests[x]
self.assertEqual(req.method, 'GET')
self.assertRegex(req.path, '/sqlpass-es/events')
self.assertIn((MeetupClient.PKEY_OAUTH2, 'Bearer aaaa'), req.headers._headers)
self.assertDictEqual(req.querystring, expected[x])
@httpretty.activate
def test_comments(self):
"""Test comments API call"""
http_requests = setup_http_server()
client = MeetupClient('aaaa', max_items=2)
# Call API
comments = client.comments('sqlpass-es', '1')
result = [comment for comment in comments]
self.assertEqual(len(result), 1)
expected = {
'page': ['2']
}
self.assertEqual(len(http_requests), 1)
req = http_requests[0]
self.assertEqual(req.method, 'GET')
self.assertRegex(req.path, '/sqlpass-es/events/1/comments')
self.assertIn((MeetupClient.PKEY_OAUTH2, 'Bearer aaaa'), req.headers._headers)
self.assertDictEqual(req.querystring, expected)
@httpretty.activate
def test_rsvps(self):
"""Test rsvps API call"""
http_requests = setup_http_server()
client = MeetupClient('aaaa', max_items=2)
# Call API
rsvps = client.rsvps('sqlpass-es', '1')
result = [rsvp for rsvp in rsvps]
self.assertEqual(len(result), 1)
expected = {
'fields': ['attendance_status'],
'page': ['2'],
'response': ['yes,no']
}
self.assertEqual(len(http_requests), 1)
req = http_requests[0]
self.assertEqual(req.method, 'GET')
self.assertRegex(req.path, '/sqlpass-es/events/1/rsvps')
self.assertIn((MeetupClient.PKEY_OAUTH2, 'Bearer aaaa'), req.headers._headers)
self.assertDictEqual(req.querystring, expected)
def test_calculate_time_to_reset(self):
"""Test whether the time to reset is zero if the sleep time is negative"""
client = MockedMeetupClient('aaaa',
max_items=2,
min_rate_to_sleep=2,
sleep_for_rate=True)
time_to_reset = client.calculate_time_to_reset()
self.assertEqual(time_to_reset, 0)
@httpretty.activate
def test_sleep_for_rate(self):
""" Test if the clients sleeps when the rate limit is reached"""
wait_to_reset = 1
http_requests = setup_http_server(rate_limit=0,
reset_rate_limit=wait_to_reset)
client = MeetupClient('aaaa', max_items=2,
min_rate_to_sleep=2,
sleep_for_rate=True)
# Call API
before = float(time.time())
events = client.events('sqlpass-es')
results = [event for event in events]
after = float(time.time())
diff = after - before
self.assertGreaterEqual(diff, wait_to_reset)
self.assertEqual(len(results), 2)
expected = [
{
'fields': ['event_hosts,featured,group_topics,plain_text_description,rsvpable,series'],
'order': ['updated'],
'page': ['2'],
'scroll': ['since:1970-01-01T00:00:00.000Z'],
'status': ['cancelled,upcoming,past,proposed,suggested']
},
{
'order': ['updated'],
'page': ['2'],
'scroll': ['since:1970-01-01T00:00:00.000Z']
}
]
self.assertEqual(len(http_requests), 2)
for x in range(0, len(http_requests)):
req = http_requests[x]
self.assertEqual(req.method, 'GET')
self.assertRegex(req.path, '/sqlpass-es/events')
self.assertIn((MeetupClient.PKEY_OAUTH2, 'Bearer aaaa'), req.headers._headers)
self.assertDictEqual(req.querystring, expected[x])
@httpretty.activate
def test_rate_limit_error(self):
"""Test if a rate limit error is raised when rate is exhausted"""
http_requests = setup_http_server(rate_limit=0,
reset_rate_limit=1)
client = MeetupClient('aaaa', max_items=2)
# Call API
events = client.events('sqlpass-es')
with self.assertRaises(RateLimitError):
_ = [event for event in events]
expected = {
'fields': ['event_hosts,featured,group_topics,plain_text_description,rsvpable,series'],
'order': ['updated'],
'page': ['2'],
'scroll': ['since:1970-01-01T00:00:00.000Z'],
'status': ['cancelled,upcoming,past,proposed,suggested']
}
self.assertEqual(len(http_requests), 1)
req = http_requests[0]
self.assertEqual(req.method, 'GET')
self.assertRegex(req.path, '/sqlpass-es/events')
self.assertIn((MeetupClient.PKEY_OAUTH2, 'Bearer aaaa'), req.headers._headers)
self.assertDictEqual(req.querystring, expected)
@httpretty.activate
def test_too_many_requests(self):
"""Test if a Retry error is raised"""
httpretty.register_uri(httpretty.GET,
MEETUP_EVENTS_URL,
status=429)
client = MeetupClient('aaaa', max_items=2, sleep_time=0.1)
start = float(time.time())
expected = start + (sum([i * client.sleep_time for i in range(client.MAX_RETRIES)]))
events = client.events('sqlpass-es')
with self.assertRaises(requests.exceptions.RetryError):
_ = [event for event in events]
end = float(time.time())
self.assertGreater(end, expected)
def test_sanitize_for_archive(self):
"""Test whether the sanitize method works properly"""
url = "http://example.com"
headers = {
MeetupClient.PKEY_OAUTH2: 'Bear aaaa'
}
payload = {
'page': 2,
'order': 'updated',
'scroll': 'since:2016-01-01T00:00:00.000Z'
}
s_url, s_headers, s_payload = MeetupClient.sanitize_for_archive(url, copy.deepcopy(headers), payload)
headers.pop(MeetupClient.PKEY_OAUTH2)
self.assertEqual(url, s_url)
self.assertEqual(headers, s_headers)
self.assertEqual(payload, s_payload)
if __name__ == "__main__":
unittest.main(warnings='ignore')
| grimoirelab/perceval | tests/test_meetup.py | Python | gpl-3.0 | 37,799 | 0.001587 |
from PIL import ImageFile, Image
class CSGOInventoryCacheFile(ImageFile.ImageFile):
format = "IIC"
format_description = "CS:GO Inventory Image Cache"
def _open(self):
self.mode = "RGBA"
self.size = 512, 384
self.tile = [
("raw", (0, 0) + self.size, 0, ("BGRA", 0, 1))
]
def convert_cache_to_image(original_location, new_location):
Image.register_open("IIC", CSGOInventoryCacheFile)
Image.register_extension("IIC", ".iic")
try:
with open(original_location, "rb") as original_img:
img = Image.open(original_img)
img.save(new_location)
except Exception as e:
raise Exception("Originating file does not exist: ", e) | nelsonw2014/CSGOInvCacheConverter | cicc/image.py | Python | mit | 729 | 0.001372 |
#! /usr/bin/env python3
# Copyright 2007 Google Inc.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
#
"""Classes enabling definition and composition of caches.
This file defines caches used to speed up the does-this-file-exist
test that forms the basis of the C preprocessor's include-file
handling, and takes most of its time.
When the preprocessor sees a line like "#include <foo/bar.h>" it looks
for a file named "bar.h" in many directories: /usr/include/foo/bar.h,
./foo/bar.h, and so forth. More precisely, the preprocessor is given
a "search path", which is a list of directory-names. (By default, the
search-path looks like ['/usr/include', '/usr/local/include', ...],
but it's often extended via gcc flags like -I, -isystem, -iprefix,
etc.) To resolve a single #include like "#include <foo/bar.h>", the
preprocessor goes through every directory in the search path, running
os.stat(os.path.join(current_working_dir, search_dir, 'foo/bar.h'))
until the stat call succeeds. With dozens of search-dirs to look
through, dozens of #include lines per source file, and hundreds of
source files per compilation, this can add up to millions of stat
calls. Many of these calls are exactly the same, so caching is a big
win.
The cache of stat calls takes a filename as input and produces a bool
as output, saying if the filename exists. For reasons that will
become clear in a moment, we actually represent the input filename as
a triple that breaks the filename into its three components:
1) currdir: the current working directory (usually os.path.absdir('.'))
2) searchdir: an element of the search path (eg '/usr/include', 'base')
3) includepath: the thing that comes after "#include" in source files
("foo/bar.h" in our examples above).
Why do we break the input into three parts? Consider what cache-lookups
we have to do for a single source file:
cache[os.path.join(currdir, searchdir1, includepath1)] # #include <ipath1>
cache[os.path.join(currdir, searchdir2, includepath1)] # #include <ipath1>
cache[os.path.join(currdir, searchdir3, includepath1)] # #include <ipath1>
[etc...until the cache-lookup returns True]
cache[os.path.join(currdir, searchdir1, includepath2)] # #include <ipath2>
cache[os.path.join(currdir, searchdir2, includepath2)] # #include <ipath2>
cache[os.path.join(currdir, searchdir3, includepath2)] # #include <ipath2>
[etc]
By having the key be a triple, we avoid all those unnecessary
os.path.join calls. But even if we do this, we notice bigger fish
to fry: the Python interpreter still has to do a string-hash of
currdir for every lookup, and also has to string-hash searchdirX and
includepathX many times. It would be much more efficient if we did
those hashes ourselves, reducing the number of string-hashes from
O(|search-path| * |#include lines|) to
O(|search-path| + |#include lines|).
This motivates (finally!) the data structures in this file. We have
three string-to-number maps, for mapping each currdir, searchdir, and
includepath to a small integer. We put that all together in a cache,
that takes a triple of integers as its key and produces True if the
file exists, False if it does not, or None if its status is unknown.
The String-to-number Map(s)
---------------------------
The basic map that converts a filepath-path -- a currdir, searchdir,
or includepath -- to a small integer is called MapToIndex. MapToIndex
provides mapping in both directions:
index: a dictionary mapping paths (strings) to indices in 1..N, and
string: an array of size N + 1 that implements the reverse mapping
So:
obj.string[obj.index[path_as_string]] == path_as_string
obj.index[obj.string[path_as_number]] == path_as_number
Note we map from 1..N, and not 0..N-1, which leave us 0 free to use as
a synonym for None or False.
There are also classes that specialize MapToIndex for specific purposes.
DirectoryMapToIndex assumes the input is a directory, and in
particular a directory that does not have a slash at the end of it (eg
"/etc"). It adds the trailing slash before inserting into the map.
This is useful because it allows us to use + to join this directory
with a relative filename, rather than the slower os.path.join().
RelpathMapToIndex assumes the input is a relative filepath, that is,
one that does not start with /. When combined with DirectoryMapToIndex
entries, + can be used as a fast alternative to os.path.join().
CanonicalMapToIndex is a MapToIndex that canonializes its input before
inserting it into the map: resolving symlinks, getting rid of ..'s,
etc. It takes an absolute path as input.
Other Caches
------------
Besides the maps from strings to integers, there are three other caches.
One is the realpath-cache, that takes a filename and returns
os.path.realpath(filename). We cache this because os.path.realpath()
is very slow. This is called CanonicalPath.
The second cache, the DirnameCache, maps an arbitrary pathname to
dirname(pathname), that is, the directory the pathname is in. The
input pathname is represented by a (currdir_idx, searchdir_idx,
includepath_idx) triple. The output is likewise represented as a
number: an index into the DirectoryMapToIndex structure.
The third cache is called SystemdirPrefixCache. It tells you, for a
given absolute filepath, whether it is prefixed by a systemdir (that
is, one of the searchdirs that's built into cpp, such as /usr/include).
This is useful to cache because there are several systemdirs, and it's
expensive to check them all each time.
Naming Conventions
------------------
currdir: the current working dir.
searchdir: an element of the search-path (places cpp looks for .h files).
includepath: the string a source file #includes.
realpath: a full filepath with all its symlinks resolved:
os.path.realpath(os.path.join(currdir, searchdir, includepath))
FOO_idx: the small integer associated with the string FOO.
includepath_map: the map that takes includepaths to their idx and back
(a RelpathMapToIndex).
directory_map: the map that takes currdirs and searchdirs to their
idx and back. It also is used to store dirname(filepath) for arbitrary
filepaths -- basically, anything we know is a directory (a
DirectoryMapToIndex).
realpath_map: the map that takes full filepaths to their idx and back,
canonicalizing them first (by resolving symlinks) (a
CanonicalMapToIndex).
searchlist: a list of searchdirs. In gcc/cpp documentation, this is
called the "search path", but for consistency, in this code we reserve
the name "path" to mean "filesystem component," never "list of dirs".
(A list of strings).
systemdir: a searchdir that's built into cpp, rather than set via -I.
(A string.)
resolved_filepath: given an includepath, and a (possibly implicit)
currdir and searchlist, the resolved_filepath is
os.path.join(currdir, searchdir, includepath)
for the first searchdir in searchlist for which the joined string
exists. This path can be represented in many ways: 1) a string like
"foo/bar/baz.h" (if so, this string has been canonicalized to resolve
symlinks and the like); 2) an index into realpath_map associated with
that string; 3) a triple of indices; or 4) a pair of indices plus an
assumption that os.getcwd() == currdir.
Pair Represenation of Filepaths
-------------------------------
A file is uniquely determined by the triple
(currdir_idx, searchdir_idx, includepath_idx)
For a single compilation unit, the code will often start with a
chdir(currdir). After that, we often refer to a file by the pair
(searchdir_idx, includepath_idx)
which might be either an absolute filename or relative to $PWD.
We refer to this pair as a filepath_pair.
TODO(csilvers): find a better name?
The function IsFilepathPair(x) tests whether x is a pair that could
plausibly have a searchdir_idx as its first element and an
includepath_idx as its second.
Tests
-----
This code is currently only tested by regression tests of modules
using this one.
"""
__author__ = "opensource@google.com (Nils Klarlund, Craig Silverstein)"
import os
import os.path
import sys
import basics
import statistics
import compiler_defaults
DIR_ARRAY_SIZE = 500
# We currently use the stat and realpath of GNU libc stat and
# realpath. They are about an order of magnitude faster than their
# Python counterparts, even when called through the Python/C
# interface.
try:
import distcc_pump_c_extensions
_OsPathExists = distcc_pump_c_extensions.OsPathExists
_OsPathIsFile = distcc_pump_c_extensions.OsPathIsFile
_PathRealpath = distcc_pump_c_extensions.Realpath
_path_realpath_works = True
except ImportError:
_OsPathExists = os.path.exists
_OsPathIsFile = os.path.isfile
_PathRealpath = os.path.realpath
# os.path.realpath might have some bugs. TODO(csilvers): check that here
_path_realpath_works = False
Debug = basics.Debug
DEBUG_TRACE = basics.DEBUG_TRACE
DEBUG_TRACE1 = basics.DEBUG_TRACE1
DEBUG_TRACE2 = basics.DEBUG_TRACE2
DEBUG_WARNING = basics.DEBUG_WARNING
NotCoveredError = basics.NotCoveredError
####
#### SIMPLE CACHES
####
class CanonicalPath(object):
"""Memoizing calculation of realpaths. realpath(x) is the 'canonical'
version of x, with all symbolic links eliminated.
"""
def __init__(self):
self.cache = {}
def Canonicalize(self, filepath):
"""Find a really canonical path, possibly memoized.
Arguments:
filepath: a filepath (string)
Returns:
the realpath of filepath (string)
The following is irrelevant if we always use the distcc_pump_c_extensions
realpath function.
---
Apparently, in some versions of Python 2.4 at least, realpath does
*not* resolve the last component of a filepath if it is a link:
https://sourceforge.net/tracker/?func=detail&atid=105470&aid=1213894&group_id=5470
Make up for that: follow that final link until a real realpath has
been found.
Also, realpath is not idempotent.
Solution (?): turn filepath into abspath before applying realpath;
then we can cache results as well (without worring about value of
current directory).
The final problem -- that os.path.realpath is very slow, at least
an order of magnitude slower than the gnu libc one --- is solved
through caching all uses through an object of the present class.
"""
assert isinstance(filepath, str)
try:
return self.cache[filepath]
except KeyError:
if _path_realpath_works:
r = _PathRealpath(filepath)
self.cache[filepath] = r
return r
# Fix for os.path.realpath idempotencey bug (Python 2.4).
filepath_ = os.path.abspath(filepath)
filepath_ = _PathRealpath(filepath_)
# Fix for os.path.realpath bug (Python 2.4): symlinks at end not
# resolved.
for unused_i in range(10):
if not os.path.islink(filepath_):
break
filepath_ = os.path.join(os.path.dirname(filepath_),
os.readlink(filepath_))
else:
raise NotCoveredError("Too many symlinks in '%s'." % filepath)
self.cache[filepath] = filepath_
return filepath_
class DirnameCache(object):
"""Cache the mapping from filepath pairs to index of their directory names.
The key is a triple (currdir_idx, searchdir_idx, includepath_idx). The
value is
(dir_idx, dir_realpath_idx)
where dir_idx is the index of dirname of the corresponding filepath, which
possibly is relative, and dir_realpath_idx is the realpath index of the
absolute location of the dirname. The value currdir_idx is of possible
importance for deteterming dir_realpath_idx, but plays no role in determining
dir_idx."""
def __init__(self, includepath_map, directory_map, realpath_map):
"""Constructor.
Arguments:
includepath_map: the map used to construct the includepath_idx
that will be passed in as arguments to Lookup().
directory_map: the map used to construct both the currdir_idx
and searchdir_idx that will be passed in as arguments to
Lookup(). It's also the data structure that produces dir_idx.
realpath_map: a string-to-int map of canonicalized filepaths
"""
self.includepath_map = includepath_map
self.directory_map = directory_map
self.realpath_map = realpath_map
self.cache = {}
def Lookup(self, currdir_idx, searchdir_idx, includepath_idx):
"""Return the directory and realpath indices of the dirname of the input.
Arguments:
currdir_idx: the directory index of the current directory
searchdir_idx: a directory_map index
includepath_idx: an includepath index
Returns:
a pair (directory map index, realpath index)
See class documentation.
Example: if the strings of the arguments indices put together make
'/usr/include/foo/bar.h', then this routine will insert '/usr/include/foo/'
into self.directory_map, and then return the corresponding pair (directory
index of /usr/include/foo/, real path index of /usr/include/foo/). If the
arguments put together form "foo.h", then the directory index returned is
that of "", the current directory, and the realpath index is that of
currdir.
"""
try:
return self.cache[(currdir_idx, searchdir_idx, includepath_idx)]
except KeyError:
directory = os.path.dirname(os.path.join(
self.directory_map.string[searchdir_idx],
self.includepath_map.string[includepath_idx]))
dir_idx = self.directory_map.Index(directory)
rp_idx = self.realpath_map.Index(
os.path.join(self.directory_map.string[currdir_idx],
directory))
self.cache[(currdir_idx, searchdir_idx, includepath_idx)] = (dir_idx,
rp_idx)
return (dir_idx, rp_idx)
class SystemdirPrefixCache(object):
"""A cache of information about whether a file exists in a systemdir.
A systemdir is a searchdir that is built in to the C/C++
preprocessor. That is, when the preprocessor is figuring out what
directory an #include is in, these are the directories it's
hard-coded in to check (you can add other directories via -I). This
cache records, for a given filepath, whether it starts with a
systemdir. This is useful to identify whether the path is likely to
correspond to a system include-file (such as stdio.h). Such files are
unlikely to change, and are likely to already exist on the distcc
servers, both of which are useful things to know for optimization.
For speed, users can access self.cache directly, rather than going
through the StartsWithSystemdir API. Be sure to call FillCache() to
make sure the cache is populated, before accessing it!
"""
def __init__(self, systemdirs):
"""Constructor.
Argument:
systemdirs: the list of system-directories the preprocessor
uses. It's a list of strings, probably extracted from the
preprocessor itself. Each systemdir should end in a slash.
In practice, systemdirs will start empty, and later some routine
(in parse_command.py) will magically fill it. So be sure to wait
for that before calling FillCache!
TODO(csilvers): normalize this; ideally pass systemdirs in to FillCache.
"""
self.systemdirs = systemdirs
# self.cache[i] will be True, False, or None for not-yet-checked.
self.cache = [None]
def FillCache(self, realpath_map):
"""Ensures that there's a cache entry for every index in realpath_map.
Argument:
realpath_map: a string-to-int map of canonicalized filepaths we know.
After this function is called, the cache entry is True iff
realpath.startswith(systemdir) is True for any of the systemdirs
passed in to our constructor.
"""
if len(self.cache) >= realpath_map.Length():
return # we're already all full
for realpath_idx in range(len(self.cache), realpath_map.Length()):
realpath = realpath_map.string[realpath_idx]
for systemdir in self.systemdirs:
if realpath.startswith(systemdir):
self.cache.append(True)
break
else: # we get here if the for never 'break'ed
self.cache.append(False)
assert len(self.cache) == realpath_map.Length()
def StartsWithSystemdir(self, realpath_idx, realpath_map):
"""Return True iff realpath starts with a systemdir.
Arguments:
realpath_idx: the index of the realpath we want to check.
realpath_map: the map from realpath_idx to a string.
Return True iff realpath.startswith(systemdir) for any of the
systemdirs passed in to our constructor. (For speed, you can
access self.cache directly instead of calling this, but make
sure FillCache() has been called first!)
"""
self.FillCache(realpath_map)
return self.cache[realpath_idx]
####
#### MAP_TO_INDEX AND ITS SPECIALIZATIONS
####
class MapToIndex(object):
"""Maps every object it sees to a unique small integer. In
practice, this class is used to map path-components (which are strings).
"""
def __init__(self):
"""Constructor.
Instance variables:
map: a dictionary such that map[path] is the index of path
string: a list satisfying: string[i] is the path such that map[path] = i
"""
# Do not make the mistake of letting a real index be 0. (Hint:
# because "if path:" then does not distinguish between 0 and None.)
self.index = {None:None}
self.string = [None]
def _Invariant_(self):
return len(self.index) == len(self.string)
def Index(self, path):
"""Returns the index i > 0 of path."""
assert self._Invariant_()
try:
return self.index[path]
except KeyError:
self.index[path] = len(self.string)
self.string.append(path)
return len(self.string) - 1
def String(self, i):
"""Returns the path such that Index(path) == i."""
assert self._Invariant_()
assert 0 < i < self.Length()
return self.string[i]
def Length(self):
"""One more than the number of elements indexed."""
assert self._Invariant_()
return len(self.string)
class DirectoryMapToIndex(MapToIndex):
"""Like a normal MapToIndex, but assumes the keys are directories,
and in particular, directories without a trailing slash (eg "/etc").
It stores the directories in the map, but appends the trailing slash
first. This is another type of normalization, and useful for cheap
path-joining (eg using + instead of os.path.join).
"""
def Index(self, directory):
"""Return index d > 0 of normalized directory.
Argument:
directory: a string, either empty or not ending in '/'.
The empty string is not changed, but other strings are stored with
a '/' appended.
"""
if directory != "" and directory != "/":
assert directory[-1] != '/', directory
directory = directory + '/'
return MapToIndex.Index(self, directory)
class RelpathMapToIndex(MapToIndex):
"""Like a normal MapToIndex, but assumes the keys are relative
filesystem paths, that is, filesystem paths not starting with /.
This is useful for "cheap" normalization: this invariant ensures that
os.path.join(some-directorymap-string, some-relpathmap-string) can
be implemented using +.
We actually do allow storing absolute paths if option
--unsafe_absolute_includes is in use. But, then, we're careful in Resolve
(below) to bail out.
"""
def Index(self, relpath, ignore_absolute_path_warning=False):
"""Return index d > 0 of relative path.
Args:
directory: a string not starting with /.
ignore_absolute_path_warning: a Boolean
The variable ignore_absolute_path_warning is set to True in order to
override the requirement that filepaths are relative. This is useful for the
compilation unit filepath and filepaths of -include's: they are permitted to
be absolute because the command line can still be rewritten on the server.
The server tweaks their location to become relative to the server root.
"""
if os.path.isabs(relpath) and not ignore_absolute_path_warning:
if basics.opt_unsafe_absolute_includes:
Debug(DEBUG_WARNING,
"absolute filepath '%s' was IGNORED"
" (correctness of build may be affected)", relpath)
else:
raise NotCoveredError("Filepath must be relative but isn't: '%s'."
" Consider setting INCLUDE_SERVER_ARGS='--"
"unsafe_absolute_includes'."
% relpath,
send_email=False)
# Now, remove leading "./" so as not to start an infinite regression when
# say foo.c contains:
#
# #include "./foo.c"
#
# which mighy seduce a recursive include analyzer down the forbidden path:
#
# "foo.c", # "./foo.c", "././foo.c." etc.
while relpath.startswith("./"):
relpath = relpath[2:]
return MapToIndex.Index(self, relpath)
class CanonicalMapToIndex(MapToIndex):
"""Like a normal MapToIndex, but assumes the keys are absolute
filepaths, and canonicalizes them before inserting into the map.
'Canonicalize' means to do the equivalent of os.path.realpath(),
which mostly involves resolving symlinks in the filepath.
"""
def __init__(self, canonicalize):
"""Constructor.
Argument:
canonicalize: an instance of the CanonicalPath cache."""
MapToIndex.__init__(self)
self.canonicalize = canonicalize
def Index(self, filepath):
"""Return the realpath index r of filepath. filepath should be
an absolute filename.
"""
return MapToIndex.Index(self, self.canonicalize(filepath))
def RetrieveDirectoriesExceptSys(directory_map, realpath_map,
systemdir_prefix_cache, directory_idxs):
"""Calculate the set of non-system directories of an index list.
Arguments:
directory_map: a DirectoryMapToIndex cache
realpath_map: a CanonicalMapToIndex cache
directory_idxs: a list or tuple of directory_map indices
Returns:
the corresponding tuple of directories except for those whose
realpath has a prefix that is a sysdir
The directories in the returned list have their trailing '/'
stripped.
"""
result = []
for dir_idx in directory_idxs:
# Index the absolute path; this will let us know whether dir_idx is under a
# default systemdir of the compiler.
rp_idx = realpath_map.Index(os.path.join(
os.getcwd(), directory_map.string[dir_idx]))
systemdir_prefix_cache.FillCache(realpath_map)
if not systemdir_prefix_cache.cache[rp_idx]:
result.append(directory_map.string[dir_idx].rstrip('/'))
return tuple(result)
####
#### THE STAT CACHES
####
class SimpleBuildStat(object):
"""Stat cache that works with strings, not indices."""
def __init__(self):
self.cache = {}
def Lookup(self, filepath):
"""Returns true if filepath exists."""
try:
return self.cache[filepath]
except KeyError:
result = self.cache[filepath] = _OsPathExists(filepath)
return result
class BuildStatCache(object):
"""A highly optimized mechanism for stat queries of filepaths,
as represented by a triple of indexes: currdir_idx, searchdir_idx,
filepath_idx. Given this input, we can say whether a regular file
represented by this triple exists on the filesystem, and if so,
what its canonical pathname is: that is, the pathname after all
symlinks have been resolved.
The hash table is three-level structure:
- build_stat[currdir_idx] contains an array for each includepath_idx
- build_stat[currdir_idx][includepath_idx] is this array, and
- build_stat[currdir_idx][includepath_idx][searchdir_idx] is either
* False if os.path.join(currdir, searchdir, includepath) does not exist
* True if it does
* None when it is not known whether it exists or not
In addition, we keep a parallel structure for the realpath, that lets us
quickly map from a filepath to os.path.realpath(filepath).
- real_stat[currdir_idx] contains an array for each fp
- real_stat[currdir_idx][includepath_idx] is this array, and
- real_stat[currdir_idx][includepath_idx][searchdir_idx] is either
* realpath_idx, such that realpath_map.string[realpath_idx] =
os.path.realpath(os.path.join(currdir, searchdir, includepath))
when build_stat[currdir_idx][includepath_idx][searchdir_idx] = True
* None, otherwise
"""
def __init__(self, includepath_map, directory_map, realpath_map):
self.build_stat = {}
self.real_stat = {}
self.includepath_map = includepath_map
self.directory_map = directory_map
self.realpath_map = realpath_map
self.path_observations = []
def _Verify(self, currdir_idx, searchdir_idx, includepath_idx):
"""Verify that the cached result is the same as obtained by stat call.
Prerequisite: we've done a chdir(currdir) before this call.
"""
assert 1 <= includepath_idx < self.includepath_map.Length()
assert 1 <= searchdir_idx < self.directory_map.Length()
if __debug__: statistics.sys_stat_counter += 1
# Since we know directory_map entries end in /, and includepaths don't
# start with / (who does "#include </usr/include/string.h>"??), we can
# use + instead of the more expensive os.path.join().
# Make sure $PWD is currdir, so we don't need to include it in our stat().
assert os.getcwd() + '/' == self.directory_map.string[currdir_idx]
really_exists = _OsPathIsFile(
self.directory_map.string[searchdir_idx]
+ self.includepath_map.string[includepath_idx])
cache_exists = self.build_stat[currdir_idx][includepath_idx][searchdir_idx]
assert isinstance(cache_exists, bool)
if cache_exists != really_exists:
filepath = os.path.join(self.directory_map.string[currdir_idx],
self.directory_map.string[searchdir_idx],
self.includepath_map.string[includepath_idx])
sys.exit("FATAL ERROR: "
"Cache inconsistency: '%s' %s, but earlier this path %s." % (
filepath,
really_exists and "exists" or "does not exist",
cache_exists and "existed" or "did not exist"))
def WarnAboutPathObservations(self, translation_unit):
"""Print new paths found according to path observation expression option.
Args:
translation_unit: a string embedded in warning
"""
for (includepath, relpath, realpath) in self.path_observations:
Debug(DEBUG_WARNING,
"For translation unit '%s',"
" lookup of file '%s' resolved to '%s' whose realpath is '%s'.",
translation_unit, includepath, relpath, realpath)
self.path_observations = []
def Resolve(self, includepath_idx, currdir_idx, searchdir_idx,
searchlist_idxs):
"""Says whether (currdir_idx, searchdir_idx, includepath_idx) exists,
and if so what its canonicalized form is (with symlinks resolved).
TODO(csilvers): rearrange the order of the arguments.
Args:
includepath_idx: The index of an includepath, from e.g. "#include <foo>"
currdir_idx: The index of the current working dir. Note that we
require os.getcwd() == currdir before calling Resolve!
searchdir_idx: A single searchdir, which is prepended to searchlist,
or None to not prepend to the searchlist.
searchlist_idxs: A list of directory indices.
Returns:
1) (None, None) if, for all sl_idx in [searchdir_idx] + searchlist_idxs,
os.path.join(currdir, sp, includepath) does not exist.
2) ((sl_idx, includepath_idx), realpath_idx)
if, for some sl_idx in [searchdir_idx] + searchlist_idxs,
os.path.join(currdir, sp, includepath) does exist. In this case,
sl_idx is the index of the first searchlist entry for which the
exists-test succeeds, and realpath_idx is the index into the
realpath_map of os.path.join(currdir, sp, includepath).
Again, we require as a prequesite that os.getcwd() must equal currdir:
os.getcwd() + '/' == self.directory_map.string[currdir_idx]
"""
includepath = self.includepath_map.string[includepath_idx]
if includepath.startswith('/'):
# We really don't want to start exploring absolute includepaths; what's
# the sl_idx to return for example? And what about the use of '+'
# (as an optimization) below instead of os.path.join.
return (None, None)
dir_map_string = self.directory_map.string # memoize the fn pointer
build_stat = self.build_stat
real_stat = self.real_stat
if __debug__:
dir_map = self.directory_map
assert 0 < includepath_idx < self.includepath_map.Length()
assert 0 < currdir_idx < dir_map.Length()
assert searchdir_idx is None or 1 <= searchdir_idx < dir_map.Length()
for sl_idx in searchlist_idxs:
assert sl_idx < dir_map.Length()
assert os.getcwd() + '/' == dir_map_string[currdir_idx]
Debug(DEBUG_TRACE2, "Resolve: includepath: '%s', currdir: '%s', "
"searchdir: '%s', searchlist: %s" %
(includepath,
dir_map_string[currdir_idx],
searchdir_idx and dir_map_string[searchdir_idx],
" \n".join([dir_map_string[idx] for idx in searchlist_idxs])))
try:
# Locate the array (list) relative to currdir_idx and includepath_idx
searchdir_stats = build_stat[currdir_idx][includepath_idx]
# Locate the corresponding array of realpath names
searchdir_realpaths = real_stat[currdir_idx][includepath_idx]
except KeyError: # We'll need to grow the relevant arrays
currdir_stats = build_stat.setdefault(currdir_idx, {})
currdir_realpaths = real_stat.setdefault(currdir_idx, {})
searchdir_stats = currdir_stats[includepath_idx] = \
[None] * DIR_ARRAY_SIZE
searchdir_realpaths = currdir_realpaths[includepath_idx] = \
[None] * DIR_ARRAY_SIZE
# Try searchdir_idx if not None, then try every index in searchlist_idxs.
# This inner loop may be executed tens of millions of times.
# Do not try to form [searchdir_idx] + searchlist_idxs -- too expensive!
for searchlist in (searchdir_idx and [searchdir_idx] or [],
searchlist_idxs):
for sl_idx in searchlist:
if __debug__:
statistics.search_counter += 1
statistics.build_stat_counter += 1
try:
# We expect that searchdir_stats[sl_idx] == False, because
# we've usually seen sl_idx before for our includepath and
# our currdir --- and includepath does not usually exist
# relative to the sp directory. We're optimizing for this
# case of course. That should give us a rate of a couple of
# million iterations per second (for this case).
if searchdir_stats[sl_idx] == False:
if __debug__: self._Verify(currdir_idx, sl_idx, includepath_idx)
continue
if searchdir_stats[sl_idx]:
if __debug__: self._Verify(currdir_idx, sl_idx, includepath_idx)
return ((sl_idx, includepath_idx), searchdir_realpaths[sl_idx])
except IndexError: # DIR_ARRAY_SIZE wasn't big enough; let's double
searchdir_stats.extend([None] * max(sl_idx, len(searchdir_stats)))
searchdir_realpaths.extend([None] * max(sl_idx, len(searchdir_stats)))
# If we get here, result is not cached yet.
if __debug__: statistics.sys_stat_counter += 1
# We do not explictly take into account currdir_idx, because
# of the check above that os.getcwd is set to current_dir.
relpath = dir_map_string[sl_idx] + includepath
if _OsPathIsFile(relpath):
searchdir_stats[sl_idx] = True
rpath = os.path.join(dir_map_string[currdir_idx], relpath)
realpath_idx = searchdir_realpaths[sl_idx] = (
self.realpath_map.Index(rpath))
# This is the place to catch errant files according to user defined
# regular expression path_observation_re.
if basics.opt_path_observation_re:
realpath = self.realpath_map.string[realpath_idx]
if basics.opt_path_observation_re.search(realpath):
self.path_observations.append((includepath, relpath, realpath))
return ((sl_idx, includepath_idx), realpath_idx)
else:
searchdir_stats[sl_idx] = False
if __debug__: Debug(DEBUG_TRACE2, "Resolve: failed")
return (None, None)
class SetUpCaches(object):
"""Erect the edifice of caches.
Instance variables:
includepath_map: RelpathMapToIndex
directory_map: DirectoryMapToIndex
realpath_map: CanonicalMapToIndex
canonical_path: CanonicalPath
build_stat_cache: BuildStatCache
dirname_cache: DirnameCache
simple_build_stat: SimpleBuildStat
client_root: a path such as /dev/shm/tmpX.include_server-X-1
(used during default system dir determination)
IsFilepathIndex: test for filepath index
IsDirectoryIndex: test for director index
IsRealpathIndex: test for realpath index
IsFilepathPair: test for filepath pair
"""
def __init__(self, client_root):
# A memoizing (caching) class to canonicalize a path: mostly by
# resolving any symlinks in the path-component.
self.canonical_path = CanonicalPath()
# The index-map for includepath names: things seen after '#include'.
self.includepath_map = RelpathMapToIndex()
# The index-map for searchdir names and currdir as well. Also used any
# other time we have something we know is a directory (eg dirname(foo)).
self.directory_map = DirectoryMapToIndex()
# The index-map for realpaths: the full pathname of an include, with
# symlinks resolved and such (hence the name realpath).
self.realpath_map = CanonicalMapToIndex(self.canonical_path.Canonicalize)
# A cache of the directory part of filepaths. Note it uses the
# directory_map to actually store the mapping.
self.dirname_cache = DirnameCache(self.includepath_map, self.directory_map,
self.realpath_map)
# A cache of whether a realpath starts with a system searchdir or
# not. Note: at this time, system_dirs_default_all will be empty.
# It will get filled via processing in parse_command.py. This is
# why we need to store the compiler_defaults instance, to make
# sure "our" system_dirs_default_all is updated.
# TODO(csilvers): get rid of this once prefix_cache TODO is cleaned up
self.compiler_defaults = compiler_defaults.CompilerDefaults(
self.canonical_path.Canonicalize, client_root)
self.systemdir_prefix_cache = SystemdirPrefixCache(
self.compiler_defaults.system_dirs_default_all)
# The main caches, that say whether a file exists or not. We have
# two: a simple one that takes a filepath (string) as an argument,
# and the complicated one that works with index-triples.
self.simple_build_stat = SimpleBuildStat()
self.build_stat_cache = BuildStatCache(self.includepath_map,
self.directory_map,
self.realpath_map)
# Convenient function closures to test for various semantic datatypes.
self.IsIncludepathIndex = (lambda x:
isinstance(x, int)
and 0 < x < self.includepath_map.Length())
self.IsSearchdirIndex = (lambda x:
isinstance(x, int)
and 0 < x < self.directory_map.Length())
self.IsCurrdirIndex = (lambda x:
isinstance(x, int)
and 0 < x < self.directory_map.Length())
self.IsFilepathPair = (lambda x:
isinstance(x, tuple)
and len(x) == 2
and self.IsSearchdirIndex(x[0])
and self.IsIncludepathIndex(x[1]))
self.IsRealpathIndex = (lambda x:
isinstance(x, int)
and 0 < x < self.realpath_map.Length())
| itensionanders/distcc | include_server/cache_basics.py | Python | gpl-2.0 | 37,214 | 0.004407 |
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
The Image class provides the interface which should be used
by users at the application level. The image provides a coordinate map,
and the data itself.
"""
from __future__ import absolute_import
__docformat__ = 'restructuredtext'
# You'd usually use nipy.core.api for these
from . import image
from .image import Image
from nipy.testing import Tester
test = Tester().test
bench = Tester().bench
| alexis-roche/nipy | nipy/core/image/__init__.py | Python | bsd-3-clause | 518 | 0.001931 |
from django import template
from django.conf import settings
from ccpages.models import Page
register = template.Library()
@register.inclusion_tag('ccpages/_js.html')
def ccpages_js():
return {
'STATIC_URL': settings.STATIC_URL,
}
@register.inclusion_tag('ccpages/_css.html')
def ccpages_css():
return {
'STATIC_URL': settings.STATIC_URL,
}
@register.inclusion_tag('ccpages/_nav_breadcrumb.html')
def ccpages_nav_breadcrumbs(page):
"""returns a breadcrumb"""
return {
'pages': Page.objects.nav_breadcrumbs(page),
'page': page,
}
@register.inclusion_tag('ccpages/_nav_local.html')
def ccpages_nav_local(page):
"""returns the local nav for a given page's root"""
return {
'pages': Page.objects.nav_local(page)
}
@register.assignment_tag
def ccpages_nav_global():
"""returns the global pages"""
return Page.objects.nav_global()
| designcc/django-ccpages | ccpages/templatetags/ccpages_tags.py | Python | bsd-3-clause | 921 | 0.005429 |
from imgurpython import ImgurClient
import webbrowser
import credentials
client = ImgurClient(credentials.imgur['client_id'], credentials.imgur['client_secret'])
# Authorization flow, pin example (see docs for other auth types)
authorization_url = client.get_auth_url('pin')
print authorization_url
webbrowser.open(authorization_url)
pin = raw_input("Enter pin : ")
credentials = client.authorize(pin, "pin")
print "Imgur Access token : %s" % credentials["access_token"]
print "Imgur Refresh token : %s" % credentials["refresh_token"] | hexagonist/RedditDaltonizerBot | get_imgur_tokens.py | Python | mit | 540 | 0.003704 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_is_vcs_installed
---------------------
"""
from cookiecutter import vcs
def test_existing_repo_type():
assert vcs.is_vcs_installed("git")
def test_non_existing_repo_type():
assert not vcs.is_vcs_installed("stringthatisntashellcommand")
| tylerdave/cookiecutter | tests/test_is_vcs_installed.py | Python | bsd-3-clause | 305 | 0 |
#
# Copyright (C) 2013-2015 RoboIME
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
import numpy as np
from numpy import math
_trigonom_ = ['sin', 'cos', 'tan']
_invtrigonom_ = ['a' + f for f in _trigonom_] + ['atan2']
_restricted_ = ['trunc']
for fun in dir(math):
if fun in _restricted_:
pass
elif fun in _trigonom_:
exec '{0} = lambda x: math.{0}(math.radians(x))'.format(fun) in globals()
elif fun == 'atan2':
exec '{0} = lambda y, x: math.degrees(math.{0}(y, x))'.format(fun) in globals()
elif fun in _invtrigonom_:
exec '{0} = lambda x: math.degrees(math.{0}(x))'.format(fun) in globals()
else:
exec '{0} = math.{0}'.format(fun)
def norm(vector):
""" Returns the norm (length) of the vector."""
# note: this is a very hot function, hence the odd optimization
# Unoptimized it is: return np.sqrt(np.sum(np.square(vector)))
return np.sqrt(np.dot(vector, vector))
def unit_vector(vector):
""" Returns the unit vector of the vector. """
return vector / norm(vector)
def angle_between(v1, v2):
""" Returns the angle in radians between vectors 'v1' and 'v2'::
>>> angle_between((1, 0, 0), (0, 1, 0))
1.5707963267948966
>>> angle_between((1, 0, 0), (1, 0, 0))
0.0
>>> angle_between((1, 0, 0), (-1, 0, 0))
3.141592653589793
"""
v1_u = unit_vector(v1)
v2_u = unit_vector(v2)
angle = np.arccos(np.dot(v1_u, v2_u))
if math.isnan(angle):
if (v1_u == v2_u).all():
return 0.0
else:
return 180
return math.degrees(angle)
| roboime/pyroboime | roboime/utils/mathutils.py | Python | agpl-3.0 | 2,108 | 0.001423 |
from math import isnan
import warnings
import unittest
from unittest.mock import MagicMock
import numpy as np
from numpy.testing import assert_array_equal
from Orange.data import \
Instance, Domain, Unknown, Value, \
DiscreteVariable, ContinuousVariable, StringVariable
class TestInstance(unittest.TestCase):
attributes = ["Feature %i" % i for i in range(10)]
class_vars = ["Class %i" % i for i in range(1)]
metas = [DiscreteVariable("Meta 1", values="XYZ"),
ContinuousVariable("Meta 2"),
StringVariable("Meta 3")]
def mock_domain(self, with_classes=False, with_metas=False):
attributes = self.attributes
class_vars = self.class_vars if with_classes else []
metas = self.metas if with_metas else []
variables = attributes + class_vars
return MagicMock(Domain,
attributes=attributes,
class_vars=class_vars,
metas=metas,
variables=variables)
def create_domain(self, attributes=(), classes=(), metas=()):
attr_vars = [ContinuousVariable(name=a) if isinstance(a, str) else a
for a in attributes]
class_vars = [ContinuousVariable(name=c) if isinstance(c, str) else c
for c in classes]
meta_vars = [DiscreteVariable(name=m, values=map(str, range(5)))
if isinstance(m, str) else m
for m in metas]
domain = Domain(attr_vars, class_vars, meta_vars)
return domain
def test_init_x_no_data(self):
domain = self.mock_domain()
inst = Instance(domain)
self.assertIsInstance(inst, Instance)
self.assertIs(inst.domain, domain)
self.assertEqual(inst._x.shape, (len(self.attributes), ))
self.assertEqual(inst._y.shape, (0, ))
self.assertEqual(inst._metas.shape, (0, ))
self.assertTrue(all(isnan(x) for x in inst._x))
def test_init_xy_no_data(self):
domain = self.mock_domain(with_classes=True)
inst = Instance(domain)
self.assertIsInstance(inst, Instance)
self.assertIs(inst.domain, domain)
self.assertEqual(inst._x.shape, (len(self.attributes), ))
self.assertEqual(inst._y.shape, (len(self.class_vars), ))
self.assertEqual(inst._metas.shape, (0, ))
self.assertTrue(all(isnan(x) for x in inst._x))
self.assertTrue(all(isnan(x) for x in inst._y))
def test_init_xym_no_data(self):
domain = self.mock_domain(with_classes=True, with_metas=True)
inst = Instance(domain)
self.assertIsInstance(inst, Instance)
self.assertIs(inst.domain, domain)
self.assertEqual(inst._x.shape, (len(self.attributes), ))
self.assertEqual(inst._y.shape, (len(self.class_vars), ))
self.assertEqual(inst._metas.shape, (3, ))
self.assertTrue(all(isnan(x) for x in inst._x))
self.assertTrue(all(isnan(x) for x in inst._y))
with warnings.catch_warnings():
warnings.simplefilter("ignore", FutureWarning)
assert_array_equal(inst._metas, np.array([Unknown, Unknown, None]))
def test_init_x_arr(self):
domain = self.create_domain(["x", DiscreteVariable("g", values="MF")])
vals = np.array([42, 0])
inst = Instance(domain, vals)
assert_array_equal(inst._x, vals)
self.assertEqual(inst._y.shape, (0, ))
self.assertEqual(inst._metas.shape, (0, ))
domain = self.create_domain()
inst = Instance(domain, np.empty((0,)))
self.assertEqual(inst._x.shape, (0, ))
self.assertEqual(inst._y.shape, (0, ))
self.assertEqual(inst._metas.shape, (0, ))
def test_init_x_list(self):
domain = self.create_domain(["x", DiscreteVariable("g", values="MF")])
lst = [42, 0]
vals = np.array(lst)
inst = Instance(domain, vals)
assert_array_equal(inst._x, vals)
self.assertEqual(inst._y.shape, (0, ))
self.assertEqual(inst._metas.shape, (0, ))
domain = self.create_domain()
inst = Instance(domain, [])
self.assertEqual(inst._x.shape, (0, ))
self.assertEqual(inst._y.shape, (0, ))
self.assertEqual(inst._metas.shape, (0, ))
def test_init_xy_arr(self):
domain = self.create_domain(["x", DiscreteVariable("g", values="MF")],
[DiscreteVariable("y", values="ABC")])
vals = np.array([42, 0, 1])
inst = Instance(domain, vals)
assert_array_equal(inst._x, vals[:2])
self.assertEqual(inst._y.shape, (1, ))
self.assertEqual(inst._y[0], 1)
self.assertEqual(inst._metas.shape, (0, ))
def test_init_xy_list(self):
domain = self.create_domain(["x", DiscreteVariable("g", values="MF")],
[DiscreteVariable("y", values="ABC")])
lst = [42, "M", "C"]
vals = np.array([42, 0, 2])
inst = Instance(domain, vals)
assert_array_equal(inst._x, vals[:2])
self.assertEqual(inst._y.shape, (1, ))
self.assertEqual(inst._y[0], 2)
self.assertEqual(inst._metas.shape, (0, ))
def test_init_xym_arr(self):
domain = self.create_domain(["x", DiscreteVariable("g", values="MF")],
[DiscreteVariable("y", values="ABC")],
self.metas)
vals = np.array([42, "M", "B", "X", 43, "Foo"], dtype=object)
inst = Instance(domain, vals)
self.assertIsInstance(inst, Instance)
self.assertIs(inst.domain, domain)
self.assertEqual(inst._x.shape, (2, ))
self.assertEqual(inst._y.shape, (1, ))
self.assertEqual(inst._metas.shape, (3, ))
assert_array_equal(inst._x, np.array([42, 0]))
self.assertEqual(inst._y[0], 1)
assert_array_equal(inst._metas, np.array([0, 43, "Foo"], dtype=object))
def test_init_xym_list(self):
domain = self.create_domain(["x", DiscreteVariable("g", values="MF")],
[DiscreteVariable("y", values="ABC")],
self.metas)
vals = [42, "M", "B", "X", 43, "Foo"]
inst = Instance(domain, vals)
self.assertIsInstance(inst, Instance)
self.assertIs(inst.domain, domain)
self.assertEqual(inst._x.shape, (2, ))
self.assertEqual(inst._y.shape, (1, ))
self.assertEqual(inst._metas.shape, (3, ))
assert_array_equal(inst._x, np.array([42, 0]))
self.assertEqual(inst._y[0], 1)
assert_array_equal(inst._metas, np.array([0, 43, "Foo"], dtype=object))
def test_init_inst(self):
domain = self.create_domain(["x", DiscreteVariable("g", values="MF")],
[DiscreteVariable("y", values="ABC")],
self.metas)
vals = [42, "M", "B", "X", 43, "Foo"]
inst = Instance(domain, vals)
inst2 = Instance(domain, inst)
assert_array_equal(inst2._x, np.array([42, 0]))
self.assertEqual(inst2._y[0], 1)
assert_array_equal(inst2._metas, np.array([0, 43, "Foo"], dtype=object))
domain2 = self.create_domain(["z", domain[1], self.metas[1]],
domain.class_vars,
[self.metas[0], "w", domain[0]])
inst2 = Instance(domain2, inst)
with warnings.catch_warnings():
warnings.simplefilter("ignore", FutureWarning)
assert_array_equal(inst2._x, np.array([Unknown, 0, 43]))
self.assertEqual(inst2._y[0], 1)
assert_array_equal(inst2._metas, np.array([0, Unknown, 42],
dtype=object))
def test_get_item(self):
domain = self.create_domain(["x", DiscreteVariable("g", values="MF")],
[DiscreteVariable("y", values="ABC")],
self.metas)
vals = [42, "M", "B", "X", 43, "Foo"]
inst = Instance(domain, vals)
val = inst[0]
self.assertIsInstance(val, Value)
self.assertEqual(inst[0], 42)
self.assertEqual(inst["x"], 42)
self.assertEqual(inst[domain[0]], 42)
val = inst[1]
self.assertIsInstance(val, Value)
self.assertEqual(inst[1], "M")
self.assertEqual(inst["g"], "M")
self.assertEqual(inst[domain[1]], "M")
val = inst[2]
self.assertIsInstance(val, Value)
self.assertEqual(inst[2], "B")
self.assertEqual(inst["y"], "B")
self.assertEqual(inst[domain.class_var], "B")
val = inst[-2]
self.assertIsInstance(val, Value)
self.assertEqual(inst[-2], 43)
self.assertEqual(inst["Meta 2"], 43)
self.assertEqual(inst[self.metas[1]], 43)
with self.assertRaises(ValueError):
inst["asdf"] = 42
with self.assertRaises(ValueError):
inst[ContinuousVariable("asdf")] = 42
def test_list(self):
domain = self.create_domain(["x", DiscreteVariable("g", values="MF")],
[DiscreteVariable("y", values="ABC")],
self.metas)
vals = [42, "M", "B", "X", 43, "Foo"]
inst = Instance(domain, vals)
l = inst.list
self.assertIsInstance(l, list)
self.assertEqual(l, [42, "M", "B", "X", 43, "Foo"])
self.assertGreater(len(l), len(inst))
self.assertEqual(len(l), 6)
def test_set_item(self):
domain = self.create_domain(["x", DiscreteVariable("g", values="MF")],
[DiscreteVariable("y", values="ABC")],
self.metas)
vals = [42, "M", "B", "X", 43, "Foo"]
inst = Instance(domain, vals)
inst[0] = 43
self.assertEqual(inst[0], 43)
inst["x"] = 44
self.assertEqual(inst[0], 44)
inst[domain[0]] = 45
self.assertEqual(inst[0], 45)
inst[1] = "F"
self.assertEqual(inst[1], "F")
inst["g"] = "M"
self.assertEqual(inst[1], "M")
with self.assertRaises(ValueError):
inst[1] = "N"
with self.assertRaises(ValueError):
inst["asdf"] = 42
inst[2] = "C"
self.assertEqual(inst[2], "C")
inst["y"] = "A"
self.assertEqual(inst[2], "A")
inst[domain.class_var] = "B"
self.assertEqual(inst[2], "B")
inst[-1] = "Y"
self.assertEqual(inst[-1], "Y")
inst["Meta 1"] = "Z"
self.assertEqual(inst[-1], "Z")
inst[domain.metas[0]] = "X"
self.assertEqual(inst[-1], "X")
def test_str(self):
domain = self.create_domain(["x", DiscreteVariable("g", values="MF")])
inst = Instance(domain, [42, 0])
self.assertEqual(str(inst), "[42.000, M]")
domain = self.create_domain(["x", DiscreteVariable("g", values="MF")],
[DiscreteVariable("y", values="ABC")])
inst = Instance(domain, [42, "M", "B"])
self.assertEqual(str(inst), "[42.000, M | B]")
domain = self.create_domain(["x", DiscreteVariable("g", values="MF")],
[DiscreteVariable("y", values="ABC")],
self.metas)
inst = Instance(domain, [42, "M", "B", "X", 43, "Foo"])
self.assertEqual(str(inst), "[42.000, M | B] {X, 43.000, Foo}")
domain = self.create_domain([],
[DiscreteVariable("y", values="ABC")],
self.metas)
inst = Instance(domain, ["B", "X", 43, "Foo"])
self.assertEqual(str(inst), "[ | B] {X, 43.000, Foo}")
domain = self.create_domain([],
[],
self.metas)
inst = Instance(domain, ["X", 43, "Foo"])
self.assertEqual(str(inst), "[] {X, 43.000, Foo}")
domain = self.create_domain(self.attributes)
inst = Instance(domain, range(len(self.attributes)))
self.assertEqual(
str(inst),
"[{}]".format(", ".join("{:.3f}".format(x)
for x in range(len(self.attributes)))))
for attr in domain:
attr.number_of_decimals = 0
self.assertEqual(
str(inst),
"[{}]".format(", ".join("{}".format(x)
for x in range(len(self.attributes)))))
def test_repr(self):
domain = self.create_domain(self.attributes)
inst = Instance(domain, range(len(self.attributes)))
self.assertEqual(repr(inst), "[0.000, 1.000, 2.000, 3.000, 4.000, ...]")
for attr in domain:
attr.number_of_decimals = 0
self.assertEqual(repr(inst), "[0, 1, 2, 3, 4, ...]")
def test_eq(self):
domain = self.create_domain(["x", DiscreteVariable("g", values="MF")],
[DiscreteVariable("y", values="ABC")],
self.metas)
vals = [42, "M", "B", "X", 43, "Foo"]
inst = Instance(domain, vals)
inst2 = Instance(domain, vals)
self.assertTrue(inst == inst2)
self.assertTrue(inst2 == inst)
inst2[0] = 43
self.assertFalse(inst == inst2)
inst2[0] = Unknown
self.assertFalse(inst == inst2)
inst2 = Instance(domain, vals)
inst2[2] = "C"
self.assertFalse(inst == inst2)
inst2 = Instance(domain, vals)
inst2[-1] = "Y"
self.assertFalse(inst == inst2)
inst2 = Instance(domain, vals)
inst2[-2] = "33"
self.assertFalse(inst == inst2)
inst2 = Instance(domain, vals)
inst2[-3] = "Bar"
self.assertFalse(inst == inst2)
def test_instance_id(self):
domain = self.create_domain(["x"])
vals = [42]
inst = Instance(domain, vals, id=42)
self.assertEqual(inst.id, 42)
inst2 = Instance(domain, vals)
inst3 = Instance(domain, vals)
self.assertNotEqual(inst2.id, inst3.id)
| hugobuddel/orange3 | Orange/tests/test_instance.py | Python | gpl-3.0 | 14,298 | 0.00028 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-25 10:17
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('explorer', '0004_district_shapefile_link'),
]
operations = [
migrations.AlterField(
model_name='district',
name='shapefile_link',
field=models.URLField(blank=True, null=True),
),
]
| asterix135/whoshouldivotefor | explorer/migrations/0005_auto_20170625_0617.py | Python | mit | 474 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import six
from sys import platform
import locale
import os.path
from pelican.tests.support import unittest, get_settings
from pelican.contents import Page, Article, Static, URLWrapper, Author, Category
from pelican.settings import DEFAULT_CONFIG
from pelican.utils import path_to_url, truncate_html_words, SafeDatetime, posix_join
from pelican.signals import content_object_init
from jinja2.utils import generate_lorem_ipsum
# generate one paragraph, enclosed with <p>
TEST_CONTENT = str(generate_lorem_ipsum(n=1))
TEST_SUMMARY = generate_lorem_ipsum(n=1, html=False)
class TestPage(unittest.TestCase):
def setUp(self):
super(TestPage, self).setUp()
self.old_locale = locale.setlocale(locale.LC_ALL)
locale.setlocale(locale.LC_ALL, str('C'))
self.page_kwargs = {
'content': TEST_CONTENT,
'context': {
'localsiteurl': '',
},
'metadata': {
'summary': TEST_SUMMARY,
'title': 'foo bar',
'author': Author('Blogger', DEFAULT_CONFIG),
},
'source_path': '/path/to/file/foo.ext'
}
def tearDown(self):
locale.setlocale(locale.LC_ALL, self.old_locale)
def test_use_args(self):
# Creating a page with arguments passed to the constructor should use
# them to initialise object's attributes.
metadata = {'foo': 'bar', 'foobar': 'baz', 'title': 'foobar', }
page = Page(TEST_CONTENT, metadata=metadata,
context={'localsiteurl': ''})
for key, value in metadata.items():
self.assertTrue(hasattr(page, key))
self.assertEqual(value, getattr(page, key))
self.assertEqual(page.content, TEST_CONTENT)
def test_mandatory_properties(self):
# If the title is not set, must throw an exception.
page = Page('content')
with self.assertRaises(NameError):
page.check_properties()
page = Page('content', metadata={'title': 'foobar'})
page.check_properties()
def test_summary_from_metadata(self):
# If a :summary: metadata is given, it should be used
page = Page(**self.page_kwargs)
self.assertEqual(page.summary, TEST_SUMMARY)
def test_summary_max_length(self):
# If a :SUMMARY_MAX_LENGTH: is set, and there is no other summary,
# generated summary should not exceed the given length.
page_kwargs = self._copy_page_kwargs()
settings = get_settings()
page_kwargs['settings'] = settings
del page_kwargs['metadata']['summary']
settings['SUMMARY_MAX_LENGTH'] = None
page = Page(**page_kwargs)
self.assertEqual(page.summary, TEST_CONTENT)
settings['SUMMARY_MAX_LENGTH'] = 10
page = Page(**page_kwargs)
self.assertEqual(page.summary, truncate_html_words(TEST_CONTENT, 10))
settings['SUMMARY_MAX_LENGTH'] = 0
page = Page(**page_kwargs)
self.assertEqual(page.summary, '')
def test_slug(self):
page_kwargs = self._copy_page_kwargs()
settings = get_settings()
page_kwargs['settings'] = settings
settings['SLUGIFY_SOURCE'] = "title"
page = Page(**page_kwargs)
self.assertEqual(page.slug, 'foo-bar')
settings['SLUGIFY_SOURCE'] = "basename"
page = Page(**page_kwargs)
self.assertEqual(page.slug, 'foo')
def test_defaultlang(self):
# If no lang is given, default to the default one.
page = Page(**self.page_kwargs)
self.assertEqual(page.lang, DEFAULT_CONFIG['DEFAULT_LANG'])
# it is possible to specify the lang in the metadata infos
self.page_kwargs['metadata'].update({'lang': 'fr', })
page = Page(**self.page_kwargs)
self.assertEqual(page.lang, 'fr')
def test_save_as(self):
# If a lang is not the default lang, save_as should be set
# accordingly.
# if a title is defined, save_as should be set
page = Page(**self.page_kwargs)
self.assertEqual(page.save_as, "pages/foo-bar.html")
# if a language is defined, save_as should include it accordingly
self.page_kwargs['metadata'].update({'lang': 'fr', })
page = Page(**self.page_kwargs)
self.assertEqual(page.save_as, "pages/foo-bar-fr.html")
def test_metadata_url_format(self):
# Arbitrary metadata should be passed through url_format()
page = Page(**self.page_kwargs)
self.assertIn('summary', page.url_format.keys())
page.metadata['directory'] = 'test-dir'
page.settings = get_settings(PAGE_SAVE_AS='{directory}/{slug}')
self.assertEqual(page.save_as, 'test-dir/foo-bar')
def test_datetime(self):
# If DATETIME is set to a tuple, it should be used to override LOCALE
dt = SafeDatetime(2015, 9, 13)
page_kwargs = self._copy_page_kwargs()
# set its date to dt
page_kwargs['metadata']['date'] = dt
page = Page(**page_kwargs)
# page.locale_date is a unicode string in both python2 and python3
dt_date = dt.strftime(DEFAULT_CONFIG['DEFAULT_DATE_FORMAT'])
# dt_date is a byte string in python2, and a unicode string in python3
# Let's make sure it is a unicode string (relies on python 3.3 supporting the u prefix)
if type(dt_date) != type(u''):
# python2:
dt_date = unicode(dt_date, 'utf8')
self.assertEqual(page.locale_date, dt_date )
page_kwargs['settings'] = get_settings()
# I doubt this can work on all platforms ...
if platform == "win32":
locale = 'jpn'
else:
locale = 'ja_JP.utf8'
page_kwargs['settings']['DATE_FORMATS'] = {'jp': (locale,
'%Y-%m-%d(%a)')}
page_kwargs['metadata']['lang'] = 'jp'
import locale as locale_module
try:
page = Page(**page_kwargs)
self.assertEqual(page.locale_date, '2015-09-13(\u65e5)')
except locale_module.Error:
# The constructor of ``Page`` will try to set the locale to
# ``ja_JP.utf8``. But this attempt will failed when there is no
# such locale in the system. You can see which locales there are
# in your system with ``locale -a`` command.
#
# Until we find some other method to test this functionality, we
# will simply skip this test.
unittest.skip("There is no locale %s in this system." % locale)
def test_template(self):
# Pages default to page, metadata overwrites
default_page = Page(**self.page_kwargs)
self.assertEqual('page', default_page.template)
page_kwargs = self._copy_page_kwargs()
page_kwargs['metadata']['template'] = 'custom'
custom_page = Page(**page_kwargs)
self.assertEqual('custom', custom_page.template)
def _copy_page_kwargs(self):
# make a deep copy of page_kwargs
page_kwargs = dict([(key, self.page_kwargs[key]) for key in
self.page_kwargs])
for key in page_kwargs:
if not isinstance(page_kwargs[key], dict):
break
page_kwargs[key] = dict([(subkey, page_kwargs[key][subkey])
for subkey in page_kwargs[key]])
return page_kwargs
def test_signal(self):
# If a title is given, it should be used to generate the slug.
def receiver_test_function(sender, instance):
pass
content_object_init.connect(receiver_test_function, sender=Page)
Page(**self.page_kwargs)
self.assertTrue(content_object_init.has_receivers_for(Page))
def test_get_content(self):
# Test that the content is updated with the relative links to
# filenames, tags and categories.
settings = get_settings()
args = self.page_kwargs.copy()
args['settings'] = settings
# Tag
args['content'] = ('A simple test, with a '
'<a href="|tag|tagname">link</a>')
page = Page(**args)
content = page.get_content('http://notmyidea.org')
self.assertEqual(
content,
('A simple test, with a '
'<a href="http://notmyidea.org/tag/tagname.html">link</a>'))
# Category
args['content'] = ('A simple test, with a '
'<a href="|category|category">link</a>')
page = Page(**args)
content = page.get_content('http://notmyidea.org')
self.assertEqual(
content,
('A simple test, with a '
'<a href="http://notmyidea.org/category/category.html">link</a>'))
def test_intrasite_link(self):
# type does not take unicode in PY2 and bytes in PY3, which in
# combination with unicode literals leads to following insane line:
cls_name = '_DummyArticle' if six.PY3 else b'_DummyArticle'
article = type(cls_name, (object,), {'url': 'article.html'})
args = self.page_kwargs.copy()
args['settings'] = get_settings()
args['source_path'] = 'content'
args['context']['filenames'] = {'article.rst': article}
# Classic intrasite link via filename
args['content'] = (
'A simple test, with a '
'<a href="|filename|article.rst">link</a>'
)
content = Page(**args).get_content('http://notmyidea.org')
self.assertEqual(
content,
'A simple test, with a '
'<a href="http://notmyidea.org/article.html">link</a>'
)
# fragment
args['content'] = (
'A simple test, with a '
'<a href="|filename|article.rst#section-2">link</a>'
)
content = Page(**args).get_content('http://notmyidea.org')
self.assertEqual(
content,
'A simple test, with a '
'<a href="http://notmyidea.org/article.html#section-2">link</a>'
)
# query
args['content'] = (
'A simple test, with a '
'<a href="|filename|article.rst'
'?utm_whatever=234&highlight=word">link</a>'
)
content = Page(**args).get_content('http://notmyidea.org')
self.assertEqual(
content,
'A simple test, with a '
'<a href="http://notmyidea.org/article.html'
'?utm_whatever=234&highlight=word">link</a>'
)
# combination
args['content'] = (
'A simple test, with a '
'<a href="|filename|article.rst'
'?utm_whatever=234&highlight=word#section-2">link</a>'
)
content = Page(**args).get_content('http://notmyidea.org')
self.assertEqual(
content,
'A simple test, with a '
'<a href="http://notmyidea.org/article.html'
'?utm_whatever=234&highlight=word#section-2">link</a>'
)
def test_intrasite_link_more(self):
# type does not take unicode in PY2 and bytes in PY3, which in
# combination with unicode literals leads to following insane line:
cls_name = '_DummyAsset' if six.PY3 else b'_DummyAsset'
args = self.page_kwargs.copy()
args['settings'] = get_settings()
args['source_path'] = 'content'
args['context']['filenames'] = {
'images/poster.jpg': type(cls_name, (object,), {'url': 'images/poster.jpg'}),
'assets/video.mp4': type(cls_name, (object,), {'url': 'assets/video.mp4'}),
'images/graph.svg': type(cls_name, (object,), {'url': 'images/graph.svg'}),
'reference.rst': type(cls_name, (object,), {'url': 'reference.html'}),
}
# video.poster
args['content'] = (
'There is a video with poster '
'<video controls poster="{filename}/images/poster.jpg">'
'<source src="|filename|/assets/video.mp4" type="video/mp4">'
'</video>'
)
content = Page(**args).get_content('http://notmyidea.org')
self.assertEqual(
content,
'There is a video with poster '
'<video controls poster="http://notmyidea.org/images/poster.jpg">'
'<source src="http://notmyidea.org/assets/video.mp4" type="video/mp4">'
'</video>'
)
# object.data
args['content'] = (
'There is a svg object '
'<object data="{filename}/images/graph.svg" type="image/svg+xml"></object>'
)
content = Page(**args).get_content('http://notmyidea.org')
self.assertEqual(
content,
'There is a svg object '
'<object data="http://notmyidea.org/images/graph.svg" type="image/svg+xml"></object>'
)
# blockquote.cite
args['content'] = (
'There is a blockquote with cite attribute '
'<blockquote cite="{filename}reference.rst">blah blah</blockquote>'
)
content = Page(**args).get_content('http://notmyidea.org')
self.assertEqual(
content,
'There is a blockquote with cite attribute '
'<blockquote cite="http://notmyidea.org/reference.html">blah blah</blockquote>'
)
def test_intrasite_link_markdown_spaces(self):
# Markdown introduces %20 instead of spaces, this tests that
# we support markdown doing this.
cls_name = '_DummyArticle' if six.PY3 else b'_DummyArticle'
article = type(cls_name, (object,), {'url': 'article-spaces.html'})
args = self.page_kwargs.copy()
args['settings'] = get_settings()
args['source_path'] = 'content'
args['context']['filenames'] = {'article spaces.rst': article}
# An intrasite link via filename with %20 as a space
args['content'] = (
'A simple test, with a '
'<a href="|filename|article%20spaces.rst">link</a>'
)
content = Page(**args).get_content('http://notmyidea.org')
self.assertEqual(
content,
'A simple test, with a '
'<a href="http://notmyidea.org/article-spaces.html">link</a>'
)
def test_multiple_authors(self):
"""Test article with multiple authors."""
args = self.page_kwargs.copy()
content = Page(**args)
assert content.authors == [content.author]
args['metadata'].pop('author')
args['metadata']['authors'] = [Author('First Author', DEFAULT_CONFIG),
Author('Second Author', DEFAULT_CONFIG)]
content = Page(**args)
assert content.authors
assert content.author == content.authors[0]
class TestArticle(TestPage):
def test_template(self):
# Articles default to article, metadata overwrites
default_article = Article(**self.page_kwargs)
self.assertEqual('article', default_article.template)
article_kwargs = self._copy_page_kwargs()
article_kwargs['metadata']['template'] = 'custom'
custom_article = Article(**article_kwargs)
self.assertEqual('custom', custom_article.template)
def test_slugify_category_author(self):
settings = get_settings()
settings['SLUG_SUBSTITUTIONS'] = [ ('C#', 'csharp') ]
settings['ARTICLE_URL'] = '{author}/{category}/{slug}/'
settings['ARTICLE_SAVE_AS'] = '{author}/{category}/{slug}/index.html'
article_kwargs = self._copy_page_kwargs()
article_kwargs['metadata']['author'] = Author("O'Brien", settings)
article_kwargs['metadata']['category'] = Category('C# & stuff', settings)
article_kwargs['metadata']['title'] = 'fnord'
article_kwargs['settings'] = settings
article = Article(**article_kwargs)
self.assertEqual(article.url, 'obrien/csharp-stuff/fnord/')
self.assertEqual(article.save_as, 'obrien/csharp-stuff/fnord/index.html')
class TestStatic(unittest.TestCase):
def setUp(self):
self.settings = get_settings(
STATIC_SAVE_AS='{path}',
STATIC_URL='{path}',
PAGE_SAVE_AS=os.path.join('outpages', '{slug}.html'),
PAGE_URL='outpages/{slug}.html')
self.context = self.settings.copy()
self.static = Static(content=None, metadata={}, settings=self.settings,
source_path=posix_join('dir', 'foo.jpg'), context=self.context)
self.context['filenames'] = {self.static.source_path: self.static}
def tearDown(self):
pass
def test_attach_to_same_dir(self):
"""attach_to() overrides a static file's save_as and url.
"""
page = Page(content="fake page",
metadata={'title': 'fakepage'}, settings=self.settings,
source_path=os.path.join('dir', 'fakepage.md'))
self.static.attach_to(page)
expected_save_as = os.path.join('outpages', 'foo.jpg')
self.assertEqual(self.static.save_as, expected_save_as)
self.assertEqual(self.static.url, path_to_url(expected_save_as))
def test_attach_to_parent_dir(self):
"""attach_to() preserves dirs inside the linking document dir.
"""
page = Page(content="fake page", metadata={'title': 'fakepage'},
settings=self.settings, source_path='fakepage.md')
self.static.attach_to(page)
expected_save_as = os.path.join('outpages', 'dir', 'foo.jpg')
self.assertEqual(self.static.save_as, expected_save_as)
self.assertEqual(self.static.url, path_to_url(expected_save_as))
def test_attach_to_other_dir(self):
"""attach_to() ignores dirs outside the linking document dir.
"""
page = Page(content="fake page",
metadata={'title': 'fakepage'}, settings=self.settings,
source_path=os.path.join('dir', 'otherdir', 'fakepage.md'))
self.static.attach_to(page)
expected_save_as = os.path.join('outpages', 'foo.jpg')
self.assertEqual(self.static.save_as, expected_save_as)
self.assertEqual(self.static.url, path_to_url(expected_save_as))
def test_attach_to_ignores_subsequent_calls(self):
"""attach_to() does nothing when called a second time.
"""
page = Page(content="fake page",
metadata={'title': 'fakepage'}, settings=self.settings,
source_path=os.path.join('dir', 'fakepage.md'))
self.static.attach_to(page)
otherdir_settings = self.settings.copy()
otherdir_settings.update(dict(
PAGE_SAVE_AS=os.path.join('otherpages', '{slug}.html'),
PAGE_URL='otherpages/{slug}.html'))
otherdir_page = Page(content="other page",
metadata={'title': 'otherpage'}, settings=otherdir_settings,
source_path=os.path.join('dir', 'otherpage.md'))
self.static.attach_to(otherdir_page)
otherdir_save_as = os.path.join('otherpages', 'foo.jpg')
self.assertNotEqual(self.static.save_as, otherdir_save_as)
self.assertNotEqual(self.static.url, path_to_url(otherdir_save_as))
def test_attach_to_does_nothing_after_save_as_referenced(self):
"""attach_to() does nothing if the save_as was already referenced.
(For example, by a {filename} link an a document processed earlier.)
"""
original_save_as = self.static.save_as
page = Page(content="fake page",
metadata={'title': 'fakepage'}, settings=self.settings,
source_path=os.path.join('dir', 'fakepage.md'))
self.static.attach_to(page)
self.assertEqual(self.static.save_as, original_save_as)
self.assertEqual(self.static.url, path_to_url(original_save_as))
def test_attach_to_does_nothing_after_url_referenced(self):
"""attach_to() does nothing if the url was already referenced.
(For example, by a {filename} link an a document processed earlier.)
"""
original_url = self.static.url
page = Page(content="fake page",
metadata={'title': 'fakepage'}, settings=self.settings,
source_path=os.path.join('dir', 'fakepage.md'))
self.static.attach_to(page)
self.assertEqual(self.static.save_as, self.static.source_path)
self.assertEqual(self.static.url, original_url)
def test_attach_to_does_not_override_an_override(self):
"""attach_to() does not override paths that were overridden elsewhere.
(For example, by the user with EXTRA_PATH_METADATA)
"""
customstatic = Static(content=None,
metadata=dict(save_as='customfoo.jpg', url='customfoo.jpg'),
settings=self.settings,
source_path=os.path.join('dir', 'foo.jpg'),
context=self.settings.copy())
page = Page(content="fake page",
metadata={'title': 'fakepage'}, settings=self.settings,
source_path=os.path.join('dir', 'fakepage.md'))
customstatic.attach_to(page)
self.assertEqual(customstatic.save_as, 'customfoo.jpg')
self.assertEqual(customstatic.url, 'customfoo.jpg')
def test_attach_link_syntax(self):
"""{attach} link syntax triggers output path override & url replacement.
"""
html = '<a href="{attach}../foo.jpg">link</a>'
page = Page(content=html,
metadata={'title': 'fakepage'}, settings=self.settings,
source_path=os.path.join('dir', 'otherdir', 'fakepage.md'),
context=self.context)
content = page.get_content('')
self.assertNotEqual(content, html,
"{attach} link syntax did not trigger URL replacement.")
expected_save_as = os.path.join('outpages', 'foo.jpg')
self.assertEqual(self.static.save_as, expected_save_as)
self.assertEqual(self.static.url, path_to_url(expected_save_as))
def test_tag_link_syntax(self):
"{tag} link syntax triggers url replacement."
html = '<a href="{tag}foo">link</a>'
page = Page(
content=html,
metadata={'title': 'fakepage'}, settings=self.settings,
source_path=os.path.join('dir', 'otherdir', 'fakepage.md'),
context=self.context)
content = page.get_content('')
self.assertNotEqual(content, html)
def test_category_link_syntax(self):
"{category} link syntax triggers url replacement."
html = '<a href="{category}foo">link</a>'
page = Page(content=html,
metadata={'title': 'fakepage'}, settings=self.settings,
source_path=os.path.join('dir', 'otherdir', 'fakepage.md'),
context=self.context)
content = page.get_content('')
self.assertNotEqual(content, html)
class TestURLWrapper(unittest.TestCase):
def test_comparisons(self):
# URLWrappers are sorted by name
wrapper_a = URLWrapper(name='first', settings={})
wrapper_b = URLWrapper(name='last', settings={})
self.assertFalse(wrapper_a > wrapper_b)
self.assertFalse(wrapper_a >= wrapper_b)
self.assertFalse(wrapper_a == wrapper_b)
self.assertTrue(wrapper_a != wrapper_b)
self.assertTrue(wrapper_a <= wrapper_b)
self.assertTrue(wrapper_a < wrapper_b)
wrapper_b.name = 'first'
self.assertFalse(wrapper_a > wrapper_b)
self.assertTrue(wrapper_a >= wrapper_b)
self.assertTrue(wrapper_a == wrapper_b)
self.assertFalse(wrapper_a != wrapper_b)
self.assertTrue(wrapper_a <= wrapper_b)
self.assertFalse(wrapper_a < wrapper_b)
wrapper_a.name = 'last'
self.assertTrue(wrapper_a > wrapper_b)
self.assertTrue(wrapper_a >= wrapper_b)
self.assertFalse(wrapper_a == wrapper_b)
self.assertTrue(wrapper_a != wrapper_b)
self.assertFalse(wrapper_a <= wrapper_b)
self.assertFalse(wrapper_a < wrapper_b)
| goerz/pelican | pelican/tests/test_contents.py | Python | agpl-3.0 | 24,186 | 0.001778 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import six
import six.moves.urllib as urllib
import tabulator
from .resource_file import (
InlineResourceFile,
LocalResourceFile,
RemoteResourceFile,
)
class Resource(object):
'''Base class for all Data Package's resource types.
This classes will usually be created by :class:`DataPackage`, and not by
you. If you need to create one, use the :func:`Resource.load` factory
method.
The resources' attributes should only be altered through the
:data:`metadata` dict.
'''
@classmethod
def load(cls, metadata, default_base_path=None):
'''Factory method that loads the resource described in ``metadata``.
It'll first try to load the resource defined in ``metadata`` as a
:class:`TabularResource`. If that fails, it'll fall back to loading it
as a :class:`Resource`.
Args:
metadata (dict): The dict with the resource's metadata
default_base_path (str, optional): The base path to be used in case
the resource's data is in the local disk. Usually this would be
the base path of the `datapackage.json` this resource is in.
Returns:
Resource: The returned resource's class will depend on the type of
resource. If it was tabular, a :class:`TabularResource` will be
returned, otherwise, it'll be a :class:`Resource`.
'''
if TabularResource.can_handle(metadata):
resource_class = TabularResource
else:
resource_class = Resource
return resource_class(metadata, default_base_path)
def __init__(self, metadata, default_base_path=None):
self._metadata = metadata
self._base_path = default_base_path
@property
def metadata(self):
'''dict: The metadata this resource was created with.'''
return self._metadata
@property
def data(self):
'''Returns this resource's data.
The data should not be changed.
Returns:
bytes or data's type: This resource's data. If the data was
inlined, the return type will have the data's type. If not,
it'll be bytes.
Raises:
IOError: If there was some problem opening the data file (e.g. it
doesn't exist or we don't have permissions to read it).
'''
if not hasattr(self, '_data') or \
self._metadata_data_has_changed(self.metadata):
self._data = self._parse_data(self.metadata)
return self._data
@property
def local_data_path(self):
'''str: The absolute local path for the data.'''
path = self._absolute_path(self.metadata.get('path'))
if path:
return os.path.abspath(path)
@property
def remote_data_path(self):
'''str: The remote path for the data, if it exists.
The URL will only be returned if it has a scheme (e.g. http, https,
etc.) by itself or when considering the datapackage's or resource's
base path.
'''
url = self.metadata.get('url')
if url:
return url
else:
path = self._absolute_path(self.metadata.get('path'))
if path and _is_url(path):
return path
@property
def _resource_file(self):
if self._metadata_data_has_changed(self.metadata):
resource_file = self._load_resource_file()
else:
try:
resource_file = self.__resource_file
except AttributeError:
resource_file = self._load_resource_file()
self.__resource_file = resource_file
return self.__resource_file
def iter(self):
'''Lazily iterates over the data.
This method is useful when you don't want to load all data in memory at
once. The returned iterator behaviour depends on the type of the data.
If it's a string, it'll iterate over rows **without removing the
newlines**. The returned data type will be bytes, not string. If it's
any other type, the iterator will simply return it.
Returns:
iter: An iterator that yields this resource.
Raises:
IOError: If there was some problem opening the data file (e.g. it
doesn't exist or we don't have permissions to read it).
'''
if self._resource_file:
return iter(self._resource_file)
else:
raise ValueError('Resource has no data')
def _metadata_data_has_changed(self, metadata):
changed = False
metadata_data_ids = self._metadata_data_ids(metadata)
try:
changed = metadata_data_ids != self._original_metadata_data_ids
except AttributeError:
self._original_metadata_data_ids = metadata_data_ids
return changed
def _metadata_data_ids(self, metadata):
return {
'data_id': id(metadata.get('data')),
'data_path_id': id(metadata.get('path')),
'data_url_id': id(metadata.get('url'))
}
def _load_resource_file(self):
inline_data = self.metadata.get('data')
data_path = self.metadata.get('path')
data_url = self.metadata.get('url')
if inline_data:
return InlineResourceFile(inline_data)
if self.local_data_path and os.path.isfile(self.local_data_path):
return LocalResourceFile(self.local_data_path)
elif self.remote_data_path:
try:
return RemoteResourceFile(self.remote_data_path)
except IOError as e:
if data_url:
return RemoteResourceFile(data_url)
raise e
elif data_url:
return RemoteResourceFile(data_url)
if inline_data or data_path or data_url:
raise IOError('Couldn\'t load resource.')
def _parse_data(self, metadata):
return self._load_data()
def _load_data(self):
if self._resource_file:
return self._resource_file.read()
def _absolute_path(self, path):
if path is None or self._base_path is None:
return path
return os.path.join(self._base_path, path)
class TabularResource(Resource):
'''Subclass of :class:`Resource` that deals with tabular data.
It currently supports CSV, TSV, XLS, XLSX and JSON.
'''
@classmethod
def can_handle(cls, metadata):
'''bool: Returns True if this class can handle the resource in
metadata.'''
def get_extension(path_or_url):
path = urllib.parse.urlparse(path_or_url).path
return path.split('.')[-1].lower()
TABULAR_RESOURCE_FORMATS = ('csv', 'tsv', 'xls', 'xlsx', 'json')
metadata_data = metadata.get('data')
if metadata_data:
try:
cls._raise_if_isnt_tabular_data(metadata_data)
return True
except ValueError:
pass
metadata_format = metadata.get('format', '').lower()
metadata_path = metadata.get('path', '')
metadata_url = metadata.get('url', '')
if metadata_format in TABULAR_RESOURCE_FORMATS or \
get_extension(metadata_path) in TABULAR_RESOURCE_FORMATS or \
get_extension(metadata_url) in TABULAR_RESOURCE_FORMATS:
return True
return False
@staticmethod
def _raise_if_isnt_tabular_data(data):
tabular_types = (
list,
tuple,
)
valid = False
for tabular_type in tabular_types:
if isinstance(data, tabular_type):
valid = True
break
if not valid:
types_str = ', '.join([t.__name__ for t in tabular_types])
msg = 'Expected data type to be any of \'{0}\' but it was \'{1}\''
raise ValueError(msg.format(types_str, type(data).__name__))
def iter(self):
'''Lazily-iterates over rows in data.
This method is useful when you don't want to load all data in memory at
once.
Returns:
iter: An iterator that yields each row in this resource.
Raises:
ValueError: If the data isn't tabular, if the resource has
no data, or if its specified encoding is incorrect
IOError: If there was some problem opening the data file (e.g. it
doesn't exist or we don't have permissions to read it).
'''
result = None
inline_data = self.metadata.get('data')
if self.local_data_path and os.path.isfile(self.local_data_path):
data_path_or_url = self.local_data_path
else:
data_path_or_url = self.remote_data_path
if inline_data:
inline_data = self._parse_inline_data()
result = iter(inline_data)
elif data_path_or_url:
dialect = self.metadata.get('dialect', {})
parser_options = {}
parser_class = None
if 'delimiter' in dialect:
parser_options['delimiter'] = dialect['delimiter']
if 'lineTerminator' in dialect:
parser_options['lineterminator'] = dialect['lineTerminator']
if len(dialect) > 0:
parser_class = tabulator.parsers.CSV
print('EEEE',self.metadata.get('encoding'))
try:
table = tabulator.topen(data_path_or_url, with_headers=True,
encoding=self.metadata.get('encoding'),
parser_class=parser_class,
parser_options=parser_options)
result = TabulatorIterator(table)
except tabulator.errors.Error as e:
msg = 'Data at \'{0}\' isn\'t in a known tabular data format'
six.raise_from(ValueError(msg.format(data_path_or_url)), e)
if result is None:
if self.metadata.get('path'):
# FIXME: This is a hack to throw an IOError when local data
# exists but couldn't be loaded for some reason. If "path"
# existed and there were no issues opening it, "result" would
# never be None.
raise IOError('Resource\'s data couldn\'t be loaded.')
raise ValueError('Resource has no data')
return result
def _load_data(self):
return [row for row in self.iter()]
def _parse_inline_data(self):
data = self.metadata.get('data')
self._raise_if_isnt_tabular_data(data)
return data
def _is_url(path):
parts = six.moves.urllib.parse.urlsplit(path)
return bool(parts.scheme and parts.netloc)
class TabulatorIterator(object):
# FIXME: This is a workaround because Tabulator doesn't support returning a
# list of keyed dicts yet. When it does, we can remove this.
def __init__(self, tabulator_iter):
self._tabulator_iter = tabulator_iter
def __iter__(self):
return self
def __next__(self):
row = next(self._tabulator_iter)
return dict(zip(row.headers, row.values))
def next(self):
# For Py27 compatibility
return self.__next__()
| sirex/datapackage-py | datapackage/resource.py | Python | mit | 11,528 | 0.000087 |
# Generated by Django 2.0.1 on 2018-01-19 00:49
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='GameType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=60)),
],
),
migrations.CreateModel(
name='Table',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('description', models.TextField()),
('game_days', models.CharField(max_length=100)),
('game_address', models.TextField()),
('game_type', models.ForeignKey(on_delete='cascade', to='games.GameType')),
],
),
]
| Ladeia/QueroJogar | games/migrations/0001_initial.py | Python | mpl-2.0 | 1,032 | 0.002907 |
from SceneGenerator import *
import math
import random
def scale(vec, s):
vec[0] *= s
vec[1] *= s
vec[2] *= s
return vec
s = 1
scene = generateScene('CarScene', camPosition=[1,5,20], camLookat=[0,0,0])
addParameters(scene, h=0.005, maxIter=50, maxIterVel=50, contactTolerance=0.01, gravity=[0,-2,0], numberOfStepsPerRenderUpdate=10)
# floor
floorScale=[1000, 1, 1000]
floorScale = scale(floorScale, s)
floorT = [0,-0.5,0]
floorT = scale(floorT, s)
addRigidBody(scene, '../models/cube.obj', 2, coScale=floorScale,
scale=floorScale, translation=floorT,
dynamic=0, rest= 0.5)
carX = [0,2,0]
# chassis
restitution = 0.6
frict = 0.4
chassisX = add_vector(carX, [0,-0.1,0.1])
chassisScale = [2.5,0.4,1]
chassisScale = scale(chassisScale, s)
chassis = addRigidBody(scene, '../models/cube.obj', 0, coScale=chassisScale, scale=chassisScale,
translation=chassisX, dynamic=1, rest=restitution, friction=0.0, density = 200)
# damper bodies
damperScale = [0.2,0.1,0.2]
damperScale = scale(damperScale, s)
damperDensity = 50000;
damperX1 = add_vector(carX, [1.75, -0.7, 0.5])
dBody1 = addRigidBody(scene, '../models/cube.obj', 0, coScale=damperScale, scale=damperScale,
translation=damperX1, dynamic=1, rest=restitution, density = damperDensity)
damperX2 = add_vector(carX, [1.75, -0.7, -0.5])
dBody2 = addRigidBody(scene, '../models/cube.obj', 0, coScale=damperScale, scale=damperScale,
translation=damperX2, dynamic=1, rest=restitution, density = damperDensity)
damperX3 = add_vector(carX, [-1.75, -0.7, 0.5])
dBody3 = addRigidBody(scene, '../models/cube.obj', 0, coScale=damperScale, scale=damperScale,
translation=damperX3, dynamic=1, rest=restitution, density = damperDensity)
damperX4 = add_vector(carX, [-1.75, -0.7, -0.5])
dBody4 = addRigidBody(scene, '../models/cube.obj', 0, coScale=damperScale, scale=damperScale,
translation=damperX4, dynamic=1, rest=restitution, density = damperDensity)
# steering
steeringBodyX = add_vector(carX, [-1.75, -0.15, 0])
steeringBodyScale = [0.2,0.1,1]
steeringBodyScale = scale(steeringBodyScale, s)
steeringBody = addRigidBody(scene, '../models/cube.obj', 0, coScale=steeringBodyScale, scale=steeringBodyScale,
translation=steeringBodyX, dynamic=1, rest=restitution, density = 10000)
steeringMotorX = add_vector(carX, [-1.75, -0.4, 0])
addTargetAngleMotorHingeJoint(scene, chassis, steeringBody, steeringMotorX, [0, 1, 0], 0.707, [0,0, 2, 0.707, 8, 0.707, 12, -0.707, 18, -0.707, 20, 0], 1)
# wheels
wheelScale = [0.3,0.3,0.3]
wheelScale = scale(wheelScale, s)
wheelDensity = 600
wheelX1 = add_vector(carX, [1.75, -0.7, 0.9])
wheel1 = addRigidBody(scene, '../models/sphere.obj', 1, coScale=wheelScale, scale=wheelScale,
translation=wheelX1, dynamic=1, rest=restitution, friction=frict, density=wheelDensity)
wheelX2 = add_vector(carX, [1.75, -0.7, -0.9])
wheel2 = addRigidBody(scene, '../models/sphere.obj', 1, coScale=wheelScale, scale=wheelScale,
translation=wheelX2, dynamic=1, rest=restitution, friction=frict, density=wheelDensity)
wheelX3 = add_vector(carX, [-1.75, -0.7, 0.9])
wheel3 = addRigidBody(scene, '../models/sphere.obj', 1, coScale=wheelScale, scale=wheelScale,
translation=wheelX3, dynamic=1, rest=restitution, friction=frict, density=wheelDensity)
wheelX4 = add_vector(carX, [-1.75, -0.7, -0.9])
wheel4 = addRigidBody(scene, '../models/sphere.obj', 1, coScale=wheelScale, scale=wheelScale,
translation=wheelX4, dynamic=1, rest=restitution, friction=frict, density=wheelDensity)
motorX1 = add_vector(carX, [1.75, -0.7, 0.7])
motorX2 = add_vector(carX, [1.75, -0.7, -0.7])
motorX3 = add_vector(carX, [-1.75, -0.7, 0.7])
motorX4 = add_vector(carX, [-1.75, -0.7, -0.7])
addTargetVelocityMotorHingeJoint(scene, dBody1, wheel1, motorX1, [0, 0, 1], 10.0)
addTargetVelocityMotorHingeJoint(scene, dBody2, wheel2, motorX2, [0, 0, 1], 10.0)
addTargetVelocityMotorHingeJoint(scene, dBody3, wheel3, motorX3, [0, 0, 1], 10.0)
addTargetVelocityMotorHingeJoint(scene, dBody4, wheel4, motorX4, [0, 0, 1], 10.0)
addDamperJoint(scene, chassis, dBody1, [0, 1, 0], 500000.0)
addDamperJoint(scene, chassis, dBody2, [0, 1, 0], 500000.0)
addDamperJoint(scene, steeringBody, dBody3, [0, 1, 0], 500000.0)
addDamperJoint(scene, steeringBody, dBody4, [0, 1, 0], 500000.0)
writeScene(scene, 'CarScene.json')
| janbender/PositionBasedDynamics | data/scenes/CarScene.py | Python | mit | 4,517 | 0.019491 |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
import time
from datetime import timedelta
import pytest
from airflow import DAG
from airflow.exceptions import AirflowSensorTimeout, AirflowSkipException
from airflow.sensors.base_sensor_operator import BaseSensorOperator
from airflow.utils import timezone
from airflow.utils.decorators import apply_defaults
from airflow.utils.timezone import datetime
DEFAULT_DATE = datetime(2015, 1, 1)
TEST_DAG_ID = 'unit_test_dag'
class TimeoutTestSensor(BaseSensorOperator):
"""
Sensor that always returns the return_value provided
:param return_value: Set to true to mark the task as SKIPPED on failure
:type return_value: any
"""
@apply_defaults
def __init__(self,
return_value=False,
*args,
**kwargs):
self.return_value = return_value
super(TimeoutTestSensor, self).__init__(*args, **kwargs)
def poke(self, context):
return self.return_value
def execute(self, context):
started_at = timezone.utcnow()
time_jump = self.params.get('time_jump')
while not self.poke(context):
if time_jump:
started_at -= time_jump
if (timezone.utcnow() - started_at).total_seconds() > self.timeout:
if self.soft_fail:
raise AirflowSkipException('Snap. Time is OUT.')
else:
raise AirflowSensorTimeout('Snap. Time is OUT.')
time.sleep(self.poke_interval)
self.log.info("Success criteria met. Exiting.")
class SensorTimeoutTest(unittest.TestCase):
def setUp(self):
args = {
'owner': 'airflow',
'start_date': DEFAULT_DATE
}
self.dag = DAG(TEST_DAG_ID, default_args=args)
@pytest.mark.quarantined
def test_timeout(self):
t = TimeoutTestSensor(
task_id='test_timeout',
execution_timeout=timedelta(days=2),
return_value=False,
poke_interval=5,
params={'time_jump': timedelta(days=2, seconds=1)},
dag=self.dag
)
self.assertRaises(
AirflowSensorTimeout,
t.run,
start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True
)
| owlabs/incubator-airflow | tests/sensors/test_timeout_sensor.py | Python | apache-2.0 | 3,102 | 0.000322 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-12-09 00:06
from __future__ import unicode_literals
from django.conf import settings
import django.core.files.storage
from django.db import migrations, models
import django.db.migrations.operations.special
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
replaces = [
("crashmanager", "0001_initial"),
("crashmanager", "0002_bugzillatemplate_security"),
("crashmanager", "0003_bucket_frequent"),
("crashmanager", "0004_add_tool"),
("crashmanager", "0005_add_user"),
("crashmanager", "0006_user_defaultproviderid"),
("crashmanager", "0007_bugzillatemplate_comment"),
("crashmanager", "0008_crashentry_crashaddressnumeric"),
("crashmanager", "0009_copy_crashaddress"),
("crashmanager", "0010_bugzillatemplate_security_group"),
("crashmanager", "0011_bucket_permanent"),
("crashmanager", "0012_crashentry_cachedcrashinfo"),
("crashmanager", "0013_init_cachedcrashinfo"),
("crashmanager", "0014_bugzillatemplate_testcase_filename"),
("crashmanager", "0015_crashentry_triagedonce"),
("crashmanager", "0016_auto_20160308_1500"),
("crashmanager", "0017_user_restricted"),
("crashmanager", "0018_auto_20170620_1503"),
("crashmanager", "0019_bucket_optimizedsignature"),
("crashmanager", "0020_add_app_permissions"),
]
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="Bucket",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("signature", models.TextField()),
("shortDescription", models.CharField(blank=True, max_length=1023)),
],
),
migrations.CreateModel(
name="Bug",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("externalId", models.CharField(blank=True, max_length=255)),
("closed", models.DateTimeField(blank=True, null=True)),
],
),
migrations.CreateModel(
name="BugProvider",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("classname", models.CharField(max_length=255)),
("hostname", models.CharField(max_length=255)),
("urlTemplate", models.CharField(max_length=1023)),
],
),
migrations.CreateModel(
name="BugzillaTemplate",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.TextField()),
("product", models.TextField()),
("component", models.TextField()),
("summary", models.TextField(blank=True)),
("version", models.TextField()),
("description", models.TextField(blank=True)),
("whiteboard", models.TextField(blank=True)),
("keywords", models.TextField(blank=True)),
("op_sys", models.TextField(blank=True)),
("platform", models.TextField(blank=True)),
("priority", models.TextField(blank=True)),
("severity", models.TextField(blank=True)),
("alias", models.TextField(blank=True)),
("cc", models.TextField(blank=True)),
("assigned_to", models.TextField(blank=True)),
("qa_contact", models.TextField(blank=True)),
("target_milestone", models.TextField(blank=True)),
("attrs", models.TextField(blank=True)),
("security", models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name="Client",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name="CrashEntry",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("created", models.DateTimeField(default=django.utils.timezone.now)),
("rawStdout", models.TextField(blank=True)),
("rawStderr", models.TextField(blank=True)),
("rawCrashData", models.TextField(blank=True)),
("metadata", models.TextField(blank=True)),
("env", models.TextField(blank=True)),
("args", models.TextField(blank=True)),
("crashAddress", models.CharField(blank=True, max_length=255)),
("shortSignature", models.CharField(blank=True, max_length=255)),
(
"bucket",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="crashmanager.Bucket",
),
),
(
"client",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="crashmanager.Client",
),
),
],
),
migrations.CreateModel(
name="OS",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=63)),
("version", models.CharField(blank=True, max_length=127, null=True)),
],
),
migrations.CreateModel(
name="Platform",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=63)),
],
),
migrations.CreateModel(
name="Product",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=63)),
("version", models.CharField(blank=True, max_length=127, null=True)),
],
),
migrations.CreateModel(
name="TestCase",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"test",
models.FileField(
storage=django.core.files.storage.FileSystemStorage(
location=None
),
upload_to=b"tests",
),
),
("size", models.IntegerField(default=0)),
("quality", models.IntegerField(default=0)),
("isBinary", models.BooleanField(default=False)),
],
),
migrations.AddField(
model_name="crashentry",
name="os",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="crashmanager.OS"
),
),
migrations.AddField(
model_name="crashentry",
name="platform",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="crashmanager.Platform"
),
),
migrations.AddField(
model_name="crashentry",
name="product",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="crashmanager.Product"
),
),
migrations.AddField(
model_name="crashentry",
name="testcase",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="crashmanager.TestCase",
),
),
migrations.AddField(
model_name="bug",
name="externalType",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="crashmanager.BugProvider",
),
),
migrations.AddField(
model_name="bucket",
name="bug",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="crashmanager.Bug",
),
),
migrations.AddField(
model_name="bucket",
name="frequent",
field=models.BooleanField(default=False),
),
migrations.CreateModel(
name="Tool",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=63)),
],
),
migrations.CreateModel(
name="User",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("defaultTemplateId", models.IntegerField(default=0)),
("defaultToolsFilter", models.ManyToManyField(to="crashmanager.Tool")),
(
"user",
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
("defaultProviderId", models.IntegerField(default=1)),
],
),
migrations.AddField(
model_name="crashentry",
name="tool",
field=models.ForeignKey(
default=1,
on_delete=django.db.models.deletion.CASCADE,
to="crashmanager.Tool",
),
preserve_default=False,
),
migrations.AddField(
model_name="bugzillatemplate",
name="comment",
field=models.TextField(blank=True, default=""),
preserve_default=False,
),
migrations.AddField(
model_name="crashentry",
name="crashAddressNumeric",
field=models.BigIntegerField(blank=True, null=True),
),
migrations.AddField(
model_name="bugzillatemplate",
name="security_group",
field=models.TextField(blank=True, default=""),
preserve_default=False,
),
migrations.AddField(
model_name="bucket",
name="permanent",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="crashentry",
name="cachedCrashInfo",
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name="bugzillatemplate",
name="testcase_filename",
field=models.TextField(blank=True, default=""),
preserve_default=False,
),
migrations.AddField(
model_name="crashentry",
name="triagedOnce",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="user",
name="restricted",
field=models.BooleanField(default=False),
),
migrations.CreateModel(
name="BucketWatch",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("lastCrash", models.IntegerField(default=0)),
(
"bucket",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="crashmanager.Bucket",
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="crashmanager.User",
),
),
],
),
migrations.AddField(
model_name="user",
name="bucketsWatching",
field=models.ManyToManyField(
through="crashmanager.BucketWatch", to="crashmanager.Bucket"
),
),
migrations.AddField(
model_name="bucket",
name="optimizedSignature",
field=models.TextField(blank=True, null=True),
),
migrations.AlterModelOptions(
name="user",
options={
"permissions": (
("view_crashmanager", "Can see CrashManager app"),
("view_covmanager", "Can see CovManager app"),
("view_ec2spotmanager", "Can see EC2SpotManager app"),
)
},
),
]
| MozillaSecurity/FuzzManager | server/crashmanager/migrations/0001_squashed_0020_add_app_permissions.py | Python | mpl-2.0 | 15,946 | 0.000564 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
from .models import Group, Firma, User, oauth, OAuthSignIn
from random import randint
from app.utils import fake_firma, fake_user
def populate_db(num_users=5, num_groups=15, num_firms=5):
"""
Fills the data will fake data.
"""
admin_username = 'cburmeister'
admin_email = 'cburmeister@discogs.com'
admin_password = 'test123'
users = []
for _ in range(int(num_users)):
users.append(
fake_user()
)
"""users.append(
User(
admin_username,
admin_email,
admin_password,
fake.ipv4(),
active=True,
is_sadmin=True
)
)"""
for user in users:
db.session.add(user)
firms = []
for _ in range(int(num_firms)):
firms.append(
fake_firma()
)
for firm in firms:
db.session.add(firm)
db.session.commit()
class DataTable(object):
"""
Represents a sortable, filterable, searchable, and paginated set of data,
generated by arguments in the request values.
TODO:
- flask-ext for access to request values?
- throw some custom errors when getting fields, etc
- get rid of the 4 helpers that do the same thing
- should this generate some html to help with visualizing the data?
"""
def __init__(self, model, columns, sortable, searchable, filterable, limits, request):
self.model = model
self.query = self.model.query
self.columns = columns
self.sortable = sortable
self.orders = ['asc', 'desc']
self.searchable = searchable
self.filterable = filterable
self.limits = limits
self.get_selected(request)
for f in self.filterable:
self.selected_filter = request.values.get(f.name, None)
self.filter(f.name, self.selected_filter)
self.search(self.selected_query)
self.sort(self.selected_sort, self.selected_order)
self.paginate(self.selected_page, self.selected_limit)
def get_selected(self, request):
self.selected_sort = request.values.get('sort', self.sortables[0])
self.selected_order = request.values.get('order', self.orders[0])
self.selected_query = request.values.get('query', None)
self.selected_limit = request.values.get('limit', self.limits[1], type=int)
self.selected_page = request.values.get('page', 1, type=int)
@property
def _columns(self):
return [x.name for x in self.columns]
@property
def sortables(self):
return [x.name for x in self.sortable]
@property
def searchables(self):
return [x.name for x in self.searchable]
@property
def filterables(self):
return [x.name for x in self.filterable]
@property
def colspan(self):
"""Length of all columns."""
return len(self.columns) + len(self.sortable) + len(self.searchable)
def sort(self, field, order):
"""Sorts the data based on a field & order."""
if field in self.sortables and order in self.orders:
field = getattr(getattr(self.model, field), order)
self.query = self.query.order_by(field())
def filter(self, field, value):
"""Filters the query based on a field & value."""
if field and value:
field = getattr(self.model, field)
self.query = self.query.filter(field==value)
def search(self, search_query):
"""Filters the query based on a list of fields & search query."""
if search_query:
search_query = '%%%s%%' % search_query
from sqlalchemy import or_
fields = [getattr(self.model, x) for x in self.searchables]
self.query = self.query.filter(or_(*[x.like(search_query) for x in fields]))
def paginate(self, page, limit):
"""Paginate the query based on a page & limit."""
self.query = self.query.paginate(page, limit)
| Urumasi/Flask-Bones | app/data/__init__.py | Python | mit | 4,084 | 0.001959 |
# # a=1
# # b=a
# # print(a,b)
# #
# # for i in range(1,10,2):
# # print(i)
#
# def fun():
# a=10
# print(a)
# return a +100
#
# sun = fun()
# # sun + 100
#
# print(fun())
# print(sun)
file = open('my file.txt','r') #以读文件的形式打开文件
# content = file.readline() 仅读取第一行
content = file.readlines() #读取所有行,并以列表形式存储
content[3]
print(content[3])
| 1065865483/0python_script | Python/01.py | Python | mit | 415 | 0.014164 |
#!/usr/bin/python
# bigcinemas
class InvalidAge(Exception):
def __init__(self,age):
self.age = age
def validate_age(age):
if age < 18:
raise InvalidAge(age)
else:
return "Welcome to the movies!!"
age = int(raw_input("please enter your age:"))
#print validate_age(age)
try:
validate_age(age)
# except Exception as e:
except InvalidAge as e:
print "Buddy!! you are very young at {}!! Grow up a bit.".format(e.age)
else:
print validate_age(age)
| tuxfux-hlp-notes/python-batches | archieves/batch-64/14-oop/sixth.py | Python | gpl-3.0 | 462 | 0.025974 |
# coding=utf-8
from .token import PAYMENT_PROVIDER_TOKEN
from telegram import (LabeledPrice, InlineKeyboardButton, InlineKeyboardMarkup, ParseMode)
from telegram.ext import (MessageHandler, CallbackQueryHandler, Filters, PreCheckoutQueryHandler, ShippingQueryHandler)
import logging
logger = logging.getLogger(__name__)
class TelegramDonation:
def manual_cuantity(self, bot, update):
try:
return 0 # CONVERSATION.END
except: pass
def mostrar_donacion(self, bot, update):
if update.message:
pass # TODO
else:
donation_amount = int(update.callback_query.data.split("-")[1])
if donation_amount - 100:
less = donation_amount - 50
else:
less = donation_amount
more = donation_amount + 50
eur_num_to_text = ("%s" % (donation_amount / 100.0) + "0").replace(".", ",")
msg = "_TEST_ actualmente detrás del código de Piloco hay una única persona trabajando para mejorarlo. " \
"Bla bla bla.\nSi puedes hacer una aportación económica sería de gran ayuda, para mantener los se" \
"rvidores y para poder dedicar más tiempo a Piloco."
keyboard = [[InlineKeyboardButton("Cancelar 💔", callback_data="donation_cancel")],
[InlineKeyboardButton("➖", callback_data="donation_new-%s" % less),
InlineKeyboardButton("%s €" % eur_num_to_text, callback_data="donation_custom"),
InlineKeyboardButton("➕", callback_data="donation_new-%s" % more)],
[InlineKeyboardButton("Donar %s € ❤️" % eur_num_to_text, callback_data="donate-%s" % donation_amount)]]
if update.message:
update.message.reply_text(msg, reply_markup=InlineKeyboardMarkup(keyboard))
else:
update.callback_query.message.edit_text(msg, reply_markup=InlineKeyboardMarkup(keyboard))
update.callback_query.answer()
def start_without_shipping_callback(self, bot, update):
chat_id = update.callback_query.message.chat_id
title = "Donación"
description = "Aportación económica para el mantenimiento y desarrollo de Pilocobot."
payload = "Custom-Payload"
provider_token = PAYMENT_PROVIDER_TOKEN
start_parameter = "test-payment"
currency = "EUR"
price = int(update.callback_query.data.split("-")[1])
prices = [LabeledPrice("Donacion", price)]
update.callback_query.message.edit_reply_markup(reply_markup=InlineKeyboardMarkup([]))
# optionally pass need_name=True, need_phone_number=True,
# need_email=True, need_shipping_address=True, is_flexible=True
bot.sendInvoice(chat_id, title, description, payload,
provider_token, start_parameter, currency, prices)
# after (optional) shipping, it's the pre-checkout
def precheckout_callback(self, bot, update):
query = update.pre_checkout_query
# check the payload, is this from your bot?
if query.invoice_payload != 'Custom-Payload':
# answer False pre_checkout_query
bot.answer_pre_checkout_query(pre_checkout_query_id=query.id, ok=False,
error_message="Parece que ha habido un error")
else:
bot.answer_pre_checkout_query(pre_checkout_query_id=query.id, ok=True)
# finally, after contacting to the payment provider...
def successful_payment_callback(self, bot, update):
# do something after successful receive of payment
update.message.reply_text(
"¡La transacción se ha completado con éxito! Gracias por tu aportación, "
"has recibido %s puntos reputación." % update.message.successful_payment.total_amount)
TelegramDonation = TelegramDonation()
def main():
# Optional handler if your product requires shipping
dp.add_handler(ShippingQueryHandler(shipping_callback))
# Pre-checkout handler to final check
dp.add_handler(PreCheckoutQueryHandler(precheckout_callback))
# Success! Notify your user!
dp.add_handler(MessageHandler(Filters.successful_payment, successful_payment_callback))
dp.add_handler(CallbackQueryHandler(mostrar_donacion, pattern="^donation_new-\d*"))
dp.add_handler(CallbackQueryHandler(start_without_shipping_callback, pattern="^donate-\d*"))
# Start the Bot
updater.start_polling()
| vetu11/piloco | bin/paybot.py | Python | gpl-3.0 | 4,476 | 0.007872 |
#!/usr/bin/env python
# Copyright (C) 2012 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Usage: make-file-arrays.py [--condition=condition-string] --out-h=<header-file-name> --out-cpp=<cpp-file-name> <input-file>...
import os.path
import re
import sys
from optparse import OptionParser
def make_variable_name_and_read(file_name):
result = re.match(r"([\w\d_]+)\.([\w\d_]+)", os.path.basename(file_name))
if not result:
print "Invalid input file name:", os.path.basename(file_name)
sys.exit(1)
variable_name = result.group(1)[0].lower() + result.group(1)[1:] + result.group(2).capitalize()
file = open(file_name, "rb")
content = file.read()
file.close()
return (variable_name, content)
def strip_whitespace_and_comments(file_name, content):
result = re.match(r".*\.([^.]+)", file_name)
if not result:
print "The file name has no extension:", file_name
sys.exit(1)
extension = result.group(1).lower()
multi_line_comment = re.compile(r"/\*.*?\*/", re.MULTILINE | re.DOTALL)
single_line_comment = re.compile(r"//.*$", re.MULTILINE)
repeating_space = re.compile(r"[ \t]+", re.MULTILINE)
leading_space = re.compile(r"^[ \t]+", re.MULTILINE)
trailing_space = re.compile(r"[ \t]+$", re.MULTILINE)
empty_line = re.compile(r"\n+")
if extension == "js":
content = multi_line_comment.sub("", content)
content = single_line_comment.sub("", content)
content = repeating_space.sub(" ", content)
content = leading_space.sub("", content)
content = trailing_space.sub("", content)
content = empty_line.sub("\n", content)
elif extension == "css":
content = multi_line_comment.sub("", content)
content = repeating_space.sub(" ", content)
content = leading_space.sub("", content)
content = trailing_space.sub("", content)
content = empty_line.sub("\n", content)
return content
def main():
parser = OptionParser()
parser.add_option("--out-h", dest="out_header")
parser.add_option("--out-cpp", dest="out_cpp")
parser.add_option("--condition", dest="flag")
(options, args) = parser.parse_args()
if len(args) < 1:
parser.error("Need one or more input files")
if not options.out_header:
parser.error("Need to specify --out-h=filename")
if not options.out_cpp:
parser.error("Need to specify --out-cpp=filename")
if options.flag:
options.flag = options.flag.replace(" AND ", " && ")
options.flag = options.flag.replace(" OR ", " || ")
header_file = open(options.out_header, "w")
if options.flag:
header_file.write("#if " + options.flag + "\n")
header_file.write("namespace WebCore {\n")
cpp_file = open(options.out_cpp, "w")
cpp_file.write("#include \"config.h\"\n")
cpp_file.write("#include \"" + os.path.basename(options.out_header) + "\"\n")
if options.flag:
cpp_file.write("#if " + options.flag + "\n")
cpp_file.write("namespace WebCore {\n")
for file_name in args:
(variable_name, content) = make_variable_name_and_read(file_name)
content = strip_whitespace_and_comments(file_name, content)
size = len(content)
header_file.write("extern const char %s[%d];\n" % (variable_name, size))
cpp_file.write("const char %s[%d] = {\n" % (variable_name, size))
for index in range(size):
char_code = ord(content[index])
if char_code < 128:
cpp_file.write("%d" % char_code)
else:
cpp_file.write("'\\x%02x'" % char_code)
cpp_file.write("," if index != len(content) - 1 else "};\n")
if index % 20 == 19:
cpp_file.write("\n")
cpp_file.write("\n")
header_file.write("}\n")
if options.flag:
header_file.write("#endif\n")
header_file.close()
cpp_file.write("}\n")
if options.flag:
cpp_file.write("#endif\n")
cpp_file.close()
if __name__ == "__main__":
main()
| nawawi/wkhtmltopdf | webkit/Source/WebCore/make-file-arrays.py | Python | lgpl-3.0 | 5,529 | 0.000723 |
# -*- coding: utf-8 -*-
# isort: skip_file
from django.contrib.auth.models import User
from django.db import models
# This line cannot move to the below according to the isort linter.
# Resolve it firstly, then apply isort again.
from .base import TCMSContentTypeBaseModel # noqa
from tcms.logs.views import TCMSLog
from tcms.testruns import signals as run_watchers # noqa
from tcms.xmlrpc.serializer import XMLRPCSerializer
from .base import UrlMixin
User._meta.ordering = ["username"]
class TCMSActionModel(models.Model, UrlMixin):
"""
TCMS action models.
Use for global log system.
"""
class Meta:
abstract = True
@classmethod
def to_xmlrpc(cls, query={}):
"""
Convert the query set for XMLRPC
"""
s = XMLRPCSerializer(queryset=cls.objects.filter(**query).order_by("pk"))
return s.serialize_queryset()
def serialize(self):
"""
Convert the model for XMLPRC
"""
s = XMLRPCSerializer(model=self)
return s.serialize_model()
def log(self):
log = TCMSLog(model=self)
return log.list()
def log_action(self, who, new_value, field="", original_value=""):
log = TCMSLog(model=self)
log.make(who=who, field=field, original_value=original_value, new_value=new_value)
return log
def clean(self):
strip_types = (
models.CharField,
models.TextField,
models.URLField,
models.EmailField,
models.IPAddressField,
models.GenericIPAddressField,
models.SlugField,
)
# FIXME: reconsider alternative solution
# It makes no sense to add field name each time when a new field is
# added and it accepts values containing either \t, \r and \n.
ignored_fields = ("notes", "issue_report_params", "issue_report_templ")
for field in self._meta.fields:
# TODO: hardcode 'notes' here
if field.name not in ignored_fields and isinstance(field, strip_types):
value = getattr(self, field.name)
if value:
setattr(
self,
field.name,
value.replace("\t", " ").replace("\n", " ").replace("\r", " "),
)
| Nitrate/Nitrate | src/tcms/core/models/__init__.py | Python | gpl-2.0 | 2,359 | 0.001696 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = r'''
---
module: file
version_added: historical
short_description: Manage files and file properties
extends_documentation_fragment: files
description:
- Set attributes of files, symlinks or directories.
- Alternatively, remove files, symlinks or directories.
- Many other modules support the same options as the C(file) module - including M(copy), M(template), and M(assemble).
- For Windows targets, use the M(win_file) module instead.
options:
path:
description:
- Path to the file being managed.
type: path
required: yes
aliases: [ dest, name ]
state:
description:
- If C(absent), directories will be recursively deleted, and files or symlinks will
be unlinked. In the case of a directory, if C(diff) is declared, you will see the files and folders deleted listed
under C(path_contents). Note that C(absent) will not cause C(file) to fail if the C(path) does
not exist as the state did not change.
- If C(directory), all intermediate subdirectories will be created if they
do not exist. Since Ansible 1.7 they will be created with the supplied permissions.
- If C(file), without any other options this works mostly as a 'stat' and will return the current state of C(path).
Even with other options (i.e C(mode)), the file will be modified but will NOT be created if it does not exist;
see the C(touch) value or the M(copy) or M(template) module if you want that behavior.
- If C(hard), the hard link will be created or changed.
- If C(link), the symbolic link will be created or changed.
- If C(touch) (new in 1.4), an empty file will be created if the C(path) does not
exist, while an existing file or directory will receive updated file access and
modification times (similar to the way C(touch) works from the command line).
type: str
default: file
choices: [ absent, directory, file, hard, link, touch ]
src:
description:
- Path of the file to link to.
- This applies only to C(state=link) and C(state=hard).
- For C(state=link), this will also accept a non-existing path.
- Relative paths are relative to the file being created (C(path)) which is how
the Unix command C(ln -s SRC DEST) treats relative paths.
type: path
recurse:
description:
- Recursively set the specified file attributes on directory contents.
- This applies only when C(state) is set to C(directory).
type: bool
default: no
version_added: '1.1'
force:
description:
- >
Force the creation of the symlinks in two cases: the source file does
not exist (but will appear later); the destination exists and is a file (so, we need to unlink the
C(path) file and create symlink to the C(src) file in place of it).
type: bool
default: no
follow:
description:
- This flag indicates that filesystem links, if they exist, should be followed.
- Previous to Ansible 2.5, this was C(no) by default.
type: bool
default: yes
version_added: '1.8'
modification_time:
description:
- This parameter indicates the time the file's modification time should be set to.
- Should be C(preserve) when no modification is required, C(YYYYMMDDHHMM.SS) when using default time format, or C(now).
- Default is None meaning that C(preserve) is the default for C(state=[file,directory,link,hard]) and C(now) is default for C(state=touch).
type: str
version_added: "2.7"
modification_time_format:
description:
- When used with C(modification_time), indicates the time format that must be used.
- Based on default Python format (see time.strftime doc).
type: str
default: "%Y%m%d%H%M.%S"
version_added: '2.7'
access_time:
description:
- This parameter indicates the time the file's access time should be set to.
- Should be C(preserve) when no modification is required, C(YYYYMMDDHHMM.SS) when using default time format, or C(now).
- Default is C(None) meaning that C(preserve) is the default for C(state=[file,directory,link,hard]) and C(now) is default for C(state=touch).
type: str
version_added: '2.7'
access_time_format:
description:
- When used with C(access_time), indicates the time format that must be used.
- Based on default Python format (see time.strftime doc).
type: str
default: "%Y%m%d%H%M.%S"
version_added: '2.7'
seealso:
- module: assemble
- module: copy
- module: stat
- module: template
- module: win_file
author:
- Ansible Core Team
- Michael DeHaan
'''
EXAMPLES = r'''
- name: Change file ownership, group and permissions
file:
path: /etc/foo.conf
owner: foo
group: foo
mode: '0644'
- name: Give insecure permissions to an existing file
file:
path: /work
owner: root
group: root
mode: '1777'
- name: Create a symbolic link
file:
src: /file/to/link/to
dest: /path/to/symlink
owner: foo
group: foo
state: link
- name: Create two hard links
file:
src: '/tmp/{{ item.src }}'
dest: '{{ item.dest }}'
state: hard
loop:
- { src: x, dest: y }
- { src: z, dest: k }
- name: Touch a file, using symbolic modes to set the permissions (equivalent to 0644)
file:
path: /etc/foo.conf
state: touch
mode: u=rw,g=r,o=r
- name: Touch the same file, but add/remove some permissions
file:
path: /etc/foo.conf
state: touch
mode: u+rw,g-wx,o-rwx
- name: Touch again the same file, but dont change times this makes the task idempotent
file:
path: /etc/foo.conf
state: touch
mode: u+rw,g-wx,o-rwx
modification_time: preserve
access_time: preserve
- name: Create a directory if it does not exist
file:
path: /etc/some_directory
state: directory
mode: '0755'
- name: Update modification and access time of given file
file:
path: /etc/some_file
state: file
modification_time: now
access_time: now
- name: Set access time based on seconds from epoch value
file:
path: /etc/another_file
state: file
access_time: '{{ "%Y%m%d%H%M.%S" | strftime(stat_var.stat.atime) }}'
- name: Recursively change ownership of a directory
file:
path: /etc/foo
state: directory
recurse: yes
owner: foo
group: foo
- name: Remove file (delete file)
file:
path: /etc/foo.txt
state: absent
- name: Recursively remove directory
file:
path: /etc/foo
state: absent
'''
RETURN = r'''
'''
import errno
import os
import shutil
import sys
import time
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_bytes, to_native
# There will only be a single AnsibleModule object per module
module = None
class AnsibleModuleError(Exception):
def __init__(self, results):
self.results = results
def __repr__(self):
print('AnsibleModuleError(results={0})'.format(self.results))
class ParameterError(AnsibleModuleError):
pass
class Sentinel(object):
def __new__(cls, *args, **kwargs):
return cls
def _ansible_excepthook(exc_type, exc_value, tb):
# Using an exception allows us to catch it if the calling code knows it can recover
if issubclass(exc_type, AnsibleModuleError):
module.fail_json(**exc_value.results)
else:
sys.__excepthook__(exc_type, exc_value, tb)
def additional_parameter_handling(params):
"""Additional parameter validation and reformatting"""
# When path is a directory, rewrite the pathname to be the file inside of the directory
# TODO: Why do we exclude link? Why don't we exclude directory? Should we exclude touch?
# I think this is where we want to be in the future:
# when isdir(path):
# if state == absent: Remove the directory
# if state == touch: Touch the directory
# if state == directory: Assert the directory is the same as the one specified
# if state == file: place inside of the directory (use _original_basename)
# if state == link: place inside of the directory (use _original_basename. Fallback to src?)
# if state == hard: place inside of the directory (use _original_basename. Fallback to src?)
if (params['state'] not in ("link", "absent") and os.path.isdir(to_bytes(params['path'], errors='surrogate_or_strict'))):
basename = None
if params['_original_basename']:
basename = params['_original_basename']
elif params['src']:
basename = os.path.basename(params['src'])
if basename:
params['path'] = os.path.join(params['path'], basename)
# state should default to file, but since that creates many conflicts,
# default state to 'current' when it exists.
prev_state = get_state(to_bytes(params['path'], errors='surrogate_or_strict'))
if params['state'] is None:
if prev_state != 'absent':
params['state'] = prev_state
elif params['recurse']:
params['state'] = 'directory'
else:
params['state'] = 'file'
# make sure the target path is a directory when we're doing a recursive operation
if params['recurse'] and params['state'] != 'directory':
raise ParameterError(results={"msg": "recurse option requires state to be 'directory'",
"path": params["path"]})
# Fail if 'src' but no 'state' is specified
if params['src'] and params['state'] not in ('link', 'hard'):
raise ParameterError(results={'msg': "src option requires state to be 'link' or 'hard'",
'path': params['path']})
def get_state(path):
''' Find out current state '''
b_path = to_bytes(path, errors='surrogate_or_strict')
try:
if os.path.lexists(b_path):
if os.path.islink(b_path):
return 'link'
elif os.path.isdir(b_path):
return 'directory'
elif os.stat(b_path).st_nlink > 1:
return 'hard'
# could be many other things, but defaulting to file
return 'file'
return 'absent'
except OSError as e:
if e.errno == errno.ENOENT: # It may already have been removed
return 'absent'
else:
raise
# This should be moved into the common file utilities
def recursive_set_attributes(b_path, follow, file_args, mtime, atime):
changed = False
try:
for b_root, b_dirs, b_files in os.walk(b_path):
for b_fsobj in b_dirs + b_files:
b_fsname = os.path.join(b_root, b_fsobj)
if not os.path.islink(b_fsname):
tmp_file_args = file_args.copy()
tmp_file_args['path'] = to_native(b_fsname, errors='surrogate_or_strict')
changed |= module.set_fs_attributes_if_different(tmp_file_args, changed, expand=False)
changed |= update_timestamp_for_file(tmp_file_args['path'], mtime, atime)
else:
# Change perms on the link
tmp_file_args = file_args.copy()
tmp_file_args['path'] = to_native(b_fsname, errors='surrogate_or_strict')
changed |= module.set_fs_attributes_if_different(tmp_file_args, changed, expand=False)
changed |= update_timestamp_for_file(tmp_file_args['path'], mtime, atime)
if follow:
b_fsname = os.path.join(b_root, os.readlink(b_fsname))
# The link target could be nonexistent
if os.path.exists(b_fsname):
if os.path.isdir(b_fsname):
# Link is a directory so change perms on the directory's contents
changed |= recursive_set_attributes(b_fsname, follow, file_args, mtime, atime)
# Change perms on the file pointed to by the link
tmp_file_args = file_args.copy()
tmp_file_args['path'] = to_native(b_fsname, errors='surrogate_or_strict')
changed |= module.set_fs_attributes_if_different(tmp_file_args, changed, expand=False)
changed |= update_timestamp_for_file(tmp_file_args['path'], mtime, atime)
except RuntimeError as e:
# on Python3 "RecursionError" is raised which is derived from "RuntimeError"
# TODO once this function is moved into the common file utilities, this should probably raise more general exception
raise AnsibleModuleError(
results={'msg': "Could not recursively set attributes on %s. Original error was: '%s'" % (to_native(b_path), to_native(e))}
)
return changed
def initial_diff(path, state, prev_state):
diff = {'before': {'path': path},
'after': {'path': path},
}
if prev_state != state:
diff['before']['state'] = prev_state
diff['after']['state'] = state
if state == 'absent' and prev_state == 'directory':
walklist = {
'directories': [],
'files': [],
}
b_path = to_bytes(path, errors='surrogate_or_strict')
for base_path, sub_folders, files in os.walk(b_path):
for folder in sub_folders:
folderpath = os.path.join(base_path, folder)
walklist['directories'].append(folderpath)
for filename in files:
filepath = os.path.join(base_path, filename)
walklist['files'].append(filepath)
diff['before']['path_content'] = walklist
return diff
#
# States
#
def get_timestamp_for_time(formatted_time, time_format):
if formatted_time == 'preserve':
return None
elif formatted_time == 'now':
return Sentinel
else:
try:
struct = time.strptime(formatted_time, time_format)
struct_time = time.mktime(struct)
except (ValueError, OverflowError) as e:
raise AnsibleModuleError(results={'msg': 'Error while obtaining timestamp for time %s using format %s: %s'
% (formatted_time, time_format, to_native(e, nonstring='simplerepr'))})
return struct_time
def update_timestamp_for_file(path, mtime, atime, diff=None):
b_path = to_bytes(path, errors='surrogate_or_strict')
try:
# When mtime and atime are set to 'now', rely on utime(path, None) which does not require ownership of the file
# https://github.com/ansible/ansible/issues/50943
if mtime is Sentinel and atime is Sentinel:
# It's not exact but we can't rely on os.stat(path).st_mtime after setting os.utime(path, None) as it may
# not be updated. Just use the current time for the diff values
mtime = atime = time.time()
previous_mtime = os.stat(b_path).st_mtime
previous_atime = os.stat(b_path).st_atime
set_time = None
else:
# If both parameters are None 'preserve', nothing to do
if mtime is None and atime is None:
return False
previous_mtime = os.stat(b_path).st_mtime
previous_atime = os.stat(b_path).st_atime
if mtime is None:
mtime = previous_mtime
elif mtime is Sentinel:
mtime = time.time()
if atime is None:
atime = previous_atime
elif atime is Sentinel:
atime = time.time()
# If both timestamps are already ok, nothing to do
if mtime == previous_mtime and atime == previous_atime:
return False
set_time = (atime, mtime)
os.utime(b_path, set_time)
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
if 'after' not in diff:
diff['after'] = {}
if mtime != previous_mtime:
diff['before']['mtime'] = previous_mtime
diff['after']['mtime'] = mtime
if atime != previous_atime:
diff['before']['atime'] = previous_atime
diff['after']['atime'] = atime
except OSError as e:
raise AnsibleModuleError(results={'msg': 'Error while updating modification or access time: %s'
% to_native(e, nonstring='simplerepr'), 'path': path})
return True
def keep_backward_compatibility_on_timestamps(parameter, state):
if state in ['file', 'hard', 'directory', 'link'] and parameter is None:
return 'preserve'
elif state == 'touch' and parameter is None:
return 'now'
else:
return parameter
def execute_diff_peek(path):
"""Take a guess as to whether a file is a binary file"""
b_path = to_bytes(path, errors='surrogate_or_strict')
appears_binary = False
try:
with open(b_path, 'rb') as f:
head = f.read(8192)
except Exception:
# If we can't read the file, we're okay assuming it's text
pass
else:
if b"\x00" in head:
appears_binary = True
return appears_binary
def ensure_absent(path):
b_path = to_bytes(path, errors='surrogate_or_strict')
prev_state = get_state(b_path)
result = {}
if prev_state != 'absent':
diff = initial_diff(path, 'absent', prev_state)
if not module.check_mode:
if prev_state == 'directory':
try:
shutil.rmtree(b_path, ignore_errors=False)
except Exception as e:
raise AnsibleModuleError(results={'msg': "rmtree failed: %s" % to_native(e)})
else:
try:
os.unlink(b_path)
except OSError as e:
if e.errno != errno.ENOENT: # It may already have been removed
raise AnsibleModuleError(results={'msg': "unlinking failed: %s " % to_native(e),
'path': path})
result.update({'path': path, 'changed': True, 'diff': diff, 'state': 'absent'})
else:
result.update({'path': path, 'changed': False, 'state': 'absent'})
return result
def execute_touch(path, follow, timestamps):
b_path = to_bytes(path, errors='surrogate_or_strict')
prev_state = get_state(b_path)
changed = False
result = {'dest': path}
mtime = get_timestamp_for_time(timestamps['modification_time'], timestamps['modification_time_format'])
atime = get_timestamp_for_time(timestamps['access_time'], timestamps['access_time_format'])
if not module.check_mode:
if prev_state == 'absent':
# Create an empty file if the filename did not already exist
try:
open(b_path, 'wb').close()
changed = True
except (OSError, IOError) as e:
raise AnsibleModuleError(results={'msg': 'Error, could not touch target: %s'
% to_native(e, nonstring='simplerepr'),
'path': path})
# Update the attributes on the file
diff = initial_diff(path, 'touch', prev_state)
file_args = module.load_file_common_arguments(module.params)
try:
changed = module.set_fs_attributes_if_different(file_args, changed, diff, expand=False)
changed |= update_timestamp_for_file(file_args['path'], mtime, atime, diff)
except SystemExit as e:
if e.code:
# We take this to mean that fail_json() was called from
# somewhere in basic.py
if prev_state == 'absent':
# If we just created the file we can safely remove it
os.remove(b_path)
raise
result['changed'] = changed
result['diff'] = diff
return result
def ensure_file_attributes(path, follow, timestamps):
b_path = to_bytes(path, errors='surrogate_or_strict')
prev_state = get_state(b_path)
file_args = module.load_file_common_arguments(module.params)
mtime = get_timestamp_for_time(timestamps['modification_time'], timestamps['modification_time_format'])
atime = get_timestamp_for_time(timestamps['access_time'], timestamps['access_time_format'])
if prev_state != 'file':
if follow and prev_state == 'link':
# follow symlink and operate on original
b_path = os.path.realpath(b_path)
path = to_native(b_path, errors='strict')
prev_state = get_state(b_path)
file_args['path'] = path
if prev_state not in ('file', 'hard'):
# file is not absent and any other state is a conflict
raise AnsibleModuleError(results={'msg': 'file (%s) is %s, cannot continue' % (path, prev_state),
'path': path})
diff = initial_diff(path, 'file', prev_state)
changed = module.set_fs_attributes_if_different(file_args, False, diff, expand=False)
changed |= update_timestamp_for_file(file_args['path'], mtime, atime, diff)
return {'path': path, 'changed': changed, 'diff': diff}
def ensure_directory(path, follow, recurse, timestamps):
b_path = to_bytes(path, errors='surrogate_or_strict')
prev_state = get_state(b_path)
file_args = module.load_file_common_arguments(module.params)
mtime = get_timestamp_for_time(timestamps['modification_time'], timestamps['modification_time_format'])
atime = get_timestamp_for_time(timestamps['access_time'], timestamps['access_time_format'])
# For followed symlinks, we need to operate on the target of the link
if follow and prev_state == 'link':
b_path = os.path.realpath(b_path)
path = to_native(b_path, errors='strict')
file_args['path'] = path
prev_state = get_state(b_path)
changed = False
diff = initial_diff(path, 'directory', prev_state)
if prev_state == 'absent':
# Create directory and assign permissions to it
if module.check_mode:
return {'changed': True, 'diff': diff}
curpath = ''
try:
# Split the path so we can apply filesystem attributes recursively
# from the root (/) directory for absolute paths or the base path
# of a relative path. We can then walk the appropriate directory
# path to apply attributes.
# Something like mkdir -p with mode applied to all of the newly created directories
for dirname in path.strip('/').split('/'):
curpath = '/'.join([curpath, dirname])
# Remove leading slash if we're creating a relative path
if not os.path.isabs(path):
curpath = curpath.lstrip('/')
b_curpath = to_bytes(curpath, errors='surrogate_or_strict')
if not os.path.exists(b_curpath):
try:
os.mkdir(b_curpath)
changed = True
except OSError as ex:
# Possibly something else created the dir since the os.path.exists
# check above. As long as it's a dir, we don't need to error out.
if not (ex.errno == errno.EEXIST and os.path.isdir(b_curpath)):
raise
tmp_file_args = file_args.copy()
tmp_file_args['path'] = curpath
changed = module.set_fs_attributes_if_different(tmp_file_args, changed, diff, expand=False)
changed |= update_timestamp_for_file(file_args['path'], mtime, atime, diff)
except Exception as e:
raise AnsibleModuleError(results={'msg': 'There was an issue creating %s as requested:'
' %s' % (curpath, to_native(e)),
'path': path})
return {'path': path, 'changed': changed, 'diff': diff}
elif prev_state != 'directory':
# We already know prev_state is not 'absent', therefore it exists in some form.
raise AnsibleModuleError(results={'msg': '%s already exists as a %s' % (path, prev_state),
'path': path})
#
# previous state == directory
#
changed = module.set_fs_attributes_if_different(file_args, changed, diff, expand=False)
changed |= update_timestamp_for_file(file_args['path'], mtime, atime, diff)
if recurse:
changed |= recursive_set_attributes(b_path, follow, file_args, mtime, atime)
return {'path': path, 'changed': changed, 'diff': diff}
def ensure_symlink(path, src, follow, force, timestamps):
b_path = to_bytes(path, errors='surrogate_or_strict')
b_src = to_bytes(src, errors='surrogate_or_strict')
prev_state = get_state(b_path)
mtime = get_timestamp_for_time(timestamps['modification_time'], timestamps['modification_time_format'])
atime = get_timestamp_for_time(timestamps['access_time'], timestamps['access_time_format'])
# source is both the source of a symlink or an informational passing of the src for a template module
# or copy module, even if this module never uses it, it is needed to key off some things
if src is None:
if follow:
# use the current target of the link as the source
src = to_native(os.path.realpath(b_path), errors='strict')
b_src = to_bytes(src, errors='surrogate_or_strict')
if not os.path.islink(b_path) and os.path.isdir(b_path):
relpath = path
else:
b_relpath = os.path.dirname(b_path)
relpath = to_native(b_relpath, errors='strict')
absrc = os.path.join(relpath, src)
b_absrc = to_bytes(absrc, errors='surrogate_or_strict')
if not force and not os.path.exists(b_absrc):
raise AnsibleModuleError(results={'msg': 'src file does not exist, use "force=yes" if you'
' really want to create the link: %s' % absrc,
'path': path, 'src': src})
if prev_state == 'directory':
if not force:
raise AnsibleModuleError(results={'msg': 'refusing to convert from %s to symlink for %s'
% (prev_state, path),
'path': path})
elif os.listdir(b_path):
# refuse to replace a directory that has files in it
raise AnsibleModuleError(results={'msg': 'the directory %s is not empty, refusing to'
' convert it' % path,
'path': path})
elif prev_state in ('file', 'hard') and not force:
raise AnsibleModuleError(results={'msg': 'refusing to convert from %s to symlink for %s'
% (prev_state, path),
'path': path})
diff = initial_diff(path, 'link', prev_state)
changed = False
if prev_state in ('hard', 'file', 'directory', 'absent'):
changed = True
elif prev_state == 'link':
b_old_src = os.readlink(b_path)
if b_old_src != b_src:
diff['before']['src'] = to_native(b_old_src, errors='strict')
diff['after']['src'] = src
changed = True
else:
raise AnsibleModuleError(results={'msg': 'unexpected position reached', 'dest': path, 'src': src})
if changed and not module.check_mode:
if prev_state != 'absent':
# try to replace atomically
b_tmppath = to_bytes(os.path.sep).join(
[os.path.dirname(b_path), to_bytes(".%s.%s.tmp" % (os.getpid(), time.time()))]
)
try:
if prev_state == 'directory':
os.rmdir(b_path)
os.symlink(b_src, b_tmppath)
os.rename(b_tmppath, b_path)
except OSError as e:
if os.path.exists(b_tmppath):
os.unlink(b_tmppath)
raise AnsibleModuleError(results={'msg': 'Error while replacing: %s'
% to_native(e, nonstring='simplerepr'),
'path': path})
else:
try:
os.symlink(b_src, b_path)
except OSError as e:
raise AnsibleModuleError(results={'msg': 'Error while linking: %s'
% to_native(e, nonstring='simplerepr'),
'path': path})
if module.check_mode and not os.path.exists(b_path):
return {'dest': path, 'src': src, 'changed': changed, 'diff': diff}
# Now that we might have created the symlink, get the arguments.
# We need to do it now so we can properly follow the symlink if needed
# because load_file_common_arguments sets 'path' according
# the value of follow and the symlink existence.
file_args = module.load_file_common_arguments(module.params)
# Whenever we create a link to a nonexistent target we know that the nonexistent target
# cannot have any permissions set on it. Skip setting those and emit a warning (the user
# can set follow=False to remove the warning)
if follow and os.path.islink(b_path) and not os.path.exists(file_args['path']):
module.warn('Cannot set fs attributes on a non-existent symlink target. follow should be'
' set to False to avoid this.')
else:
changed = module.set_fs_attributes_if_different(file_args, changed, diff, expand=False)
changed |= update_timestamp_for_file(file_args['path'], mtime, atime, diff)
return {'dest': path, 'src': src, 'changed': changed, 'diff': diff}
def ensure_hardlink(path, src, follow, force, timestamps):
b_path = to_bytes(path, errors='surrogate_or_strict')
b_src = to_bytes(src, errors='surrogate_or_strict')
prev_state = get_state(b_path)
file_args = module.load_file_common_arguments(module.params)
mtime = get_timestamp_for_time(timestamps['modification_time'], timestamps['modification_time_format'])
atime = get_timestamp_for_time(timestamps['access_time'], timestamps['access_time_format'])
# src is the source of a hardlink. We require it if we are creating a new hardlink.
# We require path in the argument_spec so we know it is present at this point.
if src is None:
raise AnsibleModuleError(results={'msg': 'src is required for creating new hardlinks'})
if not os.path.exists(b_src):
raise AnsibleModuleError(results={'msg': 'src does not exist', 'dest': path, 'src': src})
diff = initial_diff(path, 'hard', prev_state)
changed = False
if prev_state == 'absent':
changed = True
elif prev_state == 'link':
b_old_src = os.readlink(b_path)
if b_old_src != b_src:
diff['before']['src'] = to_native(b_old_src, errors='strict')
diff['after']['src'] = src
changed = True
elif prev_state == 'hard':
if not os.stat(b_path).st_ino == os.stat(b_src).st_ino:
changed = True
if not force:
raise AnsibleModuleError(results={'msg': 'Cannot link, different hard link exists at destination',
'dest': path, 'src': src})
elif prev_state == 'file':
changed = True
if not force:
raise AnsibleModuleError(results={'msg': 'Cannot link, %s exists at destination' % prev_state,
'dest': path, 'src': src})
elif prev_state == 'directory':
changed = True
if os.path.exists(b_path):
if os.stat(b_path).st_ino == os.stat(b_src).st_ino:
return {'path': path, 'changed': False}
elif not force:
raise AnsibleModuleError(results={'msg': 'Cannot link: different hard link exists at destination',
'dest': path, 'src': src})
else:
raise AnsibleModuleError(results={'msg': 'unexpected position reached', 'dest': path, 'src': src})
if changed and not module.check_mode:
if prev_state != 'absent':
# try to replace atomically
b_tmppath = to_bytes(os.path.sep).join(
[os.path.dirname(b_path), to_bytes(".%s.%s.tmp" % (os.getpid(), time.time()))]
)
try:
if prev_state == 'directory':
if os.path.exists(b_path):
try:
os.unlink(b_path)
except OSError as e:
if e.errno != errno.ENOENT: # It may already have been removed
raise
os.link(b_src, b_tmppath)
os.rename(b_tmppath, b_path)
except OSError as e:
if os.path.exists(b_tmppath):
os.unlink(b_tmppath)
raise AnsibleModuleError(results={'msg': 'Error while replacing: %s'
% to_native(e, nonstring='simplerepr'),
'path': path})
else:
try:
os.link(b_src, b_path)
except OSError as e:
raise AnsibleModuleError(results={'msg': 'Error while linking: %s'
% to_native(e, nonstring='simplerepr'),
'path': path})
if module.check_mode and not os.path.exists(b_path):
return {'dest': path, 'src': src, 'changed': changed, 'diff': diff}
changed = module.set_fs_attributes_if_different(file_args, changed, diff, expand=False)
changed |= update_timestamp_for_file(file_args['path'], mtime, atime, diff)
return {'dest': path, 'src': src, 'changed': changed, 'diff': diff}
def main():
global module
module = AnsibleModule(
argument_spec=dict(
state=dict(type='str', choices=['absent', 'directory', 'file', 'hard', 'link', 'touch']),
path=dict(type='path', required=True, aliases=['dest', 'name']),
_original_basename=dict(type='str'), # Internal use only, for recursive ops
recurse=dict(type='bool', default=False),
force=dict(type='bool', default=False), # Note: Should not be in file_common_args in future
follow=dict(type='bool', default=True), # Note: Different default than file_common_args
_diff_peek=dict(type='bool'), # Internal use only, for internal checks in the action plugins
src=dict(type='path'), # Note: Should not be in file_common_args in future
modification_time=dict(type='str'),
modification_time_format=dict(type='str', default='%Y%m%d%H%M.%S'),
access_time=dict(type='str'),
access_time_format=dict(type='str', default='%Y%m%d%H%M.%S'),
),
add_file_common_args=True,
supports_check_mode=True,
)
# When we rewrite basic.py, we will do something similar to this on instantiating an AnsibleModule
sys.excepthook = _ansible_excepthook
additional_parameter_handling(module.params)
params = module.params
state = params['state']
recurse = params['recurse']
force = params['force']
follow = params['follow']
path = params['path']
src = params['src']
timestamps = {}
timestamps['modification_time'] = keep_backward_compatibility_on_timestamps(params['modification_time'], state)
timestamps['modification_time_format'] = params['modification_time_format']
timestamps['access_time'] = keep_backward_compatibility_on_timestamps(params['access_time'], state)
timestamps['access_time_format'] = params['access_time_format']
# short-circuit for diff_peek
if params['_diff_peek'] is not None:
appears_binary = execute_diff_peek(to_bytes(path, errors='surrogate_or_strict'))
module.exit_json(path=path, changed=False, appears_binary=appears_binary)
if state == 'file':
result = ensure_file_attributes(path, follow, timestamps)
elif state == 'directory':
result = ensure_directory(path, follow, recurse, timestamps)
elif state == 'link':
result = ensure_symlink(path, src, follow, force, timestamps)
elif state == 'hard':
result = ensure_hardlink(path, src, follow, force, timestamps)
elif state == 'touch':
result = execute_touch(path, follow, timestamps)
elif state == 'absent':
result = ensure_absent(path)
module.exit_json(**result)
if __name__ == '__main__':
main()
| Shaps/ansible | lib/ansible/modules/files/file.py | Python | gpl-3.0 | 37,706 | 0.003739 |
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for manipulating revision chains in the database."""
import d1_common.types.exceptions
import d1_gmn.app
import d1_gmn.app.did
import d1_gmn.app.model_util
import d1_gmn.app.models
def create_or_update_chain(pid, sid, obsoletes_pid, obsoleted_by_pid):
chain_model = _get_chain_by_pid(pid)
if chain_model:
_set_chain_sid(chain_model, sid)
else:
_add_sciobj(pid, sid, obsoletes_pid, obsoleted_by_pid)
_update_sid_to_last_existing_pid_map(pid)
def delete_chain(pid):
pid_to_chain_model = d1_gmn.app.models.ChainMember.objects.get(pid__did=pid)
chain_model = pid_to_chain_model.chain
pid_to_chain_model.delete()
if not d1_gmn.app.models.ChainMember.objects.filter(chain=chain_model).exists():
if chain_model.sid:
# Cascades back to chain_model.
d1_gmn.app.models.IdNamespace.objects.filter(
did=chain_model.sid.did
).delete()
else:
chain_model.delete()
def cut_from_chain(sciobj_model):
"""Remove an object from a revision chain.
The object can be at any location in the chain, including the head or tail.
Preconditions:
- The object with the pid is verified to exist and to be a member of an
revision chain. E.g., with:
d1_gmn.app.views.asserts.is_existing_object(pid)
d1_gmn.app.views.asserts.is_in_revision_chain(pid)
Postconditions:
- The given object is a standalone object with empty obsoletes, obsoletedBy and
seriesId fields.
- The previously adjacent objects in the chain are adjusted to close any gap that
was created or remove dangling reference at the head or tail.
- If the object was the last object in the chain and the chain has a SID, the SID
reference is shifted over to the new last object in the chain.
"""
if _is_head(sciobj_model):
old_pid = sciobj_model.obsoletes.did
_cut_head_from_chain(sciobj_model)
elif _is_tail(sciobj_model):
old_pid = sciobj_model.obsoleted_by.did
_cut_tail_from_chain(sciobj_model)
else:
old_pid = sciobj_model.obsoleted_by.did
_cut_embedded_from_chain(sciobj_model)
_update_sid_to_last_existing_pid_map(old_pid)
def get_all_pid_by_sid(sid):
return [c.pid.did for c in _get_all_chain_member_queryset_by_sid(sid)]
# def set_revision(pid, obsoletes_pid=None, obsoleted_by_pid=None):
# sciobj_model = d1_gmn.app.util.get_sci_model(pid)
# set_revision_links(sciobj_model, obsoletes_pid, obsoleted_by_pid)
# sciobj_model.save()
def resolve_sid(sid):
"""Get the PID to which the ``sid`` currently maps.
Preconditions:
- ``sid`` is verified to exist. E.g., with d1_gmn.app.views.asserts.is_sid().
"""
return d1_gmn.app.models.Chain.objects.get(sid__did=sid).head_pid.did
def get_sid_by_pid(pid):
"""Given the ``pid`` of the object in a chain, return the SID for the chain.
Return None if there is no SID for the chain. This operation is also valid
for standalone objects which may or may not have a SID.
This is the reverse of resolve.
All known PIDs are associated with a chain.
Preconditions:
- ``pid`` is verified to exist. E.g., with
d1_gmn.app.views.asserts.is_existing_object().
"""
return d1_gmn.app.did.get_did_by_foreign_key(_get_chain_by_pid(pid).sid)
def set_revision_links(sciobj_model, obsoletes_pid=None, obsoleted_by_pid=None):
if obsoletes_pid:
sciobj_model.obsoletes = d1_gmn.app.did.get_or_create_did(obsoletes_pid)
_set_revision_reverse(sciobj_model.pid.did, obsoletes_pid, is_obsoletes=False)
if obsoleted_by_pid:
sciobj_model.obsoleted_by = d1_gmn.app.did.get_or_create_did(obsoleted_by_pid)
_set_revision_reverse(sciobj_model.pid.did, obsoleted_by_pid, is_obsoletes=True)
sciobj_model.save()
def is_obsoletes_pid(pid):
"""Return True if ``pid`` is referenced in the obsoletes field of any object.
This will return True even if the PID is in the obsoletes field of an object that
does not exist on the local MN, such as replica that is in an incomplete chain.
"""
return d1_gmn.app.models.ScienceObject.objects.filter(obsoletes__did=pid).exists()
def is_obsoleted_by_pid(pid):
"""Return True if ``pid`` is referenced in the obsoletedBy field of any object.
This will return True even if the PID is in the obsoletes field of an object that
does not exist on the local MN, such as replica that is in an incomplete chain.
"""
return d1_gmn.app.models.ScienceObject.objects.filter(
obsoleted_by__did=pid
).exists()
def is_revision(pid):
"""Return True if ``pid`` is referenced in the obsoletes or obsoletedBy field of any
object.
This will return True even if the PID is in the obsoletes field of an object that
does not exist on the local MN, such as replica that is in an incomplete chain.
"""
return is_obsoletes_pid(pid) or is_obsoleted_by_pid(pid)
def _add_sciobj(pid, sid, obsoletes_pid, obsoleted_by_pid):
is_added = _add_to_chain(pid, sid, obsoletes_pid, obsoleted_by_pid)
if not is_added:
# if not obsoletes_pid and not obsoleted_by_pid:
_add_standalone(pid, sid)
# else:
def _add_standalone(pid, sid):
# assert_sid_unused(sid)
_create_chain(pid, sid)
def _add_to_chain(pid, sid, obsoletes_pid, obsoleted_by_pid):
_assert_sid_is_in_chain(sid, obsoletes_pid)
_assert_sid_is_in_chain(sid, obsoleted_by_pid)
obsoletes_chain_model = _get_chain_by_pid(obsoletes_pid)
obsoleted_by_chain_model = _get_chain_by_pid(obsoleted_by_pid)
sid_chain_model = _get_chain_by_sid(sid) if sid else None
chain_model = obsoletes_chain_model or obsoleted_by_chain_model or sid_chain_model
if not chain_model:
return False
if obsoletes_chain_model and obsoletes_chain_model != chain_model:
_merge_chains(chain_model, obsoletes_chain_model)
if obsoleted_by_chain_model and obsoleted_by_chain_model != chain_model:
_merge_chains(chain_model, obsoleted_by_chain_model)
_add_pid_to_chain(chain_model, pid)
_set_chain_sid(chain_model, sid)
return True
def _merge_chains(chain_model_a, chain_model_b):
"""Merge two chains.
For use when it becomes known that two chains that were created separately
actually are separate sections of the same chain
E.g.:
- A obsoleted by X is created. A has no SID. X does not exist yet. A chain is
created for A.
- B obsoleting Y is created. B has SID. Y does not exist yet. A chain is created
for B.
- C obsoleting X, obsoleted by Y is created. C tells us that X and Y are in the
same chain, which means that A and B are in the same chain. At this point, the
two chains need to be merged. Merging the chains causes A to take on the SID of
B.
"""
_set_chain_sid(
chain_model_a, d1_gmn.app.did.get_did_by_foreign_key(chain_model_b.sid)
)
for member_model in _get_all_chain_member_queryset_by_chain(chain_model_b):
member_model.chain = chain_model_a
member_model.save()
chain_model_b.delete()
def _add_pid_to_chain(chain_model, pid):
chain_member_model = d1_gmn.app.models.ChainMember(
chain=chain_model, pid=d1_gmn.app.did.get_or_create_did(pid)
)
chain_member_model.save()
def _set_chain_sid(chain_model, sid):
"""Set or update SID for chain.
If the chain already has a SID, ``sid`` must either be None or match the existing
SID.
"""
if not sid:
return
if chain_model.sid and chain_model.sid.did != sid:
raise d1_common.types.exceptions.ServiceFailure(
0,
"Attempted to modify existing SID. "
'existing_sid="{}", new_sid="{}"'.format(chain_model.sid.did, sid),
)
chain_model.sid = d1_gmn.app.did.get_or_create_did(sid)
chain_model.save()
def _assert_sid_is_in_chain(sid, pid):
if not sid or not pid:
return
chain_model = _get_chain_by_pid(pid)
if not chain_model or not chain_model.sid:
return
if chain_model.sid.did != sid:
raise d1_common.types.exceptions.ServiceFailure(
0,
"Attempted to create object in chain with non-matching SID. "
'existing_sid="{}", new_sid="{}"'.format(chain_model.sid.did, sid),
)
def _find_head_or_latest_connected(pid, last_pid=None):
"""Find latest existing sciobj that can be reached by walking towards the head from
``pid``
If ``pid`` does not exist, return None. If chain is connected all the way to head
and head exists, return the head. If chain ends in a dangling obsoletedBy, return
the last existing object.
"""
try:
sci_model = d1_gmn.app.model_util.get_sci_model(pid)
except d1_gmn.app.models.ScienceObject.DoesNotExist:
return last_pid
if sci_model.obsoleted_by is None:
return pid
return _find_head_or_latest_connected(sci_model.obsoleted_by.did, pid)
def _get_chain_by_pid(pid):
"""Find chain by pid.
Return None if not found.
"""
try:
return d1_gmn.app.models.ChainMember.objects.get(pid__did=pid).chain
except d1_gmn.app.models.ChainMember.DoesNotExist:
pass
def _get_chain_by_sid(sid):
"""Return None if not found."""
try:
return d1_gmn.app.models.Chain.objects.get(sid__did=sid)
except d1_gmn.app.models.Chain.DoesNotExist:
pass
def _update_sid_to_last_existing_pid_map(pid):
"""Set chain head PID to the last existing object in the chain to which ``pid``
belongs. If SID has been set for chain, it resolves to chain head PID.
Intended to be called in MNStorage.delete() and other chain manipulation.
Preconditions:
- ``pid`` must exist and be verified to be a PID.
d1_gmn.app.views.asserts.is_existing_object()
"""
last_pid = _find_head_or_latest_connected(pid)
chain_model = _get_chain_by_pid(last_pid)
if not chain_model:
return
chain_model.head_pid = d1_gmn.app.did.get_or_create_did(last_pid)
chain_model.save()
def _create_chain(pid, sid):
"""Create the initial chain structure for a new standalone object. Intended to be
called in MNStorage.create().
Preconditions:
- ``sid`` must be verified to be available to be assigned to a new standalone
object. E.g., with is_valid_sid_for_new_standalone().
"""
chain_model = d1_gmn.app.models.Chain(
# sid=d1_gmn.app.models.did(sid) if sid else None,
head_pid=d1_gmn.app.did.get_or_create_did(pid)
)
chain_model.save()
_add_pid_to_chain(chain_model, pid)
_set_chain_sid(chain_model, sid)
return chain_model
# def _get_or_create_chain_for_pid(pid):
# try:
# return d1_gmn.app.models.ChainMember.objects.get(pid__did=pid).chain
# except d1_gmn.app.models.ChainMember.DoesNotExist:
# return _create_chain(pid, None)
def _map_sid_to_pid(chain_model, sid, pid):
if sid is not None:
chain_model.sid = d1_gmn.app.did.get_or_create_did(sid)
chain_model.head_pid = d1_gmn.app.did.get_or_create_did(pid)
chain_model.save()
def _get_all_chain_member_queryset_by_sid(sid):
return d1_gmn.app.models.ChainMember.objects.filter(
chain=d1_gmn.app.models.Chain.objects.get(sid__did=sid)
)
def _get_all_chain_member_queryset_by_chain(chain_model):
return d1_gmn.app.models.ChainMember.objects.filter(chain=chain_model)
def _cut_head_from_chain(sciobj_model):
new_head_model = d1_gmn.app.model_util.get_sci_model(sciobj_model.obsoletes.did)
new_head_model.obsoleted_by = None
sciobj_model.obsoletes = None
sciobj_model.save()
new_head_model.save()
def _cut_tail_from_chain(sciobj_model):
new_tail_model = d1_gmn.app.model_util.get_sci_model(sciobj_model.obsoleted_by.did)
new_tail_model.obsoletes = None
sciobj_model.obsoleted_by = None
sciobj_model.save()
new_tail_model.save()
def _cut_embedded_from_chain(sciobj_model):
prev_model = d1_gmn.app.model_util.get_sci_model(sciobj_model.obsoletes.did)
next_model = d1_gmn.app.model_util.get_sci_model(sciobj_model.obsoleted_by.did)
prev_model.obsoleted_by = next_model.pid
next_model.obsoletes = prev_model.pid
sciobj_model.obsoletes = None
sciobj_model.obsoleted_by = None
sciobj_model.save()
prev_model.save()
next_model.save()
def _is_head(sciobj_model):
return sciobj_model.obsoletes and not sciobj_model.obsoleted_by
def _is_tail(sciobj_model):
return sciobj_model.obsoleted_by and not sciobj_model.obsoletes
def _set_revision_reverse(to_pid, from_pid, is_obsoletes):
try:
sciobj_model = d1_gmn.app.model_util.get_sci_model(from_pid)
except d1_gmn.app.models.ScienceObject.DoesNotExist:
return
if not d1_gmn.app.did.is_existing_object(to_pid):
return
did_model = d1_gmn.app.did.get_or_create_did(to_pid)
if is_obsoletes:
sciobj_model.obsoletes = did_model
else:
sciobj_model.obsoleted_by = did_model
sciobj_model.save()
# def assert_sid_unused(sid):
# if not sid:
# return
# if find_chain_by_sid(sid):
# raise d1_common.types.exceptions.ServiceFailure(
# 0, u'Attempted to create standalone object with SID already in use. '
# 'sid="{}"'.format(sid)
# )
# def upd_sid_resolve(pid, sid=None, obsoletes_pid=None, obsoleted_by_pid=None):
# """Set SID to resolve to the newest object that exists locally for a chain"""
#
# last_pid = find_head_or_latest_connected(pid)
# def has_chain(pid):
# return d1_gmn.app.models.ChainMember.objects.filter(pid__did=pid).exists()
# def create_chain(sid, pid):
# """Create the initial chain structure for a new standalone object. Intended to
# be called in MNStorage.create().
#
# Preconditions:
# - ``sid`` must either be None or be previously unused.
# d1_gmn.app.views.asserts.is_unused()
# - ``pid`` must exist and be verified to be a PID.
# d1_gmn.app.views.asserts.is_pid()
# """
# chain_model = _get_or_create_chain_for_pid(pid)
# _map_sid_to_pid(chain_model, sid, pid)
# def add_pid_to_chain(sid, old_pid, new_pid):
# """Add a new revision ``new_pid`` to the chain that ``old_pid`` belongs to and
# update any SID to resolve to the new PID. Intended to be called in
# MNStorage.update().
#
# Preconditions:
# - ``sid`` must either be None or match the SID already assigned to the chain.
# - Both ``old_pid`` and ``new_pid`` must exist and be verified to be PIDs
# d1_gmn.app.views.asserts.is_pid()
# """
# chain_model = _get_or_create_chain_for_pid(old_pid)
# _add_pid_to_chain(chain_model, new_pid)
# _map_sid_to_pid(chain_model, sid, new_pid)
# def is_sid_in_revision_chain(sid, pid):
# """Determine if ``sid`` resolves to an object in the revision chain to which
# ``pid`` belongs.
#
# Preconditions:
# - ``sid`` is verified to exist. E.g., with d1_gmn.app.views.asserts.is_sid().
# """
# chain_pid_list = get_pids_in_revision_chain(pid)
# resolved_pid = resolve_sid(sid)
# return resolved_pid in chain_pid_list
# def update_or_create_sid_to_pid_map(sid, pid):
# """Update existing or create a new ``sid`` to ``pid`` association. Then create
# or update the ``sid`` to resolve to the ``pid``.
#
# Preconditions:
# - ``sid`` is verified to be unused if creating a standalone object (that may later become
# the first object in a chain).
# - ``sid`` is verified to belong to the given chain updating.
# - ``pid`` is verified to exist. E.g., with d1_gmn.app.views.asserts.is_pid().
# """
# d1_gmn.app.models.sid_to_pid(sid, pid)
# d1_gmn.app.models.sid_to_head_pid(sid, pid)
# def get_sid_by_pid(pid):
# """Get the SID to which the ``pid`` maps.
# Return None if there is no SID maps to ``pid``.
# """
# try:
# return d1_gmn.app.models.SeriesIdToPersistentId.objects.get(
# pid__did=pid
# ).sid.did
# except d1_gmn.app.models.SeriesIdToPersistentId.DoesNotExist:
# return None
# def move_sid_to_last_object_in_chain(pid):
# """Move SID to the last object in a chain to which ``pid`` belongs.
#
# - If the chain does not have a SID, do nothing.
# - If the SID already maps to the last object in the chain, do nothing.
#
# A SID always resolves to the last object in its chain. So System Metadata XML
# docs are used for introducing SIDs and setting initial mappings, but the
# database maintains the current mapping going forward.
#
# Preconditions:
# - PID is verified to exist. E.g., with d1_gmn.app.views.asserts.is_pid().
#
# Postconditions:
# - The SID maps to the last object in the chain.
# """
# sid = sysmeta_db.get_sid_by_pid(pid)
# if sid:
# chain_pid_list = sysmeta_db.get_pids_in_revision_chain(pid)
# update_sid(sid, chain_pid_list[-1])
# def update_revision_chain(pid, obsoletes_pid, obsoleted_by_pid, sid):
# with sysmeta_file.SysMetaFile(pid) as sysmeta_pyxb:
# sysmeta_file.update_revision_chain(
# sysmeta_pyxb, obsoletes_pid, obsoleted_by_pid, sid
# )
# sysmeta_db.update_revision_chain(sysmeta_pyxb)
# if sysmeta.obsoletes is not None:
# chain_pid_list = [pid]
# sci_obj = mn.models.ScienceObject.objects.get(pid__did=pid)
# while sci_obj.obsoletes:
# obsoletes_pid = sysmeta_pyxb.obsoletes.value()
# chain_pid_list.append(obsoletes_pid)
# sci_obj = mn.models.ScienceObject.objects.get(pid__did=obsoletes_pid)
# sci_obj = mn.models.ScienceObject.objects.get(pid__did=pid)
# while sci_obj.obsoleted_by:
# obsoleted_by_pid = sysmeta_pyxb.obsoleted_by.value()
# chain_pid_list.append(obsoleted_by_pid)
# sci_obj = mn.models.ScienceObject.objects.get(pid__did=obsoleted_by_pid)
# return chain_pid_list
| DataONEorg/d1_python | gmn/src/d1_gmn/app/revision.py | Python | apache-2.0 | 18,605 | 0.002687 |
import decimal
import os
from contextlib import contextmanager
from django.test import TestCase
from django.core.exceptions import ImproperlyConfigured
from mock import patch
from configurations.values import (Value, BooleanValue, IntegerValue,
FloatValue, DecimalValue, ListValue,
TupleValue, SetValue, DictValue,
URLValue, EmailValue, IPValue,
RegexValue, PathValue, SecretValue,
DatabaseURLValue, EmailURLValue,
CacheURLValue, BackendsValue,
CastingMixin, SearchURLValue)
@contextmanager
def env(**kwargs):
with patch.dict(os.environ, clear=True, **kwargs):
yield
class FailingCasterValue(CastingMixin, Value):
caster = 'non.existing.caster'
class ValueTests(TestCase):
def test_value(self):
value = Value('default', environ=False)
self.assertEqual(value.setup('TEST'), 'default')
with env(DJANGO_TEST='override'):
self.assertEqual(value.setup('TEST'), 'default')
@patch.dict(os.environ, clear=True, DJANGO_TEST='override')
def test_env_var(self):
value = Value('default')
self.assertEqual(value.setup('TEST'), 'override')
self.assertNotEqual(value.setup('TEST'), value.default)
self.assertEqual(value.to_python(os.environ['DJANGO_TEST']),
value.setup('TEST'))
def test_value_reuse(self):
value1 = Value('default')
value2 = Value(value1)
self.assertEqual(value1.setup('TEST1'), 'default')
self.assertEqual(value2.setup('TEST2'), 'default')
with env(DJANGO_TEST1='override1', DJANGO_TEST2='override2'):
self.assertEqual(value1.setup('TEST1'), 'override1')
self.assertEqual(value2.setup('TEST2'), 'override2')
def test_env_var_prefix(self):
with patch.dict(os.environ, clear=True, ACME_TEST='override'):
value = Value('default', environ_prefix='ACME')
self.assertEqual(value.setup('TEST'), 'override')
with patch.dict(os.environ, clear=True, TEST='override'):
value = Value('default', environ_prefix='')
self.assertEqual(value.setup('TEST'), 'override')
def test_boolean_values_true(self):
value = BooleanValue(False)
for truthy in value.true_values:
with env(DJANGO_TEST=truthy):
self.assertTrue(value.setup('TEST'))
def test_boolean_values_faulty(self):
self.assertRaises(ValueError, BooleanValue, 'false')
def test_boolean_values_false(self):
value = BooleanValue(True)
for falsy in value.false_values:
with env(DJANGO_TEST=falsy):
self.assertFalse(value.setup('TEST'))
def test_boolean_values_nonboolean(self):
value = BooleanValue(True)
with env(DJANGO_TEST='nonboolean'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_integer_values(self):
value = IntegerValue(1)
with env(DJANGO_TEST='2'):
self.assertEqual(value.setup('TEST'), 2)
with env(DJANGO_TEST='noninteger'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_float_values(self):
value = FloatValue(1.0)
with env(DJANGO_TEST='2.0'):
self.assertEqual(value.setup('TEST'), 2.0)
with env(DJANGO_TEST='noninteger'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_decimal_values(self):
value = DecimalValue(decimal.Decimal(1))
with env(DJANGO_TEST='2'):
self.assertEqual(value.setup('TEST'), decimal.Decimal(2))
with env(DJANGO_TEST='nondecimal'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_failing_caster(self):
self.assertRaises(ImproperlyConfigured, FailingCasterValue)
def test_list_values_default(self):
value = ListValue()
with env(DJANGO_TEST='2,2'):
self.assertEqual(value.setup('TEST'), ['2', '2'])
with env(DJANGO_TEST='2, 2 ,'):
self.assertEqual(value.setup('TEST'), ['2', '2'])
with env(DJANGO_TEST=''):
self.assertEqual(value.setup('TEST'), [])
def test_list_values_separator(self):
value = ListValue(separator=':')
with env(DJANGO_TEST='/usr/bin:/usr/sbin:/usr/local/bin'):
self.assertEqual(value.setup('TEST'),
['/usr/bin', '/usr/sbin', '/usr/local/bin'])
def test_List_values_converter(self):
value = ListValue(converter=int)
with env(DJANGO_TEST='2,2'):
self.assertEqual(value.setup('TEST'), [2, 2])
value = ListValue(converter=float)
with env(DJANGO_TEST='2,2'):
self.assertEqual(value.setup('TEST'), [2.0, 2.0])
def test_list_values_custom_converter(self):
value = ListValue(converter=lambda x: x * 2)
with env(DJANGO_TEST='2,2'):
self.assertEqual(value.setup('TEST'), ['22', '22'])
def test_list_values_converter_exception(self):
value = ListValue(converter=int)
with env(DJANGO_TEST='2,b'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_tuple_values_default(self):
value = TupleValue()
with env(DJANGO_TEST='2,2'):
self.assertEqual(value.setup('TEST'), ('2', '2'))
with env(DJANGO_TEST='2, 2 ,'):
self.assertEqual(value.setup('TEST'), ('2', '2'))
with env(DJANGO_TEST=''):
self.assertEqual(value.setup('TEST'), ())
def test_set_values_default(self):
value = SetValue()
with env(DJANGO_TEST='2,2'):
self.assertEqual(value.setup('TEST'), set(['2', '2']))
with env(DJANGO_TEST='2, 2 ,'):
self.assertEqual(value.setup('TEST'), set(['2', '2']))
with env(DJANGO_TEST=''):
self.assertEqual(value.setup('TEST'), set())
def test_dict_values_default(self):
value = DictValue()
with env(DJANGO_TEST='{2: 2}'):
self.assertEqual(value.setup('TEST'), {2: 2})
expected = {2: 2, '3': '3', '4': [1, 2, 3]}
with env(DJANGO_TEST="{2: 2, '3': '3', '4': [1, 2, 3]}"):
self.assertEqual(value.setup('TEST'), expected)
with env(DJANGO_TEST="""{
2: 2,
'3': '3',
'4': [1, 2, 3],
}"""):
self.assertEqual(value.setup('TEST'), expected)
with env(DJANGO_TEST=''):
self.assertEqual(value.setup('TEST'), {})
with env(DJANGO_TEST='spam'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_email_values(self):
value = EmailValue('spam@eg.gs')
with env(DJANGO_TEST='spam@sp.am'):
self.assertEqual(value.setup('TEST'), 'spam@sp.am')
with env(DJANGO_TEST='spam'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_url_values(self):
value = URLValue('http://eggs.spam')
with env(DJANGO_TEST='http://spam.eggs'):
self.assertEqual(value.setup('TEST'), 'http://spam.eggs')
with env(DJANGO_TEST='httb://spam.eggs'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_ip_values(self):
value = IPValue('0.0.0.0')
with env(DJANGO_TEST='127.0.0.1'):
self.assertEqual(value.setup('TEST'), '127.0.0.1')
with env(DJANGO_TEST='::1'):
self.assertEqual(value.setup('TEST'), '::1')
with env(DJANGO_TEST='spam.eggs'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_regex_values(self):
value = RegexValue('000--000', regex=r'\d+--\d+')
with env(DJANGO_TEST='123--456'):
self.assertEqual(value.setup('TEST'), '123--456')
with env(DJANGO_TEST='123456'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_path_values_with_check(self):
value = PathValue()
with env(DJANGO_TEST='/'):
self.assertEqual(value.setup('TEST'), '/')
with env(DJANGO_TEST='~/'):
self.assertEqual(value.setup('TEST'), os.path.expanduser('~'))
with env(DJANGO_TEST='/does/not/exist'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_path_values_no_check(self):
value = PathValue(check_exists=False)
with env(DJANGO_TEST='/'):
self.assertEqual(value.setup('TEST'), '/')
with env(DJANGO_TEST='~/spam/eggs'):
self.assertEqual(value.setup('TEST'),
os.path.join(os.path.expanduser('~'),
'spam', 'eggs'))
with env(DJANGO_TEST='/does/not/exist'):
self.assertEqual(value.setup('TEST'), '/does/not/exist')
def test_secret_value(self):
self.assertRaises(ValueError, SecretValue, 'default')
value = SecretValue()
self.assertRaises(ValueError, value.setup, 'TEST')
with env(DJANGO_SECRET_KEY='123'):
self.assertEqual(value.setup('SECRET_KEY'), '123')
value = SecretValue(environ_name='FACEBOOK_API_SECRET',
environ_prefix=None)
self.assertRaises(ValueError, value.setup, 'TEST')
with env(FACEBOOK_API_SECRET='123'):
self.assertEqual(value.setup('TEST'), '123')
def test_database_url_value(self):
value = DatabaseURLValue()
self.assertEqual(value.default, {})
with env(DATABASE_URL='sqlite://'):
self.assertEqual(value.setup('DATABASE_URL'), {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'HOST': None,
'NAME': ':memory:',
'PASSWORD': None,
'PORT': None,
'USER': None,
}})
def test_email_url_value(self):
value = EmailURLValue()
self.assertEqual(value.default, {})
with env(EMAIL_URL='smtps://user@domain.com:password@smtp.example.com:587'):
self.assertEqual(value.setup('EMAIL_URL'), {
'EMAIL_BACKEND': 'django.core.mail.backends.smtp.EmailBackend',
'EMAIL_FILE_PATH': '',
'EMAIL_HOST': 'smtp.example.com',
'EMAIL_HOST_PASSWORD': 'password',
'EMAIL_HOST_USER': 'user@domain.com',
'EMAIL_PORT': 587,
'EMAIL_USE_TLS': True})
with env(EMAIL_URL='console://'):
self.assertEqual(value.setup('EMAIL_URL'), {
'EMAIL_BACKEND': 'django.core.mail.backends.console.EmailBackend',
'EMAIL_FILE_PATH': '',
'EMAIL_HOST': None,
'EMAIL_HOST_PASSWORD': None,
'EMAIL_HOST_USER': None,
'EMAIL_PORT': None,
'EMAIL_USE_TLS': False})
with env(EMAIL_URL='smtps://user@domain.com:password@smtp.example.com:wrong'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_cache_url_value(self):
cache_setting = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'KEY_PREFIX': '',
'LOCATION': 'user@host:port:1'
}
}
cache_url = 'redis://user@host:port/1'
value = CacheURLValue(cache_url)
self.assertEqual(value.default, cache_setting)
value = CacheURLValue()
self.assertEqual(value.default, {})
with env(CACHE_URL='redis://user@host:port/1'):
self.assertEqual(value.setup('CACHE_URL'), cache_setting)
with env(CACHE_URL='wrong://user@host:port/1'):
self.assertRaises(KeyError, value.setup, 'TEST')
def test_search_url_value(self):
value = SearchURLValue()
self.assertEqual(value.default, {})
with env(SEARCH_URL='elasticsearch://127.0.0.1:9200/index'):
self.assertEqual(value.setup('SEARCH_URL'), {
'default': {
'ENGINE': 'haystack.backends.elasticsearch_backend.ElasticsearchSearchEngine',
'URL': 'http://127.0.0.1:9200',
'INDEX_NAME': 'index',
}})
def test_backend_list_value(self):
backends = ['django.middleware.common.CommonMiddleware']
value = BackendsValue(backends)
self.assertEqual(value.setup('TEST'), backends)
backends = ['non.existing.Backend']
self.assertRaises(ValueError, BackendsValue, backends)
| luzfcb/django-configurations | tests/test_values.py | Python | bsd-3-clause | 12,772 | 0.000313 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.